extended.texi (__builtin_expect): We no longer require second argument to be constant.
[gcc.git] / gcc / predict.c
1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 /* References:
23
24 [1] "Branch Prediction for Free"
25 Ball and Larus; PLDI '93.
26 [2] "Static Branch Frequency and Program Profile Analysis"
27 Wu and Larus; MICRO-27.
28 [3] "Corpus-based Static Branch Prediction"
29 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
30
31
32 #include "config.h"
33 #include "system.h"
34 #include "coretypes.h"
35 #include "tm.h"
36 #include "tree.h"
37 #include "rtl.h"
38 #include "tm_p.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
41 #include "insn-config.h"
42 #include "regs.h"
43 #include "flags.h"
44 #include "output.h"
45 #include "function.h"
46 #include "except.h"
47 #include "toplev.h"
48 #include "recog.h"
49 #include "expr.h"
50 #include "predict.h"
51 #include "coverage.h"
52 #include "sreal.h"
53 #include "params.h"
54 #include "target.h"
55 #include "cfgloop.h"
56 #include "tree-flow.h"
57 #include "ggc.h"
58 #include "tree-dump.h"
59 #include "tree-pass.h"
60 #include "timevar.h"
61 #include "tree-scalar-evolution.h"
62 #include "cfgloop.h"
63
64 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
65 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
66 static sreal real_zero, real_one, real_almost_one, real_br_prob_base,
67 real_inv_br_prob_base, real_one_half, real_bb_freq_max;
68
69 /* Random guesstimation given names. */
70 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 100 - 1)
71 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
72 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
73 #define PROB_ALWAYS (REG_BR_PROB_BASE)
74
75 static void combine_predictions_for_insn (rtx, basic_block);
76 static void dump_prediction (FILE *, enum br_predictor, int, basic_block, int);
77 static void estimate_loops_at_level (struct loop *, bitmap);
78 static void propagate_freq (struct loop *, bitmap);
79 static void estimate_bb_frequencies (struct loops *);
80 static void predict_paths_leading_to (basic_block, int *, enum br_predictor, enum prediction);
81 static bool last_basic_block_p (basic_block);
82 static void compute_function_frequency (void);
83 static void choose_function_section (void);
84 static bool can_predict_insn_p (rtx);
85
86 /* Information we hold about each branch predictor.
87 Filled using information from predict.def. */
88
89 struct predictor_info
90 {
91 const char *const name; /* Name used in the debugging dumps. */
92 const int hitrate; /* Expected hitrate used by
93 predict_insn_def call. */
94 const int flags;
95 };
96
97 /* Use given predictor without Dempster-Shaffer theory if it matches
98 using first_match heuristics. */
99 #define PRED_FLAG_FIRST_MATCH 1
100
101 /* Recompute hitrate in percent to our representation. */
102
103 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
104
105 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
106 static const struct predictor_info predictor_info[]= {
107 #include "predict.def"
108
109 /* Upper bound on predictors. */
110 {NULL, 0, 0}
111 };
112 #undef DEF_PREDICTOR
113
114 /* Return true in case BB can be CPU intensive and should be optimized
115 for maximal performance. */
116
117 bool
118 maybe_hot_bb_p (basic_block bb)
119 {
120 if (profile_info && flag_branch_probabilities
121 && (bb->count
122 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
123 return false;
124 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
125 return false;
126 return true;
127 }
128
129 /* Return true in case BB is cold and should be optimized for size. */
130
131 bool
132 probably_cold_bb_p (basic_block bb)
133 {
134 if (profile_info && flag_branch_probabilities
135 && (bb->count
136 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
137 return true;
138 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
139 return true;
140 return false;
141 }
142
143 /* Return true in case BB is probably never executed. */
144 bool
145 probably_never_executed_bb_p (basic_block bb)
146 {
147 if (profile_info && flag_branch_probabilities)
148 return ((bb->count + profile_info->runs / 2) / profile_info->runs) == 0;
149 return false;
150 }
151
152 /* Return true if the one of outgoing edges is already predicted by
153 PREDICTOR. */
154
155 bool
156 rtl_predicted_by_p (basic_block bb, enum br_predictor predictor)
157 {
158 rtx note;
159 if (!INSN_P (BB_END (bb)))
160 return false;
161 for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1))
162 if (REG_NOTE_KIND (note) == REG_BR_PRED
163 && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
164 return true;
165 return false;
166 }
167
168 /* Return true if the one of outgoing edges is already predicted by
169 PREDICTOR. */
170
171 bool
172 tree_predicted_by_p (basic_block bb, enum br_predictor predictor)
173 {
174 struct edge_prediction *i;
175 for (i = bb->predictions; i; i = i->ep_next)
176 if (i->ep_predictor == predictor)
177 return true;
178 return false;
179 }
180
181 /* Return true when the probability of edge is reliable.
182
183 The profile guessing code is good at predicting branch outcome (ie.
184 taken/not taken), that is predicted right slightly over 75% of time.
185 It is however notoriously poor on predicting the probability itself.
186 In general the profile appear a lot flatter (with probabilities closer
187 to 50%) than the reality so it is bad idea to use it to drive optimization
188 such as those disabling dynamic branch prediction for well predictable
189 branches.
190
191 There are two exceptions - edges leading to noreturn edges and edges
192 predicted by number of iterations heuristics are predicted well. This macro
193 should be able to distinguish those, but at the moment it simply check for
194 noreturn heuristic that is only one giving probability over 99% or bellow
195 1%. In future we might want to propagate reliability information across the
196 CFG if we find this information useful on multiple places. */
197 static bool
198 probability_reliable_p (int prob)
199 {
200 return (profile_status == PROFILE_READ
201 || (profile_status == PROFILE_GUESSED
202 && (prob <= HITRATE (1) || prob >= HITRATE (99))));
203 }
204
205 /* Same predicate as above, working on edges. */
206 bool
207 edge_probability_reliable_p (edge e)
208 {
209 return probability_reliable_p (e->probability);
210 }
211
212 /* Same predicate as edge_probability_reliable_p, working on notes. */
213 bool
214 br_prob_note_reliable_p (rtx note)
215 {
216 gcc_assert (REG_NOTE_KIND (note) == REG_BR_PROB);
217 return probability_reliable_p (INTVAL (XEXP (note, 0)));
218 }
219
220 static void
221 predict_insn (rtx insn, enum br_predictor predictor, int probability)
222 {
223 gcc_assert (any_condjump_p (insn));
224 if (!flag_guess_branch_prob)
225 return;
226
227 REG_NOTES (insn)
228 = gen_rtx_EXPR_LIST (REG_BR_PRED,
229 gen_rtx_CONCAT (VOIDmode,
230 GEN_INT ((int) predictor),
231 GEN_INT ((int) probability)),
232 REG_NOTES (insn));
233 }
234
235 /* Predict insn by given predictor. */
236
237 void
238 predict_insn_def (rtx insn, enum br_predictor predictor,
239 enum prediction taken)
240 {
241 int probability = predictor_info[(int) predictor].hitrate;
242
243 if (taken != TAKEN)
244 probability = REG_BR_PROB_BASE - probability;
245
246 predict_insn (insn, predictor, probability);
247 }
248
249 /* Predict edge E with given probability if possible. */
250
251 void
252 rtl_predict_edge (edge e, enum br_predictor predictor, int probability)
253 {
254 rtx last_insn;
255 last_insn = BB_END (e->src);
256
257 /* We can store the branch prediction information only about
258 conditional jumps. */
259 if (!any_condjump_p (last_insn))
260 return;
261
262 /* We always store probability of branching. */
263 if (e->flags & EDGE_FALLTHRU)
264 probability = REG_BR_PROB_BASE - probability;
265
266 predict_insn (last_insn, predictor, probability);
267 }
268
269 /* Predict edge E with the given PROBABILITY. */
270 void
271 tree_predict_edge (edge e, enum br_predictor predictor, int probability)
272 {
273 gcc_assert (profile_status != PROFILE_GUESSED);
274 if ((e->src != ENTRY_BLOCK_PTR && EDGE_COUNT (e->src->succs) > 1)
275 && flag_guess_branch_prob && optimize)
276 {
277 struct edge_prediction *i = ggc_alloc (sizeof (struct edge_prediction));
278
279 i->ep_next = e->src->predictions;
280 e->src->predictions = i;
281 i->ep_probability = probability;
282 i->ep_predictor = predictor;
283 i->ep_edge = e;
284 }
285 }
286
287 /* Remove all predictions on given basic block that are attached
288 to edge E. */
289 void
290 remove_predictions_associated_with_edge (edge e)
291 {
292 if (e->src->predictions)
293 {
294 struct edge_prediction **prediction = &e->src->predictions;
295 while (*prediction)
296 {
297 if ((*prediction)->ep_edge == e)
298 *prediction = (*prediction)->ep_next;
299 else
300 prediction = &((*prediction)->ep_next);
301 }
302 }
303 }
304
305 /* Return true when we can store prediction on insn INSN.
306 At the moment we represent predictions only on conditional
307 jumps, not at computed jump or other complicated cases. */
308 static bool
309 can_predict_insn_p (rtx insn)
310 {
311 return (JUMP_P (insn)
312 && any_condjump_p (insn)
313 && EDGE_COUNT (BLOCK_FOR_INSN (insn)->succs) >= 2);
314 }
315
316 /* Predict edge E by given predictor if possible. */
317
318 void
319 predict_edge_def (edge e, enum br_predictor predictor,
320 enum prediction taken)
321 {
322 int probability = predictor_info[(int) predictor].hitrate;
323
324 if (taken != TAKEN)
325 probability = REG_BR_PROB_BASE - probability;
326
327 predict_edge (e, predictor, probability);
328 }
329
330 /* Invert all branch predictions or probability notes in the INSN. This needs
331 to be done each time we invert the condition used by the jump. */
332
333 void
334 invert_br_probabilities (rtx insn)
335 {
336 rtx note;
337
338 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
339 if (REG_NOTE_KIND (note) == REG_BR_PROB)
340 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
341 else if (REG_NOTE_KIND (note) == REG_BR_PRED)
342 XEXP (XEXP (note, 0), 1)
343 = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
344 }
345
346 /* Dump information about the branch prediction to the output file. */
347
348 static void
349 dump_prediction (FILE *file, enum br_predictor predictor, int probability,
350 basic_block bb, int used)
351 {
352 edge e;
353 edge_iterator ei;
354
355 if (!file)
356 return;
357
358 FOR_EACH_EDGE (e, ei, bb->succs)
359 if (! (e->flags & EDGE_FALLTHRU))
360 break;
361
362 fprintf (file, " %s heuristics%s: %.1f%%",
363 predictor_info[predictor].name,
364 used ? "" : " (ignored)", probability * 100.0 / REG_BR_PROB_BASE);
365
366 if (bb->count)
367 {
368 fprintf (file, " exec ");
369 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, bb->count);
370 if (e)
371 {
372 fprintf (file, " hit ");
373 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, e->count);
374 fprintf (file, " (%.1f%%)", e->count * 100.0 / bb->count);
375 }
376 }
377
378 fprintf (file, "\n");
379 }
380
381 /* We can not predict the probabilities of outgoing edges of bb. Set them
382 evenly and hope for the best. */
383 static void
384 set_even_probabilities (basic_block bb)
385 {
386 int nedges = 0;
387 edge e;
388 edge_iterator ei;
389
390 FOR_EACH_EDGE (e, ei, bb->succs)
391 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
392 nedges ++;
393 FOR_EACH_EDGE (e, ei, bb->succs)
394 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
395 e->probability = (REG_BR_PROB_BASE + nedges / 2) / nedges;
396 else
397 e->probability = 0;
398 }
399
400 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
401 note if not already present. Remove now useless REG_BR_PRED notes. */
402
403 static void
404 combine_predictions_for_insn (rtx insn, basic_block bb)
405 {
406 rtx prob_note;
407 rtx *pnote;
408 rtx note;
409 int best_probability = PROB_EVEN;
410 int best_predictor = END_PREDICTORS;
411 int combined_probability = REG_BR_PROB_BASE / 2;
412 int d;
413 bool first_match = false;
414 bool found = false;
415
416 if (!can_predict_insn_p (insn))
417 {
418 set_even_probabilities (bb);
419 return;
420 }
421
422 prob_note = find_reg_note (insn, REG_BR_PROB, 0);
423 pnote = &REG_NOTES (insn);
424 if (dump_file)
425 fprintf (dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
426 bb->index);
427
428 /* We implement "first match" heuristics and use probability guessed
429 by predictor with smallest index. */
430 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
431 if (REG_NOTE_KIND (note) == REG_BR_PRED)
432 {
433 int predictor = INTVAL (XEXP (XEXP (note, 0), 0));
434 int probability = INTVAL (XEXP (XEXP (note, 0), 1));
435
436 found = true;
437 if (best_predictor > predictor)
438 best_probability = probability, best_predictor = predictor;
439
440 d = (combined_probability * probability
441 + (REG_BR_PROB_BASE - combined_probability)
442 * (REG_BR_PROB_BASE - probability));
443
444 /* Use FP math to avoid overflows of 32bit integers. */
445 if (d == 0)
446 /* If one probability is 0% and one 100%, avoid division by zero. */
447 combined_probability = REG_BR_PROB_BASE / 2;
448 else
449 combined_probability = (((double) combined_probability) * probability
450 * REG_BR_PROB_BASE / d + 0.5);
451 }
452
453 /* Decide which heuristic to use. In case we didn't match anything,
454 use no_prediction heuristic, in case we did match, use either
455 first match or Dempster-Shaffer theory depending on the flags. */
456
457 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
458 first_match = true;
459
460 if (!found)
461 dump_prediction (dump_file, PRED_NO_PREDICTION,
462 combined_probability, bb, true);
463 else
464 {
465 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability,
466 bb, !first_match);
467 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability,
468 bb, first_match);
469 }
470
471 if (first_match)
472 combined_probability = best_probability;
473 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
474
475 while (*pnote)
476 {
477 if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
478 {
479 int predictor = INTVAL (XEXP (XEXP (*pnote, 0), 0));
480 int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
481
482 dump_prediction (dump_file, predictor, probability, bb,
483 !first_match || best_predictor == predictor);
484 *pnote = XEXP (*pnote, 1);
485 }
486 else
487 pnote = &XEXP (*pnote, 1);
488 }
489
490 if (!prob_note)
491 {
492 REG_NOTES (insn)
493 = gen_rtx_EXPR_LIST (REG_BR_PROB,
494 GEN_INT (combined_probability), REG_NOTES (insn));
495
496 /* Save the prediction into CFG in case we are seeing non-degenerated
497 conditional jump. */
498 if (!single_succ_p (bb))
499 {
500 BRANCH_EDGE (bb)->probability = combined_probability;
501 FALLTHRU_EDGE (bb)->probability
502 = REG_BR_PROB_BASE - combined_probability;
503 }
504 }
505 else if (!single_succ_p (bb))
506 {
507 int prob = INTVAL (XEXP (prob_note, 0));
508
509 BRANCH_EDGE (bb)->probability = prob;
510 FALLTHRU_EDGE (bb)->probability = REG_BR_PROB_BASE - prob;
511 }
512 else
513 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
514 }
515
516 /* Combine predictions into single probability and store them into CFG.
517 Remove now useless prediction entries. */
518
519 static void
520 combine_predictions_for_bb (basic_block bb)
521 {
522 int best_probability = PROB_EVEN;
523 int best_predictor = END_PREDICTORS;
524 int combined_probability = REG_BR_PROB_BASE / 2;
525 int d;
526 bool first_match = false;
527 bool found = false;
528 struct edge_prediction *pred;
529 int nedges = 0;
530 edge e, first = NULL, second = NULL;
531 edge_iterator ei;
532
533 FOR_EACH_EDGE (e, ei, bb->succs)
534 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
535 {
536 nedges ++;
537 if (first && !second)
538 second = e;
539 if (!first)
540 first = e;
541 }
542
543 /* When there is no successor or only one choice, prediction is easy.
544
545 We are lazy for now and predict only basic blocks with two outgoing
546 edges. It is possible to predict generic case too, but we have to
547 ignore first match heuristics and do more involved combining. Implement
548 this later. */
549 if (nedges != 2)
550 {
551 if (!bb->count)
552 set_even_probabilities (bb);
553 bb->predictions = NULL;
554 if (dump_file)
555 fprintf (dump_file, "%i edges in bb %i predicted to even probabilities\n",
556 nedges, bb->index);
557 return;
558 }
559
560 if (dump_file)
561 fprintf (dump_file, "Predictions for bb %i\n", bb->index);
562
563 /* We implement "first match" heuristics and use probability guessed
564 by predictor with smallest index. */
565 for (pred = bb->predictions; pred; pred = pred->ep_next)
566 {
567 int predictor = pred->ep_predictor;
568 int probability = pred->ep_probability;
569
570 if (pred->ep_edge != first)
571 probability = REG_BR_PROB_BASE - probability;
572
573 found = true;
574 if (best_predictor > predictor)
575 best_probability = probability, best_predictor = predictor;
576
577 d = (combined_probability * probability
578 + (REG_BR_PROB_BASE - combined_probability)
579 * (REG_BR_PROB_BASE - probability));
580
581 /* Use FP math to avoid overflows of 32bit integers. */
582 if (d == 0)
583 /* If one probability is 0% and one 100%, avoid division by zero. */
584 combined_probability = REG_BR_PROB_BASE / 2;
585 else
586 combined_probability = (((double) combined_probability) * probability
587 * REG_BR_PROB_BASE / d + 0.5);
588 }
589
590 /* Decide which heuristic to use. In case we didn't match anything,
591 use no_prediction heuristic, in case we did match, use either
592 first match or Dempster-Shaffer theory depending on the flags. */
593
594 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
595 first_match = true;
596
597 if (!found)
598 dump_prediction (dump_file, PRED_NO_PREDICTION, combined_probability, bb, true);
599 else
600 {
601 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability, bb,
602 !first_match);
603 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability, bb,
604 first_match);
605 }
606
607 if (first_match)
608 combined_probability = best_probability;
609 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
610
611 for (pred = bb->predictions; pred; pred = pred->ep_next)
612 {
613 int predictor = pred->ep_predictor;
614 int probability = pred->ep_probability;
615
616 if (pred->ep_edge != EDGE_SUCC (bb, 0))
617 probability = REG_BR_PROB_BASE - probability;
618 dump_prediction (dump_file, predictor, probability, bb,
619 !first_match || best_predictor == predictor);
620 }
621 bb->predictions = NULL;
622
623 if (!bb->count)
624 {
625 first->probability = combined_probability;
626 second->probability = REG_BR_PROB_BASE - combined_probability;
627 }
628 }
629
630 /* Predict edge probabilities by exploiting loop structure.
631 When RTLSIMPLELOOPS is set, attempt to count number of iterations by analyzing
632 RTL otherwise use tree based approach. */
633 static void
634 predict_loops (struct loops *loops_info)
635 {
636 unsigned i;
637
638 scev_initialize (loops_info);
639
640 /* Try to predict out blocks in a loop that are not part of a
641 natural loop. */
642 for (i = 1; i < loops_info->num; i++)
643 {
644 basic_block bb, *bbs;
645 unsigned j;
646 unsigned n_exits;
647 struct loop *loop = loops_info->parray[i];
648 edge *exits;
649 struct tree_niter_desc niter_desc;
650
651 exits = get_loop_exit_edges (loop, &n_exits);
652
653
654 for (j = 0; j < n_exits; j++)
655 {
656 tree niter = NULL;
657
658 if (number_of_iterations_exit (loop, exits[j], &niter_desc, false))
659 niter = niter_desc.niter;
660 if (!niter || TREE_CODE (niter_desc.niter) != INTEGER_CST)
661 niter = loop_niter_by_eval (loop, exits[j]);
662
663 if (TREE_CODE (niter) == INTEGER_CST)
664 {
665 int probability;
666 int max = PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS);
667 if (host_integerp (niter, 1)
668 && tree_int_cst_lt (niter,
669 build_int_cstu (NULL_TREE, max - 1)))
670 {
671 HOST_WIDE_INT nitercst = tree_low_cst (niter, 1) + 1;
672 probability = ((REG_BR_PROB_BASE + nitercst / 2)
673 / nitercst);
674 }
675 else
676 probability = ((REG_BR_PROB_BASE + max / 2) / max);
677
678 predict_edge (exits[j], PRED_LOOP_ITERATIONS, probability);
679 }
680 }
681 free (exits);
682
683 bbs = get_loop_body (loop);
684
685 for (j = 0; j < loop->num_nodes; j++)
686 {
687 int header_found = 0;
688 edge e;
689 edge_iterator ei;
690
691 bb = bbs[j];
692
693 /* Bypass loop heuristics on continue statement. These
694 statements construct loops via "non-loop" constructs
695 in the source language and are better to be handled
696 separately. */
697 if (predicted_by_p (bb, PRED_CONTINUE))
698 continue;
699
700 /* Loop branch heuristics - predict an edge back to a
701 loop's head as taken. */
702 if (bb == loop->latch)
703 {
704 e = find_edge (loop->latch, loop->header);
705 if (e)
706 {
707 header_found = 1;
708 predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN);
709 }
710 }
711
712 /* Loop exit heuristics - predict an edge exiting the loop if the
713 conditional has no loop header successors as not taken. */
714 if (!header_found)
715 {
716 /* For loop with many exits we don't want to predict all exits
717 with the pretty large probability, because if all exits are
718 considered in row, the loop would be predicted to iterate
719 almost never. The code to divide probability by number of
720 exits is very rough. It should compute the number of exits
721 taken in each patch through function (not the overall number
722 of exits that might be a lot higher for loops with wide switch
723 statements in them) and compute n-th square root.
724
725 We limit the minimal probability by 2% to avoid
726 EDGE_PROBABILITY_RELIABLE from trusting the branch prediction
727 as this was causing regression in perl benchmark containing such
728 a wide loop. */
729
730 int probability = ((REG_BR_PROB_BASE
731 - predictor_info [(int) PRED_LOOP_EXIT].hitrate)
732 / n_exits);
733 if (probability < HITRATE (2))
734 probability = HITRATE (2);
735 FOR_EACH_EDGE (e, ei, bb->succs)
736 if (e->dest->index < NUM_FIXED_BLOCKS
737 || !flow_bb_inside_loop_p (loop, e->dest))
738 predict_edge (e, PRED_LOOP_EXIT, probability);
739 }
740 }
741
742 /* Free basic blocks from get_loop_body. */
743 free (bbs);
744 }
745
746 scev_finalize ();
747 current_loops = NULL;
748 }
749
750 /* Attempt to predict probabilities of BB outgoing edges using local
751 properties. */
752 static void
753 bb_estimate_probability_locally (basic_block bb)
754 {
755 rtx last_insn = BB_END (bb);
756 rtx cond;
757
758 if (! can_predict_insn_p (last_insn))
759 return;
760 cond = get_condition (last_insn, NULL, false, false);
761 if (! cond)
762 return;
763
764 /* Try "pointer heuristic."
765 A comparison ptr == 0 is predicted as false.
766 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
767 if (COMPARISON_P (cond)
768 && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0)))
769 || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1)))))
770 {
771 if (GET_CODE (cond) == EQ)
772 predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
773 else if (GET_CODE (cond) == NE)
774 predict_insn_def (last_insn, PRED_POINTER, TAKEN);
775 }
776 else
777
778 /* Try "opcode heuristic."
779 EQ tests are usually false and NE tests are usually true. Also,
780 most quantities are positive, so we can make the appropriate guesses
781 about signed comparisons against zero. */
782 switch (GET_CODE (cond))
783 {
784 case CONST_INT:
785 /* Unconditional branch. */
786 predict_insn_def (last_insn, PRED_UNCONDITIONAL,
787 cond == const0_rtx ? NOT_TAKEN : TAKEN);
788 break;
789
790 case EQ:
791 case UNEQ:
792 /* Floating point comparisons appears to behave in a very
793 unpredictable way because of special role of = tests in
794 FP code. */
795 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
796 ;
797 /* Comparisons with 0 are often used for booleans and there is
798 nothing useful to predict about them. */
799 else if (XEXP (cond, 1) == const0_rtx
800 || XEXP (cond, 0) == const0_rtx)
801 ;
802 else
803 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
804 break;
805
806 case NE:
807 case LTGT:
808 /* Floating point comparisons appears to behave in a very
809 unpredictable way because of special role of = tests in
810 FP code. */
811 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
812 ;
813 /* Comparisons with 0 are often used for booleans and there is
814 nothing useful to predict about them. */
815 else if (XEXP (cond, 1) == const0_rtx
816 || XEXP (cond, 0) == const0_rtx)
817 ;
818 else
819 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
820 break;
821
822 case ORDERED:
823 predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
824 break;
825
826 case UNORDERED:
827 predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
828 break;
829
830 case LE:
831 case LT:
832 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
833 || XEXP (cond, 1) == constm1_rtx)
834 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
835 break;
836
837 case GE:
838 case GT:
839 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
840 || XEXP (cond, 1) == constm1_rtx)
841 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
842 break;
843
844 default:
845 break;
846 }
847 }
848
849 /* Set edge->probability for each successor edge of BB. */
850 void
851 guess_outgoing_edge_probabilities (basic_block bb)
852 {
853 bb_estimate_probability_locally (bb);
854 combine_predictions_for_insn (BB_END (bb), bb);
855 }
856 \f
857 /* Return constant EXPR will likely have at execution time, NULL if unknown.
858 The function is used by builtin_expect branch predictor so the evidence
859 must come from this construct and additional possible constant folding.
860
861 We may want to implement more involved value guess (such as value range
862 propagation based prediction), but such tricks shall go to new
863 implementation. */
864
865 static tree
866 expr_expected_value (tree expr, bitmap visited)
867 {
868 if (TREE_CONSTANT (expr))
869 return expr;
870 else if (TREE_CODE (expr) == SSA_NAME)
871 {
872 tree def = SSA_NAME_DEF_STMT (expr);
873
874 /* If we were already here, break the infinite cycle. */
875 if (bitmap_bit_p (visited, SSA_NAME_VERSION (expr)))
876 return NULL;
877 bitmap_set_bit (visited, SSA_NAME_VERSION (expr));
878
879 if (TREE_CODE (def) == PHI_NODE)
880 {
881 /* All the arguments of the PHI node must have the same constant
882 length. */
883 int i;
884 tree val = NULL, new_val;
885
886 for (i = 0; i < PHI_NUM_ARGS (def); i++)
887 {
888 tree arg = PHI_ARG_DEF (def, i);
889
890 /* If this PHI has itself as an argument, we cannot
891 determine the string length of this argument. However,
892 if we can find an expected constant value for the other
893 PHI args then we can still be sure that this is
894 likely a constant. So be optimistic and just
895 continue with the next argument. */
896 if (arg == PHI_RESULT (def))
897 continue;
898
899 new_val = expr_expected_value (arg, visited);
900 if (!new_val)
901 return NULL;
902 if (!val)
903 val = new_val;
904 else if (!operand_equal_p (val, new_val, false))
905 return NULL;
906 }
907 return val;
908 }
909 if (TREE_CODE (def) != MODIFY_EXPR || TREE_OPERAND (def, 0) != expr)
910 return NULL;
911 return expr_expected_value (TREE_OPERAND (def, 1), visited);
912 }
913 else if (TREE_CODE (expr) == CALL_EXPR)
914 {
915 tree decl = get_callee_fndecl (expr);
916 if (!decl)
917 return NULL;
918 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
919 && DECL_FUNCTION_CODE (decl) == BUILT_IN_EXPECT)
920 {
921 tree arglist = TREE_OPERAND (expr, 1);
922 tree val;
923
924 if (arglist == NULL_TREE
925 || TREE_CHAIN (arglist) == NULL_TREE)
926 return NULL;
927 val = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (expr, 1)));
928 if (TREE_CONSTANT (val))
929 return val;
930 return TREE_VALUE (TREE_CHAIN (TREE_OPERAND (expr, 1)));
931 }
932 }
933 if (BINARY_CLASS_P (expr) || COMPARISON_CLASS_P (expr))
934 {
935 tree op0, op1, res;
936 op0 = expr_expected_value (TREE_OPERAND (expr, 0), visited);
937 if (!op0)
938 return NULL;
939 op1 = expr_expected_value (TREE_OPERAND (expr, 1), visited);
940 if (!op1)
941 return NULL;
942 res = fold_build2 (TREE_CODE (expr), TREE_TYPE (expr), op0, op1);
943 if (TREE_CONSTANT (res))
944 return res;
945 return NULL;
946 }
947 if (UNARY_CLASS_P (expr))
948 {
949 tree op0, res;
950 op0 = expr_expected_value (TREE_OPERAND (expr, 0), visited);
951 if (!op0)
952 return NULL;
953 res = fold_build1 (TREE_CODE (expr), TREE_TYPE (expr), op0);
954 if (TREE_CONSTANT (res))
955 return res;
956 return NULL;
957 }
958 return NULL;
959 }
960 \f
961 /* Get rid of all builtin_expect calls we no longer need. */
962 static void
963 strip_builtin_expect (void)
964 {
965 basic_block bb;
966 FOR_EACH_BB (bb)
967 {
968 block_stmt_iterator bi;
969 for (bi = bsi_start (bb); !bsi_end_p (bi); bsi_next (&bi))
970 {
971 tree stmt = bsi_stmt (bi);
972 tree fndecl;
973 tree arglist;
974
975 if (TREE_CODE (stmt) == MODIFY_EXPR
976 && TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR
977 && (fndecl = get_callee_fndecl (TREE_OPERAND (stmt, 1)))
978 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
979 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
980 && (arglist = TREE_OPERAND (TREE_OPERAND (stmt, 1), 1))
981 && TREE_CHAIN (arglist))
982 {
983 TREE_OPERAND (stmt, 1) = TREE_VALUE (arglist);
984 update_stmt (stmt);
985 }
986 }
987 }
988 }
989 \f
990 /* Predict using opcode of the last statement in basic block. */
991 static void
992 tree_predict_by_opcode (basic_block bb)
993 {
994 tree stmt = last_stmt (bb);
995 edge then_edge;
996 tree cond;
997 tree op0;
998 tree type;
999 tree val;
1000 bitmap visited;
1001 edge_iterator ei;
1002
1003 if (!stmt || TREE_CODE (stmt) != COND_EXPR)
1004 return;
1005 FOR_EACH_EDGE (then_edge, ei, bb->succs)
1006 if (then_edge->flags & EDGE_TRUE_VALUE)
1007 break;
1008 cond = TREE_OPERAND (stmt, 0);
1009 if (!COMPARISON_CLASS_P (cond))
1010 return;
1011 op0 = TREE_OPERAND (cond, 0);
1012 type = TREE_TYPE (op0);
1013 visited = BITMAP_ALLOC (NULL);
1014 val = expr_expected_value (cond, visited);
1015 BITMAP_FREE (visited);
1016 if (val)
1017 {
1018 if (integer_zerop (val))
1019 predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, NOT_TAKEN);
1020 else
1021 predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, TAKEN);
1022 return;
1023 }
1024 /* Try "pointer heuristic."
1025 A comparison ptr == 0 is predicted as false.
1026 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1027 if (POINTER_TYPE_P (type))
1028 {
1029 if (TREE_CODE (cond) == EQ_EXPR)
1030 predict_edge_def (then_edge, PRED_TREE_POINTER, NOT_TAKEN);
1031 else if (TREE_CODE (cond) == NE_EXPR)
1032 predict_edge_def (then_edge, PRED_TREE_POINTER, TAKEN);
1033 }
1034 else
1035
1036 /* Try "opcode heuristic."
1037 EQ tests are usually false and NE tests are usually true. Also,
1038 most quantities are positive, so we can make the appropriate guesses
1039 about signed comparisons against zero. */
1040 switch (TREE_CODE (cond))
1041 {
1042 case EQ_EXPR:
1043 case UNEQ_EXPR:
1044 /* Floating point comparisons appears to behave in a very
1045 unpredictable way because of special role of = tests in
1046 FP code. */
1047 if (FLOAT_TYPE_P (type))
1048 ;
1049 /* Comparisons with 0 are often used for booleans and there is
1050 nothing useful to predict about them. */
1051 else if (integer_zerop (op0)
1052 || integer_zerop (TREE_OPERAND (cond, 1)))
1053 ;
1054 else
1055 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, NOT_TAKEN);
1056 break;
1057
1058 case NE_EXPR:
1059 case LTGT_EXPR:
1060 /* Floating point comparisons appears to behave in a very
1061 unpredictable way because of special role of = tests in
1062 FP code. */
1063 if (FLOAT_TYPE_P (type))
1064 ;
1065 /* Comparisons with 0 are often used for booleans and there is
1066 nothing useful to predict about them. */
1067 else if (integer_zerop (op0)
1068 || integer_zerop (TREE_OPERAND (cond, 1)))
1069 ;
1070 else
1071 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, TAKEN);
1072 break;
1073
1074 case ORDERED_EXPR:
1075 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, TAKEN);
1076 break;
1077
1078 case UNORDERED_EXPR:
1079 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, NOT_TAKEN);
1080 break;
1081
1082 case LE_EXPR:
1083 case LT_EXPR:
1084 if (integer_zerop (TREE_OPERAND (cond, 1))
1085 || integer_onep (TREE_OPERAND (cond, 1))
1086 || integer_all_onesp (TREE_OPERAND (cond, 1))
1087 || real_zerop (TREE_OPERAND (cond, 1))
1088 || real_onep (TREE_OPERAND (cond, 1))
1089 || real_minus_onep (TREE_OPERAND (cond, 1)))
1090 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, NOT_TAKEN);
1091 break;
1092
1093 case GE_EXPR:
1094 case GT_EXPR:
1095 if (integer_zerop (TREE_OPERAND (cond, 1))
1096 || integer_onep (TREE_OPERAND (cond, 1))
1097 || integer_all_onesp (TREE_OPERAND (cond, 1))
1098 || real_zerop (TREE_OPERAND (cond, 1))
1099 || real_onep (TREE_OPERAND (cond, 1))
1100 || real_minus_onep (TREE_OPERAND (cond, 1)))
1101 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, TAKEN);
1102 break;
1103
1104 default:
1105 break;
1106 }
1107 }
1108
1109 /* Try to guess whether the value of return means error code. */
1110 static enum br_predictor
1111 return_prediction (tree val, enum prediction *prediction)
1112 {
1113 /* VOID. */
1114 if (!val)
1115 return PRED_NO_PREDICTION;
1116 /* Different heuristics for pointers and scalars. */
1117 if (POINTER_TYPE_P (TREE_TYPE (val)))
1118 {
1119 /* NULL is usually not returned. */
1120 if (integer_zerop (val))
1121 {
1122 *prediction = NOT_TAKEN;
1123 return PRED_NULL_RETURN;
1124 }
1125 }
1126 else if (INTEGRAL_TYPE_P (TREE_TYPE (val)))
1127 {
1128 /* Negative return values are often used to indicate
1129 errors. */
1130 if (TREE_CODE (val) == INTEGER_CST
1131 && tree_int_cst_sgn (val) < 0)
1132 {
1133 *prediction = NOT_TAKEN;
1134 return PRED_NEGATIVE_RETURN;
1135 }
1136 /* Constant return values seems to be commonly taken.
1137 Zero/one often represent booleans so exclude them from the
1138 heuristics. */
1139 if (TREE_CONSTANT (val)
1140 && (!integer_zerop (val) && !integer_onep (val)))
1141 {
1142 *prediction = TAKEN;
1143 return PRED_NEGATIVE_RETURN;
1144 }
1145 }
1146 return PRED_NO_PREDICTION;
1147 }
1148
1149 /* Find the basic block with return expression and look up for possible
1150 return value trying to apply RETURN_PREDICTION heuristics. */
1151 static void
1152 apply_return_prediction (int *heads)
1153 {
1154 tree return_stmt = NULL;
1155 tree return_val;
1156 edge e;
1157 tree phi;
1158 int phi_num_args, i;
1159 enum br_predictor pred;
1160 enum prediction direction;
1161 edge_iterator ei;
1162
1163 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1164 {
1165 return_stmt = last_stmt (e->src);
1166 if (TREE_CODE (return_stmt) == RETURN_EXPR)
1167 break;
1168 }
1169 if (!e)
1170 return;
1171 return_val = TREE_OPERAND (return_stmt, 0);
1172 if (!return_val)
1173 return;
1174 if (TREE_CODE (return_val) == MODIFY_EXPR)
1175 return_val = TREE_OPERAND (return_val, 1);
1176 if (TREE_CODE (return_val) != SSA_NAME
1177 || !SSA_NAME_DEF_STMT (return_val)
1178 || TREE_CODE (SSA_NAME_DEF_STMT (return_val)) != PHI_NODE)
1179 return;
1180 for (phi = SSA_NAME_DEF_STMT (return_val); phi; phi = PHI_CHAIN (phi))
1181 if (PHI_RESULT (phi) == return_val)
1182 break;
1183 if (!phi)
1184 return;
1185 phi_num_args = PHI_NUM_ARGS (phi);
1186 pred = return_prediction (PHI_ARG_DEF (phi, 0), &direction);
1187
1188 /* Avoid the degenerate case where all return values form the function
1189 belongs to same category (ie they are all positive constants)
1190 so we can hardly say something about them. */
1191 for (i = 1; i < phi_num_args; i++)
1192 if (pred != return_prediction (PHI_ARG_DEF (phi, i), &direction))
1193 break;
1194 if (i != phi_num_args)
1195 for (i = 0; i < phi_num_args; i++)
1196 {
1197 pred = return_prediction (PHI_ARG_DEF (phi, i), &direction);
1198 if (pred != PRED_NO_PREDICTION)
1199 predict_paths_leading_to (PHI_ARG_EDGE (phi, i)->src, heads, pred,
1200 direction);
1201 }
1202 }
1203
1204 /* Look for basic block that contains unlikely to happen events
1205 (such as noreturn calls) and mark all paths leading to execution
1206 of this basic blocks as unlikely. */
1207
1208 static void
1209 tree_bb_level_predictions (void)
1210 {
1211 basic_block bb;
1212 int *heads;
1213
1214 heads = XNEWVEC (int, last_basic_block);
1215 memset (heads, ENTRY_BLOCK, sizeof (int) * last_basic_block);
1216 heads[ENTRY_BLOCK_PTR->next_bb->index] = last_basic_block;
1217
1218 apply_return_prediction (heads);
1219
1220 FOR_EACH_BB (bb)
1221 {
1222 block_stmt_iterator bsi = bsi_last (bb);
1223
1224 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1225 {
1226 tree stmt = bsi_stmt (bsi);
1227 switch (TREE_CODE (stmt))
1228 {
1229 case MODIFY_EXPR:
1230 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
1231 {
1232 stmt = TREE_OPERAND (stmt, 1);
1233 goto call_expr;
1234 }
1235 break;
1236 case CALL_EXPR:
1237 call_expr:;
1238 if (call_expr_flags (stmt) & ECF_NORETURN)
1239 predict_paths_leading_to (bb, heads, PRED_NORETURN,
1240 NOT_TAKEN);
1241 break;
1242 default:
1243 break;
1244 }
1245 }
1246 }
1247
1248 free (heads);
1249 }
1250
1251 /* Predict branch probabilities and estimate profile of the tree CFG. */
1252 static unsigned int
1253 tree_estimate_probability (void)
1254 {
1255 basic_block bb;
1256 struct loops loops_info;
1257
1258 flow_loops_find (&loops_info);
1259 if (dump_file && (dump_flags & TDF_DETAILS))
1260 flow_loops_dump (&loops_info, dump_file, NULL, 0);
1261
1262 add_noreturn_fake_exit_edges ();
1263 connect_infinite_loops_to_exit ();
1264 calculate_dominance_info (CDI_DOMINATORS);
1265 calculate_dominance_info (CDI_POST_DOMINATORS);
1266
1267 tree_bb_level_predictions ();
1268
1269 mark_irreducible_loops (&loops_info);
1270 predict_loops (&loops_info);
1271
1272 FOR_EACH_BB (bb)
1273 {
1274 edge e;
1275 edge_iterator ei;
1276
1277 FOR_EACH_EDGE (e, ei, bb->succs)
1278 {
1279 /* Predict early returns to be probable, as we've already taken
1280 care for error returns and other cases are often used for
1281 fast paths through function. */
1282 if (e->dest == EXIT_BLOCK_PTR
1283 && TREE_CODE (last_stmt (bb)) == RETURN_EXPR
1284 && !single_pred_p (bb))
1285 {
1286 edge e1;
1287 edge_iterator ei1;
1288
1289 FOR_EACH_EDGE (e1, ei1, bb->preds)
1290 if (!predicted_by_p (e1->src, PRED_NULL_RETURN)
1291 && !predicted_by_p (e1->src, PRED_CONST_RETURN)
1292 && !predicted_by_p (e1->src, PRED_NEGATIVE_RETURN)
1293 && !last_basic_block_p (e1->src))
1294 predict_edge_def (e1, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
1295 }
1296
1297 /* Look for block we are guarding (ie we dominate it,
1298 but it doesn't postdominate us). */
1299 if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
1300 && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
1301 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
1302 {
1303 block_stmt_iterator bi;
1304
1305 /* The call heuristic claims that a guarded function call
1306 is improbable. This is because such calls are often used
1307 to signal exceptional situations such as printing error
1308 messages. */
1309 for (bi = bsi_start (e->dest); !bsi_end_p (bi);
1310 bsi_next (&bi))
1311 {
1312 tree stmt = bsi_stmt (bi);
1313 if ((TREE_CODE (stmt) == CALL_EXPR
1314 || (TREE_CODE (stmt) == MODIFY_EXPR
1315 && TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR))
1316 /* Constant and pure calls are hardly used to signalize
1317 something exceptional. */
1318 && TREE_SIDE_EFFECTS (stmt))
1319 {
1320 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
1321 break;
1322 }
1323 }
1324 }
1325 }
1326 tree_predict_by_opcode (bb);
1327 }
1328 FOR_EACH_BB (bb)
1329 combine_predictions_for_bb (bb);
1330
1331 strip_builtin_expect ();
1332 estimate_bb_frequencies (&loops_info);
1333 free_dominance_info (CDI_POST_DOMINATORS);
1334 remove_fake_exit_edges ();
1335 flow_loops_free (&loops_info);
1336 if (dump_file && (dump_flags & TDF_DETAILS))
1337 dump_tree_cfg (dump_file, dump_flags);
1338 if (profile_status == PROFILE_ABSENT)
1339 profile_status = PROFILE_GUESSED;
1340 return 0;
1341 }
1342 \f
1343 /* Check whether this is the last basic block of function. Commonly
1344 there is one extra common cleanup block. */
1345 static bool
1346 last_basic_block_p (basic_block bb)
1347 {
1348 if (bb == EXIT_BLOCK_PTR)
1349 return false;
1350
1351 return (bb->next_bb == EXIT_BLOCK_PTR
1352 || (bb->next_bb->next_bb == EXIT_BLOCK_PTR
1353 && single_succ_p (bb)
1354 && single_succ (bb)->next_bb == EXIT_BLOCK_PTR));
1355 }
1356
1357 /* Sets branch probabilities according to PREDiction and
1358 FLAGS. HEADS[bb->index] should be index of basic block in that we
1359 need to alter branch predictions (i.e. the first of our dominators
1360 such that we do not post-dominate it) (but we fill this information
1361 on demand, so -1 may be there in case this was not needed yet). */
1362
1363 static void
1364 predict_paths_leading_to (basic_block bb, int *heads, enum br_predictor pred,
1365 enum prediction taken)
1366 {
1367 edge e;
1368 edge_iterator ei;
1369 int y;
1370
1371 if (heads[bb->index] == ENTRY_BLOCK)
1372 {
1373 /* This is first time we need this field in heads array; so
1374 find first dominator that we do not post-dominate (we are
1375 using already known members of heads array). */
1376 basic_block ai = bb;
1377 basic_block next_ai = get_immediate_dominator (CDI_DOMINATORS, bb);
1378 int head;
1379
1380 while (heads[next_ai->index] == ENTRY_BLOCK)
1381 {
1382 if (!dominated_by_p (CDI_POST_DOMINATORS, next_ai, bb))
1383 break;
1384 heads[next_ai->index] = ai->index;
1385 ai = next_ai;
1386 next_ai = get_immediate_dominator (CDI_DOMINATORS, next_ai);
1387 }
1388 if (!dominated_by_p (CDI_POST_DOMINATORS, next_ai, bb))
1389 head = next_ai->index;
1390 else
1391 head = heads[next_ai->index];
1392 while (next_ai != bb)
1393 {
1394 next_ai = ai;
1395 ai = BASIC_BLOCK (heads[ai->index]);
1396 heads[next_ai->index] = head;
1397 }
1398 }
1399 y = heads[bb->index];
1400
1401 /* Now find the edge that leads to our branch and aply the prediction. */
1402
1403 if (y == last_basic_block)
1404 return;
1405 FOR_EACH_EDGE (e, ei, BASIC_BLOCK (y)->succs)
1406 if (e->dest->index >= NUM_FIXED_BLOCKS
1407 && dominated_by_p (CDI_POST_DOMINATORS, e->dest, bb))
1408 predict_edge_def (e, pred, taken);
1409 }
1410 \f
1411 /* This is used to carry information about basic blocks. It is
1412 attached to the AUX field of the standard CFG block. */
1413
1414 typedef struct block_info_def
1415 {
1416 /* Estimated frequency of execution of basic_block. */
1417 sreal frequency;
1418
1419 /* To keep queue of basic blocks to process. */
1420 basic_block next;
1421
1422 /* Number of predecessors we need to visit first. */
1423 int npredecessors;
1424 } *block_info;
1425
1426 /* Similar information for edges. */
1427 typedef struct edge_info_def
1428 {
1429 /* In case edge is a loopback edge, the probability edge will be reached
1430 in case header is. Estimated number of iterations of the loop can be
1431 then computed as 1 / (1 - back_edge_prob). */
1432 sreal back_edge_prob;
1433 /* True if the edge is a loopback edge in the natural loop. */
1434 unsigned int back_edge:1;
1435 } *edge_info;
1436
1437 #define BLOCK_INFO(B) ((block_info) (B)->aux)
1438 #define EDGE_INFO(E) ((edge_info) (E)->aux)
1439
1440 /* Helper function for estimate_bb_frequencies.
1441 Propagate the frequencies for LOOP. */
1442
1443 static void
1444 propagate_freq (struct loop *loop, bitmap tovisit)
1445 {
1446 basic_block head = loop->header;
1447 basic_block bb;
1448 basic_block last;
1449 unsigned i;
1450 edge e;
1451 basic_block nextbb;
1452 bitmap_iterator bi;
1453
1454 /* For each basic block we need to visit count number of his predecessors
1455 we need to visit first. */
1456 EXECUTE_IF_SET_IN_BITMAP (tovisit, 0, i, bi)
1457 {
1458 edge_iterator ei;
1459 int count = 0;
1460
1461 /* The outermost "loop" includes the exit block, which we can not
1462 look up via BASIC_BLOCK. Detect this and use EXIT_BLOCK_PTR
1463 directly. Do the same for the entry block. */
1464 bb = BASIC_BLOCK (i);
1465
1466 FOR_EACH_EDGE (e, ei, bb->preds)
1467 {
1468 bool visit = bitmap_bit_p (tovisit, e->src->index);
1469
1470 if (visit && !(e->flags & EDGE_DFS_BACK))
1471 count++;
1472 else if (visit && dump_file && !EDGE_INFO (e)->back_edge)
1473 fprintf (dump_file,
1474 "Irreducible region hit, ignoring edge to %i->%i\n",
1475 e->src->index, bb->index);
1476 }
1477 BLOCK_INFO (bb)->npredecessors = count;
1478 }
1479
1480 memcpy (&BLOCK_INFO (head)->frequency, &real_one, sizeof (real_one));
1481 last = head;
1482 for (bb = head; bb; bb = nextbb)
1483 {
1484 edge_iterator ei;
1485 sreal cyclic_probability, frequency;
1486
1487 memcpy (&cyclic_probability, &real_zero, sizeof (real_zero));
1488 memcpy (&frequency, &real_zero, sizeof (real_zero));
1489
1490 nextbb = BLOCK_INFO (bb)->next;
1491 BLOCK_INFO (bb)->next = NULL;
1492
1493 /* Compute frequency of basic block. */
1494 if (bb != head)
1495 {
1496 #ifdef ENABLE_CHECKING
1497 FOR_EACH_EDGE (e, ei, bb->preds)
1498 gcc_assert (!bitmap_bit_p (tovisit, e->src->index)
1499 || (e->flags & EDGE_DFS_BACK));
1500 #endif
1501
1502 FOR_EACH_EDGE (e, ei, bb->preds)
1503 if (EDGE_INFO (e)->back_edge)
1504 {
1505 sreal_add (&cyclic_probability, &cyclic_probability,
1506 &EDGE_INFO (e)->back_edge_prob);
1507 }
1508 else if (!(e->flags & EDGE_DFS_BACK))
1509 {
1510 sreal tmp;
1511
1512 /* frequency += (e->probability
1513 * BLOCK_INFO (e->src)->frequency /
1514 REG_BR_PROB_BASE); */
1515
1516 sreal_init (&tmp, e->probability, 0);
1517 sreal_mul (&tmp, &tmp, &BLOCK_INFO (e->src)->frequency);
1518 sreal_mul (&tmp, &tmp, &real_inv_br_prob_base);
1519 sreal_add (&frequency, &frequency, &tmp);
1520 }
1521
1522 if (sreal_compare (&cyclic_probability, &real_zero) == 0)
1523 {
1524 memcpy (&BLOCK_INFO (bb)->frequency, &frequency,
1525 sizeof (frequency));
1526 }
1527 else
1528 {
1529 if (sreal_compare (&cyclic_probability, &real_almost_one) > 0)
1530 {
1531 memcpy (&cyclic_probability, &real_almost_one,
1532 sizeof (real_almost_one));
1533 }
1534
1535 /* BLOCK_INFO (bb)->frequency = frequency
1536 / (1 - cyclic_probability) */
1537
1538 sreal_sub (&cyclic_probability, &real_one, &cyclic_probability);
1539 sreal_div (&BLOCK_INFO (bb)->frequency,
1540 &frequency, &cyclic_probability);
1541 }
1542 }
1543
1544 bitmap_clear_bit (tovisit, bb->index);
1545
1546 e = find_edge (bb, head);
1547 if (e)
1548 {
1549 sreal tmp;
1550
1551 /* EDGE_INFO (e)->back_edge_prob
1552 = ((e->probability * BLOCK_INFO (bb)->frequency)
1553 / REG_BR_PROB_BASE); */
1554
1555 sreal_init (&tmp, e->probability, 0);
1556 sreal_mul (&tmp, &tmp, &BLOCK_INFO (bb)->frequency);
1557 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1558 &tmp, &real_inv_br_prob_base);
1559 }
1560
1561 /* Propagate to successor blocks. */
1562 FOR_EACH_EDGE (e, ei, bb->succs)
1563 if (!(e->flags & EDGE_DFS_BACK)
1564 && BLOCK_INFO (e->dest)->npredecessors)
1565 {
1566 BLOCK_INFO (e->dest)->npredecessors--;
1567 if (!BLOCK_INFO (e->dest)->npredecessors)
1568 {
1569 if (!nextbb)
1570 nextbb = e->dest;
1571 else
1572 BLOCK_INFO (last)->next = e->dest;
1573
1574 last = e->dest;
1575 }
1576 }
1577 }
1578 }
1579
1580 /* Estimate probabilities of loopback edges in loops at same nest level. */
1581
1582 static void
1583 estimate_loops_at_level (struct loop *first_loop, bitmap tovisit)
1584 {
1585 struct loop *loop;
1586
1587 for (loop = first_loop; loop; loop = loop->next)
1588 {
1589 edge e;
1590 basic_block *bbs;
1591 unsigned i;
1592
1593 estimate_loops_at_level (loop->inner, tovisit);
1594
1595 /* Do not do this for dummy function loop. */
1596 if (EDGE_COUNT (loop->latch->succs) > 0)
1597 {
1598 /* Find current loop back edge and mark it. */
1599 e = loop_latch_edge (loop);
1600 EDGE_INFO (e)->back_edge = 1;
1601 }
1602
1603 bbs = get_loop_body (loop);
1604 for (i = 0; i < loop->num_nodes; i++)
1605 bitmap_set_bit (tovisit, bbs[i]->index);
1606 free (bbs);
1607 propagate_freq (loop, tovisit);
1608 }
1609 }
1610
1611 /* Convert counts measured by profile driven feedback to frequencies.
1612 Return nonzero iff there was any nonzero execution count. */
1613
1614 int
1615 counts_to_freqs (void)
1616 {
1617 gcov_type count_max, true_count_max = 0;
1618 basic_block bb;
1619
1620 FOR_EACH_BB (bb)
1621 true_count_max = MAX (bb->count, true_count_max);
1622
1623 count_max = MAX (true_count_max, 1);
1624 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1625 bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
1626 return true_count_max;
1627 }
1628
1629 /* Return true if function is likely to be expensive, so there is no point to
1630 optimize performance of prologue, epilogue or do inlining at the expense
1631 of code size growth. THRESHOLD is the limit of number of instructions
1632 function can execute at average to be still considered not expensive. */
1633
1634 bool
1635 expensive_function_p (int threshold)
1636 {
1637 unsigned int sum = 0;
1638 basic_block bb;
1639 unsigned int limit;
1640
1641 /* We can not compute accurately for large thresholds due to scaled
1642 frequencies. */
1643 gcc_assert (threshold <= BB_FREQ_MAX);
1644
1645 /* Frequencies are out of range. This either means that function contains
1646 internal loop executing more than BB_FREQ_MAX times or profile feedback
1647 is available and function has not been executed at all. */
1648 if (ENTRY_BLOCK_PTR->frequency == 0)
1649 return true;
1650
1651 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
1652 limit = ENTRY_BLOCK_PTR->frequency * threshold;
1653 FOR_EACH_BB (bb)
1654 {
1655 rtx insn;
1656
1657 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
1658 insn = NEXT_INSN (insn))
1659 if (active_insn_p (insn))
1660 {
1661 sum += bb->frequency;
1662 if (sum > limit)
1663 return true;
1664 }
1665 }
1666
1667 return false;
1668 }
1669
1670 /* Estimate basic blocks frequency by given branch probabilities. */
1671
1672 static void
1673 estimate_bb_frequencies (struct loops *loops)
1674 {
1675 basic_block bb;
1676 sreal freq_max;
1677
1678 if (!flag_branch_probabilities || !counts_to_freqs ())
1679 {
1680 static int real_values_initialized = 0;
1681 bitmap tovisit;
1682
1683 if (!real_values_initialized)
1684 {
1685 real_values_initialized = 1;
1686 sreal_init (&real_zero, 0, 0);
1687 sreal_init (&real_one, 1, 0);
1688 sreal_init (&real_br_prob_base, REG_BR_PROB_BASE, 0);
1689 sreal_init (&real_bb_freq_max, BB_FREQ_MAX, 0);
1690 sreal_init (&real_one_half, 1, -1);
1691 sreal_div (&real_inv_br_prob_base, &real_one, &real_br_prob_base);
1692 sreal_sub (&real_almost_one, &real_one, &real_inv_br_prob_base);
1693 }
1694
1695 mark_dfs_back_edges ();
1696
1697 single_succ_edge (ENTRY_BLOCK_PTR)->probability = REG_BR_PROB_BASE;
1698
1699 /* Set up block info for each basic block. */
1700 tovisit = BITMAP_ALLOC (NULL);
1701 alloc_aux_for_blocks (sizeof (struct block_info_def));
1702 alloc_aux_for_edges (sizeof (struct edge_info_def));
1703 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1704 {
1705 edge e;
1706 edge_iterator ei;
1707
1708 FOR_EACH_EDGE (e, ei, bb->succs)
1709 {
1710 sreal_init (&EDGE_INFO (e)->back_edge_prob, e->probability, 0);
1711 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1712 &EDGE_INFO (e)->back_edge_prob,
1713 &real_inv_br_prob_base);
1714 }
1715 }
1716
1717 /* First compute probabilities locally for each loop from innermost
1718 to outermost to examine probabilities for back edges. */
1719 estimate_loops_at_level (loops->tree_root, tovisit);
1720
1721 memcpy (&freq_max, &real_zero, sizeof (real_zero));
1722 FOR_EACH_BB (bb)
1723 if (sreal_compare (&freq_max, &BLOCK_INFO (bb)->frequency) < 0)
1724 memcpy (&freq_max, &BLOCK_INFO (bb)->frequency, sizeof (freq_max));
1725
1726 sreal_div (&freq_max, &real_bb_freq_max, &freq_max);
1727 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1728 {
1729 sreal tmp;
1730
1731 sreal_mul (&tmp, &BLOCK_INFO (bb)->frequency, &freq_max);
1732 sreal_add (&tmp, &tmp, &real_one_half);
1733 bb->frequency = sreal_to_int (&tmp);
1734 }
1735
1736 free_aux_for_blocks ();
1737 free_aux_for_edges ();
1738 BITMAP_FREE (tovisit);
1739 }
1740 compute_function_frequency ();
1741 if (flag_reorder_functions)
1742 choose_function_section ();
1743 }
1744
1745 /* Decide whether function is hot, cold or unlikely executed. */
1746 static void
1747 compute_function_frequency (void)
1748 {
1749 basic_block bb;
1750
1751 if (!profile_info || !flag_branch_probabilities)
1752 return;
1753 cfun->function_frequency = FUNCTION_FREQUENCY_UNLIKELY_EXECUTED;
1754 FOR_EACH_BB (bb)
1755 {
1756 if (maybe_hot_bb_p (bb))
1757 {
1758 cfun->function_frequency = FUNCTION_FREQUENCY_HOT;
1759 return;
1760 }
1761 if (!probably_never_executed_bb_p (bb))
1762 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
1763 }
1764 }
1765
1766 /* Choose appropriate section for the function. */
1767 static void
1768 choose_function_section (void)
1769 {
1770 if (DECL_SECTION_NAME (current_function_decl)
1771 || !targetm.have_named_sections
1772 /* Theoretically we can split the gnu.linkonce text section too,
1773 but this requires more work as the frequency needs to match
1774 for all generated objects so we need to merge the frequency
1775 of all instances. For now just never set frequency for these. */
1776 || DECL_ONE_ONLY (current_function_decl))
1777 return;
1778
1779 /* If we are doing the partitioning optimization, let the optimization
1780 choose the correct section into which to put things. */
1781
1782 if (flag_reorder_blocks_and_partition)
1783 return;
1784
1785 if (cfun->function_frequency == FUNCTION_FREQUENCY_HOT)
1786 DECL_SECTION_NAME (current_function_decl) =
1787 build_string (strlen (HOT_TEXT_SECTION_NAME), HOT_TEXT_SECTION_NAME);
1788 if (cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
1789 DECL_SECTION_NAME (current_function_decl) =
1790 build_string (strlen (UNLIKELY_EXECUTED_TEXT_SECTION_NAME),
1791 UNLIKELY_EXECUTED_TEXT_SECTION_NAME);
1792 }
1793
1794 static bool
1795 gate_estimate_probability (void)
1796 {
1797 return flag_guess_branch_prob;
1798 }
1799
1800 struct tree_opt_pass pass_profile =
1801 {
1802 "profile", /* name */
1803 gate_estimate_probability, /* gate */
1804 tree_estimate_probability, /* execute */
1805 NULL, /* sub */
1806 NULL, /* next */
1807 0, /* static_pass_number */
1808 TV_BRANCH_PROB, /* tv_id */
1809 PROP_cfg, /* properties_required */
1810 0, /* properties_provided */
1811 0, /* properties_destroyed */
1812 0, /* todo_flags_start */
1813 TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
1814 0 /* letter */
1815 };