predict.c (tree_predict_edge): Do not predict entry edge and single succestor edge.
[gcc.git] / gcc / predict.c
1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* References:
23
24 [1] "Branch Prediction for Free"
25 Ball and Larus; PLDI '93.
26 [2] "Static Branch Frequency and Program Profile Analysis"
27 Wu and Larus; MICRO-27.
28 [3] "Corpus-based Static Branch Prediction"
29 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
30
31
32 #include "config.h"
33 #include "system.h"
34 #include "coretypes.h"
35 #include "tm.h"
36 #include "tree.h"
37 #include "rtl.h"
38 #include "tm_p.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
41 #include "insn-config.h"
42 #include "regs.h"
43 #include "flags.h"
44 #include "output.h"
45 #include "function.h"
46 #include "except.h"
47 #include "toplev.h"
48 #include "recog.h"
49 #include "expr.h"
50 #include "predict.h"
51 #include "coverage.h"
52 #include "sreal.h"
53 #include "params.h"
54 #include "target.h"
55 #include "cfgloop.h"
56 #include "tree-flow.h"
57 #include "ggc.h"
58 #include "tree-dump.h"
59 #include "tree-pass.h"
60 #include "timevar.h"
61 #include "tree-scalar-evolution.h"
62 #include "cfgloop.h"
63
64 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
65 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
66 static sreal real_zero, real_one, real_almost_one, real_br_prob_base,
67 real_inv_br_prob_base, real_one_half, real_bb_freq_max;
68
69 /* Random guesstimation given names. */
70 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 100 - 1)
71 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
72 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
73 #define PROB_ALWAYS (REG_BR_PROB_BASE)
74
75 static void combine_predictions_for_insn (rtx, basic_block);
76 static void dump_prediction (FILE *, enum br_predictor, int, basic_block, int);
77 static void estimate_loops_at_level (struct loop *, bitmap);
78 static void propagate_freq (struct loop *, bitmap);
79 static void estimate_bb_frequencies (struct loops *);
80 static void predict_paths_leading_to (basic_block, int *, enum br_predictor, enum prediction);
81 static bool last_basic_block_p (basic_block);
82 static void compute_function_frequency (void);
83 static void choose_function_section (void);
84 static bool can_predict_insn_p (rtx);
85
86 /* Information we hold about each branch predictor.
87 Filled using information from predict.def. */
88
89 struct predictor_info
90 {
91 const char *const name; /* Name used in the debugging dumps. */
92 const int hitrate; /* Expected hitrate used by
93 predict_insn_def call. */
94 const int flags;
95 };
96
97 /* Use given predictor without Dempster-Shaffer theory if it matches
98 using first_match heuristics. */
99 #define PRED_FLAG_FIRST_MATCH 1
100
101 /* Recompute hitrate in percent to our representation. */
102
103 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
104
105 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
106 static const struct predictor_info predictor_info[]= {
107 #include "predict.def"
108
109 /* Upper bound on predictors. */
110 {NULL, 0, 0}
111 };
112 #undef DEF_PREDICTOR
113
114 /* Return true in case BB can be CPU intensive and should be optimized
115 for maximal performance. */
116
117 bool
118 maybe_hot_bb_p (basic_block bb)
119 {
120 if (profile_info && flag_branch_probabilities
121 && (bb->count
122 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
123 return false;
124 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
125 return false;
126 return true;
127 }
128
129 /* Return true in case BB is cold and should be optimized for size. */
130
131 bool
132 probably_cold_bb_p (basic_block bb)
133 {
134 if (profile_info && flag_branch_probabilities
135 && (bb->count
136 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
137 return true;
138 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
139 return true;
140 return false;
141 }
142
143 /* Return true in case BB is probably never executed. */
144 bool
145 probably_never_executed_bb_p (basic_block bb)
146 {
147 if (profile_info && flag_branch_probabilities)
148 return ((bb->count + profile_info->runs / 2) / profile_info->runs) == 0;
149 return false;
150 }
151
152 /* Return true if the one of outgoing edges is already predicted by
153 PREDICTOR. */
154
155 bool
156 rtl_predicted_by_p (basic_block bb, enum br_predictor predictor)
157 {
158 rtx note;
159 if (!INSN_P (BB_END (bb)))
160 return false;
161 for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1))
162 if (REG_NOTE_KIND (note) == REG_BR_PRED
163 && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
164 return true;
165 return false;
166 }
167
168 /* Return true if the one of outgoing edges is already predicted by
169 PREDICTOR. */
170
171 bool
172 tree_predicted_by_p (basic_block bb, enum br_predictor predictor)
173 {
174 struct edge_prediction *i;
175 for (i = bb->predictions; i; i = i->next)
176 if (i->predictor == predictor)
177 return true;
178 return false;
179 }
180
181 static void
182 predict_insn (rtx insn, enum br_predictor predictor, int probability)
183 {
184 gcc_assert (any_condjump_p (insn));
185 if (!flag_guess_branch_prob)
186 return;
187
188 REG_NOTES (insn)
189 = gen_rtx_EXPR_LIST (REG_BR_PRED,
190 gen_rtx_CONCAT (VOIDmode,
191 GEN_INT ((int) predictor),
192 GEN_INT ((int) probability)),
193 REG_NOTES (insn));
194 }
195
196 /* Predict insn by given predictor. */
197
198 void
199 predict_insn_def (rtx insn, enum br_predictor predictor,
200 enum prediction taken)
201 {
202 int probability = predictor_info[(int) predictor].hitrate;
203
204 if (taken != TAKEN)
205 probability = REG_BR_PROB_BASE - probability;
206
207 predict_insn (insn, predictor, probability);
208 }
209
210 /* Predict edge E with given probability if possible. */
211
212 void
213 rtl_predict_edge (edge e, enum br_predictor predictor, int probability)
214 {
215 rtx last_insn;
216 last_insn = BB_END (e->src);
217
218 /* We can store the branch prediction information only about
219 conditional jumps. */
220 if (!any_condjump_p (last_insn))
221 return;
222
223 /* We always store probability of branching. */
224 if (e->flags & EDGE_FALLTHRU)
225 probability = REG_BR_PROB_BASE - probability;
226
227 predict_insn (last_insn, predictor, probability);
228 }
229
230 /* Predict edge E with the given PROBABILITY. */
231 void
232 tree_predict_edge (edge e, enum br_predictor predictor, int probability)
233 {
234 if ((e->src != ENTRY_BLOCK_PTR && EDGE_COUNT (e->src->succs) > 1)
235 && flag_guess_branch_prob)
236 {
237 struct edge_prediction *i = ggc_alloc (sizeof (struct edge_prediction));
238
239 i->next = e->src->predictions;
240 e->src->predictions = i;
241 i->probability = probability;
242 i->predictor = predictor;
243 i->edge = e;
244 }
245 }
246
247 /* Remove all predictions on given basic block that are attached
248 to edge E. */
249 void
250 remove_predictions_associated_with_edge (edge e)
251 {
252 if (e->src->predictions)
253 {
254 struct edge_prediction **prediction = &e->src->predictions;
255 while (*prediction)
256 {
257 if ((*prediction)->edge == e)
258 *prediction = (*prediction)->next;
259 else
260 prediction = &((*prediction)->next);
261 }
262 }
263 }
264
265 /* Return true when we can store prediction on insn INSN.
266 At the moment we represent predictions only on conditional
267 jumps, not at computed jump or other complicated cases. */
268 static bool
269 can_predict_insn_p (rtx insn)
270 {
271 return (JUMP_P (insn)
272 && any_condjump_p (insn)
273 && EDGE_COUNT (BLOCK_FOR_INSN (insn)->succs) >= 2);
274 }
275
276 /* Predict edge E by given predictor if possible. */
277
278 void
279 predict_edge_def (edge e, enum br_predictor predictor,
280 enum prediction taken)
281 {
282 int probability = predictor_info[(int) predictor].hitrate;
283
284 if (taken != TAKEN)
285 probability = REG_BR_PROB_BASE - probability;
286
287 predict_edge (e, predictor, probability);
288 }
289
290 /* Invert all branch predictions or probability notes in the INSN. This needs
291 to be done each time we invert the condition used by the jump. */
292
293 void
294 invert_br_probabilities (rtx insn)
295 {
296 rtx note;
297
298 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
299 if (REG_NOTE_KIND (note) == REG_BR_PROB)
300 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
301 else if (REG_NOTE_KIND (note) == REG_BR_PRED)
302 XEXP (XEXP (note, 0), 1)
303 = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
304 }
305
306 /* Dump information about the branch prediction to the output file. */
307
308 static void
309 dump_prediction (FILE *file, enum br_predictor predictor, int probability,
310 basic_block bb, int used)
311 {
312 edge e;
313 edge_iterator ei;
314
315 if (!file)
316 return;
317
318 FOR_EACH_EDGE (e, ei, bb->succs)
319 if (! (e->flags & EDGE_FALLTHRU))
320 break;
321
322 fprintf (file, " %s heuristics%s: %.1f%%",
323 predictor_info[predictor].name,
324 used ? "" : " (ignored)", probability * 100.0 / REG_BR_PROB_BASE);
325
326 if (bb->count)
327 {
328 fprintf (file, " exec ");
329 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, bb->count);
330 if (e)
331 {
332 fprintf (file, " hit ");
333 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, e->count);
334 fprintf (file, " (%.1f%%)", e->count * 100.0 / bb->count);
335 }
336 }
337
338 fprintf (file, "\n");
339 }
340
341 /* We can not predict the probabilities of outgoing edges of bb. Set them
342 evenly and hope for the best. */
343 static void
344 set_even_probabilities (basic_block bb)
345 {
346 int nedges = 0;
347 edge e;
348 edge_iterator ei;
349
350 FOR_EACH_EDGE (e, ei, bb->succs)
351 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
352 nedges ++;
353 FOR_EACH_EDGE (e, ei, bb->succs)
354 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
355 e->probability = (REG_BR_PROB_BASE + nedges / 2) / nedges;
356 else
357 e->probability = 0;
358 }
359
360 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
361 note if not already present. Remove now useless REG_BR_PRED notes. */
362
363 static void
364 combine_predictions_for_insn (rtx insn, basic_block bb)
365 {
366 rtx prob_note;
367 rtx *pnote;
368 rtx note;
369 int best_probability = PROB_EVEN;
370 int best_predictor = END_PREDICTORS;
371 int combined_probability = REG_BR_PROB_BASE / 2;
372 int d;
373 bool first_match = false;
374 bool found = false;
375
376 if (!can_predict_insn_p (insn))
377 {
378 set_even_probabilities (bb);
379 return;
380 }
381
382 prob_note = find_reg_note (insn, REG_BR_PROB, 0);
383 pnote = &REG_NOTES (insn);
384 if (dump_file)
385 fprintf (dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
386 bb->index);
387
388 /* We implement "first match" heuristics and use probability guessed
389 by predictor with smallest index. */
390 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
391 if (REG_NOTE_KIND (note) == REG_BR_PRED)
392 {
393 int predictor = INTVAL (XEXP (XEXP (note, 0), 0));
394 int probability = INTVAL (XEXP (XEXP (note, 0), 1));
395
396 found = true;
397 if (best_predictor > predictor)
398 best_probability = probability, best_predictor = predictor;
399
400 d = (combined_probability * probability
401 + (REG_BR_PROB_BASE - combined_probability)
402 * (REG_BR_PROB_BASE - probability));
403
404 /* Use FP math to avoid overflows of 32bit integers. */
405 if (d == 0)
406 /* If one probability is 0% and one 100%, avoid division by zero. */
407 combined_probability = REG_BR_PROB_BASE / 2;
408 else
409 combined_probability = (((double) combined_probability) * probability
410 * REG_BR_PROB_BASE / d + 0.5);
411 }
412
413 /* Decide which heuristic to use. In case we didn't match anything,
414 use no_prediction heuristic, in case we did match, use either
415 first match or Dempster-Shaffer theory depending on the flags. */
416
417 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
418 first_match = true;
419
420 if (!found)
421 dump_prediction (dump_file, PRED_NO_PREDICTION,
422 combined_probability, bb, true);
423 else
424 {
425 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability,
426 bb, !first_match);
427 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability,
428 bb, first_match);
429 }
430
431 if (first_match)
432 combined_probability = best_probability;
433 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
434
435 while (*pnote)
436 {
437 if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
438 {
439 int predictor = INTVAL (XEXP (XEXP (*pnote, 0), 0));
440 int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
441
442 dump_prediction (dump_file, predictor, probability, bb,
443 !first_match || best_predictor == predictor);
444 *pnote = XEXP (*pnote, 1);
445 }
446 else
447 pnote = &XEXP (*pnote, 1);
448 }
449
450 if (!prob_note)
451 {
452 REG_NOTES (insn)
453 = gen_rtx_EXPR_LIST (REG_BR_PROB,
454 GEN_INT (combined_probability), REG_NOTES (insn));
455
456 /* Save the prediction into CFG in case we are seeing non-degenerated
457 conditional jump. */
458 if (!single_succ_p (bb))
459 {
460 BRANCH_EDGE (bb)->probability = combined_probability;
461 FALLTHRU_EDGE (bb)->probability
462 = REG_BR_PROB_BASE - combined_probability;
463 }
464 }
465 else if (!single_succ_p (bb))
466 {
467 int prob = INTVAL (XEXP (prob_note, 0));
468
469 BRANCH_EDGE (bb)->probability = prob;
470 FALLTHRU_EDGE (bb)->probability = REG_BR_PROB_BASE - prob;
471 }
472 else
473 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
474 }
475
476 /* Combine predictions into single probability and store them into CFG.
477 Remove now useless prediction entries. */
478
479 static void
480 combine_predictions_for_bb (FILE *file, basic_block bb)
481 {
482 int best_probability = PROB_EVEN;
483 int best_predictor = END_PREDICTORS;
484 int combined_probability = REG_BR_PROB_BASE / 2;
485 int d;
486 bool first_match = false;
487 bool found = false;
488 struct edge_prediction *pred;
489 int nedges = 0;
490 edge e, first = NULL, second = NULL;
491 edge_iterator ei;
492
493 FOR_EACH_EDGE (e, ei, bb->succs)
494 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
495 {
496 nedges ++;
497 if (first && !second)
498 second = e;
499 if (!first)
500 first = e;
501 }
502
503 /* When there is no successor or only one choice, prediction is easy.
504
505 We are lazy for now and predict only basic blocks with two outgoing
506 edges. It is possible to predict generic case too, but we have to
507 ignore first match heuristics and do more involved combining. Implement
508 this later. */
509 if (nedges != 2)
510 {
511 if (!bb->count)
512 set_even_probabilities (bb);
513 bb->predictions = NULL;
514 if (file)
515 fprintf (file, "%i edges in bb %i predicted to even probabilities\n",
516 nedges, bb->index);
517 return;
518 }
519
520 if (file)
521 fprintf (file, "Predictions for bb %i\n", bb->index);
522
523 /* We implement "first match" heuristics and use probability guessed
524 by predictor with smallest index. */
525 for (pred = bb->predictions; pred; pred = pred->next)
526 {
527 int predictor = pred->predictor;
528 int probability = pred->probability;
529
530 if (pred->edge != first)
531 probability = REG_BR_PROB_BASE - probability;
532
533 found = true;
534 if (best_predictor > predictor)
535 best_probability = probability, best_predictor = predictor;
536
537 d = (combined_probability * probability
538 + (REG_BR_PROB_BASE - combined_probability)
539 * (REG_BR_PROB_BASE - probability));
540
541 /* Use FP math to avoid overflows of 32bit integers. */
542 if (d == 0)
543 /* If one probability is 0% and one 100%, avoid division by zero. */
544 combined_probability = REG_BR_PROB_BASE / 2;
545 else
546 combined_probability = (((double) combined_probability) * probability
547 * REG_BR_PROB_BASE / d + 0.5);
548 }
549
550 /* Decide which heuristic to use. In case we didn't match anything,
551 use no_prediction heuristic, in case we did match, use either
552 first match or Dempster-Shaffer theory depending on the flags. */
553
554 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
555 first_match = true;
556
557 if (!found)
558 dump_prediction (file, PRED_NO_PREDICTION, combined_probability, bb, true);
559 else
560 {
561 dump_prediction (file, PRED_DS_THEORY, combined_probability, bb,
562 !first_match);
563 dump_prediction (file, PRED_FIRST_MATCH, best_probability, bb,
564 first_match);
565 }
566
567 if (first_match)
568 combined_probability = best_probability;
569 dump_prediction (file, PRED_COMBINED, combined_probability, bb, true);
570
571 for (pred = bb->predictions; pred; pred = pred->next)
572 {
573 int predictor = pred->predictor;
574 int probability = pred->probability;
575
576 if (pred->edge != EDGE_SUCC (bb, 0))
577 probability = REG_BR_PROB_BASE - probability;
578 dump_prediction (file, predictor, probability, bb,
579 !first_match || best_predictor == predictor);
580 }
581 bb->predictions = NULL;
582
583 if (!bb->count)
584 {
585 first->probability = combined_probability;
586 second->probability = REG_BR_PROB_BASE - combined_probability;
587 }
588 }
589
590 /* Predict edge probabilities by exploiting loop structure.
591 When RTLSIMPLELOOPS is set, attempt to count number of iterations by analyzing
592 RTL otherwise use tree based approach. */
593 static void
594 predict_loops (struct loops *loops_info, bool rtlsimpleloops)
595 {
596 unsigned i;
597
598 if (!rtlsimpleloops)
599 scev_initialize (loops_info);
600
601 /* Try to predict out blocks in a loop that are not part of a
602 natural loop. */
603 for (i = 1; i < loops_info->num; i++)
604 {
605 basic_block bb, *bbs;
606 unsigned j;
607 unsigned n_exits;
608 struct loop *loop = loops_info->parray[i];
609 struct niter_desc desc;
610 unsigned HOST_WIDE_INT niter;
611 edge *exits;
612
613 exits = get_loop_exit_edges (loop, &n_exits);
614
615 if (rtlsimpleloops)
616 {
617 iv_analysis_loop_init (loop);
618 find_simple_exit (loop, &desc);
619
620 if (desc.simple_p && desc.const_iter)
621 {
622 int prob;
623 niter = desc.niter + 1;
624 if (niter == 0) /* We might overflow here. */
625 niter = desc.niter;
626
627 prob = (REG_BR_PROB_BASE
628 - (REG_BR_PROB_BASE + niter /2) / niter);
629 /* Branch prediction algorithm gives 0 frequency for everything
630 after the end of loop for loop having 0 probability to finish. */
631 if (prob == REG_BR_PROB_BASE)
632 prob = REG_BR_PROB_BASE - 1;
633 predict_edge (desc.in_edge, PRED_LOOP_ITERATIONS,
634 prob);
635 }
636 }
637 else
638 {
639 struct tree_niter_desc niter_desc;
640
641 for (j = 0; j < n_exits; j++)
642 {
643 tree niter = NULL;
644
645 if (number_of_iterations_exit (loop, exits[j], &niter_desc))
646 niter = niter_desc.niter;
647 if (!niter || TREE_CODE (niter_desc.niter) != INTEGER_CST)
648 niter = loop_niter_by_eval (loop, exits[j]);
649
650 if (TREE_CODE (niter) == INTEGER_CST)
651 {
652 int probability;
653 if (host_integerp (niter, 1)
654 && tree_int_cst_lt (niter,
655 build_int_cstu (NULL_TREE,
656 REG_BR_PROB_BASE - 1)))
657 {
658 HOST_WIDE_INT nitercst = tree_low_cst (niter, 1) + 1;
659 probability = (REG_BR_PROB_BASE + nitercst / 2) / nitercst;
660 }
661 else
662 probability = 1;
663
664 predict_edge (exits[j], PRED_LOOP_ITERATIONS, probability);
665 }
666 }
667
668 }
669 free (exits);
670
671 bbs = get_loop_body (loop);
672
673 for (j = 0; j < loop->num_nodes; j++)
674 {
675 int header_found = 0;
676 edge e;
677 edge_iterator ei;
678
679 bb = bbs[j];
680
681 /* Bypass loop heuristics on continue statement. These
682 statements construct loops via "non-loop" constructs
683 in the source language and are better to be handled
684 separately. */
685 if ((rtlsimpleloops && !can_predict_insn_p (BB_END (bb)))
686 || predicted_by_p (bb, PRED_CONTINUE))
687 continue;
688
689 /* Loop branch heuristics - predict an edge back to a
690 loop's head as taken. */
691 if (bb == loop->latch)
692 {
693 e = find_edge (loop->latch, loop->header);
694 if (e)
695 {
696 header_found = 1;
697 predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN);
698 }
699 }
700
701 /* Loop exit heuristics - predict an edge exiting the loop if the
702 conditional has no loop header successors as not taken. */
703 if (!header_found)
704 FOR_EACH_EDGE (e, ei, bb->succs)
705 if (e->dest->index < 0
706 || !flow_bb_inside_loop_p (loop, e->dest))
707 predict_edge
708 (e, PRED_LOOP_EXIT,
709 (REG_BR_PROB_BASE
710 - predictor_info [(int) PRED_LOOP_EXIT].hitrate)
711 / n_exits);
712 }
713
714 /* Free basic blocks from get_loop_body. */
715 free (bbs);
716 }
717
718 if (!rtlsimpleloops)
719 {
720 scev_finalize ();
721 current_loops = NULL;
722 }
723 }
724
725 /* Attempt to predict probabilities of BB outgoing edges using local
726 properties. */
727 static void
728 bb_estimate_probability_locally (basic_block bb)
729 {
730 rtx last_insn = BB_END (bb);
731 rtx cond;
732
733 if (! can_predict_insn_p (last_insn))
734 return;
735 cond = get_condition (last_insn, NULL, false, false);
736 if (! cond)
737 return;
738
739 /* Try "pointer heuristic."
740 A comparison ptr == 0 is predicted as false.
741 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
742 if (COMPARISON_P (cond)
743 && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0)))
744 || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1)))))
745 {
746 if (GET_CODE (cond) == EQ)
747 predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
748 else if (GET_CODE (cond) == NE)
749 predict_insn_def (last_insn, PRED_POINTER, TAKEN);
750 }
751 else
752
753 /* Try "opcode heuristic."
754 EQ tests are usually false and NE tests are usually true. Also,
755 most quantities are positive, so we can make the appropriate guesses
756 about signed comparisons against zero. */
757 switch (GET_CODE (cond))
758 {
759 case CONST_INT:
760 /* Unconditional branch. */
761 predict_insn_def (last_insn, PRED_UNCONDITIONAL,
762 cond == const0_rtx ? NOT_TAKEN : TAKEN);
763 break;
764
765 case EQ:
766 case UNEQ:
767 /* Floating point comparisons appears to behave in a very
768 unpredictable way because of special role of = tests in
769 FP code. */
770 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
771 ;
772 /* Comparisons with 0 are often used for booleans and there is
773 nothing useful to predict about them. */
774 else if (XEXP (cond, 1) == const0_rtx
775 || XEXP (cond, 0) == const0_rtx)
776 ;
777 else
778 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
779 break;
780
781 case NE:
782 case LTGT:
783 /* Floating point comparisons appears to behave in a very
784 unpredictable way because of special role of = tests in
785 FP code. */
786 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
787 ;
788 /* Comparisons with 0 are often used for booleans and there is
789 nothing useful to predict about them. */
790 else if (XEXP (cond, 1) == const0_rtx
791 || XEXP (cond, 0) == const0_rtx)
792 ;
793 else
794 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
795 break;
796
797 case ORDERED:
798 predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
799 break;
800
801 case UNORDERED:
802 predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
803 break;
804
805 case LE:
806 case LT:
807 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
808 || XEXP (cond, 1) == constm1_rtx)
809 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
810 break;
811
812 case GE:
813 case GT:
814 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
815 || XEXP (cond, 1) == constm1_rtx)
816 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
817 break;
818
819 default:
820 break;
821 }
822 }
823
824 /* Statically estimate the probability that a branch will be taken and produce
825 estimated profile. When profile feedback is present never executed portions
826 of function gets estimated. */
827
828 void
829 estimate_probability (struct loops *loops_info)
830 {
831 basic_block bb;
832
833 connect_infinite_loops_to_exit ();
834 calculate_dominance_info (CDI_DOMINATORS);
835 calculate_dominance_info (CDI_POST_DOMINATORS);
836
837 predict_loops (loops_info, true);
838
839 iv_analysis_done ();
840
841 /* Attempt to predict conditional jumps using a number of heuristics. */
842 FOR_EACH_BB (bb)
843 {
844 rtx last_insn = BB_END (bb);
845 edge e;
846 edge_iterator ei;
847
848 if (! can_predict_insn_p (last_insn))
849 continue;
850
851 FOR_EACH_EDGE (e, ei, bb->succs)
852 {
853 /* Predict early returns to be probable, as we've already taken
854 care for error returns and other are often used for fast paths
855 trought function. */
856 if ((e->dest == EXIT_BLOCK_PTR
857 || (single_succ_p (e->dest)
858 && single_succ (e->dest) == EXIT_BLOCK_PTR))
859 && !predicted_by_p (bb, PRED_NULL_RETURN)
860 && !predicted_by_p (bb, PRED_CONST_RETURN)
861 && !predicted_by_p (bb, PRED_NEGATIVE_RETURN)
862 && !last_basic_block_p (e->dest))
863 predict_edge_def (e, PRED_EARLY_RETURN, TAKEN);
864
865 /* Look for block we are guarding (i.e. we dominate it,
866 but it doesn't postdominate us). */
867 if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
868 && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
869 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
870 {
871 rtx insn;
872
873 /* The call heuristic claims that a guarded function call
874 is improbable. This is because such calls are often used
875 to signal exceptional situations such as printing error
876 messages. */
877 for (insn = BB_HEAD (e->dest); insn != NEXT_INSN (BB_END (e->dest));
878 insn = NEXT_INSN (insn))
879 if (CALL_P (insn)
880 /* Constant and pure calls are hardly used to signalize
881 something exceptional. */
882 && ! CONST_OR_PURE_CALL_P (insn))
883 {
884 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
885 break;
886 }
887 }
888 }
889 bb_estimate_probability_locally (bb);
890 }
891
892 /* Attach the combined probability to each conditional jump. */
893 FOR_EACH_BB (bb)
894 combine_predictions_for_insn (BB_END (bb), bb);
895
896 remove_fake_edges ();
897 estimate_bb_frequencies (loops_info);
898 free_dominance_info (CDI_POST_DOMINATORS);
899 if (profile_status == PROFILE_ABSENT)
900 profile_status = PROFILE_GUESSED;
901 }
902
903 /* Set edge->probability for each successor edge of BB. */
904 void
905 guess_outgoing_edge_probabilities (basic_block bb)
906 {
907 bb_estimate_probability_locally (bb);
908 combine_predictions_for_insn (BB_END (bb), bb);
909 }
910 \f
911 /* Return constant EXPR will likely have at execution time, NULL if unknown.
912 The function is used by builtin_expect branch predictor so the evidence
913 must come from this construct and additional possible constant folding.
914
915 We may want to implement more involved value guess (such as value range
916 propagation based prediction), but such tricks shall go to new
917 implementation. */
918
919 static tree
920 expr_expected_value (tree expr, bitmap visited)
921 {
922 if (TREE_CONSTANT (expr))
923 return expr;
924 else if (TREE_CODE (expr) == SSA_NAME)
925 {
926 tree def = SSA_NAME_DEF_STMT (expr);
927
928 /* If we were already here, break the infinite cycle. */
929 if (bitmap_bit_p (visited, SSA_NAME_VERSION (expr)))
930 return NULL;
931 bitmap_set_bit (visited, SSA_NAME_VERSION (expr));
932
933 if (TREE_CODE (def) == PHI_NODE)
934 {
935 /* All the arguments of the PHI node must have the same constant
936 length. */
937 int i;
938 tree val = NULL, new_val;
939
940 for (i = 0; i < PHI_NUM_ARGS (def); i++)
941 {
942 tree arg = PHI_ARG_DEF (def, i);
943
944 /* If this PHI has itself as an argument, we cannot
945 determine the string length of this argument. However,
946 if we can find an expected constant value for the other
947 PHI args then we can still be sure that this is
948 likely a constant. So be optimistic and just
949 continue with the next argument. */
950 if (arg == PHI_RESULT (def))
951 continue;
952
953 new_val = expr_expected_value (arg, visited);
954 if (!new_val)
955 return NULL;
956 if (!val)
957 val = new_val;
958 else if (!operand_equal_p (val, new_val, false))
959 return NULL;
960 }
961 return val;
962 }
963 if (TREE_CODE (def) != MODIFY_EXPR || TREE_OPERAND (def, 0) != expr)
964 return NULL;
965 return expr_expected_value (TREE_OPERAND (def, 1), visited);
966 }
967 else if (TREE_CODE (expr) == CALL_EXPR)
968 {
969 tree decl = get_callee_fndecl (expr);
970 if (!decl)
971 return NULL;
972 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
973 && DECL_FUNCTION_CODE (decl) == BUILT_IN_EXPECT)
974 {
975 tree arglist = TREE_OPERAND (expr, 1);
976 tree val;
977
978 if (arglist == NULL_TREE
979 || TREE_CHAIN (arglist) == NULL_TREE)
980 return NULL;
981 val = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (expr, 1)));
982 if (TREE_CONSTANT (val))
983 return val;
984 return TREE_VALUE (TREE_CHAIN (TREE_OPERAND (expr, 1)));
985 }
986 }
987 if (BINARY_CLASS_P (expr) || COMPARISON_CLASS_P (expr))
988 {
989 tree op0, op1, res;
990 op0 = expr_expected_value (TREE_OPERAND (expr, 0), visited);
991 if (!op0)
992 return NULL;
993 op1 = expr_expected_value (TREE_OPERAND (expr, 1), visited);
994 if (!op1)
995 return NULL;
996 res = fold (build (TREE_CODE (expr), TREE_TYPE (expr), op0, op1));
997 if (TREE_CONSTANT (res))
998 return res;
999 return NULL;
1000 }
1001 if (UNARY_CLASS_P (expr))
1002 {
1003 tree op0, res;
1004 op0 = expr_expected_value (TREE_OPERAND (expr, 0), visited);
1005 if (!op0)
1006 return NULL;
1007 res = fold (build1 (TREE_CODE (expr), TREE_TYPE (expr), op0));
1008 if (TREE_CONSTANT (res))
1009 return res;
1010 return NULL;
1011 }
1012 return NULL;
1013 }
1014 \f
1015 /* Get rid of all builtin_expect calls we no longer need. */
1016 static void
1017 strip_builtin_expect (void)
1018 {
1019 basic_block bb;
1020 FOR_EACH_BB (bb)
1021 {
1022 block_stmt_iterator bi;
1023 for (bi = bsi_start (bb); !bsi_end_p (bi); bsi_next (&bi))
1024 {
1025 tree stmt = bsi_stmt (bi);
1026 tree fndecl;
1027 tree arglist;
1028
1029 if (TREE_CODE (stmt) == MODIFY_EXPR
1030 && TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR
1031 && (fndecl = get_callee_fndecl (TREE_OPERAND (stmt, 1)))
1032 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1033 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
1034 && (arglist = TREE_OPERAND (TREE_OPERAND (stmt, 1), 1))
1035 && TREE_CHAIN (arglist))
1036 {
1037 TREE_OPERAND (stmt, 1) = TREE_VALUE (arglist);
1038 update_stmt (stmt);
1039 }
1040 }
1041 }
1042 }
1043 \f
1044 /* Predict using opcode of the last statement in basic block. */
1045 static void
1046 tree_predict_by_opcode (basic_block bb)
1047 {
1048 tree stmt = last_stmt (bb);
1049 edge then_edge;
1050 tree cond;
1051 tree op0;
1052 tree type;
1053 tree val;
1054 bitmap visited;
1055 edge_iterator ei;
1056
1057 if (!stmt || TREE_CODE (stmt) != COND_EXPR)
1058 return;
1059 FOR_EACH_EDGE (then_edge, ei, bb->succs)
1060 if (then_edge->flags & EDGE_TRUE_VALUE)
1061 break;
1062 cond = TREE_OPERAND (stmt, 0);
1063 if (!COMPARISON_CLASS_P (cond))
1064 return;
1065 op0 = TREE_OPERAND (cond, 0);
1066 type = TREE_TYPE (op0);
1067 visited = BITMAP_ALLOC (NULL);
1068 val = expr_expected_value (cond, visited);
1069 BITMAP_FREE (visited);
1070 if (val)
1071 {
1072 if (integer_zerop (val))
1073 predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, NOT_TAKEN);
1074 else
1075 predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, TAKEN);
1076 return;
1077 }
1078 /* Try "pointer heuristic."
1079 A comparison ptr == 0 is predicted as false.
1080 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1081 if (POINTER_TYPE_P (type))
1082 {
1083 if (TREE_CODE (cond) == EQ_EXPR)
1084 predict_edge_def (then_edge, PRED_TREE_POINTER, NOT_TAKEN);
1085 else if (TREE_CODE (cond) == NE_EXPR)
1086 predict_edge_def (then_edge, PRED_TREE_POINTER, TAKEN);
1087 }
1088 else
1089
1090 /* Try "opcode heuristic."
1091 EQ tests are usually false and NE tests are usually true. Also,
1092 most quantities are positive, so we can make the appropriate guesses
1093 about signed comparisons against zero. */
1094 switch (TREE_CODE (cond))
1095 {
1096 case EQ_EXPR:
1097 case UNEQ_EXPR:
1098 /* Floating point comparisons appears to behave in a very
1099 unpredictable way because of special role of = tests in
1100 FP code. */
1101 if (FLOAT_TYPE_P (type))
1102 ;
1103 /* Comparisons with 0 are often used for booleans and there is
1104 nothing useful to predict about them. */
1105 else if (integer_zerop (op0)
1106 || integer_zerop (TREE_OPERAND (cond, 1)))
1107 ;
1108 else
1109 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, NOT_TAKEN);
1110 break;
1111
1112 case NE_EXPR:
1113 case LTGT_EXPR:
1114 /* Floating point comparisons appears to behave in a very
1115 unpredictable way because of special role of = tests in
1116 FP code. */
1117 if (FLOAT_TYPE_P (type))
1118 ;
1119 /* Comparisons with 0 are often used for booleans and there is
1120 nothing useful to predict about them. */
1121 else if (integer_zerop (op0)
1122 || integer_zerop (TREE_OPERAND (cond, 1)))
1123 ;
1124 else
1125 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, TAKEN);
1126 break;
1127
1128 case ORDERED_EXPR:
1129 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, TAKEN);
1130 break;
1131
1132 case UNORDERED_EXPR:
1133 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, NOT_TAKEN);
1134 break;
1135
1136 case LE_EXPR:
1137 case LT_EXPR:
1138 if (integer_zerop (TREE_OPERAND (cond, 1))
1139 || integer_onep (TREE_OPERAND (cond, 1))
1140 || integer_all_onesp (TREE_OPERAND (cond, 1))
1141 || real_zerop (TREE_OPERAND (cond, 1))
1142 || real_onep (TREE_OPERAND (cond, 1))
1143 || real_minus_onep (TREE_OPERAND (cond, 1)))
1144 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, NOT_TAKEN);
1145 break;
1146
1147 case GE_EXPR:
1148 case GT_EXPR:
1149 if (integer_zerop (TREE_OPERAND (cond, 1))
1150 || integer_onep (TREE_OPERAND (cond, 1))
1151 || integer_all_onesp (TREE_OPERAND (cond, 1))
1152 || real_zerop (TREE_OPERAND (cond, 1))
1153 || real_onep (TREE_OPERAND (cond, 1))
1154 || real_minus_onep (TREE_OPERAND (cond, 1)))
1155 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, TAKEN);
1156 break;
1157
1158 default:
1159 break;
1160 }
1161 }
1162
1163 /* Try to guess whether the value of return means error code. */
1164 static enum br_predictor
1165 return_prediction (tree val, enum prediction *prediction)
1166 {
1167 /* VOID. */
1168 if (!val)
1169 return PRED_NO_PREDICTION;
1170 /* Different heuristics for pointers and scalars. */
1171 if (POINTER_TYPE_P (TREE_TYPE (val)))
1172 {
1173 /* NULL is usually not returned. */
1174 if (integer_zerop (val))
1175 {
1176 *prediction = NOT_TAKEN;
1177 return PRED_NULL_RETURN;
1178 }
1179 }
1180 else if (INTEGRAL_TYPE_P (TREE_TYPE (val)))
1181 {
1182 /* Negative return values are often used to indicate
1183 errors. */
1184 if (TREE_CODE (val) == INTEGER_CST
1185 && tree_int_cst_sgn (val) < 0)
1186 {
1187 *prediction = NOT_TAKEN;
1188 return PRED_NEGATIVE_RETURN;
1189 }
1190 /* Constant return values seems to be commonly taken.
1191 Zero/one often represent booleans so exclude them from the
1192 heuristics. */
1193 if (TREE_CONSTANT (val)
1194 && (!integer_zerop (val) && !integer_onep (val)))
1195 {
1196 *prediction = TAKEN;
1197 return PRED_NEGATIVE_RETURN;
1198 }
1199 }
1200 return PRED_NO_PREDICTION;
1201 }
1202
1203 /* Find the basic block with return expression and look up for possible
1204 return value trying to apply RETURN_PREDICTION heuristics. */
1205 static void
1206 apply_return_prediction (int *heads)
1207 {
1208 tree return_stmt;
1209 tree return_val;
1210 edge e;
1211 tree phi;
1212 int phi_num_args, i;
1213 enum br_predictor pred;
1214 enum prediction direction;
1215 edge_iterator ei;
1216
1217 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1218 {
1219 return_stmt = last_stmt (e->src);
1220 if (TREE_CODE (return_stmt) == RETURN_EXPR)
1221 break;
1222 }
1223 if (!e)
1224 return;
1225 return_val = TREE_OPERAND (return_stmt, 0);
1226 if (!return_val)
1227 return;
1228 if (TREE_CODE (return_val) == MODIFY_EXPR)
1229 return_val = TREE_OPERAND (return_val, 1);
1230 if (TREE_CODE (return_val) != SSA_NAME
1231 || !SSA_NAME_DEF_STMT (return_val)
1232 || TREE_CODE (SSA_NAME_DEF_STMT (return_val)) != PHI_NODE)
1233 return;
1234 for (phi = SSA_NAME_DEF_STMT (return_val); phi; phi = PHI_CHAIN (phi))
1235 if (PHI_RESULT (phi) == return_val)
1236 break;
1237 if (!phi)
1238 return;
1239 phi_num_args = PHI_NUM_ARGS (phi);
1240 pred = return_prediction (PHI_ARG_DEF (phi, 0), &direction);
1241
1242 /* Avoid the degenerate case where all return values form the function
1243 belongs to same category (ie they are all positive constants)
1244 so we can hardly say something about them. */
1245 for (i = 1; i < phi_num_args; i++)
1246 if (pred != return_prediction (PHI_ARG_DEF (phi, i), &direction))
1247 break;
1248 if (i != phi_num_args)
1249 for (i = 0; i < phi_num_args; i++)
1250 {
1251 pred = return_prediction (PHI_ARG_DEF (phi, i), &direction);
1252 if (pred != PRED_NO_PREDICTION)
1253 predict_paths_leading_to (PHI_ARG_EDGE (phi, i)->src, heads, pred,
1254 direction);
1255 }
1256 }
1257
1258 /* Look for basic block that contains unlikely to happen events
1259 (such as noreturn calls) and mark all paths leading to execution
1260 of this basic blocks as unlikely. */
1261
1262 static void
1263 tree_bb_level_predictions (void)
1264 {
1265 basic_block bb;
1266 int *heads;
1267
1268 heads = xmalloc (sizeof (int) * last_basic_block);
1269 memset (heads, -1, sizeof (int) * last_basic_block);
1270 heads[ENTRY_BLOCK_PTR->next_bb->index] = last_basic_block;
1271
1272 apply_return_prediction (heads);
1273
1274 FOR_EACH_BB (bb)
1275 {
1276 block_stmt_iterator bsi = bsi_last (bb);
1277
1278 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1279 {
1280 tree stmt = bsi_stmt (bsi);
1281 switch (TREE_CODE (stmt))
1282 {
1283 case MODIFY_EXPR:
1284 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
1285 {
1286 stmt = TREE_OPERAND (stmt, 1);
1287 goto call_expr;
1288 }
1289 break;
1290 case CALL_EXPR:
1291 call_expr:;
1292 if (call_expr_flags (stmt) & ECF_NORETURN)
1293 predict_paths_leading_to (bb, heads, PRED_NORETURN,
1294 NOT_TAKEN);
1295 break;
1296 default:
1297 break;
1298 }
1299 }
1300 }
1301
1302 free (heads);
1303 }
1304
1305 /* Predict branch probabilities and estimate profile of the tree CFG. */
1306 static void
1307 tree_estimate_probability (void)
1308 {
1309 basic_block bb;
1310 struct loops loops_info;
1311
1312 flow_loops_find (&loops_info);
1313 if (dump_file && (dump_flags & TDF_DETAILS))
1314 flow_loops_dump (&loops_info, dump_file, NULL, 0);
1315
1316 add_noreturn_fake_exit_edges ();
1317 connect_infinite_loops_to_exit ();
1318 calculate_dominance_info (CDI_DOMINATORS);
1319 calculate_dominance_info (CDI_POST_DOMINATORS);
1320
1321 tree_bb_level_predictions ();
1322
1323 mark_irreducible_loops (&loops_info);
1324 predict_loops (&loops_info, false);
1325
1326 FOR_EACH_BB (bb)
1327 {
1328 edge e;
1329 edge_iterator ei;
1330
1331 FOR_EACH_EDGE (e, ei, bb->succs)
1332 {
1333 /* Predict early returns to be probable, as we've already taken
1334 care for error returns and other cases are often used for
1335 fast paths trought function. */
1336 if (e->dest == EXIT_BLOCK_PTR
1337 && TREE_CODE (last_stmt (bb)) == RETURN_EXPR
1338 && !single_pred_p (bb))
1339 {
1340 edge e1;
1341 edge_iterator ei1;
1342
1343 FOR_EACH_EDGE (e1, ei1, bb->preds)
1344 if (!predicted_by_p (e1->src, PRED_NULL_RETURN)
1345 && !predicted_by_p (e1->src, PRED_CONST_RETURN)
1346 && !predicted_by_p (e1->src, PRED_NEGATIVE_RETURN)
1347 && !last_basic_block_p (e1->src))
1348 predict_edge_def (e1, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
1349 }
1350
1351 /* Look for block we are guarding (ie we dominate it,
1352 but it doesn't postdominate us). */
1353 if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
1354 && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
1355 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
1356 {
1357 block_stmt_iterator bi;
1358
1359 /* The call heuristic claims that a guarded function call
1360 is improbable. This is because such calls are often used
1361 to signal exceptional situations such as printing error
1362 messages. */
1363 for (bi = bsi_start (e->dest); !bsi_end_p (bi);
1364 bsi_next (&bi))
1365 {
1366 tree stmt = bsi_stmt (bi);
1367 if ((TREE_CODE (stmt) == CALL_EXPR
1368 || (TREE_CODE (stmt) == MODIFY_EXPR
1369 && TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR))
1370 /* Constant and pure calls are hardly used to signalize
1371 something exceptional. */
1372 && TREE_SIDE_EFFECTS (stmt))
1373 {
1374 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
1375 break;
1376 }
1377 }
1378 }
1379 }
1380 tree_predict_by_opcode (bb);
1381 }
1382 FOR_EACH_BB (bb)
1383 combine_predictions_for_bb (dump_file, bb);
1384
1385 if (0) /* FIXME: Enable once we are pass down the profile to RTL level. */
1386 strip_builtin_expect ();
1387 estimate_bb_frequencies (&loops_info);
1388 free_dominance_info (CDI_POST_DOMINATORS);
1389 remove_fake_exit_edges ();
1390 flow_loops_free (&loops_info);
1391 if (dump_file && (dump_flags & TDF_DETAILS))
1392 dump_tree_cfg (dump_file, dump_flags);
1393 if (profile_status == PROFILE_ABSENT)
1394 profile_status = PROFILE_GUESSED;
1395 }
1396 \f
1397 /* __builtin_expect dropped tokens into the insn stream describing expected
1398 values of registers. Generate branch probabilities based off these
1399 values. */
1400
1401 void
1402 expected_value_to_br_prob (void)
1403 {
1404 rtx insn, cond, ev = NULL_RTX, ev_reg = NULL_RTX;
1405
1406 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1407 {
1408 switch (GET_CODE (insn))
1409 {
1410 case NOTE:
1411 /* Look for expected value notes. */
1412 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EXPECTED_VALUE)
1413 {
1414 ev = NOTE_EXPECTED_VALUE (insn);
1415 ev_reg = XEXP (ev, 0);
1416 delete_insn (insn);
1417 }
1418 continue;
1419
1420 case CODE_LABEL:
1421 /* Never propagate across labels. */
1422 ev = NULL_RTX;
1423 continue;
1424
1425 case JUMP_INSN:
1426 /* Look for simple conditional branches. If we haven't got an
1427 expected value yet, no point going further. */
1428 if (!JUMP_P (insn) || ev == NULL_RTX
1429 || ! any_condjump_p (insn))
1430 continue;
1431 break;
1432
1433 default:
1434 /* Look for insns that clobber the EV register. */
1435 if (ev && reg_set_p (ev_reg, insn))
1436 ev = NULL_RTX;
1437 continue;
1438 }
1439
1440 /* Collect the branch condition, hopefully relative to EV_REG. */
1441 /* ??? At present we'll miss things like
1442 (expected_value (eq r70 0))
1443 (set r71 -1)
1444 (set r80 (lt r70 r71))
1445 (set pc (if_then_else (ne r80 0) ...))
1446 as canonicalize_condition will render this to us as
1447 (lt r70, r71)
1448 Could use cselib to try and reduce this further. */
1449 cond = XEXP (SET_SRC (pc_set (insn)), 0);
1450 cond = canonicalize_condition (insn, cond, 0, NULL, ev_reg,
1451 false, false);
1452 if (! cond || XEXP (cond, 0) != ev_reg
1453 || GET_CODE (XEXP (cond, 1)) != CONST_INT)
1454 continue;
1455
1456 /* Substitute and simplify. Given that the expression we're
1457 building involves two constants, we should wind up with either
1458 true or false. */
1459 cond = gen_rtx_fmt_ee (GET_CODE (cond), VOIDmode,
1460 XEXP (ev, 1), XEXP (cond, 1));
1461 cond = simplify_rtx (cond);
1462
1463 /* Turn the condition into a scaled branch probability. */
1464 gcc_assert (cond == const_true_rtx || cond == const0_rtx);
1465 predict_insn_def (insn, PRED_BUILTIN_EXPECT,
1466 cond == const_true_rtx ? TAKEN : NOT_TAKEN);
1467 }
1468 }
1469 \f
1470 /* Check whether this is the last basic block of function. Commonly
1471 there is one extra common cleanup block. */
1472 static bool
1473 last_basic_block_p (basic_block bb)
1474 {
1475 if (bb == EXIT_BLOCK_PTR)
1476 return false;
1477
1478 return (bb->next_bb == EXIT_BLOCK_PTR
1479 || (bb->next_bb->next_bb == EXIT_BLOCK_PTR
1480 && single_succ_p (bb)
1481 && single_succ (bb)->next_bb == EXIT_BLOCK_PTR));
1482 }
1483
1484 /* Sets branch probabilities according to PREDiction and
1485 FLAGS. HEADS[bb->index] should be index of basic block in that we
1486 need to alter branch predictions (i.e. the first of our dominators
1487 such that we do not post-dominate it) (but we fill this information
1488 on demand, so -1 may be there in case this was not needed yet). */
1489
1490 static void
1491 predict_paths_leading_to (basic_block bb, int *heads, enum br_predictor pred,
1492 enum prediction taken)
1493 {
1494 edge e;
1495 edge_iterator ei;
1496 int y;
1497
1498 if (heads[bb->index] < 0)
1499 {
1500 /* This is first time we need this field in heads array; so
1501 find first dominator that we do not post-dominate (we are
1502 using already known members of heads array). */
1503 basic_block ai = bb;
1504 basic_block next_ai = get_immediate_dominator (CDI_DOMINATORS, bb);
1505 int head;
1506
1507 while (heads[next_ai->index] < 0)
1508 {
1509 if (!dominated_by_p (CDI_POST_DOMINATORS, next_ai, bb))
1510 break;
1511 heads[next_ai->index] = ai->index;
1512 ai = next_ai;
1513 next_ai = get_immediate_dominator (CDI_DOMINATORS, next_ai);
1514 }
1515 if (!dominated_by_p (CDI_POST_DOMINATORS, next_ai, bb))
1516 head = next_ai->index;
1517 else
1518 head = heads[next_ai->index];
1519 while (next_ai != bb)
1520 {
1521 next_ai = ai;
1522 if (heads[ai->index] == ENTRY_BLOCK)
1523 ai = ENTRY_BLOCK_PTR;
1524 else
1525 ai = BASIC_BLOCK (heads[ai->index]);
1526 heads[next_ai->index] = head;
1527 }
1528 }
1529 y = heads[bb->index];
1530
1531 /* Now find the edge that leads to our branch and aply the prediction. */
1532
1533 if (y == last_basic_block)
1534 return;
1535 FOR_EACH_EDGE (e, ei, BASIC_BLOCK (y)->succs)
1536 if (e->dest->index >= 0
1537 && dominated_by_p (CDI_POST_DOMINATORS, e->dest, bb))
1538 predict_edge_def (e, pred, taken);
1539 }
1540 \f
1541 /* This is used to carry information about basic blocks. It is
1542 attached to the AUX field of the standard CFG block. */
1543
1544 typedef struct block_info_def
1545 {
1546 /* Estimated frequency of execution of basic_block. */
1547 sreal frequency;
1548
1549 /* To keep queue of basic blocks to process. */
1550 basic_block next;
1551
1552 /* Number of predecessors we need to visit first. */
1553 int npredecessors;
1554 } *block_info;
1555
1556 /* Similar information for edges. */
1557 typedef struct edge_info_def
1558 {
1559 /* In case edge is an loopback edge, the probability edge will be reached
1560 in case header is. Estimated number of iterations of the loop can be
1561 then computed as 1 / (1 - back_edge_prob). */
1562 sreal back_edge_prob;
1563 /* True if the edge is an loopback edge in the natural loop. */
1564 unsigned int back_edge:1;
1565 } *edge_info;
1566
1567 #define BLOCK_INFO(B) ((block_info) (B)->aux)
1568 #define EDGE_INFO(E) ((edge_info) (E)->aux)
1569
1570 /* Helper function for estimate_bb_frequencies.
1571 Propagate the frequencies for LOOP. */
1572
1573 static void
1574 propagate_freq (struct loop *loop, bitmap tovisit)
1575 {
1576 basic_block head = loop->header;
1577 basic_block bb;
1578 basic_block last;
1579 unsigned i;
1580 edge e;
1581 basic_block nextbb;
1582 bitmap_iterator bi;
1583
1584 /* For each basic block we need to visit count number of his predecessors
1585 we need to visit first. */
1586 EXECUTE_IF_SET_IN_BITMAP (tovisit, 0, i, bi)
1587 {
1588 edge_iterator ei;
1589 int count = 0;
1590
1591 /* The outermost "loop" includes the exit block, which we can not
1592 look up via BASIC_BLOCK. Detect this and use EXIT_BLOCK_PTR
1593 directly. Do the same for the entry block. */
1594 if (i == (unsigned)ENTRY_BLOCK)
1595 bb = ENTRY_BLOCK_PTR;
1596 else if (i == (unsigned)EXIT_BLOCK)
1597 bb = EXIT_BLOCK_PTR;
1598 else
1599 bb = BASIC_BLOCK (i);
1600
1601 FOR_EACH_EDGE (e, ei, bb->preds)
1602 {
1603 bool visit = bitmap_bit_p (tovisit, e->src->index);
1604
1605 if (visit && !(e->flags & EDGE_DFS_BACK))
1606 count++;
1607 else if (visit && dump_file && !EDGE_INFO (e)->back_edge)
1608 fprintf (dump_file,
1609 "Irreducible region hit, ignoring edge to %i->%i\n",
1610 e->src->index, bb->index);
1611 }
1612 BLOCK_INFO (bb)->npredecessors = count;
1613 }
1614
1615 memcpy (&BLOCK_INFO (head)->frequency, &real_one, sizeof (real_one));
1616 last = head;
1617 for (bb = head; bb; bb = nextbb)
1618 {
1619 edge_iterator ei;
1620 sreal cyclic_probability, frequency;
1621
1622 memcpy (&cyclic_probability, &real_zero, sizeof (real_zero));
1623 memcpy (&frequency, &real_zero, sizeof (real_zero));
1624
1625 nextbb = BLOCK_INFO (bb)->next;
1626 BLOCK_INFO (bb)->next = NULL;
1627
1628 /* Compute frequency of basic block. */
1629 if (bb != head)
1630 {
1631 #ifdef ENABLE_CHECKING
1632 FOR_EACH_EDGE (e, ei, bb->preds)
1633 gcc_assert (!bitmap_bit_p (tovisit, e->src->index)
1634 || (e->flags & EDGE_DFS_BACK));
1635 #endif
1636
1637 FOR_EACH_EDGE (e, ei, bb->preds)
1638 if (EDGE_INFO (e)->back_edge)
1639 {
1640 sreal_add (&cyclic_probability, &cyclic_probability,
1641 &EDGE_INFO (e)->back_edge_prob);
1642 }
1643 else if (!(e->flags & EDGE_DFS_BACK))
1644 {
1645 sreal tmp;
1646
1647 /* frequency += (e->probability
1648 * BLOCK_INFO (e->src)->frequency /
1649 REG_BR_PROB_BASE); */
1650
1651 sreal_init (&tmp, e->probability, 0);
1652 sreal_mul (&tmp, &tmp, &BLOCK_INFO (e->src)->frequency);
1653 sreal_mul (&tmp, &tmp, &real_inv_br_prob_base);
1654 sreal_add (&frequency, &frequency, &tmp);
1655 }
1656
1657 if (sreal_compare (&cyclic_probability, &real_zero) == 0)
1658 {
1659 memcpy (&BLOCK_INFO (bb)->frequency, &frequency,
1660 sizeof (frequency));
1661 }
1662 else
1663 {
1664 if (sreal_compare (&cyclic_probability, &real_almost_one) > 0)
1665 {
1666 memcpy (&cyclic_probability, &real_almost_one,
1667 sizeof (real_almost_one));
1668 }
1669
1670 /* BLOCK_INFO (bb)->frequency = frequency
1671 / (1 - cyclic_probability) */
1672
1673 sreal_sub (&cyclic_probability, &real_one, &cyclic_probability);
1674 sreal_div (&BLOCK_INFO (bb)->frequency,
1675 &frequency, &cyclic_probability);
1676 }
1677 }
1678
1679 bitmap_clear_bit (tovisit, bb->index);
1680
1681 e = find_edge (bb, head);
1682 if (e)
1683 {
1684 sreal tmp;
1685
1686 /* EDGE_INFO (e)->back_edge_prob
1687 = ((e->probability * BLOCK_INFO (bb)->frequency)
1688 / REG_BR_PROB_BASE); */
1689
1690 sreal_init (&tmp, e->probability, 0);
1691 sreal_mul (&tmp, &tmp, &BLOCK_INFO (bb)->frequency);
1692 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1693 &tmp, &real_inv_br_prob_base);
1694 }
1695
1696 /* Propagate to successor blocks. */
1697 FOR_EACH_EDGE (e, ei, bb->succs)
1698 if (!(e->flags & EDGE_DFS_BACK)
1699 && BLOCK_INFO (e->dest)->npredecessors)
1700 {
1701 BLOCK_INFO (e->dest)->npredecessors--;
1702 if (!BLOCK_INFO (e->dest)->npredecessors)
1703 {
1704 if (!nextbb)
1705 nextbb = e->dest;
1706 else
1707 BLOCK_INFO (last)->next = e->dest;
1708
1709 last = e->dest;
1710 }
1711 }
1712 }
1713 }
1714
1715 /* Estimate probabilities of loopback edges in loops at same nest level. */
1716
1717 static void
1718 estimate_loops_at_level (struct loop *first_loop, bitmap tovisit)
1719 {
1720 struct loop *loop;
1721
1722 for (loop = first_loop; loop; loop = loop->next)
1723 {
1724 edge e;
1725 basic_block *bbs;
1726 unsigned i;
1727
1728 estimate_loops_at_level (loop->inner, tovisit);
1729
1730 /* Do not do this for dummy function loop. */
1731 if (EDGE_COUNT (loop->latch->succs) > 0)
1732 {
1733 /* Find current loop back edge and mark it. */
1734 e = loop_latch_edge (loop);
1735 EDGE_INFO (e)->back_edge = 1;
1736 }
1737
1738 bbs = get_loop_body (loop);
1739 for (i = 0; i < loop->num_nodes; i++)
1740 bitmap_set_bit (tovisit, bbs[i]->index);
1741 free (bbs);
1742 propagate_freq (loop, tovisit);
1743 }
1744 }
1745
1746 /* Convert counts measured by profile driven feedback to frequencies.
1747 Return nonzero iff there was any nonzero execution count. */
1748
1749 int
1750 counts_to_freqs (void)
1751 {
1752 gcov_type count_max, true_count_max = 0;
1753 basic_block bb;
1754
1755 FOR_EACH_BB (bb)
1756 true_count_max = MAX (bb->count, true_count_max);
1757
1758 count_max = MAX (true_count_max, 1);
1759 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1760 bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
1761 return true_count_max;
1762 }
1763
1764 /* Return true if function is likely to be expensive, so there is no point to
1765 optimize performance of prologue, epilogue or do inlining at the expense
1766 of code size growth. THRESHOLD is the limit of number of instructions
1767 function can execute at average to be still considered not expensive. */
1768
1769 bool
1770 expensive_function_p (int threshold)
1771 {
1772 unsigned int sum = 0;
1773 basic_block bb;
1774 unsigned int limit;
1775
1776 /* We can not compute accurately for large thresholds due to scaled
1777 frequencies. */
1778 gcc_assert (threshold <= BB_FREQ_MAX);
1779
1780 /* Frequencies are out of range. This either means that function contains
1781 internal loop executing more than BB_FREQ_MAX times or profile feedback
1782 is available and function has not been executed at all. */
1783 if (ENTRY_BLOCK_PTR->frequency == 0)
1784 return true;
1785
1786 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
1787 limit = ENTRY_BLOCK_PTR->frequency * threshold;
1788 FOR_EACH_BB (bb)
1789 {
1790 rtx insn;
1791
1792 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
1793 insn = NEXT_INSN (insn))
1794 if (active_insn_p (insn))
1795 {
1796 sum += bb->frequency;
1797 if (sum > limit)
1798 return true;
1799 }
1800 }
1801
1802 return false;
1803 }
1804
1805 /* Estimate basic blocks frequency by given branch probabilities. */
1806
1807 static void
1808 estimate_bb_frequencies (struct loops *loops)
1809 {
1810 basic_block bb;
1811 sreal freq_max;
1812
1813 if (!flag_branch_probabilities || !counts_to_freqs ())
1814 {
1815 static int real_values_initialized = 0;
1816 bitmap tovisit;
1817
1818 if (!real_values_initialized)
1819 {
1820 real_values_initialized = 1;
1821 sreal_init (&real_zero, 0, 0);
1822 sreal_init (&real_one, 1, 0);
1823 sreal_init (&real_br_prob_base, REG_BR_PROB_BASE, 0);
1824 sreal_init (&real_bb_freq_max, BB_FREQ_MAX, 0);
1825 sreal_init (&real_one_half, 1, -1);
1826 sreal_div (&real_inv_br_prob_base, &real_one, &real_br_prob_base);
1827 sreal_sub (&real_almost_one, &real_one, &real_inv_br_prob_base);
1828 }
1829
1830 mark_dfs_back_edges ();
1831
1832 single_succ_edge (ENTRY_BLOCK_PTR)->probability = REG_BR_PROB_BASE;
1833
1834 /* Set up block info for each basic block. */
1835 tovisit = BITMAP_ALLOC (NULL);
1836 alloc_aux_for_blocks (sizeof (struct block_info_def));
1837 alloc_aux_for_edges (sizeof (struct edge_info_def));
1838 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1839 {
1840 edge e;
1841 edge_iterator ei;
1842
1843 FOR_EACH_EDGE (e, ei, bb->succs)
1844 {
1845 sreal_init (&EDGE_INFO (e)->back_edge_prob, e->probability, 0);
1846 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1847 &EDGE_INFO (e)->back_edge_prob,
1848 &real_inv_br_prob_base);
1849 }
1850 }
1851
1852 /* First compute probabilities locally for each loop from innermost
1853 to outermost to examine probabilities for back edges. */
1854 estimate_loops_at_level (loops->tree_root, tovisit);
1855
1856 memcpy (&freq_max, &real_zero, sizeof (real_zero));
1857 FOR_EACH_BB (bb)
1858 if (sreal_compare (&freq_max, &BLOCK_INFO (bb)->frequency) < 0)
1859 memcpy (&freq_max, &BLOCK_INFO (bb)->frequency, sizeof (freq_max));
1860
1861 sreal_div (&freq_max, &real_bb_freq_max, &freq_max);
1862 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1863 {
1864 sreal tmp;
1865
1866 sreal_mul (&tmp, &BLOCK_INFO (bb)->frequency, &freq_max);
1867 sreal_add (&tmp, &tmp, &real_one_half);
1868 bb->frequency = sreal_to_int (&tmp);
1869 }
1870
1871 free_aux_for_blocks ();
1872 free_aux_for_edges ();
1873 BITMAP_FREE (tovisit);
1874 }
1875 compute_function_frequency ();
1876 if (flag_reorder_functions)
1877 choose_function_section ();
1878 }
1879
1880 /* Decide whether function is hot, cold or unlikely executed. */
1881 static void
1882 compute_function_frequency (void)
1883 {
1884 basic_block bb;
1885
1886 if (!profile_info || !flag_branch_probabilities)
1887 return;
1888 cfun->function_frequency = FUNCTION_FREQUENCY_UNLIKELY_EXECUTED;
1889 FOR_EACH_BB (bb)
1890 {
1891 if (maybe_hot_bb_p (bb))
1892 {
1893 cfun->function_frequency = FUNCTION_FREQUENCY_HOT;
1894 return;
1895 }
1896 if (!probably_never_executed_bb_p (bb))
1897 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
1898 }
1899 }
1900
1901 /* Choose appropriate section for the function. */
1902 static void
1903 choose_function_section (void)
1904 {
1905 if (DECL_SECTION_NAME (current_function_decl)
1906 || !targetm.have_named_sections
1907 /* Theoretically we can split the gnu.linkonce text section too,
1908 but this requires more work as the frequency needs to match
1909 for all generated objects so we need to merge the frequency
1910 of all instances. For now just never set frequency for these. */
1911 || DECL_ONE_ONLY (current_function_decl))
1912 return;
1913
1914 /* If we are doing the partitioning optimization, let the optimization
1915 choose the correct section into which to put things. */
1916
1917 if (flag_reorder_blocks_and_partition)
1918 return;
1919
1920 if (cfun->function_frequency == FUNCTION_FREQUENCY_HOT)
1921 DECL_SECTION_NAME (current_function_decl) =
1922 build_string (strlen (HOT_TEXT_SECTION_NAME), HOT_TEXT_SECTION_NAME);
1923 if (cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
1924 DECL_SECTION_NAME (current_function_decl) =
1925 build_string (strlen (UNLIKELY_EXECUTED_TEXT_SECTION_NAME),
1926 UNLIKELY_EXECUTED_TEXT_SECTION_NAME);
1927 }
1928
1929
1930 struct tree_opt_pass pass_profile =
1931 {
1932 "profile", /* name */
1933 NULL, /* gate */
1934 tree_estimate_probability, /* execute */
1935 NULL, /* sub */
1936 NULL, /* next */
1937 0, /* static_pass_number */
1938 TV_BRANCH_PROB, /* tv_id */
1939 PROP_cfg, /* properties_required */
1940 0, /* properties_provided */
1941 0, /* properties_destroyed */
1942 0, /* todo_flags_start */
1943 TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
1944 0 /* letter */
1945 };