tree-vrp.c (get_value_range): Use XCNEW instead of XNEW and memset.
[gcc.git] / gcc / predict.c
1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 /* References:
23
24 [1] "Branch Prediction for Free"
25 Ball and Larus; PLDI '93.
26 [2] "Static Branch Frequency and Program Profile Analysis"
27 Wu and Larus; MICRO-27.
28 [3] "Corpus-based Static Branch Prediction"
29 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
30
31
32 #include "config.h"
33 #include "system.h"
34 #include "coretypes.h"
35 #include "tm.h"
36 #include "tree.h"
37 #include "rtl.h"
38 #include "tm_p.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
41 #include "insn-config.h"
42 #include "regs.h"
43 #include "flags.h"
44 #include "output.h"
45 #include "function.h"
46 #include "except.h"
47 #include "toplev.h"
48 #include "recog.h"
49 #include "expr.h"
50 #include "predict.h"
51 #include "coverage.h"
52 #include "sreal.h"
53 #include "params.h"
54 #include "target.h"
55 #include "cfgloop.h"
56 #include "tree-flow.h"
57 #include "ggc.h"
58 #include "tree-dump.h"
59 #include "tree-pass.h"
60 #include "timevar.h"
61 #include "tree-scalar-evolution.h"
62 #include "cfgloop.h"
63
64 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
65 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
66 static sreal real_zero, real_one, real_almost_one, real_br_prob_base,
67 real_inv_br_prob_base, real_one_half, real_bb_freq_max;
68
69 /* Random guesstimation given names. */
70 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 100 - 1)
71 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
72 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
73 #define PROB_ALWAYS (REG_BR_PROB_BASE)
74
75 static void combine_predictions_for_insn (rtx, basic_block);
76 static void dump_prediction (FILE *, enum br_predictor, int, basic_block, int);
77 static void estimate_loops_at_level (struct loop *, bitmap);
78 static void propagate_freq (struct loop *, bitmap);
79 static void estimate_bb_frequencies (struct loops *);
80 static void predict_paths_leading_to (basic_block, int *, enum br_predictor, enum prediction);
81 static bool last_basic_block_p (basic_block);
82 static void compute_function_frequency (void);
83 static void choose_function_section (void);
84 static bool can_predict_insn_p (rtx);
85
86 /* Information we hold about each branch predictor.
87 Filled using information from predict.def. */
88
89 struct predictor_info
90 {
91 const char *const name; /* Name used in the debugging dumps. */
92 const int hitrate; /* Expected hitrate used by
93 predict_insn_def call. */
94 const int flags;
95 };
96
97 /* Use given predictor without Dempster-Shaffer theory if it matches
98 using first_match heuristics. */
99 #define PRED_FLAG_FIRST_MATCH 1
100
101 /* Recompute hitrate in percent to our representation. */
102
103 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
104
105 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
106 static const struct predictor_info predictor_info[]= {
107 #include "predict.def"
108
109 /* Upper bound on predictors. */
110 {NULL, 0, 0}
111 };
112 #undef DEF_PREDICTOR
113
114 /* Return true in case BB can be CPU intensive and should be optimized
115 for maximal performance. */
116
117 bool
118 maybe_hot_bb_p (basic_block bb)
119 {
120 if (profile_info && flag_branch_probabilities
121 && (bb->count
122 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
123 return false;
124 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
125 return false;
126 return true;
127 }
128
129 /* Return true in case BB is cold and should be optimized for size. */
130
131 bool
132 probably_cold_bb_p (basic_block bb)
133 {
134 if (profile_info && flag_branch_probabilities
135 && (bb->count
136 < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
137 return true;
138 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
139 return true;
140 return false;
141 }
142
143 /* Return true in case BB is probably never executed. */
144 bool
145 probably_never_executed_bb_p (basic_block bb)
146 {
147 if (profile_info && flag_branch_probabilities)
148 return ((bb->count + profile_info->runs / 2) / profile_info->runs) == 0;
149 return false;
150 }
151
152 /* Return true if the one of outgoing edges is already predicted by
153 PREDICTOR. */
154
155 bool
156 rtl_predicted_by_p (basic_block bb, enum br_predictor predictor)
157 {
158 rtx note;
159 if (!INSN_P (BB_END (bb)))
160 return false;
161 for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1))
162 if (REG_NOTE_KIND (note) == REG_BR_PRED
163 && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
164 return true;
165 return false;
166 }
167
168 /* Return true if the one of outgoing edges is already predicted by
169 PREDICTOR. */
170
171 bool
172 tree_predicted_by_p (basic_block bb, enum br_predictor predictor)
173 {
174 struct edge_prediction *i;
175 for (i = bb->predictions; i; i = i->ep_next)
176 if (i->ep_predictor == predictor)
177 return true;
178 return false;
179 }
180
181 /* Return true when the probability of edge is reliable.
182
183 The profile guessing code is good at predicting branch outcome (ie.
184 taken/not taken), that is predicted right slightly over 75% of time.
185 It is however notoriously poor on predicting the probability itself.
186 In general the profile appear a lot flatter (with probabilities closer
187 to 50%) than the reality so it is bad idea to use it to drive optimization
188 such as those disabling dynamic branch prediction for well predictable
189 branches.
190
191 There are two exceptions - edges leading to noreturn edges and edges
192 predicted by number of iterations heuristics are predicted well. This macro
193 should be able to distinguish those, but at the moment it simply check for
194 noreturn heuristic that is only one giving probability over 99% or bellow
195 1%. In future we might want to propagate reliability information across the
196 CFG if we find this information useful on multiple places. */
197 static bool
198 probability_reliable_p (int prob)
199 {
200 return (profile_status == PROFILE_READ
201 || (profile_status == PROFILE_GUESSED
202 && (prob <= HITRATE (1) || prob >= HITRATE (99))));
203 }
204
205 /* Same predicate as above, working on edges. */
206 bool
207 edge_probability_reliable_p (edge e)
208 {
209 return probability_reliable_p (e->probability);
210 }
211
212 /* Same predicate as edge_probability_reliable_p, working on notes. */
213 bool
214 br_prob_note_reliable_p (rtx note)
215 {
216 gcc_assert (REG_NOTE_KIND (note) == REG_BR_PROB);
217 return probability_reliable_p (INTVAL (XEXP (note, 0)));
218 }
219
220 static void
221 predict_insn (rtx insn, enum br_predictor predictor, int probability)
222 {
223 gcc_assert (any_condjump_p (insn));
224 if (!flag_guess_branch_prob)
225 return;
226
227 REG_NOTES (insn)
228 = gen_rtx_EXPR_LIST (REG_BR_PRED,
229 gen_rtx_CONCAT (VOIDmode,
230 GEN_INT ((int) predictor),
231 GEN_INT ((int) probability)),
232 REG_NOTES (insn));
233 }
234
235 /* Predict insn by given predictor. */
236
237 void
238 predict_insn_def (rtx insn, enum br_predictor predictor,
239 enum prediction taken)
240 {
241 int probability = predictor_info[(int) predictor].hitrate;
242
243 if (taken != TAKEN)
244 probability = REG_BR_PROB_BASE - probability;
245
246 predict_insn (insn, predictor, probability);
247 }
248
249 /* Predict edge E with given probability if possible. */
250
251 void
252 rtl_predict_edge (edge e, enum br_predictor predictor, int probability)
253 {
254 rtx last_insn;
255 last_insn = BB_END (e->src);
256
257 /* We can store the branch prediction information only about
258 conditional jumps. */
259 if (!any_condjump_p (last_insn))
260 return;
261
262 /* We always store probability of branching. */
263 if (e->flags & EDGE_FALLTHRU)
264 probability = REG_BR_PROB_BASE - probability;
265
266 predict_insn (last_insn, predictor, probability);
267 }
268
269 /* Predict edge E with the given PROBABILITY. */
270 void
271 tree_predict_edge (edge e, enum br_predictor predictor, int probability)
272 {
273 gcc_assert (profile_status != PROFILE_GUESSED);
274 if ((e->src != ENTRY_BLOCK_PTR && EDGE_COUNT (e->src->succs) > 1)
275 && flag_guess_branch_prob && optimize)
276 {
277 struct edge_prediction *i = ggc_alloc (sizeof (struct edge_prediction));
278
279 i->ep_next = e->src->predictions;
280 e->src->predictions = i;
281 i->ep_probability = probability;
282 i->ep_predictor = predictor;
283 i->ep_edge = e;
284 }
285 }
286
287 /* Remove all predictions on given basic block that are attached
288 to edge E. */
289 void
290 remove_predictions_associated_with_edge (edge e)
291 {
292 if (e->src->predictions)
293 {
294 struct edge_prediction **prediction = &e->src->predictions;
295 while (*prediction)
296 {
297 if ((*prediction)->ep_edge == e)
298 *prediction = (*prediction)->ep_next;
299 else
300 prediction = &((*prediction)->ep_next);
301 }
302 }
303 }
304
305 /* Return true when we can store prediction on insn INSN.
306 At the moment we represent predictions only on conditional
307 jumps, not at computed jump or other complicated cases. */
308 static bool
309 can_predict_insn_p (rtx insn)
310 {
311 return (JUMP_P (insn)
312 && any_condjump_p (insn)
313 && EDGE_COUNT (BLOCK_FOR_INSN (insn)->succs) >= 2);
314 }
315
316 /* Predict edge E by given predictor if possible. */
317
318 void
319 predict_edge_def (edge e, enum br_predictor predictor,
320 enum prediction taken)
321 {
322 int probability = predictor_info[(int) predictor].hitrate;
323
324 if (taken != TAKEN)
325 probability = REG_BR_PROB_BASE - probability;
326
327 predict_edge (e, predictor, probability);
328 }
329
330 /* Invert all branch predictions or probability notes in the INSN. This needs
331 to be done each time we invert the condition used by the jump. */
332
333 void
334 invert_br_probabilities (rtx insn)
335 {
336 rtx note;
337
338 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
339 if (REG_NOTE_KIND (note) == REG_BR_PROB)
340 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
341 else if (REG_NOTE_KIND (note) == REG_BR_PRED)
342 XEXP (XEXP (note, 0), 1)
343 = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
344 }
345
346 /* Dump information about the branch prediction to the output file. */
347
348 static void
349 dump_prediction (FILE *file, enum br_predictor predictor, int probability,
350 basic_block bb, int used)
351 {
352 edge e;
353 edge_iterator ei;
354
355 if (!file)
356 return;
357
358 FOR_EACH_EDGE (e, ei, bb->succs)
359 if (! (e->flags & EDGE_FALLTHRU))
360 break;
361
362 fprintf (file, " %s heuristics%s: %.1f%%",
363 predictor_info[predictor].name,
364 used ? "" : " (ignored)", probability * 100.0 / REG_BR_PROB_BASE);
365
366 if (bb->count)
367 {
368 fprintf (file, " exec ");
369 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, bb->count);
370 if (e)
371 {
372 fprintf (file, " hit ");
373 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, e->count);
374 fprintf (file, " (%.1f%%)", e->count * 100.0 / bb->count);
375 }
376 }
377
378 fprintf (file, "\n");
379 }
380
381 /* We can not predict the probabilities of outgoing edges of bb. Set them
382 evenly and hope for the best. */
383 static void
384 set_even_probabilities (basic_block bb)
385 {
386 int nedges = 0;
387 edge e;
388 edge_iterator ei;
389
390 FOR_EACH_EDGE (e, ei, bb->succs)
391 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
392 nedges ++;
393 FOR_EACH_EDGE (e, ei, bb->succs)
394 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
395 e->probability = (REG_BR_PROB_BASE + nedges / 2) / nedges;
396 else
397 e->probability = 0;
398 }
399
400 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
401 note if not already present. Remove now useless REG_BR_PRED notes. */
402
403 static void
404 combine_predictions_for_insn (rtx insn, basic_block bb)
405 {
406 rtx prob_note;
407 rtx *pnote;
408 rtx note;
409 int best_probability = PROB_EVEN;
410 int best_predictor = END_PREDICTORS;
411 int combined_probability = REG_BR_PROB_BASE / 2;
412 int d;
413 bool first_match = false;
414 bool found = false;
415
416 if (!can_predict_insn_p (insn))
417 {
418 set_even_probabilities (bb);
419 return;
420 }
421
422 prob_note = find_reg_note (insn, REG_BR_PROB, 0);
423 pnote = &REG_NOTES (insn);
424 if (dump_file)
425 fprintf (dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
426 bb->index);
427
428 /* We implement "first match" heuristics and use probability guessed
429 by predictor with smallest index. */
430 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
431 if (REG_NOTE_KIND (note) == REG_BR_PRED)
432 {
433 int predictor = INTVAL (XEXP (XEXP (note, 0), 0));
434 int probability = INTVAL (XEXP (XEXP (note, 0), 1));
435
436 found = true;
437 if (best_predictor > predictor)
438 best_probability = probability, best_predictor = predictor;
439
440 d = (combined_probability * probability
441 + (REG_BR_PROB_BASE - combined_probability)
442 * (REG_BR_PROB_BASE - probability));
443
444 /* Use FP math to avoid overflows of 32bit integers. */
445 if (d == 0)
446 /* If one probability is 0% and one 100%, avoid division by zero. */
447 combined_probability = REG_BR_PROB_BASE / 2;
448 else
449 combined_probability = (((double) combined_probability) * probability
450 * REG_BR_PROB_BASE / d + 0.5);
451 }
452
453 /* Decide which heuristic to use. In case we didn't match anything,
454 use no_prediction heuristic, in case we did match, use either
455 first match or Dempster-Shaffer theory depending on the flags. */
456
457 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
458 first_match = true;
459
460 if (!found)
461 dump_prediction (dump_file, PRED_NO_PREDICTION,
462 combined_probability, bb, true);
463 else
464 {
465 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability,
466 bb, !first_match);
467 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability,
468 bb, first_match);
469 }
470
471 if (first_match)
472 combined_probability = best_probability;
473 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
474
475 while (*pnote)
476 {
477 if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
478 {
479 int predictor = INTVAL (XEXP (XEXP (*pnote, 0), 0));
480 int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
481
482 dump_prediction (dump_file, predictor, probability, bb,
483 !first_match || best_predictor == predictor);
484 *pnote = XEXP (*pnote, 1);
485 }
486 else
487 pnote = &XEXP (*pnote, 1);
488 }
489
490 if (!prob_note)
491 {
492 REG_NOTES (insn)
493 = gen_rtx_EXPR_LIST (REG_BR_PROB,
494 GEN_INT (combined_probability), REG_NOTES (insn));
495
496 /* Save the prediction into CFG in case we are seeing non-degenerated
497 conditional jump. */
498 if (!single_succ_p (bb))
499 {
500 BRANCH_EDGE (bb)->probability = combined_probability;
501 FALLTHRU_EDGE (bb)->probability
502 = REG_BR_PROB_BASE - combined_probability;
503 }
504 }
505 else if (!single_succ_p (bb))
506 {
507 int prob = INTVAL (XEXP (prob_note, 0));
508
509 BRANCH_EDGE (bb)->probability = prob;
510 FALLTHRU_EDGE (bb)->probability = REG_BR_PROB_BASE - prob;
511 }
512 else
513 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
514 }
515
516 /* Combine predictions into single probability and store them into CFG.
517 Remove now useless prediction entries. */
518
519 static void
520 combine_predictions_for_bb (basic_block bb)
521 {
522 int best_probability = PROB_EVEN;
523 int best_predictor = END_PREDICTORS;
524 int combined_probability = REG_BR_PROB_BASE / 2;
525 int d;
526 bool first_match = false;
527 bool found = false;
528 struct edge_prediction *pred;
529 int nedges = 0;
530 edge e, first = NULL, second = NULL;
531 edge_iterator ei;
532
533 FOR_EACH_EDGE (e, ei, bb->succs)
534 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
535 {
536 nedges ++;
537 if (first && !second)
538 second = e;
539 if (!first)
540 first = e;
541 }
542
543 /* When there is no successor or only one choice, prediction is easy.
544
545 We are lazy for now and predict only basic blocks with two outgoing
546 edges. It is possible to predict generic case too, but we have to
547 ignore first match heuristics and do more involved combining. Implement
548 this later. */
549 if (nedges != 2)
550 {
551 if (!bb->count)
552 set_even_probabilities (bb);
553 bb->predictions = NULL;
554 if (dump_file)
555 fprintf (dump_file, "%i edges in bb %i predicted to even probabilities\n",
556 nedges, bb->index);
557 return;
558 }
559
560 if (dump_file)
561 fprintf (dump_file, "Predictions for bb %i\n", bb->index);
562
563 /* We implement "first match" heuristics and use probability guessed
564 by predictor with smallest index. */
565 for (pred = bb->predictions; pred; pred = pred->ep_next)
566 {
567 int predictor = pred->ep_predictor;
568 int probability = pred->ep_probability;
569
570 if (pred->ep_edge != first)
571 probability = REG_BR_PROB_BASE - probability;
572
573 found = true;
574 if (best_predictor > predictor)
575 best_probability = probability, best_predictor = predictor;
576
577 d = (combined_probability * probability
578 + (REG_BR_PROB_BASE - combined_probability)
579 * (REG_BR_PROB_BASE - probability));
580
581 /* Use FP math to avoid overflows of 32bit integers. */
582 if (d == 0)
583 /* If one probability is 0% and one 100%, avoid division by zero. */
584 combined_probability = REG_BR_PROB_BASE / 2;
585 else
586 combined_probability = (((double) combined_probability) * probability
587 * REG_BR_PROB_BASE / d + 0.5);
588 }
589
590 /* Decide which heuristic to use. In case we didn't match anything,
591 use no_prediction heuristic, in case we did match, use either
592 first match or Dempster-Shaffer theory depending on the flags. */
593
594 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
595 first_match = true;
596
597 if (!found)
598 dump_prediction (dump_file, PRED_NO_PREDICTION, combined_probability, bb, true);
599 else
600 {
601 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability, bb,
602 !first_match);
603 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability, bb,
604 first_match);
605 }
606
607 if (first_match)
608 combined_probability = best_probability;
609 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
610
611 for (pred = bb->predictions; pred; pred = pred->ep_next)
612 {
613 int predictor = pred->ep_predictor;
614 int probability = pred->ep_probability;
615
616 if (pred->ep_edge != EDGE_SUCC (bb, 0))
617 probability = REG_BR_PROB_BASE - probability;
618 dump_prediction (dump_file, predictor, probability, bb,
619 !first_match || best_predictor == predictor);
620 }
621 bb->predictions = NULL;
622
623 if (!bb->count)
624 {
625 first->probability = combined_probability;
626 second->probability = REG_BR_PROB_BASE - combined_probability;
627 }
628 }
629
630 /* Predict edge probabilities by exploiting loop structure.
631 When RTLSIMPLELOOPS is set, attempt to count number of iterations by analyzing
632 RTL otherwise use tree based approach. */
633 static void
634 predict_loops (struct loops *loops_info)
635 {
636 unsigned i;
637
638 scev_initialize (loops_info);
639
640 /* Try to predict out blocks in a loop that are not part of a
641 natural loop. */
642 for (i = 1; i < loops_info->num; i++)
643 {
644 basic_block bb, *bbs;
645 unsigned j;
646 unsigned n_exits;
647 struct loop *loop = loops_info->parray[i];
648 edge *exits;
649 struct tree_niter_desc niter_desc;
650
651 exits = get_loop_exit_edges (loop, &n_exits);
652
653
654 for (j = 0; j < n_exits; j++)
655 {
656 tree niter = NULL;
657
658 if (number_of_iterations_exit (loop, exits[j], &niter_desc, false))
659 niter = niter_desc.niter;
660 if (!niter || TREE_CODE (niter_desc.niter) != INTEGER_CST)
661 niter = loop_niter_by_eval (loop, exits[j]);
662
663 if (TREE_CODE (niter) == INTEGER_CST)
664 {
665 int probability;
666 int max = PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS);
667 if (host_integerp (niter, 1)
668 && tree_int_cst_lt (niter,
669 build_int_cstu (NULL_TREE, max - 1)))
670 {
671 HOST_WIDE_INT nitercst = tree_low_cst (niter, 1) + 1;
672 probability = ((REG_BR_PROB_BASE + nitercst / 2)
673 / nitercst);
674 }
675 else
676 probability = ((REG_BR_PROB_BASE + max / 2) / max);
677
678 predict_edge (exits[j], PRED_LOOP_ITERATIONS, probability);
679 }
680 }
681 free (exits);
682
683 bbs = get_loop_body (loop);
684
685 for (j = 0; j < loop->num_nodes; j++)
686 {
687 int header_found = 0;
688 edge e;
689 edge_iterator ei;
690
691 bb = bbs[j];
692
693 /* Bypass loop heuristics on continue statement. These
694 statements construct loops via "non-loop" constructs
695 in the source language and are better to be handled
696 separately. */
697 if (predicted_by_p (bb, PRED_CONTINUE))
698 continue;
699
700 /* Loop branch heuristics - predict an edge back to a
701 loop's head as taken. */
702 if (bb == loop->latch)
703 {
704 e = find_edge (loop->latch, loop->header);
705 if (e)
706 {
707 header_found = 1;
708 predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN);
709 }
710 }
711
712 /* Loop exit heuristics - predict an edge exiting the loop if the
713 conditional has no loop header successors as not taken. */
714 if (!header_found)
715 {
716 /* For loop with many exits we don't want to predict all exits
717 with the pretty large probability, because if all exits are
718 considered in row, the loop would be predicted to iterate
719 almost never. The code to divide probability by number of
720 exits is very rough. It should compute the number of exits
721 taken in each patch through function (not the overall number
722 of exits that might be a lot higher for loops with wide switch
723 statements in them) and compute n-th square root.
724
725 We limit the minimal probability by 2% to avoid
726 EDGE_PROBABILITY_RELIABLE from trusting the branch prediction
727 as this was causing regression in perl benchmark containing such
728 a wide loop. */
729
730 int probability = ((REG_BR_PROB_BASE
731 - predictor_info [(int) PRED_LOOP_EXIT].hitrate)
732 / n_exits);
733 if (probability < HITRATE (2))
734 probability = HITRATE (2);
735 FOR_EACH_EDGE (e, ei, bb->succs)
736 if (e->dest->index < NUM_FIXED_BLOCKS
737 || !flow_bb_inside_loop_p (loop, e->dest))
738 predict_edge (e, PRED_LOOP_EXIT, probability);
739 }
740 }
741
742 /* Free basic blocks from get_loop_body. */
743 free (bbs);
744 }
745
746 scev_finalize ();
747 current_loops = NULL;
748 }
749
750 /* Attempt to predict probabilities of BB outgoing edges using local
751 properties. */
752 static void
753 bb_estimate_probability_locally (basic_block bb)
754 {
755 rtx last_insn = BB_END (bb);
756 rtx cond;
757
758 if (! can_predict_insn_p (last_insn))
759 return;
760 cond = get_condition (last_insn, NULL, false, false);
761 if (! cond)
762 return;
763
764 /* Try "pointer heuristic."
765 A comparison ptr == 0 is predicted as false.
766 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
767 if (COMPARISON_P (cond)
768 && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0)))
769 || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1)))))
770 {
771 if (GET_CODE (cond) == EQ)
772 predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
773 else if (GET_CODE (cond) == NE)
774 predict_insn_def (last_insn, PRED_POINTER, TAKEN);
775 }
776 else
777
778 /* Try "opcode heuristic."
779 EQ tests are usually false and NE tests are usually true. Also,
780 most quantities are positive, so we can make the appropriate guesses
781 about signed comparisons against zero. */
782 switch (GET_CODE (cond))
783 {
784 case CONST_INT:
785 /* Unconditional branch. */
786 predict_insn_def (last_insn, PRED_UNCONDITIONAL,
787 cond == const0_rtx ? NOT_TAKEN : TAKEN);
788 break;
789
790 case EQ:
791 case UNEQ:
792 /* Floating point comparisons appears to behave in a very
793 unpredictable way because of special role of = tests in
794 FP code. */
795 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
796 ;
797 /* Comparisons with 0 are often used for booleans and there is
798 nothing useful to predict about them. */
799 else if (XEXP (cond, 1) == const0_rtx
800 || XEXP (cond, 0) == const0_rtx)
801 ;
802 else
803 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
804 break;
805
806 case NE:
807 case LTGT:
808 /* Floating point comparisons appears to behave in a very
809 unpredictable way because of special role of = tests in
810 FP code. */
811 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
812 ;
813 /* Comparisons with 0 are often used for booleans and there is
814 nothing useful to predict about them. */
815 else if (XEXP (cond, 1) == const0_rtx
816 || XEXP (cond, 0) == const0_rtx)
817 ;
818 else
819 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
820 break;
821
822 case ORDERED:
823 predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
824 break;
825
826 case UNORDERED:
827 predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
828 break;
829
830 case LE:
831 case LT:
832 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
833 || XEXP (cond, 1) == constm1_rtx)
834 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
835 break;
836
837 case GE:
838 case GT:
839 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
840 || XEXP (cond, 1) == constm1_rtx)
841 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
842 break;
843
844 default:
845 break;
846 }
847 }
848
849 /* Set edge->probability for each successor edge of BB. */
850 void
851 guess_outgoing_edge_probabilities (basic_block bb)
852 {
853 bb_estimate_probability_locally (bb);
854 combine_predictions_for_insn (BB_END (bb), bb);
855 }
856 \f
857 /* Return constant EXPR will likely have at execution time, NULL if unknown.
858 The function is used by builtin_expect branch predictor so the evidence
859 must come from this construct and additional possible constant folding.
860
861 We may want to implement more involved value guess (such as value range
862 propagation based prediction), but such tricks shall go to new
863 implementation. */
864
865 static tree
866 expr_expected_value (tree expr, bitmap visited)
867 {
868 if (TREE_CONSTANT (expr))
869 return expr;
870 else if (TREE_CODE (expr) == SSA_NAME)
871 {
872 tree def = SSA_NAME_DEF_STMT (expr);
873
874 /* If we were already here, break the infinite cycle. */
875 if (bitmap_bit_p (visited, SSA_NAME_VERSION (expr)))
876 return NULL;
877 bitmap_set_bit (visited, SSA_NAME_VERSION (expr));
878
879 if (TREE_CODE (def) == PHI_NODE)
880 {
881 /* All the arguments of the PHI node must have the same constant
882 length. */
883 int i;
884 tree val = NULL, new_val;
885
886 for (i = 0; i < PHI_NUM_ARGS (def); i++)
887 {
888 tree arg = PHI_ARG_DEF (def, i);
889
890 /* If this PHI has itself as an argument, we cannot
891 determine the string length of this argument. However,
892 if we can find an expected constant value for the other
893 PHI args then we can still be sure that this is
894 likely a constant. So be optimistic and just
895 continue with the next argument. */
896 if (arg == PHI_RESULT (def))
897 continue;
898
899 new_val = expr_expected_value (arg, visited);
900 if (!new_val)
901 return NULL;
902 if (!val)
903 val = new_val;
904 else if (!operand_equal_p (val, new_val, false))
905 return NULL;
906 }
907 return val;
908 }
909 if (TREE_CODE (def) != MODIFY_EXPR || TREE_OPERAND (def, 0) != expr)
910 return NULL;
911 return expr_expected_value (TREE_OPERAND (def, 1), visited);
912 }
913 else if (TREE_CODE (expr) == CALL_EXPR)
914 {
915 tree decl = get_callee_fndecl (expr);
916 if (!decl)
917 return NULL;
918 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
919 && DECL_FUNCTION_CODE (decl) == BUILT_IN_EXPECT)
920 {
921 tree arglist = TREE_OPERAND (expr, 1);
922 tree val;
923
924 if (arglist == NULL_TREE
925 || TREE_CHAIN (arglist) == NULL_TREE)
926 return NULL;
927 val = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (expr, 1)));
928 if (TREE_CONSTANT (val))
929 return val;
930 return TREE_VALUE (TREE_CHAIN (TREE_OPERAND (expr, 1)));
931 }
932 }
933 if (BINARY_CLASS_P (expr) || COMPARISON_CLASS_P (expr))
934 {
935 tree op0, op1, res;
936 op0 = expr_expected_value (TREE_OPERAND (expr, 0), visited);
937 if (!op0)
938 return NULL;
939 op1 = expr_expected_value (TREE_OPERAND (expr, 1), visited);
940 if (!op1)
941 return NULL;
942 res = fold_build2 (TREE_CODE (expr), TREE_TYPE (expr), op0, op1);
943 if (TREE_CONSTANT (res))
944 return res;
945 return NULL;
946 }
947 if (UNARY_CLASS_P (expr))
948 {
949 tree op0, res;
950 op0 = expr_expected_value (TREE_OPERAND (expr, 0), visited);
951 if (!op0)
952 return NULL;
953 res = fold_build1 (TREE_CODE (expr), TREE_TYPE (expr), op0);
954 if (TREE_CONSTANT (res))
955 return res;
956 return NULL;
957 }
958 return NULL;
959 }
960 \f
961 /* Get rid of all builtin_expect calls we no longer need. */
962 static void
963 strip_builtin_expect (void)
964 {
965 basic_block bb;
966 FOR_EACH_BB (bb)
967 {
968 block_stmt_iterator bi;
969 for (bi = bsi_start (bb); !bsi_end_p (bi); bsi_next (&bi))
970 {
971 tree stmt = bsi_stmt (bi);
972 tree fndecl;
973 tree arglist;
974
975 if (TREE_CODE (stmt) == MODIFY_EXPR
976 && TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR
977 && (fndecl = get_callee_fndecl (TREE_OPERAND (stmt, 1)))
978 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
979 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
980 && (arglist = TREE_OPERAND (TREE_OPERAND (stmt, 1), 1))
981 && TREE_CHAIN (arglist))
982 {
983 TREE_OPERAND (stmt, 1) = TREE_VALUE (arglist);
984 update_stmt (stmt);
985 }
986 }
987 }
988 }
989 \f
990 /* Predict using opcode of the last statement in basic block. */
991 static void
992 tree_predict_by_opcode (basic_block bb)
993 {
994 tree stmt = last_stmt (bb);
995 edge then_edge;
996 tree cond;
997 tree op0;
998 tree type;
999 tree val;
1000 bitmap visited;
1001 edge_iterator ei;
1002
1003 if (!stmt || TREE_CODE (stmt) != COND_EXPR)
1004 return;
1005 FOR_EACH_EDGE (then_edge, ei, bb->succs)
1006 if (then_edge->flags & EDGE_TRUE_VALUE)
1007 break;
1008 cond = TREE_OPERAND (stmt, 0);
1009 if (!COMPARISON_CLASS_P (cond))
1010 return;
1011 op0 = TREE_OPERAND (cond, 0);
1012 type = TREE_TYPE (op0);
1013 visited = BITMAP_ALLOC (NULL);
1014 val = expr_expected_value (cond, visited);
1015 BITMAP_FREE (visited);
1016 if (val)
1017 {
1018 if (integer_zerop (val))
1019 predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, NOT_TAKEN);
1020 else
1021 predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, TAKEN);
1022 return;
1023 }
1024 /* Try "pointer heuristic."
1025 A comparison ptr == 0 is predicted as false.
1026 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1027 if (POINTER_TYPE_P (type))
1028 {
1029 if (TREE_CODE (cond) == EQ_EXPR)
1030 predict_edge_def (then_edge, PRED_TREE_POINTER, NOT_TAKEN);
1031 else if (TREE_CODE (cond) == NE_EXPR)
1032 predict_edge_def (then_edge, PRED_TREE_POINTER, TAKEN);
1033 }
1034 else
1035
1036 /* Try "opcode heuristic."
1037 EQ tests are usually false and NE tests are usually true. Also,
1038 most quantities are positive, so we can make the appropriate guesses
1039 about signed comparisons against zero. */
1040 switch (TREE_CODE (cond))
1041 {
1042 case EQ_EXPR:
1043 case UNEQ_EXPR:
1044 /* Floating point comparisons appears to behave in a very
1045 unpredictable way because of special role of = tests in
1046 FP code. */
1047 if (FLOAT_TYPE_P (type))
1048 ;
1049 /* Comparisons with 0 are often used for booleans and there is
1050 nothing useful to predict about them. */
1051 else if (integer_zerop (op0)
1052 || integer_zerop (TREE_OPERAND (cond, 1)))
1053 ;
1054 else
1055 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, NOT_TAKEN);
1056 break;
1057
1058 case NE_EXPR:
1059 case LTGT_EXPR:
1060 /* Floating point comparisons appears to behave in a very
1061 unpredictable way because of special role of = tests in
1062 FP code. */
1063 if (FLOAT_TYPE_P (type))
1064 ;
1065 /* Comparisons with 0 are often used for booleans and there is
1066 nothing useful to predict about them. */
1067 else if (integer_zerop (op0)
1068 || integer_zerop (TREE_OPERAND (cond, 1)))
1069 ;
1070 else
1071 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, TAKEN);
1072 break;
1073
1074 case ORDERED_EXPR:
1075 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, TAKEN);
1076 break;
1077
1078 case UNORDERED_EXPR:
1079 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, NOT_TAKEN);
1080 break;
1081
1082 case LE_EXPR:
1083 case LT_EXPR:
1084 if (integer_zerop (TREE_OPERAND (cond, 1))
1085 || integer_onep (TREE_OPERAND (cond, 1))
1086 || integer_all_onesp (TREE_OPERAND (cond, 1))
1087 || real_zerop (TREE_OPERAND (cond, 1))
1088 || real_onep (TREE_OPERAND (cond, 1))
1089 || real_minus_onep (TREE_OPERAND (cond, 1)))
1090 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, NOT_TAKEN);
1091 break;
1092
1093 case GE_EXPR:
1094 case GT_EXPR:
1095 if (integer_zerop (TREE_OPERAND (cond, 1))
1096 || integer_onep (TREE_OPERAND (cond, 1))
1097 || integer_all_onesp (TREE_OPERAND (cond, 1))
1098 || real_zerop (TREE_OPERAND (cond, 1))
1099 || real_onep (TREE_OPERAND (cond, 1))
1100 || real_minus_onep (TREE_OPERAND (cond, 1)))
1101 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, TAKEN);
1102 break;
1103
1104 default:
1105 break;
1106 }
1107 }
1108
1109 /* Try to guess whether the value of return means error code. */
1110 static enum br_predictor
1111 return_prediction (tree val, enum prediction *prediction)
1112 {
1113 /* VOID. */
1114 if (!val)
1115 return PRED_NO_PREDICTION;
1116 /* Different heuristics for pointers and scalars. */
1117 if (POINTER_TYPE_P (TREE_TYPE (val)))
1118 {
1119 /* NULL is usually not returned. */
1120 if (integer_zerop (val))
1121 {
1122 *prediction = NOT_TAKEN;
1123 return PRED_NULL_RETURN;
1124 }
1125 }
1126 else if (INTEGRAL_TYPE_P (TREE_TYPE (val)))
1127 {
1128 /* Negative return values are often used to indicate
1129 errors. */
1130 if (TREE_CODE (val) == INTEGER_CST
1131 && tree_int_cst_sgn (val) < 0)
1132 {
1133 *prediction = NOT_TAKEN;
1134 return PRED_NEGATIVE_RETURN;
1135 }
1136 /* Constant return values seems to be commonly taken.
1137 Zero/one often represent booleans so exclude them from the
1138 heuristics. */
1139 if (TREE_CONSTANT (val)
1140 && (!integer_zerop (val) && !integer_onep (val)))
1141 {
1142 *prediction = TAKEN;
1143 return PRED_NEGATIVE_RETURN;
1144 }
1145 }
1146 return PRED_NO_PREDICTION;
1147 }
1148
1149 /* Find the basic block with return expression and look up for possible
1150 return value trying to apply RETURN_PREDICTION heuristics. */
1151 static void
1152 apply_return_prediction (int *heads)
1153 {
1154 tree return_stmt = NULL;
1155 tree return_val;
1156 edge e;
1157 tree phi;
1158 int phi_num_args, i;
1159 enum br_predictor pred;
1160 enum prediction direction;
1161 edge_iterator ei;
1162
1163 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1164 {
1165 return_stmt = last_stmt (e->src);
1166 if (TREE_CODE (return_stmt) == RETURN_EXPR)
1167 break;
1168 }
1169 if (!e)
1170 return;
1171 return_val = TREE_OPERAND (return_stmt, 0);
1172 if (!return_val)
1173 return;
1174 if (TREE_CODE (return_val) == MODIFY_EXPR)
1175 return_val = TREE_OPERAND (return_val, 1);
1176 if (TREE_CODE (return_val) != SSA_NAME
1177 || !SSA_NAME_DEF_STMT (return_val)
1178 || TREE_CODE (SSA_NAME_DEF_STMT (return_val)) != PHI_NODE)
1179 return;
1180 for (phi = SSA_NAME_DEF_STMT (return_val); phi; phi = PHI_CHAIN (phi))
1181 if (PHI_RESULT (phi) == return_val)
1182 break;
1183 if (!phi)
1184 return;
1185 phi_num_args = PHI_NUM_ARGS (phi);
1186 pred = return_prediction (PHI_ARG_DEF (phi, 0), &direction);
1187
1188 /* Avoid the degenerate case where all return values form the function
1189 belongs to same category (ie they are all positive constants)
1190 so we can hardly say something about them. */
1191 for (i = 1; i < phi_num_args; i++)
1192 if (pred != return_prediction (PHI_ARG_DEF (phi, i), &direction))
1193 break;
1194 if (i != phi_num_args)
1195 for (i = 0; i < phi_num_args; i++)
1196 {
1197 pred = return_prediction (PHI_ARG_DEF (phi, i), &direction);
1198 if (pred != PRED_NO_PREDICTION)
1199 predict_paths_leading_to (PHI_ARG_EDGE (phi, i)->src, heads, pred,
1200 direction);
1201 }
1202 }
1203
1204 /* Look for basic block that contains unlikely to happen events
1205 (such as noreturn calls) and mark all paths leading to execution
1206 of this basic blocks as unlikely. */
1207
1208 static void
1209 tree_bb_level_predictions (void)
1210 {
1211 basic_block bb;
1212 int *heads;
1213
1214 heads = XCNEWVEC (int, last_basic_block);
1215 heads[ENTRY_BLOCK_PTR->next_bb->index] = last_basic_block;
1216
1217 apply_return_prediction (heads);
1218
1219 FOR_EACH_BB (bb)
1220 {
1221 block_stmt_iterator bsi = bsi_last (bb);
1222
1223 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1224 {
1225 tree stmt = bsi_stmt (bsi);
1226 switch (TREE_CODE (stmt))
1227 {
1228 case MODIFY_EXPR:
1229 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
1230 {
1231 stmt = TREE_OPERAND (stmt, 1);
1232 goto call_expr;
1233 }
1234 break;
1235 case CALL_EXPR:
1236 call_expr:;
1237 if (call_expr_flags (stmt) & ECF_NORETURN)
1238 predict_paths_leading_to (bb, heads, PRED_NORETURN,
1239 NOT_TAKEN);
1240 break;
1241 default:
1242 break;
1243 }
1244 }
1245 }
1246
1247 free (heads);
1248 }
1249
1250 /* Predict branch probabilities and estimate profile of the tree CFG. */
1251 static unsigned int
1252 tree_estimate_probability (void)
1253 {
1254 basic_block bb;
1255 struct loops loops_info;
1256
1257 flow_loops_find (&loops_info);
1258 if (dump_file && (dump_flags & TDF_DETAILS))
1259 flow_loops_dump (&loops_info, dump_file, NULL, 0);
1260
1261 add_noreturn_fake_exit_edges ();
1262 connect_infinite_loops_to_exit ();
1263 calculate_dominance_info (CDI_DOMINATORS);
1264 calculate_dominance_info (CDI_POST_DOMINATORS);
1265
1266 tree_bb_level_predictions ();
1267
1268 mark_irreducible_loops (&loops_info);
1269 predict_loops (&loops_info);
1270
1271 FOR_EACH_BB (bb)
1272 {
1273 edge e;
1274 edge_iterator ei;
1275
1276 FOR_EACH_EDGE (e, ei, bb->succs)
1277 {
1278 /* Predict early returns to be probable, as we've already taken
1279 care for error returns and other cases are often used for
1280 fast paths through function. */
1281 if (e->dest == EXIT_BLOCK_PTR
1282 && TREE_CODE (last_stmt (bb)) == RETURN_EXPR
1283 && !single_pred_p (bb))
1284 {
1285 edge e1;
1286 edge_iterator ei1;
1287
1288 FOR_EACH_EDGE (e1, ei1, bb->preds)
1289 if (!predicted_by_p (e1->src, PRED_NULL_RETURN)
1290 && !predicted_by_p (e1->src, PRED_CONST_RETURN)
1291 && !predicted_by_p (e1->src, PRED_NEGATIVE_RETURN)
1292 && !last_basic_block_p (e1->src))
1293 predict_edge_def (e1, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
1294 }
1295
1296 /* Look for block we are guarding (ie we dominate it,
1297 but it doesn't postdominate us). */
1298 if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
1299 && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
1300 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
1301 {
1302 block_stmt_iterator bi;
1303
1304 /* The call heuristic claims that a guarded function call
1305 is improbable. This is because such calls are often used
1306 to signal exceptional situations such as printing error
1307 messages. */
1308 for (bi = bsi_start (e->dest); !bsi_end_p (bi);
1309 bsi_next (&bi))
1310 {
1311 tree stmt = bsi_stmt (bi);
1312 if ((TREE_CODE (stmt) == CALL_EXPR
1313 || (TREE_CODE (stmt) == MODIFY_EXPR
1314 && TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR))
1315 /* Constant and pure calls are hardly used to signalize
1316 something exceptional. */
1317 && TREE_SIDE_EFFECTS (stmt))
1318 {
1319 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
1320 break;
1321 }
1322 }
1323 }
1324 }
1325 tree_predict_by_opcode (bb);
1326 }
1327 FOR_EACH_BB (bb)
1328 combine_predictions_for_bb (bb);
1329
1330 strip_builtin_expect ();
1331 estimate_bb_frequencies (&loops_info);
1332 free_dominance_info (CDI_POST_DOMINATORS);
1333 remove_fake_exit_edges ();
1334 flow_loops_free (&loops_info);
1335 if (dump_file && (dump_flags & TDF_DETAILS))
1336 dump_tree_cfg (dump_file, dump_flags);
1337 if (profile_status == PROFILE_ABSENT)
1338 profile_status = PROFILE_GUESSED;
1339 return 0;
1340 }
1341 \f
1342 /* Check whether this is the last basic block of function. Commonly
1343 there is one extra common cleanup block. */
1344 static bool
1345 last_basic_block_p (basic_block bb)
1346 {
1347 if (bb == EXIT_BLOCK_PTR)
1348 return false;
1349
1350 return (bb->next_bb == EXIT_BLOCK_PTR
1351 || (bb->next_bb->next_bb == EXIT_BLOCK_PTR
1352 && single_succ_p (bb)
1353 && single_succ (bb)->next_bb == EXIT_BLOCK_PTR));
1354 }
1355
1356 /* Sets branch probabilities according to PREDiction and
1357 FLAGS. HEADS[bb->index] should be index of basic block in that we
1358 need to alter branch predictions (i.e. the first of our dominators
1359 such that we do not post-dominate it) (but we fill this information
1360 on demand, so -1 may be there in case this was not needed yet). */
1361
1362 static void
1363 predict_paths_leading_to (basic_block bb, int *heads, enum br_predictor pred,
1364 enum prediction taken)
1365 {
1366 edge e;
1367 edge_iterator ei;
1368 int y;
1369
1370 if (heads[bb->index] == ENTRY_BLOCK)
1371 {
1372 /* This is first time we need this field in heads array; so
1373 find first dominator that we do not post-dominate (we are
1374 using already known members of heads array). */
1375 basic_block ai = bb;
1376 basic_block next_ai = get_immediate_dominator (CDI_DOMINATORS, bb);
1377 int head;
1378
1379 while (heads[next_ai->index] == ENTRY_BLOCK)
1380 {
1381 if (!dominated_by_p (CDI_POST_DOMINATORS, next_ai, bb))
1382 break;
1383 heads[next_ai->index] = ai->index;
1384 ai = next_ai;
1385 next_ai = get_immediate_dominator (CDI_DOMINATORS, next_ai);
1386 }
1387 if (!dominated_by_p (CDI_POST_DOMINATORS, next_ai, bb))
1388 head = next_ai->index;
1389 else
1390 head = heads[next_ai->index];
1391 while (next_ai != bb)
1392 {
1393 next_ai = ai;
1394 ai = BASIC_BLOCK (heads[ai->index]);
1395 heads[next_ai->index] = head;
1396 }
1397 }
1398 y = heads[bb->index];
1399
1400 /* Now find the edge that leads to our branch and aply the prediction. */
1401
1402 if (y == last_basic_block)
1403 return;
1404 FOR_EACH_EDGE (e, ei, BASIC_BLOCK (y)->succs)
1405 if (e->dest->index >= NUM_FIXED_BLOCKS
1406 && dominated_by_p (CDI_POST_DOMINATORS, e->dest, bb))
1407 predict_edge_def (e, pred, taken);
1408 }
1409 \f
1410 /* This is used to carry information about basic blocks. It is
1411 attached to the AUX field of the standard CFG block. */
1412
1413 typedef struct block_info_def
1414 {
1415 /* Estimated frequency of execution of basic_block. */
1416 sreal frequency;
1417
1418 /* To keep queue of basic blocks to process. */
1419 basic_block next;
1420
1421 /* Number of predecessors we need to visit first. */
1422 int npredecessors;
1423 } *block_info;
1424
1425 /* Similar information for edges. */
1426 typedef struct edge_info_def
1427 {
1428 /* In case edge is a loopback edge, the probability edge will be reached
1429 in case header is. Estimated number of iterations of the loop can be
1430 then computed as 1 / (1 - back_edge_prob). */
1431 sreal back_edge_prob;
1432 /* True if the edge is a loopback edge in the natural loop. */
1433 unsigned int back_edge:1;
1434 } *edge_info;
1435
1436 #define BLOCK_INFO(B) ((block_info) (B)->aux)
1437 #define EDGE_INFO(E) ((edge_info) (E)->aux)
1438
1439 /* Helper function for estimate_bb_frequencies.
1440 Propagate the frequencies for LOOP. */
1441
1442 static void
1443 propagate_freq (struct loop *loop, bitmap tovisit)
1444 {
1445 basic_block head = loop->header;
1446 basic_block bb;
1447 basic_block last;
1448 unsigned i;
1449 edge e;
1450 basic_block nextbb;
1451 bitmap_iterator bi;
1452
1453 /* For each basic block we need to visit count number of his predecessors
1454 we need to visit first. */
1455 EXECUTE_IF_SET_IN_BITMAP (tovisit, 0, i, bi)
1456 {
1457 edge_iterator ei;
1458 int count = 0;
1459
1460 /* The outermost "loop" includes the exit block, which we can not
1461 look up via BASIC_BLOCK. Detect this and use EXIT_BLOCK_PTR
1462 directly. Do the same for the entry block. */
1463 bb = BASIC_BLOCK (i);
1464
1465 FOR_EACH_EDGE (e, ei, bb->preds)
1466 {
1467 bool visit = bitmap_bit_p (tovisit, e->src->index);
1468
1469 if (visit && !(e->flags & EDGE_DFS_BACK))
1470 count++;
1471 else if (visit && dump_file && !EDGE_INFO (e)->back_edge)
1472 fprintf (dump_file,
1473 "Irreducible region hit, ignoring edge to %i->%i\n",
1474 e->src->index, bb->index);
1475 }
1476 BLOCK_INFO (bb)->npredecessors = count;
1477 }
1478
1479 memcpy (&BLOCK_INFO (head)->frequency, &real_one, sizeof (real_one));
1480 last = head;
1481 for (bb = head; bb; bb = nextbb)
1482 {
1483 edge_iterator ei;
1484 sreal cyclic_probability, frequency;
1485
1486 memcpy (&cyclic_probability, &real_zero, sizeof (real_zero));
1487 memcpy (&frequency, &real_zero, sizeof (real_zero));
1488
1489 nextbb = BLOCK_INFO (bb)->next;
1490 BLOCK_INFO (bb)->next = NULL;
1491
1492 /* Compute frequency of basic block. */
1493 if (bb != head)
1494 {
1495 #ifdef ENABLE_CHECKING
1496 FOR_EACH_EDGE (e, ei, bb->preds)
1497 gcc_assert (!bitmap_bit_p (tovisit, e->src->index)
1498 || (e->flags & EDGE_DFS_BACK));
1499 #endif
1500
1501 FOR_EACH_EDGE (e, ei, bb->preds)
1502 if (EDGE_INFO (e)->back_edge)
1503 {
1504 sreal_add (&cyclic_probability, &cyclic_probability,
1505 &EDGE_INFO (e)->back_edge_prob);
1506 }
1507 else if (!(e->flags & EDGE_DFS_BACK))
1508 {
1509 sreal tmp;
1510
1511 /* frequency += (e->probability
1512 * BLOCK_INFO (e->src)->frequency /
1513 REG_BR_PROB_BASE); */
1514
1515 sreal_init (&tmp, e->probability, 0);
1516 sreal_mul (&tmp, &tmp, &BLOCK_INFO (e->src)->frequency);
1517 sreal_mul (&tmp, &tmp, &real_inv_br_prob_base);
1518 sreal_add (&frequency, &frequency, &tmp);
1519 }
1520
1521 if (sreal_compare (&cyclic_probability, &real_zero) == 0)
1522 {
1523 memcpy (&BLOCK_INFO (bb)->frequency, &frequency,
1524 sizeof (frequency));
1525 }
1526 else
1527 {
1528 if (sreal_compare (&cyclic_probability, &real_almost_one) > 0)
1529 {
1530 memcpy (&cyclic_probability, &real_almost_one,
1531 sizeof (real_almost_one));
1532 }
1533
1534 /* BLOCK_INFO (bb)->frequency = frequency
1535 / (1 - cyclic_probability) */
1536
1537 sreal_sub (&cyclic_probability, &real_one, &cyclic_probability);
1538 sreal_div (&BLOCK_INFO (bb)->frequency,
1539 &frequency, &cyclic_probability);
1540 }
1541 }
1542
1543 bitmap_clear_bit (tovisit, bb->index);
1544
1545 e = find_edge (bb, head);
1546 if (e)
1547 {
1548 sreal tmp;
1549
1550 /* EDGE_INFO (e)->back_edge_prob
1551 = ((e->probability * BLOCK_INFO (bb)->frequency)
1552 / REG_BR_PROB_BASE); */
1553
1554 sreal_init (&tmp, e->probability, 0);
1555 sreal_mul (&tmp, &tmp, &BLOCK_INFO (bb)->frequency);
1556 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1557 &tmp, &real_inv_br_prob_base);
1558 }
1559
1560 /* Propagate to successor blocks. */
1561 FOR_EACH_EDGE (e, ei, bb->succs)
1562 if (!(e->flags & EDGE_DFS_BACK)
1563 && BLOCK_INFO (e->dest)->npredecessors)
1564 {
1565 BLOCK_INFO (e->dest)->npredecessors--;
1566 if (!BLOCK_INFO (e->dest)->npredecessors)
1567 {
1568 if (!nextbb)
1569 nextbb = e->dest;
1570 else
1571 BLOCK_INFO (last)->next = e->dest;
1572
1573 last = e->dest;
1574 }
1575 }
1576 }
1577 }
1578
1579 /* Estimate probabilities of loopback edges in loops at same nest level. */
1580
1581 static void
1582 estimate_loops_at_level (struct loop *first_loop, bitmap tovisit)
1583 {
1584 struct loop *loop;
1585
1586 for (loop = first_loop; loop; loop = loop->next)
1587 {
1588 edge e;
1589 basic_block *bbs;
1590 unsigned i;
1591
1592 estimate_loops_at_level (loop->inner, tovisit);
1593
1594 /* Do not do this for dummy function loop. */
1595 if (EDGE_COUNT (loop->latch->succs) > 0)
1596 {
1597 /* Find current loop back edge and mark it. */
1598 e = loop_latch_edge (loop);
1599 EDGE_INFO (e)->back_edge = 1;
1600 }
1601
1602 bbs = get_loop_body (loop);
1603 for (i = 0; i < loop->num_nodes; i++)
1604 bitmap_set_bit (tovisit, bbs[i]->index);
1605 free (bbs);
1606 propagate_freq (loop, tovisit);
1607 }
1608 }
1609
1610 /* Convert counts measured by profile driven feedback to frequencies.
1611 Return nonzero iff there was any nonzero execution count. */
1612
1613 int
1614 counts_to_freqs (void)
1615 {
1616 gcov_type count_max, true_count_max = 0;
1617 basic_block bb;
1618
1619 FOR_EACH_BB (bb)
1620 true_count_max = MAX (bb->count, true_count_max);
1621
1622 count_max = MAX (true_count_max, 1);
1623 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1624 bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
1625 return true_count_max;
1626 }
1627
1628 /* Return true if function is likely to be expensive, so there is no point to
1629 optimize performance of prologue, epilogue or do inlining at the expense
1630 of code size growth. THRESHOLD is the limit of number of instructions
1631 function can execute at average to be still considered not expensive. */
1632
1633 bool
1634 expensive_function_p (int threshold)
1635 {
1636 unsigned int sum = 0;
1637 basic_block bb;
1638 unsigned int limit;
1639
1640 /* We can not compute accurately for large thresholds due to scaled
1641 frequencies. */
1642 gcc_assert (threshold <= BB_FREQ_MAX);
1643
1644 /* Frequencies are out of range. This either means that function contains
1645 internal loop executing more than BB_FREQ_MAX times or profile feedback
1646 is available and function has not been executed at all. */
1647 if (ENTRY_BLOCK_PTR->frequency == 0)
1648 return true;
1649
1650 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
1651 limit = ENTRY_BLOCK_PTR->frequency * threshold;
1652 FOR_EACH_BB (bb)
1653 {
1654 rtx insn;
1655
1656 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
1657 insn = NEXT_INSN (insn))
1658 if (active_insn_p (insn))
1659 {
1660 sum += bb->frequency;
1661 if (sum > limit)
1662 return true;
1663 }
1664 }
1665
1666 return false;
1667 }
1668
1669 /* Estimate basic blocks frequency by given branch probabilities. */
1670
1671 static void
1672 estimate_bb_frequencies (struct loops *loops)
1673 {
1674 basic_block bb;
1675 sreal freq_max;
1676
1677 if (!flag_branch_probabilities || !counts_to_freqs ())
1678 {
1679 static int real_values_initialized = 0;
1680 bitmap tovisit;
1681
1682 if (!real_values_initialized)
1683 {
1684 real_values_initialized = 1;
1685 sreal_init (&real_zero, 0, 0);
1686 sreal_init (&real_one, 1, 0);
1687 sreal_init (&real_br_prob_base, REG_BR_PROB_BASE, 0);
1688 sreal_init (&real_bb_freq_max, BB_FREQ_MAX, 0);
1689 sreal_init (&real_one_half, 1, -1);
1690 sreal_div (&real_inv_br_prob_base, &real_one, &real_br_prob_base);
1691 sreal_sub (&real_almost_one, &real_one, &real_inv_br_prob_base);
1692 }
1693
1694 mark_dfs_back_edges ();
1695
1696 single_succ_edge (ENTRY_BLOCK_PTR)->probability = REG_BR_PROB_BASE;
1697
1698 /* Set up block info for each basic block. */
1699 tovisit = BITMAP_ALLOC (NULL);
1700 alloc_aux_for_blocks (sizeof (struct block_info_def));
1701 alloc_aux_for_edges (sizeof (struct edge_info_def));
1702 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1703 {
1704 edge e;
1705 edge_iterator ei;
1706
1707 FOR_EACH_EDGE (e, ei, bb->succs)
1708 {
1709 sreal_init (&EDGE_INFO (e)->back_edge_prob, e->probability, 0);
1710 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1711 &EDGE_INFO (e)->back_edge_prob,
1712 &real_inv_br_prob_base);
1713 }
1714 }
1715
1716 /* First compute probabilities locally for each loop from innermost
1717 to outermost to examine probabilities for back edges. */
1718 estimate_loops_at_level (loops->tree_root, tovisit);
1719
1720 memcpy (&freq_max, &real_zero, sizeof (real_zero));
1721 FOR_EACH_BB (bb)
1722 if (sreal_compare (&freq_max, &BLOCK_INFO (bb)->frequency) < 0)
1723 memcpy (&freq_max, &BLOCK_INFO (bb)->frequency, sizeof (freq_max));
1724
1725 sreal_div (&freq_max, &real_bb_freq_max, &freq_max);
1726 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1727 {
1728 sreal tmp;
1729
1730 sreal_mul (&tmp, &BLOCK_INFO (bb)->frequency, &freq_max);
1731 sreal_add (&tmp, &tmp, &real_one_half);
1732 bb->frequency = sreal_to_int (&tmp);
1733 }
1734
1735 free_aux_for_blocks ();
1736 free_aux_for_edges ();
1737 BITMAP_FREE (tovisit);
1738 }
1739 compute_function_frequency ();
1740 if (flag_reorder_functions)
1741 choose_function_section ();
1742 }
1743
1744 /* Decide whether function is hot, cold or unlikely executed. */
1745 static void
1746 compute_function_frequency (void)
1747 {
1748 basic_block bb;
1749
1750 if (!profile_info || !flag_branch_probabilities)
1751 return;
1752 cfun->function_frequency = FUNCTION_FREQUENCY_UNLIKELY_EXECUTED;
1753 FOR_EACH_BB (bb)
1754 {
1755 if (maybe_hot_bb_p (bb))
1756 {
1757 cfun->function_frequency = FUNCTION_FREQUENCY_HOT;
1758 return;
1759 }
1760 if (!probably_never_executed_bb_p (bb))
1761 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
1762 }
1763 }
1764
1765 /* Choose appropriate section for the function. */
1766 static void
1767 choose_function_section (void)
1768 {
1769 if (DECL_SECTION_NAME (current_function_decl)
1770 || !targetm.have_named_sections
1771 /* Theoretically we can split the gnu.linkonce text section too,
1772 but this requires more work as the frequency needs to match
1773 for all generated objects so we need to merge the frequency
1774 of all instances. For now just never set frequency for these. */
1775 || DECL_ONE_ONLY (current_function_decl))
1776 return;
1777
1778 /* If we are doing the partitioning optimization, let the optimization
1779 choose the correct section into which to put things. */
1780
1781 if (flag_reorder_blocks_and_partition)
1782 return;
1783
1784 if (cfun->function_frequency == FUNCTION_FREQUENCY_HOT)
1785 DECL_SECTION_NAME (current_function_decl) =
1786 build_string (strlen (HOT_TEXT_SECTION_NAME), HOT_TEXT_SECTION_NAME);
1787 if (cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
1788 DECL_SECTION_NAME (current_function_decl) =
1789 build_string (strlen (UNLIKELY_EXECUTED_TEXT_SECTION_NAME),
1790 UNLIKELY_EXECUTED_TEXT_SECTION_NAME);
1791 }
1792
1793 static bool
1794 gate_estimate_probability (void)
1795 {
1796 return flag_guess_branch_prob;
1797 }
1798
1799 struct tree_opt_pass pass_profile =
1800 {
1801 "profile", /* name */
1802 gate_estimate_probability, /* gate */
1803 tree_estimate_probability, /* execute */
1804 NULL, /* sub */
1805 NULL, /* next */
1806 0, /* static_pass_number */
1807 TV_BRANCH_PROB, /* tv_id */
1808 PROP_cfg, /* properties_required */
1809 0, /* properties_provided */
1810 0, /* properties_destroyed */
1811 0, /* todo_flags_start */
1812 TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
1813 0 /* letter */
1814 };