* predict.c (choose_function_section): Choose sections correctly.
[gcc.git] / gcc / predict.c
1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
20
21 /* References:
22
23 [1] "Branch Prediction for Free"
24 Ball and Larus; PLDI '93.
25 [2] "Static Branch Frequency and Program Profile Analysis"
26 Wu and Larus; MICRO-27.
27 [3] "Corpus-based Static Branch Prediction"
28 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
29
30
31 #include "config.h"
32 #include "system.h"
33 #include "coretypes.h"
34 #include "tm.h"
35 #include "tree.h"
36 #include "rtl.h"
37 #include "tm_p.h"
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "insn-config.h"
41 #include "regs.h"
42 #include "flags.h"
43 #include "output.h"
44 #include "function.h"
45 #include "except.h"
46 #include "toplev.h"
47 #include "recog.h"
48 #include "expr.h"
49 #include "predict.h"
50 #include "profile.h"
51 #include "sreal.h"
52 #include "params.h"
53 #include "target.h"
54 #include "loop.h"
55 #include "cfgloop.h"
56
57 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
58 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
59 static sreal real_zero, real_one, real_almost_one, real_br_prob_base,
60 real_inv_br_prob_base, real_one_half, real_bb_freq_max;
61
62 /* Random guesstimation given names. */
63 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 10 - 1)
64 #define PROB_EVEN (REG_BR_PROB_BASE / 2)
65 #define PROB_VERY_LIKELY (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
66 #define PROB_ALWAYS (REG_BR_PROB_BASE)
67
68 static bool predicted_by_p PARAMS ((basic_block,
69 enum br_predictor));
70 static void combine_predictions_for_insn PARAMS ((rtx, basic_block));
71 static void dump_prediction PARAMS ((enum br_predictor, int,
72 basic_block, int));
73 static void estimate_loops_at_level PARAMS ((struct loop *loop));
74 static void propagate_freq PARAMS ((struct loop *));
75 static void estimate_bb_frequencies PARAMS ((struct loops *));
76 static void counts_to_freqs PARAMS ((void));
77 static void process_note_predictions PARAMS ((basic_block, int *,
78 dominance_info,
79 dominance_info));
80 static void process_note_prediction PARAMS ((basic_block, int *,
81 dominance_info,
82 dominance_info, int, int));
83 static bool last_basic_block_p PARAMS ((basic_block));
84 static void compute_function_frequency PARAMS ((void));
85 static void choose_function_section PARAMS ((void));
86 static bool can_predict_insn_p PARAMS ((rtx));
87
88 /* Information we hold about each branch predictor.
89 Filled using information from predict.def. */
90
91 struct predictor_info
92 {
93 const char *const name; /* Name used in the debugging dumps. */
94 const int hitrate; /* Expected hitrate used by
95 predict_insn_def call. */
96 const int flags;
97 };
98
99 /* Use given predictor without Dempster-Shaffer theory if it matches
100 using first_match heuristics. */
101 #define PRED_FLAG_FIRST_MATCH 1
102
103 /* Recompute hitrate in percent to our representation. */
104
105 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
106
107 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
108 static const struct predictor_info predictor_info[]= {
109 #include "predict.def"
110
111 /* Upper bound on predictors. */
112 {NULL, 0, 0}
113 };
114 #undef DEF_PREDICTOR
115
116 /* Return true in case BB can be CPU intensive and should be optimized
117 for maximal performance. */
118
119 bool
120 maybe_hot_bb_p (bb)
121 basic_block bb;
122 {
123 if (profile_info.count_profiles_merged
124 && flag_branch_probabilities
125 && (bb->count
126 < profile_info.max_counter_in_program
127 / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
128 return false;
129 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
130 return false;
131 return true;
132 }
133
134 /* Return true in case BB is cold and should be optimized for size. */
135
136 bool
137 probably_cold_bb_p (bb)
138 basic_block bb;
139 {
140 if (profile_info.count_profiles_merged
141 && flag_branch_probabilities
142 && (bb->count
143 < profile_info.max_counter_in_program
144 / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
145 return true;
146 if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
147 return true;
148 return false;
149 }
150
151 /* Return true in case BB is probably never executed. */
152 bool
153 probably_never_executed_bb_p (bb)
154 basic_block bb;
155 {
156 if (profile_info.count_profiles_merged
157 && flag_branch_probabilities)
158 return ((bb->count + profile_info.count_profiles_merged / 2)
159 / profile_info.count_profiles_merged) == 0;
160 return false;
161 }
162
163 /* Return true if the one of outgoing edges is already predicted by
164 PREDICTOR. */
165
166 static bool
167 predicted_by_p (bb, predictor)
168 basic_block bb;
169 enum br_predictor predictor;
170 {
171 rtx note;
172 if (!INSN_P (bb->end))
173 return false;
174 for (note = REG_NOTES (bb->end); note; note = XEXP (note, 1))
175 if (REG_NOTE_KIND (note) == REG_BR_PRED
176 && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
177 return true;
178 return false;
179 }
180
181 void
182 predict_insn (insn, predictor, probability)
183 rtx insn;
184 int probability;
185 enum br_predictor predictor;
186 {
187 if (!any_condjump_p (insn))
188 abort ();
189
190 REG_NOTES (insn)
191 = gen_rtx_EXPR_LIST (REG_BR_PRED,
192 gen_rtx_CONCAT (VOIDmode,
193 GEN_INT ((int) predictor),
194 GEN_INT ((int) probability)),
195 REG_NOTES (insn));
196 }
197
198 /* Predict insn by given predictor. */
199
200 void
201 predict_insn_def (insn, predictor, taken)
202 rtx insn;
203 enum br_predictor predictor;
204 enum prediction taken;
205 {
206 int probability = predictor_info[(int) predictor].hitrate;
207
208 if (taken != TAKEN)
209 probability = REG_BR_PROB_BASE - probability;
210
211 predict_insn (insn, predictor, probability);
212 }
213
214 /* Predict edge E with given probability if possible. */
215
216 void
217 predict_edge (e, predictor, probability)
218 edge e;
219 int probability;
220 enum br_predictor predictor;
221 {
222 rtx last_insn;
223 last_insn = e->src->end;
224
225 /* We can store the branch prediction information only about
226 conditional jumps. */
227 if (!any_condjump_p (last_insn))
228 return;
229
230 /* We always store probability of branching. */
231 if (e->flags & EDGE_FALLTHRU)
232 probability = REG_BR_PROB_BASE - probability;
233
234 predict_insn (last_insn, predictor, probability);
235 }
236
237 /* Return true when we can store prediction on insn INSN.
238 At the moment we represent predictions only on conditional
239 jumps, not at computed jump or other complicated cases. */
240 static bool
241 can_predict_insn_p (insn)
242 rtx insn;
243 {
244 return (GET_CODE (insn) == JUMP_INSN
245 && any_condjump_p (insn)
246 && BLOCK_FOR_INSN (insn)->succ->succ_next);
247 }
248
249 /* Predict edge E by given predictor if possible. */
250
251 void
252 predict_edge_def (e, predictor, taken)
253 edge e;
254 enum br_predictor predictor;
255 enum prediction taken;
256 {
257 int probability = predictor_info[(int) predictor].hitrate;
258
259 if (taken != TAKEN)
260 probability = REG_BR_PROB_BASE - probability;
261
262 predict_edge (e, predictor, probability);
263 }
264
265 /* Invert all branch predictions or probability notes in the INSN. This needs
266 to be done each time we invert the condition used by the jump. */
267
268 void
269 invert_br_probabilities (insn)
270 rtx insn;
271 {
272 rtx note;
273
274 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
275 if (REG_NOTE_KIND (note) == REG_BR_PROB)
276 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
277 else if (REG_NOTE_KIND (note) == REG_BR_PRED)
278 XEXP (XEXP (note, 0), 1)
279 = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
280 }
281
282 /* Dump information about the branch prediction to the output file. */
283
284 static void
285 dump_prediction (predictor, probability, bb, used)
286 enum br_predictor predictor;
287 int probability;
288 basic_block bb;
289 int used;
290 {
291 edge e = bb->succ;
292
293 if (!rtl_dump_file)
294 return;
295
296 while (e && (e->flags & EDGE_FALLTHRU))
297 e = e->succ_next;
298
299 fprintf (rtl_dump_file, " %s heuristics%s: %.1f%%",
300 predictor_info[predictor].name,
301 used ? "" : " (ignored)", probability * 100.0 / REG_BR_PROB_BASE);
302
303 if (bb->count)
304 {
305 fprintf (rtl_dump_file, " exec ");
306 fprintf (rtl_dump_file, HOST_WIDEST_INT_PRINT_DEC, bb->count);
307 if (e)
308 {
309 fprintf (rtl_dump_file, " hit ");
310 fprintf (rtl_dump_file, HOST_WIDEST_INT_PRINT_DEC, e->count);
311 fprintf (rtl_dump_file, " (%.1f%%)", e->count * 100.0 / bb->count);
312 }
313 }
314
315 fprintf (rtl_dump_file, "\n");
316 }
317
318 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
319 note if not already present. Remove now useless REG_BR_PRED notes. */
320
321 static void
322 combine_predictions_for_insn (insn, bb)
323 rtx insn;
324 basic_block bb;
325 {
326 rtx prob_note = find_reg_note (insn, REG_BR_PROB, 0);
327 rtx *pnote = &REG_NOTES (insn);
328 rtx note;
329 int best_probability = PROB_EVEN;
330 int best_predictor = END_PREDICTORS;
331 int combined_probability = REG_BR_PROB_BASE / 2;
332 int d;
333 bool first_match = false;
334 bool found = false;
335
336 if (rtl_dump_file)
337 fprintf (rtl_dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
338 bb->index);
339
340 /* We implement "first match" heuristics and use probability guessed
341 by predictor with smallest index. In the future we will use better
342 probability combination techniques. */
343 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
344 if (REG_NOTE_KIND (note) == REG_BR_PRED)
345 {
346 int predictor = INTVAL (XEXP (XEXP (note, 0), 0));
347 int probability = INTVAL (XEXP (XEXP (note, 0), 1));
348
349 found = true;
350 if (best_predictor > predictor)
351 best_probability = probability, best_predictor = predictor;
352
353 d = (combined_probability * probability
354 + (REG_BR_PROB_BASE - combined_probability)
355 * (REG_BR_PROB_BASE - probability));
356
357 /* Use FP math to avoid overflows of 32bit integers. */
358 if (d == 0)
359 /* If one probability is 0% and one 100%, avoid division by zero. */
360 combined_probability = REG_BR_PROB_BASE / 2;
361 else
362 combined_probability = (((double) combined_probability) * probability
363 * REG_BR_PROB_BASE / d + 0.5);
364 }
365
366 /* Decide which heuristic to use. In case we didn't match anything,
367 use no_prediction heuristic, in case we did match, use either
368 first match or Dempster-Shaffer theory depending on the flags. */
369
370 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
371 first_match = true;
372
373 if (!found)
374 dump_prediction (PRED_NO_PREDICTION, combined_probability, bb, true);
375 else
376 {
377 dump_prediction (PRED_DS_THEORY, combined_probability, bb, !first_match);
378 dump_prediction (PRED_FIRST_MATCH, best_probability, bb, first_match);
379 }
380
381 if (first_match)
382 combined_probability = best_probability;
383 dump_prediction (PRED_COMBINED, combined_probability, bb, true);
384
385 while (*pnote)
386 {
387 if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
388 {
389 int predictor = INTVAL (XEXP (XEXP (*pnote, 0), 0));
390 int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
391
392 dump_prediction (predictor, probability, bb,
393 !first_match || best_predictor == predictor);
394 *pnote = XEXP (*pnote, 1);
395 }
396 else
397 pnote = &XEXP (*pnote, 1);
398 }
399
400 if (!prob_note)
401 {
402 REG_NOTES (insn)
403 = gen_rtx_EXPR_LIST (REG_BR_PROB,
404 GEN_INT (combined_probability), REG_NOTES (insn));
405
406 /* Save the prediction into CFG in case we are seeing non-degenerated
407 conditional jump. */
408 if (bb->succ->succ_next)
409 {
410 BRANCH_EDGE (bb)->probability = combined_probability;
411 FALLTHRU_EDGE (bb)->probability
412 = REG_BR_PROB_BASE - combined_probability;
413 }
414 }
415 }
416
417 /* Statically estimate the probability that a branch will be taken.
418 ??? In the next revision there will be a number of other predictors added
419 from the above references. Further, each heuristic will be factored out
420 into its own function for clarity (and to facilitate the combination of
421 predictions). */
422
423 void
424 estimate_probability (loops_info)
425 struct loops *loops_info;
426 {
427 dominance_info dominators, post_dominators;
428 basic_block bb;
429 unsigned i;
430
431 connect_infinite_loops_to_exit ();
432 dominators = calculate_dominance_info (CDI_DOMINATORS);
433 post_dominators = calculate_dominance_info (CDI_POST_DOMINATORS);
434
435 /* Try to predict out blocks in a loop that are not part of a
436 natural loop. */
437 for (i = 1; i < loops_info->num; i++)
438 {
439 basic_block bb, *bbs;
440 unsigned j;
441 int exits;
442 struct loop *loop = loops_info->parray[i];
443 struct loop_desc desc;
444 unsigned HOST_WIDE_INT niter;
445
446 flow_loop_scan (loops_info, loop, LOOP_EXIT_EDGES);
447 exits = loop->num_exits;
448
449 if (simple_loop_p (loops_info, loop, &desc)
450 && desc.const_iter)
451 {
452 niter = desc.niter + 1;
453 if (niter == 0) /* We might overflow here. */
454 niter = desc.niter;
455
456 predict_edge (desc.in_edge, PRED_LOOP_ITERATIONS,
457 REG_BR_PROB_BASE
458 - (REG_BR_PROB_BASE + niter /2)
459 / niter);
460 }
461
462 bbs = get_loop_body (loop);
463 for (j = 0; j < loop->num_nodes; j++)
464 {
465 int header_found = 0;
466 edge e;
467
468 bb = bbs[j];
469
470 /* Bypass loop heuristics on continue statement. These
471 statements construct loops via "non-loop" constructs
472 in the source language and are better to be handled
473 separately. */
474 if (!can_predict_insn_p (bb->end)
475 || predicted_by_p (bb, PRED_CONTINUE))
476 continue;
477
478 /* Loop branch heuristics - predict an edge back to a
479 loop's head as taken. */
480 for (e = bb->succ; e; e = e->succ_next)
481 if (e->dest == loop->header
482 && e->src == loop->latch)
483 {
484 header_found = 1;
485 predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN);
486 }
487
488 /* Loop exit heuristics - predict an edge exiting the loop if the
489 conditional has no loop header successors as not taken. */
490 if (!header_found)
491 for (e = bb->succ; e; e = e->succ_next)
492 if (e->dest->index < 0
493 || !flow_bb_inside_loop_p (loop, e->dest))
494 predict_edge
495 (e, PRED_LOOP_EXIT,
496 (REG_BR_PROB_BASE
497 - predictor_info [(int) PRED_LOOP_EXIT].hitrate)
498 / exits);
499 }
500 }
501
502 /* Attempt to predict conditional jumps using a number of heuristics. */
503 FOR_EACH_BB (bb)
504 {
505 rtx last_insn = bb->end;
506 rtx cond, earliest;
507 edge e;
508
509 if (! can_predict_insn_p (last_insn))
510 continue;
511
512 for (e = bb->succ; e; e = e->succ_next)
513 {
514 /* Predict early returns to be probable, as we've already taken
515 care for error returns and other are often used for fast paths
516 trought function. */
517 if ((e->dest == EXIT_BLOCK_PTR
518 || (e->dest->succ && !e->dest->succ->succ_next
519 && e->dest->succ->dest == EXIT_BLOCK_PTR))
520 && !predicted_by_p (bb, PRED_NULL_RETURN)
521 && !predicted_by_p (bb, PRED_CONST_RETURN)
522 && !predicted_by_p (bb, PRED_NEGATIVE_RETURN)
523 && !last_basic_block_p (e->dest))
524 predict_edge_def (e, PRED_EARLY_RETURN, TAKEN);
525
526 /* Look for block we are guarding (ie we dominate it,
527 but it doesn't postdominate us). */
528 if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
529 && dominated_by_p (dominators, e->dest, e->src)
530 && !dominated_by_p (post_dominators, e->src, e->dest))
531 {
532 rtx insn;
533
534 /* The call heuristic claims that a guarded function call
535 is improbable. This is because such calls are often used
536 to signal exceptional situations such as printing error
537 messages. */
538 for (insn = e->dest->head; insn != NEXT_INSN (e->dest->end);
539 insn = NEXT_INSN (insn))
540 if (GET_CODE (insn) == CALL_INSN
541 /* Constant and pure calls are hardly used to signalize
542 something exceptional. */
543 && ! CONST_OR_PURE_CALL_P (insn))
544 {
545 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
546 break;
547 }
548 }
549 }
550
551 cond = get_condition (last_insn, &earliest);
552 if (! cond)
553 continue;
554
555 /* Try "pointer heuristic."
556 A comparison ptr == 0 is predicted as false.
557 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
558 if (GET_RTX_CLASS (GET_CODE (cond)) == '<'
559 && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0)))
560 || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1)))))
561 {
562 if (GET_CODE (cond) == EQ)
563 predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
564 else if (GET_CODE (cond) == NE)
565 predict_insn_def (last_insn, PRED_POINTER, TAKEN);
566 }
567 else
568
569 /* Try "opcode heuristic."
570 EQ tests are usually false and NE tests are usually true. Also,
571 most quantities are positive, so we can make the appropriate guesses
572 about signed comparisons against zero. */
573 switch (GET_CODE (cond))
574 {
575 case CONST_INT:
576 /* Unconditional branch. */
577 predict_insn_def (last_insn, PRED_UNCONDITIONAL,
578 cond == const0_rtx ? NOT_TAKEN : TAKEN);
579 break;
580
581 case EQ:
582 case UNEQ:
583 /* Floating point comparisons appears to behave in a very
584 unpredictable way because of special role of = tests in
585 FP code. */
586 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
587 ;
588 /* Comparisons with 0 are often used for booleans and there is
589 nothing useful to predict about them. */
590 else if (XEXP (cond, 1) == const0_rtx
591 || XEXP (cond, 0) == const0_rtx)
592 ;
593 else
594 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
595 break;
596
597 case NE:
598 case LTGT:
599 /* Floating point comparisons appears to behave in a very
600 unpredictable way because of special role of = tests in
601 FP code. */
602 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
603 ;
604 /* Comparisons with 0 are often used for booleans and there is
605 nothing useful to predict about them. */
606 else if (XEXP (cond, 1) == const0_rtx
607 || XEXP (cond, 0) == const0_rtx)
608 ;
609 else
610 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
611 break;
612
613 case ORDERED:
614 predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
615 break;
616
617 case UNORDERED:
618 predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
619 break;
620
621 case LE:
622 case LT:
623 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
624 || XEXP (cond, 1) == constm1_rtx)
625 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
626 break;
627
628 case GE:
629 case GT:
630 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
631 || XEXP (cond, 1) == constm1_rtx)
632 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
633 break;
634
635 default:
636 break;
637 }
638 }
639
640 /* Attach the combined probability to each conditional jump. */
641 FOR_EACH_BB (bb)
642 if (GET_CODE (bb->end) == JUMP_INSN
643 && any_condjump_p (bb->end)
644 && bb->succ->succ_next != NULL)
645 combine_predictions_for_insn (bb->end, bb);
646
647 free_dominance_info (post_dominators);
648 free_dominance_info (dominators);
649
650 remove_fake_edges ();
651 estimate_bb_frequencies (loops_info);
652 }
653 \f
654 /* __builtin_expect dropped tokens into the insn stream describing expected
655 values of registers. Generate branch probabilities based off these
656 values. */
657
658 void
659 expected_value_to_br_prob ()
660 {
661 rtx insn, cond, ev = NULL_RTX, ev_reg = NULL_RTX;
662
663 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
664 {
665 switch (GET_CODE (insn))
666 {
667 case NOTE:
668 /* Look for expected value notes. */
669 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EXPECTED_VALUE)
670 {
671 ev = NOTE_EXPECTED_VALUE (insn);
672 ev_reg = XEXP (ev, 0);
673 delete_insn (insn);
674 }
675 continue;
676
677 case CODE_LABEL:
678 /* Never propagate across labels. */
679 ev = NULL_RTX;
680 continue;
681
682 case JUMP_INSN:
683 /* Look for simple conditional branches. If we haven't got an
684 expected value yet, no point going further. */
685 if (GET_CODE (insn) != JUMP_INSN || ev == NULL_RTX
686 || ! any_condjump_p (insn))
687 continue;
688 break;
689
690 default:
691 /* Look for insns that clobber the EV register. */
692 if (ev && reg_set_p (ev_reg, insn))
693 ev = NULL_RTX;
694 continue;
695 }
696
697 /* Collect the branch condition, hopefully relative to EV_REG. */
698 /* ??? At present we'll miss things like
699 (expected_value (eq r70 0))
700 (set r71 -1)
701 (set r80 (lt r70 r71))
702 (set pc (if_then_else (ne r80 0) ...))
703 as canonicalize_condition will render this to us as
704 (lt r70, r71)
705 Could use cselib to try and reduce this further. */
706 cond = XEXP (SET_SRC (pc_set (insn)), 0);
707 cond = canonicalize_condition (insn, cond, 0, NULL, ev_reg);
708 if (! cond || XEXP (cond, 0) != ev_reg
709 || GET_CODE (XEXP (cond, 1)) != CONST_INT)
710 continue;
711
712 /* Substitute and simplify. Given that the expression we're
713 building involves two constants, we should wind up with either
714 true or false. */
715 cond = gen_rtx_fmt_ee (GET_CODE (cond), VOIDmode,
716 XEXP (ev, 1), XEXP (cond, 1));
717 cond = simplify_rtx (cond);
718
719 /* Turn the condition into a scaled branch probability. */
720 if (cond != const_true_rtx && cond != const0_rtx)
721 abort ();
722 predict_insn_def (insn, PRED_BUILTIN_EXPECT,
723 cond == const_true_rtx ? TAKEN : NOT_TAKEN);
724 }
725 }
726 \f
727 /* Check whether this is the last basic block of function. Commonly tehre
728 is one extra common cleanup block. */
729 static bool
730 last_basic_block_p (bb)
731 basic_block bb;
732 {
733 if (bb == EXIT_BLOCK_PTR)
734 return false;
735
736 return (bb->next_bb == EXIT_BLOCK_PTR
737 || (bb->next_bb->next_bb == EXIT_BLOCK_PTR
738 && bb->succ && !bb->succ->succ_next
739 && bb->succ->dest->next_bb == EXIT_BLOCK_PTR));
740 }
741
742 /* Sets branch probabilities according to PREDiction and FLAGS. HEADS[bb->index]
743 should be index of basic block in that we need to alter branch predictions
744 (i.e. the first of our dominators such that we do not post-dominate it)
745 (but we fill this information on demand, so -1 may be there in case this
746 was not needed yet). */
747
748 static void
749 process_note_prediction (bb, heads, dominators, post_dominators, pred, flags)
750 basic_block bb;
751 int *heads;
752 dominance_info dominators;
753 dominance_info post_dominators;
754 int pred;
755 int flags;
756 {
757 edge e;
758 int y;
759 bool taken;
760
761 taken = flags & IS_TAKEN;
762
763 if (heads[bb->index] < 0)
764 {
765 /* This is first time we need this field in heads array; so
766 find first dominator that we do not post-dominate (we are
767 using already known members of heads array). */
768 basic_block ai = bb;
769 basic_block next_ai = get_immediate_dominator (dominators, bb);
770 int head;
771
772 while (heads[next_ai->index] < 0)
773 {
774 if (!dominated_by_p (post_dominators, next_ai, bb))
775 break;
776 heads[next_ai->index] = ai->index;
777 ai = next_ai;
778 next_ai = get_immediate_dominator (dominators, next_ai);
779 }
780 if (!dominated_by_p (post_dominators, next_ai, bb))
781 head = next_ai->index;
782 else
783 head = heads[next_ai->index];
784 while (next_ai != bb)
785 {
786 next_ai = ai;
787 if (heads[ai->index] == ENTRY_BLOCK)
788 ai = ENTRY_BLOCK_PTR;
789 else
790 ai = BASIC_BLOCK (heads[ai->index]);
791 heads[next_ai->index] = head;
792 }
793 }
794 y = heads[bb->index];
795
796 /* Now find the edge that leads to our branch and aply the prediction. */
797
798 if (y == last_basic_block || !can_predict_insn_p (BASIC_BLOCK (y)->end))
799 return;
800 for (e = BASIC_BLOCK (y)->succ; e; e = e->succ_next)
801 if (e->dest->index >= 0
802 && dominated_by_p (post_dominators, e->dest, bb))
803 predict_edge_def (e, pred, taken);
804 }
805
806 /* Gathers NOTE_INSN_PREDICTIONs in given basic block and turns them
807 into branch probabilities. For description of heads array, see
808 process_note_prediction. */
809
810 static void
811 process_note_predictions (bb, heads, dominators, post_dominators)
812 basic_block bb;
813 int *heads;
814 dominance_info dominators;
815 dominance_info post_dominators;
816 {
817 rtx insn;
818 edge e;
819
820 /* Additionally, we check here for blocks with no successors. */
821 int contained_noreturn_call = 0;
822 int was_bb_head = 0;
823 int noreturn_block = 1;
824
825 for (insn = bb->end; insn;
826 was_bb_head |= (insn == bb->head), insn = PREV_INSN (insn))
827 {
828 if (GET_CODE (insn) != NOTE)
829 {
830 if (was_bb_head)
831 break;
832 else
833 {
834 /* Noreturn calls cause program to exit, therefore they are
835 always predicted as not taken. */
836 if (GET_CODE (insn) == CALL_INSN
837 && find_reg_note (insn, REG_NORETURN, NULL))
838 contained_noreturn_call = 1;
839 continue;
840 }
841 }
842 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION)
843 {
844 int alg = (int) NOTE_PREDICTION_ALG (insn);
845 /* Process single prediction note. */
846 process_note_prediction (bb,
847 heads,
848 dominators,
849 post_dominators,
850 alg, (int) NOTE_PREDICTION_FLAGS (insn));
851 delete_insn (insn);
852 }
853 }
854 for (e = bb->succ; e; e = e->succ_next)
855 if (!(e->flags & EDGE_FAKE))
856 noreturn_block = 0;
857 if (contained_noreturn_call)
858 {
859 /* This block ended from other reasons than because of return.
860 If it is because of noreturn call, this should certainly not
861 be taken. Otherwise it is probably some error recovery. */
862 process_note_prediction (bb,
863 heads,
864 dominators,
865 post_dominators, PRED_NORETURN, NOT_TAKEN);
866 }
867 }
868
869 /* Gathers NOTE_INSN_PREDICTIONs and turns them into
870 branch probabilities. */
871
872 void
873 note_prediction_to_br_prob ()
874 {
875 basic_block bb;
876 dominance_info post_dominators, dominators;
877 int *heads;
878
879 /* To enable handling of noreturn blocks. */
880 add_noreturn_fake_exit_edges ();
881 connect_infinite_loops_to_exit ();
882
883 post_dominators = calculate_dominance_info (CDI_POST_DOMINATORS);
884 dominators = calculate_dominance_info (CDI_DOMINATORS);
885
886 heads = xmalloc (sizeof (int) * last_basic_block);
887 memset (heads, -1, sizeof (int) * last_basic_block);
888 heads[ENTRY_BLOCK_PTR->next_bb->index] = last_basic_block;
889
890 /* Process all prediction notes. */
891
892 FOR_EACH_BB (bb)
893 process_note_predictions (bb, heads, dominators, post_dominators);
894
895 free_dominance_info (post_dominators);
896 free_dominance_info (dominators);
897 free (heads);
898
899 remove_fake_edges ();
900 }
901 \f
902 /* This is used to carry information about basic blocks. It is
903 attached to the AUX field of the standard CFG block. */
904
905 typedef struct block_info_def
906 {
907 /* Estimated frequency of execution of basic_block. */
908 sreal frequency;
909
910 /* To keep queue of basic blocks to process. */
911 basic_block next;
912
913 /* True if block needs to be visited in prop_freqency. */
914 int tovisit:1;
915
916 /* Number of predecessors we need to visit first. */
917 int npredecessors;
918 } *block_info;
919
920 /* Similar information for edges. */
921 typedef struct edge_info_def
922 {
923 /* In case edge is an loopback edge, the probability edge will be reached
924 in case header is. Estimated number of iterations of the loop can be
925 then computed as 1 / (1 - back_edge_prob). */
926 sreal back_edge_prob;
927 /* True if the edge is an loopback edge in the natural loop. */
928 int back_edge:1;
929 } *edge_info;
930
931 #define BLOCK_INFO(B) ((block_info) (B)->aux)
932 #define EDGE_INFO(E) ((edge_info) (E)->aux)
933
934 /* Helper function for estimate_bb_frequencies.
935 Propagate the frequencies for LOOP. */
936
937 static void
938 propagate_freq (loop)
939 struct loop *loop;
940 {
941 basic_block head = loop->header;
942 basic_block bb;
943 basic_block last;
944 edge e;
945 basic_block nextbb;
946
947 /* For each basic block we need to visit count number of his predecessors
948 we need to visit first. */
949 FOR_EACH_BB (bb)
950 {
951 if (BLOCK_INFO (bb)->tovisit)
952 {
953 int count = 0;
954
955 for (e = bb->pred; e; e = e->pred_next)
956 if (BLOCK_INFO (e->src)->tovisit && !(e->flags & EDGE_DFS_BACK))
957 count++;
958 else if (BLOCK_INFO (e->src)->tovisit
959 && rtl_dump_file && !EDGE_INFO (e)->back_edge)
960 fprintf (rtl_dump_file,
961 "Irreducible region hit, ignoring edge to %i->%i\n",
962 e->src->index, bb->index);
963 BLOCK_INFO (bb)->npredecessors = count;
964 }
965 }
966
967 memcpy (&BLOCK_INFO (head)->frequency, &real_one, sizeof (real_one));
968 last = head;
969 for (bb = head; bb; bb = nextbb)
970 {
971 sreal cyclic_probability, frequency;
972
973 memcpy (&cyclic_probability, &real_zero, sizeof (real_zero));
974 memcpy (&frequency, &real_zero, sizeof (real_zero));
975
976 nextbb = BLOCK_INFO (bb)->next;
977 BLOCK_INFO (bb)->next = NULL;
978
979 /* Compute frequency of basic block. */
980 if (bb != head)
981 {
982 #ifdef ENABLE_CHECKING
983 for (e = bb->pred; e; e = e->pred_next)
984 if (BLOCK_INFO (e->src)->tovisit && !(e->flags & EDGE_DFS_BACK))
985 abort ();
986 #endif
987
988 for (e = bb->pred; e; e = e->pred_next)
989 if (EDGE_INFO (e)->back_edge)
990 {
991 sreal_add (&cyclic_probability, &cyclic_probability,
992 &EDGE_INFO (e)->back_edge_prob);
993 }
994 else if (!(e->flags & EDGE_DFS_BACK))
995 {
996 sreal tmp;
997
998 /* frequency += (e->probability
999 * BLOCK_INFO (e->src)->frequency /
1000 REG_BR_PROB_BASE); */
1001
1002 sreal_init (&tmp, e->probability, 0);
1003 sreal_mul (&tmp, &tmp, &BLOCK_INFO (e->src)->frequency);
1004 sreal_mul (&tmp, &tmp, &real_inv_br_prob_base);
1005 sreal_add (&frequency, &frequency, &tmp);
1006 }
1007
1008 if (sreal_compare (&cyclic_probability, &real_zero) == 0)
1009 {
1010 memcpy (&BLOCK_INFO (bb)->frequency, &frequency,
1011 sizeof (frequency));
1012 }
1013 else
1014 {
1015 if (sreal_compare (&cyclic_probability, &real_almost_one) > 0)
1016 {
1017 memcpy (&cyclic_probability, &real_almost_one,
1018 sizeof (real_almost_one));
1019 }
1020
1021 /* BLOCK_INFO (bb)->frequency = frequency
1022 / (1 - cyclic_probability) */
1023
1024 sreal_sub (&cyclic_probability, &real_one, &cyclic_probability);
1025 sreal_div (&BLOCK_INFO (bb)->frequency,
1026 &frequency, &cyclic_probability);
1027 }
1028 }
1029
1030 BLOCK_INFO (bb)->tovisit = 0;
1031
1032 /* Compute back edge frequencies. */
1033 for (e = bb->succ; e; e = e->succ_next)
1034 if (e->dest == head)
1035 {
1036 sreal tmp;
1037
1038 /* EDGE_INFO (e)->back_edge_prob
1039 = ((e->probability * BLOCK_INFO (bb)->frequency)
1040 / REG_BR_PROB_BASE); */
1041
1042 sreal_init (&tmp, e->probability, 0);
1043 sreal_mul (&tmp, &tmp, &BLOCK_INFO (bb)->frequency);
1044 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1045 &tmp, &real_inv_br_prob_base);
1046 }
1047
1048 /* Propagate to successor blocks. */
1049 for (e = bb->succ; e; e = e->succ_next)
1050 if (!(e->flags & EDGE_DFS_BACK)
1051 && BLOCK_INFO (e->dest)->npredecessors)
1052 {
1053 BLOCK_INFO (e->dest)->npredecessors--;
1054 if (!BLOCK_INFO (e->dest)->npredecessors)
1055 {
1056 if (!nextbb)
1057 nextbb = e->dest;
1058 else
1059 BLOCK_INFO (last)->next = e->dest;
1060
1061 last = e->dest;
1062 }
1063 }
1064 }
1065 }
1066
1067 /* Estimate probabilities of loopback edges in loops at same nest level. */
1068
1069 static void
1070 estimate_loops_at_level (first_loop)
1071 struct loop *first_loop;
1072 {
1073 struct loop *loop;
1074
1075 for (loop = first_loop; loop; loop = loop->next)
1076 {
1077 edge e;
1078 basic_block *bbs;
1079 unsigned i;
1080
1081 estimate_loops_at_level (loop->inner);
1082
1083 if (loop->latch->succ) /* Do not do this for dummy function loop. */
1084 {
1085 /* Find current loop back edge and mark it. */
1086 e = loop_latch_edge (loop);
1087 EDGE_INFO (e)->back_edge = 1;
1088 }
1089
1090 bbs = get_loop_body (loop);
1091 for (i = 0; i < loop->num_nodes; i++)
1092 BLOCK_INFO (bbs[i])->tovisit = 1;
1093 free (bbs);
1094 propagate_freq (loop);
1095 }
1096 }
1097
1098 /* Convert counts measured by profile driven feedback to frequencies. */
1099
1100 static void
1101 counts_to_freqs ()
1102 {
1103 gcov_type count_max = 1;
1104 basic_block bb;
1105
1106 FOR_EACH_BB (bb)
1107 count_max = MAX (bb->count, count_max);
1108
1109 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1110 bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
1111 }
1112
1113 /* Return true if function is likely to be expensive, so there is no point to
1114 optimize performance of prologue, epilogue or do inlining at the expense
1115 of code size growth. THRESHOLD is the limit of number of instructions
1116 function can execute at average to be still considered not expensive. */
1117
1118 bool
1119 expensive_function_p (threshold)
1120 int threshold;
1121 {
1122 unsigned int sum = 0;
1123 basic_block bb;
1124 unsigned int limit;
1125
1126 /* We can not compute accurately for large thresholds due to scaled
1127 frequencies. */
1128 if (threshold > BB_FREQ_MAX)
1129 abort ();
1130
1131 /* Frequencies are out of range. This either means that function contains
1132 internal loop executing more than BB_FREQ_MAX times or profile feedback
1133 is available and function has not been executed at all. */
1134 if (ENTRY_BLOCK_PTR->frequency == 0)
1135 return true;
1136
1137 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
1138 limit = ENTRY_BLOCK_PTR->frequency * threshold;
1139 FOR_EACH_BB (bb)
1140 {
1141 rtx insn;
1142
1143 for (insn = bb->head; insn != NEXT_INSN (bb->end);
1144 insn = NEXT_INSN (insn))
1145 if (active_insn_p (insn))
1146 {
1147 sum += bb->frequency;
1148 if (sum > limit)
1149 return true;
1150 }
1151 }
1152
1153 return false;
1154 }
1155
1156 /* Estimate basic blocks frequency by given branch probabilities. */
1157
1158 static void
1159 estimate_bb_frequencies (loops)
1160 struct loops *loops;
1161 {
1162 basic_block bb;
1163 sreal freq_max;
1164
1165 if (flag_branch_probabilities)
1166 counts_to_freqs ();
1167 else
1168 {
1169 sreal_init (&real_zero, 0, 0);
1170 sreal_init (&real_one, 1, 0);
1171 sreal_init (&real_br_prob_base, REG_BR_PROB_BASE, 0);
1172 sreal_init (&real_bb_freq_max, BB_FREQ_MAX, 0);
1173 sreal_init (&real_one_half, 1, -1);
1174 sreal_div (&real_inv_br_prob_base, &real_one, &real_br_prob_base);
1175 sreal_sub (&real_almost_one, &real_one, &real_inv_br_prob_base);
1176
1177 mark_dfs_back_edges ();
1178 /* Fill in the probability values in flowgraph based on the REG_BR_PROB
1179 notes. */
1180 FOR_EACH_BB (bb)
1181 {
1182 rtx last_insn = bb->end;
1183
1184 if (!can_predict_insn_p (last_insn))
1185 {
1186 /* We can predict only conditional jumps at the moment.
1187 Expect each edge to be equally probable.
1188 ?? In the future we want to make abnormal edges improbable. */
1189 int nedges = 0;
1190 edge e;
1191
1192 for (e = bb->succ; e; e = e->succ_next)
1193 {
1194 nedges++;
1195 if (e->probability != 0)
1196 break;
1197 }
1198 if (!e)
1199 for (e = bb->succ; e; e = e->succ_next)
1200 e->probability = (REG_BR_PROB_BASE + nedges / 2) / nedges;
1201 }
1202 }
1203
1204 ENTRY_BLOCK_PTR->succ->probability = REG_BR_PROB_BASE;
1205
1206 /* Set up block info for each basic block. */
1207 alloc_aux_for_blocks (sizeof (struct block_info_def));
1208 alloc_aux_for_edges (sizeof (struct edge_info_def));
1209 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1210 {
1211 edge e;
1212
1213 BLOCK_INFO (bb)->tovisit = 0;
1214 for (e = bb->succ; e; e = e->succ_next)
1215 {
1216 sreal_init (&EDGE_INFO (e)->back_edge_prob, e->probability, 0);
1217 sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1218 &EDGE_INFO (e)->back_edge_prob,
1219 &real_inv_br_prob_base);
1220 }
1221 }
1222
1223 /* First compute probabilities locally for each loop from innermost
1224 to outermost to examine probabilities for back edges. */
1225 estimate_loops_at_level (loops->tree_root);
1226
1227 memcpy (&freq_max, &real_zero, sizeof (real_zero));
1228 FOR_EACH_BB (bb)
1229 if (sreal_compare (&freq_max, &BLOCK_INFO (bb)->frequency) < 0)
1230 memcpy (&freq_max, &BLOCK_INFO (bb)->frequency, sizeof (freq_max));
1231
1232 sreal_div (&freq_max, &real_bb_freq_max, &freq_max);
1233 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1234 {
1235 sreal tmp;
1236
1237 sreal_mul (&tmp, &BLOCK_INFO (bb)->frequency, &freq_max);
1238 sreal_add (&tmp, &tmp, &real_one_half);
1239 bb->frequency = sreal_to_int (&tmp);
1240 }
1241
1242 free_aux_for_blocks ();
1243 free_aux_for_edges ();
1244 }
1245 compute_function_frequency ();
1246 if (flag_reorder_functions)
1247 choose_function_section ();
1248 }
1249
1250 /* Decide whether function is hot, cold or unlikely executed. */
1251 static void
1252 compute_function_frequency ()
1253 {
1254 basic_block bb;
1255
1256 if (!profile_info.count_profiles_merged
1257 || !flag_branch_probabilities)
1258 return;
1259 cfun->function_frequency = FUNCTION_FREQUENCY_UNLIKELY_EXECUTED;
1260 FOR_EACH_BB (bb)
1261 {
1262 if (maybe_hot_bb_p (bb))
1263 {
1264 cfun->function_frequency = FUNCTION_FREQUENCY_HOT;
1265 return;
1266 }
1267 if (!probably_never_executed_bb_p (bb))
1268 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
1269 }
1270 }
1271
1272 /* Choose appropriate section for the function. */
1273 static void
1274 choose_function_section ()
1275 {
1276 if (DECL_SECTION_NAME (current_function_decl)
1277 || !targetm.have_named_sections
1278 /* Theoretically we can split the gnu.linkonce text section too,
1279 but this requires more work as the frequency needs to match
1280 for all generated objects so we need to merge the frequency
1281 of all instances. For now just never set frequency for these. */
1282 || DECL_ONE_ONLY (current_function_decl))
1283 return;
1284 if (cfun->function_frequency == FUNCTION_FREQUENCY_HOT)
1285 DECL_SECTION_NAME (current_function_decl) =
1286 build_string (strlen (HOT_TEXT_SECTION_NAME), HOT_TEXT_SECTION_NAME);
1287 if (cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
1288 DECL_SECTION_NAME (current_function_decl) =
1289 build_string (strlen (UNLIKELY_EXECUTED_TEXT_SECTION_NAME),
1290 UNLIKELY_EXECUTED_TEXT_SECTION_NAME);
1291 }