target.h (globalize_decl_name): New.
[gcc.git] / gcc / profile.c
1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
5 based on some ideas from Dain Samples of UC Berkeley.
6 Further mangling by Bob Manson, Cygnus Support.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 2, or (at your option) any later
13 version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to the Free
22 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 02110-1301, USA. */
24
25 /* Generate basic block profile instrumentation and auxiliary files.
26 Profile generation is optimized, so that not all arcs in the basic
27 block graph need instrumenting. First, the BB graph is closed with
28 one entry (function start), and one exit (function exit). Any
29 ABNORMAL_EDGE cannot be instrumented (because there is no control
30 path to place the code). We close the graph by inserting fake
31 EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
32 edges that do not go to the exit_block. We ignore such abnormal
33 edges. Naturally these fake edges are never directly traversed,
34 and so *cannot* be directly instrumented. Some other graph
35 massaging is done. To optimize the instrumentation we generate the
36 BB minimal span tree, only edges that are not on the span tree
37 (plus the entry point) need instrumenting. From that information
38 all other edge counts can be deduced. By construction all fake
39 edges must be on the spanning tree. We also attempt to place
40 EDGE_CRITICAL edges on the spanning tree.
41
42 The auxiliary files generated are <dumpbase>.gcno (at compile time)
43 and <dumpbase>.gcda (at run time). The format is
44 described in full in gcov-io.h. */
45
46 /* ??? Register allocation should use basic block execution counts to
47 give preference to the most commonly executed blocks. */
48
49 /* ??? Should calculate branch probabilities before instrumenting code, since
50 then we can use arc counts to help decide which arcs to instrument. */
51
52 #include "config.h"
53 #include "system.h"
54 #include "coretypes.h"
55 #include "tm.h"
56 #include "rtl.h"
57 #include "flags.h"
58 #include "output.h"
59 #include "regs.h"
60 #include "expr.h"
61 #include "function.h"
62 #include "toplev.h"
63 #include "coverage.h"
64 #include "value-prof.h"
65 #include "tree.h"
66 #include "cfghooks.h"
67 #include "tree-flow.h"
68 #include "timevar.h"
69 #include "cfgloop.h"
70 #include "tree-pass.h"
71
72 /* Hooks for profiling. */
73 static struct profile_hooks* profile_hooks;
74
75 /* Additional information about the edges we need. */
76 struct edge_info {
77 unsigned int count_valid : 1;
78
79 /* Is on the spanning tree. */
80 unsigned int on_tree : 1;
81
82 /* Pretend this edge does not exist (it is abnormal and we've
83 inserted a fake to compensate). */
84 unsigned int ignore : 1;
85 };
86
87 struct bb_info {
88 unsigned int count_valid : 1;
89
90 /* Number of successor and predecessor edges. */
91 gcov_type succ_count;
92 gcov_type pred_count;
93 };
94
95 #define EDGE_INFO(e) ((struct edge_info *) (e)->aux)
96 #define BB_INFO(b) ((struct bb_info *) (b)->aux)
97
98 /* Counter summary from the last set of coverage counts read. */
99
100 const struct gcov_ctr_summary *profile_info;
101
102 /* Collect statistics on the performance of this pass for the entire source
103 file. */
104
105 static int total_num_blocks;
106 static int total_num_edges;
107 static int total_num_edges_ignored;
108 static int total_num_edges_instrumented;
109 static int total_num_blocks_created;
110 static int total_num_passes;
111 static int total_num_times_called;
112 static int total_hist_br_prob[20];
113 static int total_num_never_executed;
114 static int total_num_branches;
115
116 /* Forward declarations. */
117 static void find_spanning_tree (struct edge_list *);
118 static unsigned instrument_edges (struct edge_list *);
119 static void instrument_values (histogram_values);
120 static void compute_branch_probabilities (void);
121 static void compute_value_histograms (histogram_values);
122 static gcov_type * get_exec_counts (void);
123 static basic_block find_group (basic_block);
124 static void union_groups (basic_block, basic_block);
125
126 \f
127 /* Add edge instrumentation code to the entire insn chain.
128
129 F is the first insn of the chain.
130 NUM_BLOCKS is the number of basic blocks found in F. */
131
132 static unsigned
133 instrument_edges (struct edge_list *el)
134 {
135 unsigned num_instr_edges = 0;
136 int num_edges = NUM_EDGES (el);
137 basic_block bb;
138
139 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
140 {
141 edge e;
142 edge_iterator ei;
143
144 FOR_EACH_EDGE (e, ei, bb->succs)
145 {
146 struct edge_info *inf = EDGE_INFO (e);
147
148 if (!inf->ignore && !inf->on_tree)
149 {
150 gcc_assert (!(e->flags & EDGE_ABNORMAL));
151 if (dump_file)
152 fprintf (dump_file, "Edge %d to %d instrumented%s\n",
153 e->src->index, e->dest->index,
154 EDGE_CRITICAL_P (e) ? " (and split)" : "");
155 (profile_hooks->gen_edge_profiler) (num_instr_edges++, e);
156 }
157 }
158 }
159
160 total_num_blocks_created += num_edges;
161 if (dump_file)
162 fprintf (dump_file, "%d edges instrumented\n", num_instr_edges);
163 return num_instr_edges;
164 }
165
166 /* Add code to measure histograms for values in list VALUES. */
167 static void
168 instrument_values (histogram_values values)
169 {
170 unsigned i, t;
171
172 /* Emit code to generate the histograms before the insns. */
173
174 for (i = 0; i < VEC_length (histogram_value, values); i++)
175 {
176 histogram_value hist = VEC_index (histogram_value, values, i);
177 switch (hist->type)
178 {
179 case HIST_TYPE_INTERVAL:
180 t = GCOV_COUNTER_V_INTERVAL;
181 break;
182
183 case HIST_TYPE_POW2:
184 t = GCOV_COUNTER_V_POW2;
185 break;
186
187 case HIST_TYPE_SINGLE_VALUE:
188 t = GCOV_COUNTER_V_SINGLE;
189 break;
190
191 case HIST_TYPE_CONST_DELTA:
192 t = GCOV_COUNTER_V_DELTA;
193 break;
194
195 case HIST_TYPE_INDIR_CALL:
196 t = GCOV_COUNTER_V_INDIR;
197 break;
198
199 default:
200 gcc_unreachable ();
201 }
202 if (!coverage_counter_alloc (t, hist->n_counters))
203 continue;
204
205 switch (hist->type)
206 {
207 case HIST_TYPE_INTERVAL:
208 (profile_hooks->gen_interval_profiler) (hist, t, 0);
209 break;
210
211 case HIST_TYPE_POW2:
212 (profile_hooks->gen_pow2_profiler) (hist, t, 0);
213 break;
214
215 case HIST_TYPE_SINGLE_VALUE:
216 (profile_hooks->gen_one_value_profiler) (hist, t, 0);
217 break;
218
219 case HIST_TYPE_CONST_DELTA:
220 (profile_hooks->gen_const_delta_profiler) (hist, t, 0);
221 break;
222
223 case HIST_TYPE_INDIR_CALL:
224 (profile_hooks->gen_ic_profiler) (hist, t, 0);
225 break;
226
227 default:
228 gcc_unreachable ();
229 }
230 }
231 }
232 \f
233
234 /* Computes hybrid profile for all matching entries in da_file. */
235
236 static gcov_type *
237 get_exec_counts (void)
238 {
239 unsigned num_edges = 0;
240 basic_block bb;
241 gcov_type *counts;
242
243 /* Count the edges to be (possibly) instrumented. */
244 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
245 {
246 edge e;
247 edge_iterator ei;
248
249 FOR_EACH_EDGE (e, ei, bb->succs)
250 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
251 num_edges++;
252 }
253
254 counts = get_coverage_counts (GCOV_COUNTER_ARCS, num_edges, &profile_info);
255 if (!counts)
256 return NULL;
257
258 if (dump_file && profile_info)
259 fprintf(dump_file, "Merged %u profiles with maximal count %u.\n",
260 profile_info->runs, (unsigned) profile_info->sum_max);
261
262 return counts;
263 }
264 \f
265
266 /* Compute the branch probabilities for the various branches.
267 Annotate them accordingly. */
268
269 static void
270 compute_branch_probabilities (void)
271 {
272 basic_block bb;
273 int i;
274 int num_edges = 0;
275 int changes;
276 int passes;
277 int hist_br_prob[20];
278 int num_never_executed;
279 int num_branches;
280 gcov_type *exec_counts = get_exec_counts ();
281 int exec_counts_pos = 0;
282
283 /* Very simple sanity checks so we catch bugs in our profiling code. */
284 if (profile_info)
285 {
286 if (profile_info->run_max * profile_info->runs < profile_info->sum_max)
287 {
288 error ("corrupted profile info: run_max * runs < sum_max");
289 exec_counts = NULL;
290 }
291
292 if (profile_info->sum_all < profile_info->sum_max)
293 {
294 error ("corrupted profile info: sum_all is smaller than sum_max");
295 exec_counts = NULL;
296 }
297 }
298
299 /* Attach extra info block to each bb. */
300
301 alloc_aux_for_blocks (sizeof (struct bb_info));
302 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
303 {
304 edge e;
305 edge_iterator ei;
306
307 FOR_EACH_EDGE (e, ei, bb->succs)
308 if (!EDGE_INFO (e)->ignore)
309 BB_INFO (bb)->succ_count++;
310 FOR_EACH_EDGE (e, ei, bb->preds)
311 if (!EDGE_INFO (e)->ignore)
312 BB_INFO (bb)->pred_count++;
313 }
314
315 /* Avoid predicting entry on exit nodes. */
316 BB_INFO (EXIT_BLOCK_PTR)->succ_count = 2;
317 BB_INFO (ENTRY_BLOCK_PTR)->pred_count = 2;
318
319 /* For each edge not on the spanning tree, set its execution count from
320 the .da file. */
321
322 /* The first count in the .da file is the number of times that the function
323 was entered. This is the exec_count for block zero. */
324
325 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
326 {
327 edge e;
328 edge_iterator ei;
329
330 FOR_EACH_EDGE (e, ei, bb->succs)
331 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
332 {
333 num_edges++;
334 if (exec_counts)
335 {
336 e->count = exec_counts[exec_counts_pos++];
337 if (e->count > profile_info->sum_max)
338 {
339 error ("corrupted profile info: edge from %i to %i exceeds maximal count",
340 bb->index, e->dest->index);
341 }
342 }
343 else
344 e->count = 0;
345
346 EDGE_INFO (e)->count_valid = 1;
347 BB_INFO (bb)->succ_count--;
348 BB_INFO (e->dest)->pred_count--;
349 if (dump_file)
350 {
351 fprintf (dump_file, "\nRead edge from %i to %i, count:",
352 bb->index, e->dest->index);
353 fprintf (dump_file, HOST_WIDEST_INT_PRINT_DEC,
354 (HOST_WIDEST_INT) e->count);
355 }
356 }
357 }
358
359 if (dump_file)
360 fprintf (dump_file, "\n%d edge counts read\n", num_edges);
361
362 /* For every block in the file,
363 - if every exit/entrance edge has a known count, then set the block count
364 - if the block count is known, and every exit/entrance edge but one has
365 a known execution count, then set the count of the remaining edge
366
367 As edge counts are set, decrement the succ/pred count, but don't delete
368 the edge, that way we can easily tell when all edges are known, or only
369 one edge is unknown. */
370
371 /* The order that the basic blocks are iterated through is important.
372 Since the code that finds spanning trees starts with block 0, low numbered
373 edges are put on the spanning tree in preference to high numbered edges.
374 Hence, most instrumented edges are at the end. Graph solving works much
375 faster if we propagate numbers from the end to the start.
376
377 This takes an average of slightly more than 3 passes. */
378
379 changes = 1;
380 passes = 0;
381 while (changes)
382 {
383 passes++;
384 changes = 0;
385 FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR, NULL, prev_bb)
386 {
387 struct bb_info *bi = BB_INFO (bb);
388 if (! bi->count_valid)
389 {
390 if (bi->succ_count == 0)
391 {
392 edge e;
393 edge_iterator ei;
394 gcov_type total = 0;
395
396 FOR_EACH_EDGE (e, ei, bb->succs)
397 total += e->count;
398 bb->count = total;
399 bi->count_valid = 1;
400 changes = 1;
401 }
402 else if (bi->pred_count == 0)
403 {
404 edge e;
405 edge_iterator ei;
406 gcov_type total = 0;
407
408 FOR_EACH_EDGE (e, ei, bb->preds)
409 total += e->count;
410 bb->count = total;
411 bi->count_valid = 1;
412 changes = 1;
413 }
414 }
415 if (bi->count_valid)
416 {
417 if (bi->succ_count == 1)
418 {
419 edge e;
420 edge_iterator ei;
421 gcov_type total = 0;
422
423 /* One of the counts will be invalid, but it is zero,
424 so adding it in also doesn't hurt. */
425 FOR_EACH_EDGE (e, ei, bb->succs)
426 total += e->count;
427
428 /* Seedgeh for the invalid edge, and set its count. */
429 FOR_EACH_EDGE (e, ei, bb->succs)
430 if (! EDGE_INFO (e)->count_valid && ! EDGE_INFO (e)->ignore)
431 break;
432
433 /* Calculate count for remaining edge by conservation. */
434 total = bb->count - total;
435
436 gcc_assert (e);
437 EDGE_INFO (e)->count_valid = 1;
438 e->count = total;
439 bi->succ_count--;
440
441 BB_INFO (e->dest)->pred_count--;
442 changes = 1;
443 }
444 if (bi->pred_count == 1)
445 {
446 edge e;
447 edge_iterator ei;
448 gcov_type total = 0;
449
450 /* One of the counts will be invalid, but it is zero,
451 so adding it in also doesn't hurt. */
452 FOR_EACH_EDGE (e, ei, bb->preds)
453 total += e->count;
454
455 /* Search for the invalid edge, and set its count. */
456 FOR_EACH_EDGE (e, ei, bb->preds)
457 if (!EDGE_INFO (e)->count_valid && !EDGE_INFO (e)->ignore)
458 break;
459
460 /* Calculate count for remaining edge by conservation. */
461 total = bb->count - total + e->count;
462
463 gcc_assert (e);
464 EDGE_INFO (e)->count_valid = 1;
465 e->count = total;
466 bi->pred_count--;
467
468 BB_INFO (e->src)->succ_count--;
469 changes = 1;
470 }
471 }
472 }
473 }
474 if (dump_file)
475 dump_flow_info (dump_file, dump_flags);
476
477 total_num_passes += passes;
478 if (dump_file)
479 fprintf (dump_file, "Graph solving took %d passes.\n\n", passes);
480
481 /* If the graph has been correctly solved, every block will have a
482 succ and pred count of zero. */
483 FOR_EACH_BB (bb)
484 {
485 gcc_assert (!BB_INFO (bb)->succ_count && !BB_INFO (bb)->pred_count);
486 }
487
488 /* For every edge, calculate its branch probability and add a reg_note
489 to the branch insn to indicate this. */
490
491 for (i = 0; i < 20; i++)
492 hist_br_prob[i] = 0;
493 num_never_executed = 0;
494 num_branches = 0;
495
496 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
497 {
498 edge e;
499 edge_iterator ei;
500
501 if (bb->count < 0)
502 {
503 error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
504 bb->index, (int)bb->count);
505 bb->count = 0;
506 }
507 FOR_EACH_EDGE (e, ei, bb->succs)
508 {
509 /* Function may return twice in the cased the called function is
510 setjmp or calls fork, but we can't represent this by extra
511 edge from the entry, since extra edge from the exit is
512 already present. We get negative frequency from the entry
513 point. */
514 if ((e->count < 0
515 && e->dest == EXIT_BLOCK_PTR)
516 || (e->count > bb->count
517 && e->dest != EXIT_BLOCK_PTR))
518 {
519 if (block_ends_with_call_p (bb))
520 e->count = e->count < 0 ? 0 : bb->count;
521 }
522 if (e->count < 0 || e->count > bb->count)
523 {
524 error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
525 e->src->index, e->dest->index,
526 (int)e->count);
527 e->count = bb->count / 2;
528 }
529 }
530 if (bb->count)
531 {
532 FOR_EACH_EDGE (e, ei, bb->succs)
533 e->probability = (e->count * REG_BR_PROB_BASE + bb->count / 2) / bb->count;
534 if (bb->index >= NUM_FIXED_BLOCKS
535 && block_ends_with_condjump_p (bb)
536 && EDGE_COUNT (bb->succs) >= 2)
537 {
538 int prob;
539 edge e;
540 int index;
541
542 /* Find the branch edge. It is possible that we do have fake
543 edges here. */
544 FOR_EACH_EDGE (e, ei, bb->succs)
545 if (!(e->flags & (EDGE_FAKE | EDGE_FALLTHRU)))
546 break;
547
548 prob = e->probability;
549 index = prob * 20 / REG_BR_PROB_BASE;
550
551 if (index == 20)
552 index = 19;
553 hist_br_prob[index]++;
554
555 num_branches++;
556 }
557 }
558 /* As a last resort, distribute the probabilities evenly.
559 Use simple heuristics that if there are normal edges,
560 give all abnormals frequency of 0, otherwise distribute the
561 frequency over abnormals (this is the case of noreturn
562 calls). */
563 else if (profile_status == PROFILE_ABSENT)
564 {
565 int total = 0;
566
567 FOR_EACH_EDGE (e, ei, bb->succs)
568 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
569 total ++;
570 if (total)
571 {
572 FOR_EACH_EDGE (e, ei, bb->succs)
573 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
574 e->probability = REG_BR_PROB_BASE / total;
575 else
576 e->probability = 0;
577 }
578 else
579 {
580 total += EDGE_COUNT (bb->succs);
581 FOR_EACH_EDGE (e, ei, bb->succs)
582 e->probability = REG_BR_PROB_BASE / total;
583 }
584 if (bb->index >= NUM_FIXED_BLOCKS
585 && block_ends_with_condjump_p (bb)
586 && EDGE_COUNT (bb->succs) >= 2)
587 num_branches++, num_never_executed;
588 }
589 }
590 counts_to_freqs ();
591
592 if (dump_file)
593 {
594 fprintf (dump_file, "%d branches\n", num_branches);
595 fprintf (dump_file, "%d branches never executed\n",
596 num_never_executed);
597 if (num_branches)
598 for (i = 0; i < 10; i++)
599 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
600 (hist_br_prob[i] + hist_br_prob[19-i]) * 100 / num_branches,
601 5 * i, 5 * i + 5);
602
603 total_num_branches += num_branches;
604 total_num_never_executed += num_never_executed;
605 for (i = 0; i < 20; i++)
606 total_hist_br_prob[i] += hist_br_prob[i];
607
608 fputc ('\n', dump_file);
609 fputc ('\n', dump_file);
610 }
611
612 free_aux_for_blocks ();
613 }
614
615 /* Load value histograms values whose description is stored in VALUES array
616 from .gcda file. */
617
618 static void
619 compute_value_histograms (histogram_values values)
620 {
621 unsigned i, j, t, any;
622 unsigned n_histogram_counters[GCOV_N_VALUE_COUNTERS];
623 gcov_type *histogram_counts[GCOV_N_VALUE_COUNTERS];
624 gcov_type *act_count[GCOV_N_VALUE_COUNTERS];
625 gcov_type *aact_count;
626
627 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
628 n_histogram_counters[t] = 0;
629
630 for (i = 0; i < VEC_length (histogram_value, values); i++)
631 {
632 histogram_value hist = VEC_index (histogram_value, values, i);
633 n_histogram_counters[(int) hist->type] += hist->n_counters;
634 }
635
636 any = 0;
637 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
638 {
639 if (!n_histogram_counters[t])
640 {
641 histogram_counts[t] = NULL;
642 continue;
643 }
644
645 histogram_counts[t] =
646 get_coverage_counts (COUNTER_FOR_HIST_TYPE (t),
647 n_histogram_counters[t], NULL);
648 if (histogram_counts[t])
649 any = 1;
650 act_count[t] = histogram_counts[t];
651 }
652 if (!any)
653 return;
654
655 for (i = 0; i < VEC_length (histogram_value, values); i++)
656 {
657 histogram_value hist = VEC_index (histogram_value, values, i);
658 tree stmt = hist->hvalue.stmt;
659
660 t = (int) hist->type;
661
662 aact_count = act_count[t];
663 act_count[t] += hist->n_counters;
664
665 gimple_add_histogram_value (cfun, stmt, hist);
666 hist->hvalue.counters = XNEWVEC (gcov_type, hist->n_counters);
667 for (j = 0; j < hist->n_counters; j++)
668 hist->hvalue.counters[j] = aact_count[j];
669 }
670
671 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
672 if (histogram_counts[t])
673 free (histogram_counts[t]);
674 }
675
676 /* The entry basic block will be moved around so that it has index=1,
677 there is nothing at index 0 and the exit is at n_basic_block. */
678 #define BB_TO_GCOV_INDEX(bb) ((bb)->index - 1)
679 /* When passed NULL as file_name, initialize.
680 When passed something else, output the necessary commands to change
681 line to LINE and offset to FILE_NAME. */
682 static void
683 output_location (char const *file_name, int line,
684 gcov_position_t *offset, basic_block bb)
685 {
686 static char const *prev_file_name;
687 static int prev_line;
688 bool name_differs, line_differs;
689
690 if (!file_name)
691 {
692 prev_file_name = NULL;
693 prev_line = -1;
694 return;
695 }
696
697 name_differs = !prev_file_name || strcmp (file_name, prev_file_name);
698 line_differs = prev_line != line;
699
700 if (name_differs || line_differs)
701 {
702 if (!*offset)
703 {
704 *offset = gcov_write_tag (GCOV_TAG_LINES);
705 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
706 name_differs = line_differs=true;
707 }
708
709 /* If this is a new source file, then output the
710 file's name to the .bb file. */
711 if (name_differs)
712 {
713 prev_file_name = file_name;
714 gcov_write_unsigned (0);
715 gcov_write_string (prev_file_name);
716 }
717 if (line_differs)
718 {
719 gcov_write_unsigned (line);
720 prev_line = line;
721 }
722 }
723 }
724
725 /* Instrument and/or analyze program behavior based on program flow graph.
726 In either case, this function builds a flow graph for the function being
727 compiled. The flow graph is stored in BB_GRAPH.
728
729 When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
730 the flow graph that are needed to reconstruct the dynamic behavior of the
731 flow graph.
732
733 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
734 information from a data file containing edge count information from previous
735 executions of the function being compiled. In this case, the flow graph is
736 annotated with actual execution counts, which are later propagated into the
737 rtl for optimization purposes.
738
739 Main entry point of this file. */
740
741 void
742 branch_prob (void)
743 {
744 basic_block bb;
745 unsigned i;
746 unsigned num_edges, ignored_edges;
747 unsigned num_instrumented;
748 struct edge_list *el;
749 histogram_values values = NULL;
750
751 total_num_times_called++;
752
753 flow_call_edges_add (NULL);
754 add_noreturn_fake_exit_edges ();
755
756 /* We can't handle cyclic regions constructed using abnormal edges.
757 To avoid these we replace every source of abnormal edge by a fake
758 edge from entry node and every destination by fake edge to exit.
759 This keeps graph acyclic and our calculation exact for all normal
760 edges except for exit and entrance ones.
761
762 We also add fake exit edges for each call and asm statement in the
763 basic, since it may not return. */
764
765 FOR_EACH_BB (bb)
766 {
767 int need_exit_edge = 0, need_entry_edge = 0;
768 int have_exit_edge = 0, have_entry_edge = 0;
769 edge e;
770 edge_iterator ei;
771
772 /* Functions returning multiple times are not handled by extra edges.
773 Instead we simply allow negative counts on edges from exit to the
774 block past call and corresponding probabilities. We can't go
775 with the extra edges because that would result in flowgraph that
776 needs to have fake edges outside the spanning tree. */
777
778 FOR_EACH_EDGE (e, ei, bb->succs)
779 {
780 block_stmt_iterator bsi;
781 tree last = NULL;
782
783 /* It may happen that there are compiler generated statements
784 without a locus at all. Go through the basic block from the
785 last to the first statement looking for a locus. */
786 for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
787 {
788 last = bsi_stmt (bsi);
789 if (EXPR_LOCUS (last))
790 break;
791 }
792
793 /* Edge with goto locus might get wrong coverage info unless
794 it is the only edge out of BB.
795 Don't do that when the locuses match, so
796 if (blah) goto something;
797 is not computed twice. */
798 if (last && EXPR_LOCUS (last)
799 && e->goto_locus
800 && !single_succ_p (bb)
801 #ifdef USE_MAPPED_LOCATION
802 && (LOCATION_FILE (e->goto_locus)
803 != LOCATION_FILE (EXPR_LOCATION (last))
804 || (LOCATION_LINE (e->goto_locus)
805 != LOCATION_LINE (EXPR_LOCATION (last)))))
806 #else
807 && (e->goto_locus->file != EXPR_LOCUS (last)->file
808 || (e->goto_locus->line != EXPR_LOCUS (last)->line)))
809 #endif
810 {
811 basic_block new = split_edge (e);
812 single_succ_edge (new)->goto_locus = e->goto_locus;
813 }
814 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
815 && e->dest != EXIT_BLOCK_PTR)
816 need_exit_edge = 1;
817 if (e->dest == EXIT_BLOCK_PTR)
818 have_exit_edge = 1;
819 }
820 FOR_EACH_EDGE (e, ei, bb->preds)
821 {
822 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
823 && e->src != ENTRY_BLOCK_PTR)
824 need_entry_edge = 1;
825 if (e->src == ENTRY_BLOCK_PTR)
826 have_entry_edge = 1;
827 }
828
829 if (need_exit_edge && !have_exit_edge)
830 {
831 if (dump_file)
832 fprintf (dump_file, "Adding fake exit edge to bb %i\n",
833 bb->index);
834 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
835 }
836 if (need_entry_edge && !have_entry_edge)
837 {
838 if (dump_file)
839 fprintf (dump_file, "Adding fake entry edge to bb %i\n",
840 bb->index);
841 make_edge (ENTRY_BLOCK_PTR, bb, EDGE_FAKE);
842 }
843 }
844
845 el = create_edge_list ();
846 num_edges = NUM_EDGES (el);
847 alloc_aux_for_edges (sizeof (struct edge_info));
848
849 /* The basic blocks are expected to be numbered sequentially. */
850 compact_blocks ();
851
852 ignored_edges = 0;
853 for (i = 0 ; i < num_edges ; i++)
854 {
855 edge e = INDEX_EDGE (el, i);
856 e->count = 0;
857
858 /* Mark edges we've replaced by fake edges above as ignored. */
859 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
860 && e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR)
861 {
862 EDGE_INFO (e)->ignore = 1;
863 ignored_edges++;
864 }
865 }
866
867 /* Create spanning tree from basic block graph, mark each edge that is
868 on the spanning tree. We insert as many abnormal and critical edges
869 as possible to minimize number of edge splits necessary. */
870
871 find_spanning_tree (el);
872
873 /* Fake edges that are not on the tree will not be instrumented, so
874 mark them ignored. */
875 for (num_instrumented = i = 0; i < num_edges; i++)
876 {
877 edge e = INDEX_EDGE (el, i);
878 struct edge_info *inf = EDGE_INFO (e);
879
880 if (inf->ignore || inf->on_tree)
881 /*NOP*/;
882 else if (e->flags & EDGE_FAKE)
883 {
884 inf->ignore = 1;
885 ignored_edges++;
886 }
887 else
888 num_instrumented++;
889 }
890
891 total_num_blocks += n_basic_blocks;
892 if (dump_file)
893 fprintf (dump_file, "%d basic blocks\n", n_basic_blocks);
894
895 total_num_edges += num_edges;
896 if (dump_file)
897 fprintf (dump_file, "%d edges\n", num_edges);
898
899 total_num_edges_ignored += ignored_edges;
900 if (dump_file)
901 fprintf (dump_file, "%d ignored edges\n", ignored_edges);
902
903 /* Write the data from which gcov can reconstruct the basic block
904 graph. */
905
906 /* Basic block flags */
907 if (coverage_begin_output ())
908 {
909 gcov_position_t offset;
910
911 offset = gcov_write_tag (GCOV_TAG_BLOCKS);
912 for (i = 0; i != (unsigned) (n_basic_blocks); i++)
913 gcov_write_unsigned (0);
914 gcov_write_length (offset);
915 }
916
917 /* Keep all basic block indexes nonnegative in the gcov output.
918 Index 0 is used for entry block, last index is for exit block.
919 */
920 ENTRY_BLOCK_PTR->index = 1;
921 EXIT_BLOCK_PTR->index = last_basic_block;
922
923 /* Arcs */
924 if (coverage_begin_output ())
925 {
926 gcov_position_t offset;
927
928 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
929 {
930 edge e;
931 edge_iterator ei;
932
933 offset = gcov_write_tag (GCOV_TAG_ARCS);
934 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
935
936 FOR_EACH_EDGE (e, ei, bb->succs)
937 {
938 struct edge_info *i = EDGE_INFO (e);
939 if (!i->ignore)
940 {
941 unsigned flag_bits = 0;
942
943 if (i->on_tree)
944 flag_bits |= GCOV_ARC_ON_TREE;
945 if (e->flags & EDGE_FAKE)
946 flag_bits |= GCOV_ARC_FAKE;
947 if (e->flags & EDGE_FALLTHRU)
948 flag_bits |= GCOV_ARC_FALLTHROUGH;
949 /* On trees we don't have fallthru flags, but we can
950 recompute them from CFG shape. */
951 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)
952 && e->src->next_bb == e->dest)
953 flag_bits |= GCOV_ARC_FALLTHROUGH;
954
955 gcov_write_unsigned (BB_TO_GCOV_INDEX (e->dest));
956 gcov_write_unsigned (flag_bits);
957 }
958 }
959
960 gcov_write_length (offset);
961 }
962 }
963
964 /* Line numbers. */
965 if (coverage_begin_output ())
966 {
967 gcov_position_t offset;
968
969 /* Initialize the output. */
970 output_location (NULL, 0, NULL, NULL);
971
972 FOR_EACH_BB (bb)
973 {
974 block_stmt_iterator bsi;
975
976 offset = 0;
977
978 if (bb == ENTRY_BLOCK_PTR->next_bb)
979 {
980 expanded_location curr_location =
981 expand_location (DECL_SOURCE_LOCATION (current_function_decl));
982 output_location (curr_location.file, curr_location.line,
983 &offset, bb);
984 }
985
986 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
987 {
988 tree stmt = bsi_stmt (bsi);
989 if (EXPR_HAS_LOCATION (stmt))
990 output_location (EXPR_FILENAME (stmt), EXPR_LINENO (stmt),
991 &offset, bb);
992 }
993
994 /* Notice GOTO expressions we eliminated while constructing the
995 CFG. */
996 if (single_succ_p (bb) && single_succ_edge (bb)->goto_locus)
997 {
998 /* ??? source_locus type is marked deprecated in input.h. */
999 source_locus curr_location = single_succ_edge (bb)->goto_locus;
1000 /* ??? The FILE/LINE API is inconsistent for these cases. */
1001 #ifdef USE_MAPPED_LOCATION
1002 output_location (LOCATION_FILE (curr_location),
1003 LOCATION_LINE (curr_location), &offset, bb);
1004 #else
1005 output_location (curr_location->file, curr_location->line,
1006 &offset, bb);
1007 #endif
1008 }
1009
1010 if (offset)
1011 {
1012 /* A file of NULL indicates the end of run. */
1013 gcov_write_unsigned (0);
1014 gcov_write_string (NULL);
1015 gcov_write_length (offset);
1016 }
1017 }
1018 }
1019
1020 ENTRY_BLOCK_PTR->index = ENTRY_BLOCK;
1021 EXIT_BLOCK_PTR->index = EXIT_BLOCK;
1022 #undef BB_TO_GCOV_INDEX
1023
1024 if (flag_profile_values)
1025 find_values_to_profile (&values);
1026
1027 if (flag_branch_probabilities)
1028 {
1029 compute_branch_probabilities ();
1030 if (flag_profile_values)
1031 compute_value_histograms (values);
1032 }
1033
1034 remove_fake_edges ();
1035
1036 /* For each edge not on the spanning tree, add counting code. */
1037 if (profile_arc_flag
1038 && coverage_counter_alloc (GCOV_COUNTER_ARCS, num_instrumented))
1039 {
1040 unsigned n_instrumented;
1041
1042 profile_hooks->init_edge_profiler ();
1043
1044 n_instrumented = instrument_edges (el);
1045
1046 gcc_assert (n_instrumented == num_instrumented);
1047
1048 if (flag_profile_values)
1049 instrument_values (values);
1050
1051 /* Commit changes done by instrumentation. */
1052 bsi_commit_edge_inserts ();
1053 }
1054
1055 free_aux_for_edges ();
1056
1057 VEC_free (histogram_value, heap, values);
1058 free_edge_list (el);
1059 if (flag_branch_probabilities)
1060 profile_status = PROFILE_READ;
1061 coverage_end_function ();
1062 }
1063 \f
1064 /* Union find algorithm implementation for the basic blocks using
1065 aux fields. */
1066
1067 static basic_block
1068 find_group (basic_block bb)
1069 {
1070 basic_block group = bb, bb1;
1071
1072 while ((basic_block) group->aux != group)
1073 group = (basic_block) group->aux;
1074
1075 /* Compress path. */
1076 while ((basic_block) bb->aux != group)
1077 {
1078 bb1 = (basic_block) bb->aux;
1079 bb->aux = (void *) group;
1080 bb = bb1;
1081 }
1082 return group;
1083 }
1084
1085 static void
1086 union_groups (basic_block bb1, basic_block bb2)
1087 {
1088 basic_block bb1g = find_group (bb1);
1089 basic_block bb2g = find_group (bb2);
1090
1091 /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
1092 this code is unlikely going to be performance problem anyway. */
1093 gcc_assert (bb1g != bb2g);
1094
1095 bb1g->aux = bb2g;
1096 }
1097 \f
1098 /* This function searches all of the edges in the program flow graph, and puts
1099 as many bad edges as possible onto the spanning tree. Bad edges include
1100 abnormals edges, which can't be instrumented at the moment. Since it is
1101 possible for fake edges to form a cycle, we will have to develop some
1102 better way in the future. Also put critical edges to the tree, since they
1103 are more expensive to instrument. */
1104
1105 static void
1106 find_spanning_tree (struct edge_list *el)
1107 {
1108 int i;
1109 int num_edges = NUM_EDGES (el);
1110 basic_block bb;
1111
1112 /* We use aux field for standard union-find algorithm. */
1113 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1114 bb->aux = bb;
1115
1116 /* Add fake edge exit to entry we can't instrument. */
1117 union_groups (EXIT_BLOCK_PTR, ENTRY_BLOCK_PTR);
1118
1119 /* First add all abnormal edges to the tree unless they form a cycle. Also
1120 add all edges to EXIT_BLOCK_PTR to avoid inserting profiling code behind
1121 setting return value from function. */
1122 for (i = 0; i < num_edges; i++)
1123 {
1124 edge e = INDEX_EDGE (el, i);
1125 if (((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_FAKE))
1126 || e->dest == EXIT_BLOCK_PTR)
1127 && !EDGE_INFO (e)->ignore
1128 && (find_group (e->src) != find_group (e->dest)))
1129 {
1130 if (dump_file)
1131 fprintf (dump_file, "Abnormal edge %d to %d put to tree\n",
1132 e->src->index, e->dest->index);
1133 EDGE_INFO (e)->on_tree = 1;
1134 union_groups (e->src, e->dest);
1135 }
1136 }
1137
1138 /* Now insert all critical edges to the tree unless they form a cycle. */
1139 for (i = 0; i < num_edges; i++)
1140 {
1141 edge e = INDEX_EDGE (el, i);
1142 if (EDGE_CRITICAL_P (e) && !EDGE_INFO (e)->ignore
1143 && find_group (e->src) != find_group (e->dest))
1144 {
1145 if (dump_file)
1146 fprintf (dump_file, "Critical edge %d to %d put to tree\n",
1147 e->src->index, e->dest->index);
1148 EDGE_INFO (e)->on_tree = 1;
1149 union_groups (e->src, e->dest);
1150 }
1151 }
1152
1153 /* And now the rest. */
1154 for (i = 0; i < num_edges; i++)
1155 {
1156 edge e = INDEX_EDGE (el, i);
1157 if (!EDGE_INFO (e)->ignore
1158 && find_group (e->src) != find_group (e->dest))
1159 {
1160 if (dump_file)
1161 fprintf (dump_file, "Normal edge %d to %d put to tree\n",
1162 e->src->index, e->dest->index);
1163 EDGE_INFO (e)->on_tree = 1;
1164 union_groups (e->src, e->dest);
1165 }
1166 }
1167
1168 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1169 bb->aux = NULL;
1170 }
1171 \f
1172 /* Perform file-level initialization for branch-prob processing. */
1173
1174 void
1175 init_branch_prob (void)
1176 {
1177 int i;
1178
1179 total_num_blocks = 0;
1180 total_num_edges = 0;
1181 total_num_edges_ignored = 0;
1182 total_num_edges_instrumented = 0;
1183 total_num_blocks_created = 0;
1184 total_num_passes = 0;
1185 total_num_times_called = 0;
1186 total_num_branches = 0;
1187 total_num_never_executed = 0;
1188 for (i = 0; i < 20; i++)
1189 total_hist_br_prob[i] = 0;
1190 }
1191
1192 /* Performs file-level cleanup after branch-prob processing
1193 is completed. */
1194
1195 void
1196 end_branch_prob (void)
1197 {
1198 if (dump_file)
1199 {
1200 fprintf (dump_file, "\n");
1201 fprintf (dump_file, "Total number of blocks: %d\n",
1202 total_num_blocks);
1203 fprintf (dump_file, "Total number of edges: %d\n", total_num_edges);
1204 fprintf (dump_file, "Total number of ignored edges: %d\n",
1205 total_num_edges_ignored);
1206 fprintf (dump_file, "Total number of instrumented edges: %d\n",
1207 total_num_edges_instrumented);
1208 fprintf (dump_file, "Total number of blocks created: %d\n",
1209 total_num_blocks_created);
1210 fprintf (dump_file, "Total number of graph solution passes: %d\n",
1211 total_num_passes);
1212 if (total_num_times_called != 0)
1213 fprintf (dump_file, "Average number of graph solution passes: %d\n",
1214 (total_num_passes + (total_num_times_called >> 1))
1215 / total_num_times_called);
1216 fprintf (dump_file, "Total number of branches: %d\n",
1217 total_num_branches);
1218 fprintf (dump_file, "Total number of branches never executed: %d\n",
1219 total_num_never_executed);
1220 if (total_num_branches)
1221 {
1222 int i;
1223
1224 for (i = 0; i < 10; i++)
1225 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
1226 (total_hist_br_prob[i] + total_hist_br_prob[19-i]) * 100
1227 / total_num_branches, 5*i, 5*i+5);
1228 }
1229 }
1230 }
1231
1232 /* Set up hooks to enable tree-based profiling. */
1233
1234 void
1235 tree_register_profile_hooks (void)
1236 {
1237 gcc_assert (current_ir_type () == IR_GIMPLE);
1238 profile_hooks = &tree_profile_hooks;
1239 }
1240