cfg.c (dump_flow_info): Get dump flags as an additional parameter.
[gcc.git] / gcc / profile.c
1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
5 based on some ideas from Dain Samples of UC Berkeley.
6 Further mangling by Bob Manson, Cygnus Support.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 2, or (at your option) any later
13 version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to the Free
22 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 02110-1301, USA. */
24
25 /* Generate basic block profile instrumentation and auxiliary files.
26 Profile generation is optimized, so that not all arcs in the basic
27 block graph need instrumenting. First, the BB graph is closed with
28 one entry (function start), and one exit (function exit). Any
29 ABNORMAL_EDGE cannot be instrumented (because there is no control
30 path to place the code). We close the graph by inserting fake
31 EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
32 edges that do not go to the exit_block. We ignore such abnormal
33 edges. Naturally these fake edges are never directly traversed,
34 and so *cannot* be directly instrumented. Some other graph
35 massaging is done. To optimize the instrumentation we generate the
36 BB minimal span tree, only edges that are not on the span tree
37 (plus the entry point) need instrumenting. From that information
38 all other edge counts can be deduced. By construction all fake
39 edges must be on the spanning tree. We also attempt to place
40 EDGE_CRITICAL edges on the spanning tree.
41
42 The auxiliary files generated are <dumpbase>.gcno (at compile time)
43 and <dumpbase>.gcda (at run time). The format is
44 described in full in gcov-io.h. */
45
46 /* ??? Register allocation should use basic block execution counts to
47 give preference to the most commonly executed blocks. */
48
49 /* ??? Should calculate branch probabilities before instrumenting code, since
50 then we can use arc counts to help decide which arcs to instrument. */
51
52 #include "config.h"
53 #include "system.h"
54 #include "coretypes.h"
55 #include "tm.h"
56 #include "rtl.h"
57 #include "flags.h"
58 #include "output.h"
59 #include "regs.h"
60 #include "expr.h"
61 #include "function.h"
62 #include "toplev.h"
63 #include "coverage.h"
64 #include "value-prof.h"
65 #include "tree.h"
66 #include "cfghooks.h"
67 #include "tree-flow.h"
68 #include "timevar.h"
69 #include "cfgloop.h"
70 #include "tree-pass.h"
71
72 /* Hooks for profiling. */
73 static struct profile_hooks* profile_hooks;
74
75 /* Additional information about the edges we need. */
76 struct edge_info {
77 unsigned int count_valid : 1;
78
79 /* Is on the spanning tree. */
80 unsigned int on_tree : 1;
81
82 /* Pretend this edge does not exist (it is abnormal and we've
83 inserted a fake to compensate). */
84 unsigned int ignore : 1;
85 };
86
87 struct bb_info {
88 unsigned int count_valid : 1;
89
90 /* Number of successor and predecessor edges. */
91 gcov_type succ_count;
92 gcov_type pred_count;
93 };
94
95 #define EDGE_INFO(e) ((struct edge_info *) (e)->aux)
96 #define BB_INFO(b) ((struct bb_info *) (b)->aux)
97
98 /* Counter summary from the last set of coverage counts read. */
99
100 const struct gcov_ctr_summary *profile_info;
101
102 /* Collect statistics on the performance of this pass for the entire source
103 file. */
104
105 static int total_num_blocks;
106 static int total_num_edges;
107 static int total_num_edges_ignored;
108 static int total_num_edges_instrumented;
109 static int total_num_blocks_created;
110 static int total_num_passes;
111 static int total_num_times_called;
112 static int total_hist_br_prob[20];
113 static int total_num_never_executed;
114 static int total_num_branches;
115
116 /* Forward declarations. */
117 static void find_spanning_tree (struct edge_list *);
118 static unsigned instrument_edges (struct edge_list *);
119 static void instrument_values (histogram_values);
120 static void compute_branch_probabilities (void);
121 static void compute_value_histograms (histogram_values);
122 static gcov_type * get_exec_counts (void);
123 static basic_block find_group (basic_block);
124 static void union_groups (basic_block, basic_block);
125
126 \f
127 /* Add edge instrumentation code to the entire insn chain.
128
129 F is the first insn of the chain.
130 NUM_BLOCKS is the number of basic blocks found in F. */
131
132 static unsigned
133 instrument_edges (struct edge_list *el)
134 {
135 unsigned num_instr_edges = 0;
136 int num_edges = NUM_EDGES (el);
137 basic_block bb;
138
139 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
140 {
141 edge e;
142 edge_iterator ei;
143
144 FOR_EACH_EDGE (e, ei, bb->succs)
145 {
146 struct edge_info *inf = EDGE_INFO (e);
147
148 if (!inf->ignore && !inf->on_tree)
149 {
150 gcc_assert (!(e->flags & EDGE_ABNORMAL));
151 if (dump_file)
152 fprintf (dump_file, "Edge %d to %d instrumented%s\n",
153 e->src->index, e->dest->index,
154 EDGE_CRITICAL_P (e) ? " (and split)" : "");
155 (profile_hooks->gen_edge_profiler) (num_instr_edges++, e);
156 }
157 }
158 }
159
160 total_num_blocks_created += num_edges;
161 if (dump_file)
162 fprintf (dump_file, "%d edges instrumented\n", num_instr_edges);
163 return num_instr_edges;
164 }
165
166 /* Add code to measure histograms for values in list VALUES. */
167 static void
168 instrument_values (histogram_values values)
169 {
170 unsigned i, t;
171
172 /* Emit code to generate the histograms before the insns. */
173
174 for (i = 0; i < VEC_length (histogram_value, values); i++)
175 {
176 histogram_value hist = VEC_index (histogram_value, values, i);
177 switch (hist->type)
178 {
179 case HIST_TYPE_INTERVAL:
180 t = GCOV_COUNTER_V_INTERVAL;
181 break;
182
183 case HIST_TYPE_POW2:
184 t = GCOV_COUNTER_V_POW2;
185 break;
186
187 case HIST_TYPE_SINGLE_VALUE:
188 t = GCOV_COUNTER_V_SINGLE;
189 break;
190
191 case HIST_TYPE_CONST_DELTA:
192 t = GCOV_COUNTER_V_DELTA;
193 break;
194
195 default:
196 gcc_unreachable ();
197 }
198 if (!coverage_counter_alloc (t, hist->n_counters))
199 continue;
200
201 switch (hist->type)
202 {
203 case HIST_TYPE_INTERVAL:
204 (profile_hooks->gen_interval_profiler) (hist, t, 0);
205 break;
206
207 case HIST_TYPE_POW2:
208 (profile_hooks->gen_pow2_profiler) (hist, t, 0);
209 break;
210
211 case HIST_TYPE_SINGLE_VALUE:
212 (profile_hooks->gen_one_value_profiler) (hist, t, 0);
213 break;
214
215 case HIST_TYPE_CONST_DELTA:
216 (profile_hooks->gen_const_delta_profiler) (hist, t, 0);
217 break;
218
219 default:
220 gcc_unreachable ();
221 }
222 }
223 VEC_free (histogram_value, heap, values);
224 }
225 \f
226
227 /* Computes hybrid profile for all matching entries in da_file. */
228
229 static gcov_type *
230 get_exec_counts (void)
231 {
232 unsigned num_edges = 0;
233 basic_block bb;
234 gcov_type *counts;
235
236 /* Count the edges to be (possibly) instrumented. */
237 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
238 {
239 edge e;
240 edge_iterator ei;
241
242 FOR_EACH_EDGE (e, ei, bb->succs)
243 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
244 num_edges++;
245 }
246
247 counts = get_coverage_counts (GCOV_COUNTER_ARCS, num_edges, &profile_info);
248 if (!counts)
249 return NULL;
250
251 if (dump_file && profile_info)
252 fprintf(dump_file, "Merged %u profiles with maximal count %u.\n",
253 profile_info->runs, (unsigned) profile_info->sum_max);
254
255 return counts;
256 }
257 \f
258
259 /* Compute the branch probabilities for the various branches.
260 Annotate them accordingly. */
261
262 static void
263 compute_branch_probabilities (void)
264 {
265 basic_block bb;
266 int i;
267 int num_edges = 0;
268 int changes;
269 int passes;
270 int hist_br_prob[20];
271 int num_never_executed;
272 int num_branches;
273 gcov_type *exec_counts = get_exec_counts ();
274 int exec_counts_pos = 0;
275
276 /* Very simple sanity checks so we catch bugs in our profiling code. */
277 if (profile_info)
278 {
279 if (profile_info->run_max * profile_info->runs < profile_info->sum_max)
280 {
281 error ("corrupted profile info: run_max * runs < sum_max");
282 exec_counts = NULL;
283 }
284
285 if (profile_info->sum_all < profile_info->sum_max)
286 {
287 error ("corrupted profile info: sum_all is smaller than sum_max");
288 exec_counts = NULL;
289 }
290 }
291
292 /* Attach extra info block to each bb. */
293
294 alloc_aux_for_blocks (sizeof (struct bb_info));
295 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
296 {
297 edge e;
298 edge_iterator ei;
299
300 FOR_EACH_EDGE (e, ei, bb->succs)
301 if (!EDGE_INFO (e)->ignore)
302 BB_INFO (bb)->succ_count++;
303 FOR_EACH_EDGE (e, ei, bb->preds)
304 if (!EDGE_INFO (e)->ignore)
305 BB_INFO (bb)->pred_count++;
306 }
307
308 /* Avoid predicting entry on exit nodes. */
309 BB_INFO (EXIT_BLOCK_PTR)->succ_count = 2;
310 BB_INFO (ENTRY_BLOCK_PTR)->pred_count = 2;
311
312 /* For each edge not on the spanning tree, set its execution count from
313 the .da file. */
314
315 /* The first count in the .da file is the number of times that the function
316 was entered. This is the exec_count for block zero. */
317
318 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
319 {
320 edge e;
321 edge_iterator ei;
322
323 FOR_EACH_EDGE (e, ei, bb->succs)
324 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
325 {
326 num_edges++;
327 if (exec_counts)
328 {
329 e->count = exec_counts[exec_counts_pos++];
330 if (e->count > profile_info->sum_max)
331 {
332 error ("corrupted profile info: edge from %i to %i exceeds maximal count",
333 bb->index, e->dest->index);
334 }
335 }
336 else
337 e->count = 0;
338
339 EDGE_INFO (e)->count_valid = 1;
340 BB_INFO (bb)->succ_count--;
341 BB_INFO (e->dest)->pred_count--;
342 if (dump_file)
343 {
344 fprintf (dump_file, "\nRead edge from %i to %i, count:",
345 bb->index, e->dest->index);
346 fprintf (dump_file, HOST_WIDEST_INT_PRINT_DEC,
347 (HOST_WIDEST_INT) e->count);
348 }
349 }
350 }
351
352 if (dump_file)
353 fprintf (dump_file, "\n%d edge counts read\n", num_edges);
354
355 /* For every block in the file,
356 - if every exit/entrance edge has a known count, then set the block count
357 - if the block count is known, and every exit/entrance edge but one has
358 a known execution count, then set the count of the remaining edge
359
360 As edge counts are set, decrement the succ/pred count, but don't delete
361 the edge, that way we can easily tell when all edges are known, or only
362 one edge is unknown. */
363
364 /* The order that the basic blocks are iterated through is important.
365 Since the code that finds spanning trees starts with block 0, low numbered
366 edges are put on the spanning tree in preference to high numbered edges.
367 Hence, most instrumented edges are at the end. Graph solving works much
368 faster if we propagate numbers from the end to the start.
369
370 This takes an average of slightly more than 3 passes. */
371
372 changes = 1;
373 passes = 0;
374 while (changes)
375 {
376 passes++;
377 changes = 0;
378 FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR, NULL, prev_bb)
379 {
380 struct bb_info *bi = BB_INFO (bb);
381 if (! bi->count_valid)
382 {
383 if (bi->succ_count == 0)
384 {
385 edge e;
386 edge_iterator ei;
387 gcov_type total = 0;
388
389 FOR_EACH_EDGE (e, ei, bb->succs)
390 total += e->count;
391 bb->count = total;
392 bi->count_valid = 1;
393 changes = 1;
394 }
395 else if (bi->pred_count == 0)
396 {
397 edge e;
398 edge_iterator ei;
399 gcov_type total = 0;
400
401 FOR_EACH_EDGE (e, ei, bb->preds)
402 total += e->count;
403 bb->count = total;
404 bi->count_valid = 1;
405 changes = 1;
406 }
407 }
408 if (bi->count_valid)
409 {
410 if (bi->succ_count == 1)
411 {
412 edge e;
413 edge_iterator ei;
414 gcov_type total = 0;
415
416 /* One of the counts will be invalid, but it is zero,
417 so adding it in also doesn't hurt. */
418 FOR_EACH_EDGE (e, ei, bb->succs)
419 total += e->count;
420
421 /* Seedgeh for the invalid edge, and set its count. */
422 FOR_EACH_EDGE (e, ei, bb->succs)
423 if (! EDGE_INFO (e)->count_valid && ! EDGE_INFO (e)->ignore)
424 break;
425
426 /* Calculate count for remaining edge by conservation. */
427 total = bb->count - total;
428
429 gcc_assert (e);
430 EDGE_INFO (e)->count_valid = 1;
431 e->count = total;
432 bi->succ_count--;
433
434 BB_INFO (e->dest)->pred_count--;
435 changes = 1;
436 }
437 if (bi->pred_count == 1)
438 {
439 edge e;
440 edge_iterator ei;
441 gcov_type total = 0;
442
443 /* One of the counts will be invalid, but it is zero,
444 so adding it in also doesn't hurt. */
445 FOR_EACH_EDGE (e, ei, bb->preds)
446 total += e->count;
447
448 /* Search for the invalid edge, and set its count. */
449 FOR_EACH_EDGE (e, ei, bb->preds)
450 if (!EDGE_INFO (e)->count_valid && !EDGE_INFO (e)->ignore)
451 break;
452
453 /* Calculate count for remaining edge by conservation. */
454 total = bb->count - total + e->count;
455
456 gcc_assert (e);
457 EDGE_INFO (e)->count_valid = 1;
458 e->count = total;
459 bi->pred_count--;
460
461 BB_INFO (e->src)->succ_count--;
462 changes = 1;
463 }
464 }
465 }
466 }
467 if (dump_file)
468 dump_flow_info (dump_file, dump_flags);
469
470 total_num_passes += passes;
471 if (dump_file)
472 fprintf (dump_file, "Graph solving took %d passes.\n\n", passes);
473
474 /* If the graph has been correctly solved, every block will have a
475 succ and pred count of zero. */
476 FOR_EACH_BB (bb)
477 {
478 gcc_assert (!BB_INFO (bb)->succ_count && !BB_INFO (bb)->pred_count);
479 }
480
481 /* For every edge, calculate its branch probability and add a reg_note
482 to the branch insn to indicate this. */
483
484 for (i = 0; i < 20; i++)
485 hist_br_prob[i] = 0;
486 num_never_executed = 0;
487 num_branches = 0;
488
489 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
490 {
491 edge e;
492 edge_iterator ei;
493 rtx note;
494
495 if (bb->count < 0)
496 {
497 error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
498 bb->index, (int)bb->count);
499 bb->count = 0;
500 }
501 FOR_EACH_EDGE (e, ei, bb->succs)
502 {
503 /* Function may return twice in the cased the called function is
504 setjmp or calls fork, but we can't represent this by extra
505 edge from the entry, since extra edge from the exit is
506 already present. We get negative frequency from the entry
507 point. */
508 if ((e->count < 0
509 && e->dest == EXIT_BLOCK_PTR)
510 || (e->count > bb->count
511 && e->dest != EXIT_BLOCK_PTR))
512 {
513 if (block_ends_with_call_p (bb))
514 e->count = e->count < 0 ? 0 : bb->count;
515 }
516 if (e->count < 0 || e->count > bb->count)
517 {
518 error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
519 e->src->index, e->dest->index,
520 (int)e->count);
521 e->count = bb->count / 2;
522 }
523 }
524 if (bb->count)
525 {
526 FOR_EACH_EDGE (e, ei, bb->succs)
527 e->probability = (e->count * REG_BR_PROB_BASE + bb->count / 2) / bb->count;
528 if (bb->index >= NUM_FIXED_BLOCKS
529 && block_ends_with_condjump_p (bb)
530 && EDGE_COUNT (bb->succs) >= 2)
531 {
532 int prob;
533 edge e;
534 int index;
535
536 /* Find the branch edge. It is possible that we do have fake
537 edges here. */
538 FOR_EACH_EDGE (e, ei, bb->succs)
539 if (!(e->flags & (EDGE_FAKE | EDGE_FALLTHRU)))
540 break;
541
542 prob = e->probability;
543 index = prob * 20 / REG_BR_PROB_BASE;
544
545 if (index == 20)
546 index = 19;
547 hist_br_prob[index]++;
548
549 /* Do this for RTL only. */
550 if (!ir_type ())
551 {
552 note = find_reg_note (BB_END (bb), REG_BR_PROB, 0);
553 /* There may be already note put by some other pass, such
554 as builtin_expect expander. */
555 if (note)
556 XEXP (note, 0) = GEN_INT (prob);
557 else
558 REG_NOTES (BB_END (bb))
559 = gen_rtx_EXPR_LIST (REG_BR_PROB, GEN_INT (prob),
560 REG_NOTES (BB_END (bb)));
561 }
562 num_branches++;
563 }
564 }
565 /* Otherwise try to preserve the existing REG_BR_PROB probabilities
566 tree based profile guessing put into code. BB can be the
567 ENTRY_BLOCK, and it can have multiple (fake) successors in
568 EH cases, but it still has no code; don't crash in this case. */
569 else if (profile_status == PROFILE_ABSENT
570 && !ir_type ()
571 && EDGE_COUNT (bb->succs) > 1
572 && BB_END (bb)
573 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, 0)))
574 {
575 int prob = INTVAL (XEXP (note, 0));
576
577 BRANCH_EDGE (bb)->probability = prob;
578 FALLTHRU_EDGE (bb)->probability = REG_BR_PROB_BASE - prob;
579 }
580 /* As a last resort, distribute the probabilities evenly.
581 Use simple heuristics that if there are normal edges,
582 give all abnormals frequency of 0, otherwise distribute the
583 frequency over abnormals (this is the case of noreturn
584 calls). */
585 else if (profile_status == PROFILE_ABSENT)
586 {
587 int total = 0;
588
589 FOR_EACH_EDGE (e, ei, bb->succs)
590 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
591 total ++;
592 if (total)
593 {
594 FOR_EACH_EDGE (e, ei, bb->succs)
595 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
596 e->probability = REG_BR_PROB_BASE / total;
597 else
598 e->probability = 0;
599 }
600 else
601 {
602 total += EDGE_COUNT (bb->succs);
603 FOR_EACH_EDGE (e, ei, bb->succs)
604 e->probability = REG_BR_PROB_BASE / total;
605 }
606 if (bb->index >= NUM_FIXED_BLOCKS
607 && block_ends_with_condjump_p (bb)
608 && EDGE_COUNT (bb->succs) >= 2)
609 num_branches++, num_never_executed;
610 }
611 }
612 counts_to_freqs ();
613
614 if (dump_file)
615 {
616 fprintf (dump_file, "%d branches\n", num_branches);
617 fprintf (dump_file, "%d branches never executed\n",
618 num_never_executed);
619 if (num_branches)
620 for (i = 0; i < 10; i++)
621 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
622 (hist_br_prob[i] + hist_br_prob[19-i]) * 100 / num_branches,
623 5 * i, 5 * i + 5);
624
625 total_num_branches += num_branches;
626 total_num_never_executed += num_never_executed;
627 for (i = 0; i < 20; i++)
628 total_hist_br_prob[i] += hist_br_prob[i];
629
630 fputc ('\n', dump_file);
631 fputc ('\n', dump_file);
632 }
633
634 free_aux_for_blocks ();
635 }
636
637 /* Load value histograms values whose description is stored in VALUES array
638 from .gcda file. */
639
640 static void
641 compute_value_histograms (histogram_values values)
642 {
643 unsigned i, j, t, any;
644 unsigned n_histogram_counters[GCOV_N_VALUE_COUNTERS];
645 gcov_type *histogram_counts[GCOV_N_VALUE_COUNTERS];
646 gcov_type *act_count[GCOV_N_VALUE_COUNTERS];
647 gcov_type *aact_count;
648
649 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
650 n_histogram_counters[t] = 0;
651
652 for (i = 0; i < VEC_length (histogram_value, values); i++)
653 {
654 histogram_value hist = VEC_index (histogram_value, values, i);
655 n_histogram_counters[(int) hist->type] += hist->n_counters;
656 }
657
658 any = 0;
659 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
660 {
661 if (!n_histogram_counters[t])
662 {
663 histogram_counts[t] = NULL;
664 continue;
665 }
666
667 histogram_counts[t] =
668 get_coverage_counts (COUNTER_FOR_HIST_TYPE (t),
669 n_histogram_counters[t], NULL);
670 if (histogram_counts[t])
671 any = 1;
672 act_count[t] = histogram_counts[t];
673 }
674 if (!any)
675 return;
676
677 for (i = 0; i < VEC_length (histogram_value, values); i++)
678 {
679 histogram_value hist = VEC_index (histogram_value, values, i);
680 tree stmt = hist->hvalue.stmt;
681 stmt_ann_t ann = get_stmt_ann (stmt);
682
683 t = (int) hist->type;
684
685 aact_count = act_count[t];
686 act_count[t] += hist->n_counters;
687
688 hist->hvalue.next = ann->histograms;
689 ann->histograms = hist;
690 hist->hvalue.counters = XNEWVEC (gcov_type, hist->n_counters);
691 for (j = 0; j < hist->n_counters; j++)
692 hist->hvalue.counters[j] = aact_count[j];
693 }
694
695 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
696 if (histogram_counts[t])
697 free (histogram_counts[t]);
698 }
699
700 /* The entry basic block will be moved around so that it has index=1,
701 there is nothing at index 0 and the exit is at n_basic_block. */
702 #define BB_TO_GCOV_INDEX(bb) ((bb)->index - 1)
703 /* When passed NULL as file_name, initialize.
704 When passed something else, output the necessary commands to change
705 line to LINE and offset to FILE_NAME. */
706 static void
707 output_location (char const *file_name, int line,
708 gcov_position_t *offset, basic_block bb)
709 {
710 static char const *prev_file_name;
711 static int prev_line;
712 bool name_differs, line_differs;
713
714 if (!file_name)
715 {
716 prev_file_name = NULL;
717 prev_line = -1;
718 return;
719 }
720
721 name_differs = !prev_file_name || strcmp (file_name, prev_file_name);
722 line_differs = prev_line != line;
723
724 if (name_differs || line_differs)
725 {
726 if (!*offset)
727 {
728 *offset = gcov_write_tag (GCOV_TAG_LINES);
729 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
730 name_differs = line_differs=true;
731 }
732
733 /* If this is a new source file, then output the
734 file's name to the .bb file. */
735 if (name_differs)
736 {
737 prev_file_name = file_name;
738 gcov_write_unsigned (0);
739 gcov_write_string (prev_file_name);
740 }
741 if (line_differs)
742 {
743 gcov_write_unsigned (line);
744 prev_line = line;
745 }
746 }
747 }
748
749 /* Instrument and/or analyze program behavior based on program flow graph.
750 In either case, this function builds a flow graph for the function being
751 compiled. The flow graph is stored in BB_GRAPH.
752
753 When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
754 the flow graph that are needed to reconstruct the dynamic behavior of the
755 flow graph.
756
757 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
758 information from a data file containing edge count information from previous
759 executions of the function being compiled. In this case, the flow graph is
760 annotated with actual execution counts, which are later propagated into the
761 rtl for optimization purposes.
762
763 Main entry point of this file. */
764
765 void
766 branch_prob (void)
767 {
768 basic_block bb;
769 unsigned i;
770 unsigned num_edges, ignored_edges;
771 unsigned num_instrumented;
772 struct edge_list *el;
773 histogram_values values = NULL;
774
775 total_num_times_called++;
776
777 flow_call_edges_add (NULL);
778 add_noreturn_fake_exit_edges ();
779
780 /* We can't handle cyclic regions constructed using abnormal edges.
781 To avoid these we replace every source of abnormal edge by a fake
782 edge from entry node and every destination by fake edge to exit.
783 This keeps graph acyclic and our calculation exact for all normal
784 edges except for exit and entrance ones.
785
786 We also add fake exit edges for each call and asm statement in the
787 basic, since it may not return. */
788
789 FOR_EACH_BB (bb)
790 {
791 int need_exit_edge = 0, need_entry_edge = 0;
792 int have_exit_edge = 0, have_entry_edge = 0;
793 edge e;
794 edge_iterator ei;
795
796 /* Functions returning multiple times are not handled by extra edges.
797 Instead we simply allow negative counts on edges from exit to the
798 block past call and corresponding probabilities. We can't go
799 with the extra edges because that would result in flowgraph that
800 needs to have fake edges outside the spanning tree. */
801
802 FOR_EACH_EDGE (e, ei, bb->succs)
803 {
804 block_stmt_iterator bsi;
805 tree last = NULL;
806
807 /* It may happen that there are compiler generated statements
808 without a locus at all. Go through the basic block from the
809 last to the first statement looking for a locus. */
810 for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
811 {
812 last = bsi_stmt (bsi);
813 if (EXPR_LOCUS (last))
814 break;
815 }
816
817 /* Edge with goto locus might get wrong coverage info unless
818 it is the only edge out of BB.
819 Don't do that when the locuses match, so
820 if (blah) goto something;
821 is not computed twice. */
822 if (last && EXPR_LOCUS (last)
823 && e->goto_locus
824 && !single_succ_p (bb)
825 #ifdef USE_MAPPED_LOCATION
826 && (LOCATION_FILE (e->goto_locus)
827 != LOCATION_FILE (EXPR_LOCATION (last))
828 || (LOCATION_LINE (e->goto_locus)
829 != LOCATION_LINE (EXPR_LOCATION (last)))))
830 #else
831 && (e->goto_locus->file != EXPR_LOCUS (last)->file
832 || (e->goto_locus->line != EXPR_LOCUS (last)->line)))
833 #endif
834 {
835 basic_block new = split_edge (e);
836 single_succ_edge (new)->goto_locus = e->goto_locus;
837 }
838 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
839 && e->dest != EXIT_BLOCK_PTR)
840 need_exit_edge = 1;
841 if (e->dest == EXIT_BLOCK_PTR)
842 have_exit_edge = 1;
843 }
844 FOR_EACH_EDGE (e, ei, bb->preds)
845 {
846 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
847 && e->src != ENTRY_BLOCK_PTR)
848 need_entry_edge = 1;
849 if (e->src == ENTRY_BLOCK_PTR)
850 have_entry_edge = 1;
851 }
852
853 if (need_exit_edge && !have_exit_edge)
854 {
855 if (dump_file)
856 fprintf (dump_file, "Adding fake exit edge to bb %i\n",
857 bb->index);
858 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
859 }
860 if (need_entry_edge && !have_entry_edge)
861 {
862 if (dump_file)
863 fprintf (dump_file, "Adding fake entry edge to bb %i\n",
864 bb->index);
865 make_edge (ENTRY_BLOCK_PTR, bb, EDGE_FAKE);
866 }
867 }
868
869 el = create_edge_list ();
870 num_edges = NUM_EDGES (el);
871 alloc_aux_for_edges (sizeof (struct edge_info));
872
873 /* The basic blocks are expected to be numbered sequentially. */
874 compact_blocks ();
875
876 ignored_edges = 0;
877 for (i = 0 ; i < num_edges ; i++)
878 {
879 edge e = INDEX_EDGE (el, i);
880 e->count = 0;
881
882 /* Mark edges we've replaced by fake edges above as ignored. */
883 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
884 && e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR)
885 {
886 EDGE_INFO (e)->ignore = 1;
887 ignored_edges++;
888 }
889 }
890
891 /* Create spanning tree from basic block graph, mark each edge that is
892 on the spanning tree. We insert as many abnormal and critical edges
893 as possible to minimize number of edge splits necessary. */
894
895 find_spanning_tree (el);
896
897 /* Fake edges that are not on the tree will not be instrumented, so
898 mark them ignored. */
899 for (num_instrumented = i = 0; i < num_edges; i++)
900 {
901 edge e = INDEX_EDGE (el, i);
902 struct edge_info *inf = EDGE_INFO (e);
903
904 if (inf->ignore || inf->on_tree)
905 /*NOP*/;
906 else if (e->flags & EDGE_FAKE)
907 {
908 inf->ignore = 1;
909 ignored_edges++;
910 }
911 else
912 num_instrumented++;
913 }
914
915 total_num_blocks += n_basic_blocks;
916 if (dump_file)
917 fprintf (dump_file, "%d basic blocks\n", n_basic_blocks);
918
919 total_num_edges += num_edges;
920 if (dump_file)
921 fprintf (dump_file, "%d edges\n", num_edges);
922
923 total_num_edges_ignored += ignored_edges;
924 if (dump_file)
925 fprintf (dump_file, "%d ignored edges\n", ignored_edges);
926
927 /* Write the data from which gcov can reconstruct the basic block
928 graph. */
929
930 /* Basic block flags */
931 if (coverage_begin_output ())
932 {
933 gcov_position_t offset;
934
935 offset = gcov_write_tag (GCOV_TAG_BLOCKS);
936 for (i = 0; i != (unsigned) (n_basic_blocks); i++)
937 gcov_write_unsigned (0);
938 gcov_write_length (offset);
939 }
940
941 /* Keep all basic block indexes nonnegative in the gcov output.
942 Index 0 is used for entry block, last index is for exit block.
943 */
944 ENTRY_BLOCK_PTR->index = 1;
945 EXIT_BLOCK_PTR->index = last_basic_block;
946
947 /* Arcs */
948 if (coverage_begin_output ())
949 {
950 gcov_position_t offset;
951
952 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
953 {
954 edge e;
955 edge_iterator ei;
956
957 offset = gcov_write_tag (GCOV_TAG_ARCS);
958 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
959
960 FOR_EACH_EDGE (e, ei, bb->succs)
961 {
962 struct edge_info *i = EDGE_INFO (e);
963 if (!i->ignore)
964 {
965 unsigned flag_bits = 0;
966
967 if (i->on_tree)
968 flag_bits |= GCOV_ARC_ON_TREE;
969 if (e->flags & EDGE_FAKE)
970 flag_bits |= GCOV_ARC_FAKE;
971 if (e->flags & EDGE_FALLTHRU)
972 flag_bits |= GCOV_ARC_FALLTHROUGH;
973 /* On trees we don't have fallthru flags, but we can
974 recompute them from CFG shape. */
975 if (ir_type ()
976 && e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)
977 && e->src->next_bb == e->dest)
978 flag_bits |= GCOV_ARC_FALLTHROUGH;
979
980 gcov_write_unsigned (BB_TO_GCOV_INDEX (e->dest));
981 gcov_write_unsigned (flag_bits);
982 }
983 }
984
985 gcov_write_length (offset);
986 }
987 }
988
989 /* Line numbers. */
990 if (coverage_begin_output ())
991 {
992 /* Initialize the output. */
993 output_location (NULL, 0, NULL, NULL);
994
995 if (!ir_type ())
996 {
997 gcov_position_t offset;
998
999 FOR_EACH_BB (bb)
1000 {
1001 rtx insn = BB_HEAD (bb);
1002 int ignore_next_note = 0;
1003
1004 offset = 0;
1005
1006 /* We are looking for line number notes. Search backward
1007 before basic block to find correct ones. */
1008 insn = prev_nonnote_insn (insn);
1009 if (!insn)
1010 insn = get_insns ();
1011 else
1012 insn = NEXT_INSN (insn);
1013
1014 while (insn != BB_END (bb))
1015 {
1016 if (NOTE_P (insn))
1017 {
1018 /* Must ignore the line number notes that
1019 immediately follow the end of an inline function
1020 to avoid counting it twice. There is a note
1021 before the call, and one after the call. */
1022 if (NOTE_LINE_NUMBER (insn)
1023 == NOTE_INSN_REPEATED_LINE_NUMBER)
1024 ignore_next_note = 1;
1025 else if (NOTE_LINE_NUMBER (insn) <= 0)
1026 /*NOP*/;
1027 else if (ignore_next_note)
1028 ignore_next_note = 0;
1029 else
1030 {
1031 expanded_location s;
1032 NOTE_EXPANDED_LOCATION (s, insn);
1033 output_location (s.file, s.line, &offset, bb);
1034 }
1035 }
1036 insn = NEXT_INSN (insn);
1037 }
1038
1039 if (offset)
1040 {
1041 /* A file of NULL indicates the end of run. */
1042 gcov_write_unsigned (0);
1043 gcov_write_string (NULL);
1044 gcov_write_length (offset);
1045 }
1046 }
1047 }
1048 else
1049 {
1050 gcov_position_t offset;
1051
1052 FOR_EACH_BB (bb)
1053 {
1054 block_stmt_iterator bsi;
1055
1056 offset = 0;
1057
1058 if (bb == ENTRY_BLOCK_PTR->next_bb)
1059 {
1060 expanded_location curr_location =
1061 expand_location (DECL_SOURCE_LOCATION
1062 (current_function_decl));
1063 output_location (curr_location.file, curr_location.line,
1064 &offset, bb);
1065 }
1066
1067 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1068 {
1069 tree stmt = bsi_stmt (bsi);
1070 if (EXPR_HAS_LOCATION (stmt))
1071 output_location (EXPR_FILENAME (stmt),
1072 EXPR_LINENO (stmt),
1073 &offset, bb);
1074 }
1075
1076 /* Notice GOTO expressions we eliminated while constructing the
1077 CFG. */
1078 if (single_succ_p (bb) && single_succ_edge (bb)->goto_locus)
1079 {
1080 /* ??? source_locus type is marked deprecated in input.h. */
1081 source_locus curr_location = single_succ_edge (bb)->goto_locus;
1082 /* ??? The FILE/LINE API is inconsistent for these cases. */
1083 #ifdef USE_MAPPED_LOCATION
1084 output_location (LOCATION_FILE (curr_location),
1085 LOCATION_LINE (curr_location),
1086 &offset, bb);
1087 #else
1088 output_location (curr_location->file, curr_location->line,
1089 &offset, bb);
1090 #endif
1091 }
1092
1093 if (offset)
1094 {
1095 /* A file of NULL indicates the end of run. */
1096 gcov_write_unsigned (0);
1097 gcov_write_string (NULL);
1098 gcov_write_length (offset);
1099 }
1100 }
1101 }
1102 }
1103
1104 ENTRY_BLOCK_PTR->index = ENTRY_BLOCK;
1105 EXIT_BLOCK_PTR->index = EXIT_BLOCK;
1106 #undef BB_TO_GCOV_INDEX
1107
1108 if (flag_profile_values)
1109 find_values_to_profile (&values);
1110
1111 if (flag_branch_probabilities)
1112 {
1113 compute_branch_probabilities ();
1114 if (flag_profile_values)
1115 compute_value_histograms (values);
1116 }
1117
1118 remove_fake_edges ();
1119
1120 /* For each edge not on the spanning tree, add counting code. */
1121 if (profile_arc_flag
1122 && coverage_counter_alloc (GCOV_COUNTER_ARCS, num_instrumented))
1123 {
1124 unsigned n_instrumented;
1125
1126 profile_hooks->init_edge_profiler ();
1127
1128 n_instrumented = instrument_edges (el);
1129
1130 gcc_assert (n_instrumented == num_instrumented);
1131
1132 if (flag_profile_values)
1133 instrument_values (values);
1134
1135 /* Commit changes done by instrumentation. */
1136 if (ir_type ())
1137 bsi_commit_edge_inserts ();
1138 else
1139 {
1140 commit_edge_insertions_watch_calls ();
1141 allocate_reg_info (max_reg_num (), FALSE, FALSE);
1142 }
1143 }
1144
1145 free_aux_for_edges ();
1146
1147 if (!ir_type ())
1148 {
1149 /* Re-merge split basic blocks and the mess introduced by
1150 insert_insn_on_edge. */
1151 cleanup_cfg (profile_arc_flag ? CLEANUP_EXPENSIVE : 0);
1152 if (dump_file)
1153 dump_flow_info (dump_file, dump_flags);
1154 }
1155
1156 free_edge_list (el);
1157 if (flag_branch_probabilities)
1158 profile_status = PROFILE_READ;
1159 coverage_end_function ();
1160 }
1161 \f
1162 /* Union find algorithm implementation for the basic blocks using
1163 aux fields. */
1164
1165 static basic_block
1166 find_group (basic_block bb)
1167 {
1168 basic_block group = bb, bb1;
1169
1170 while ((basic_block) group->aux != group)
1171 group = (basic_block) group->aux;
1172
1173 /* Compress path. */
1174 while ((basic_block) bb->aux != group)
1175 {
1176 bb1 = (basic_block) bb->aux;
1177 bb->aux = (void *) group;
1178 bb = bb1;
1179 }
1180 return group;
1181 }
1182
1183 static void
1184 union_groups (basic_block bb1, basic_block bb2)
1185 {
1186 basic_block bb1g = find_group (bb1);
1187 basic_block bb2g = find_group (bb2);
1188
1189 /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
1190 this code is unlikely going to be performance problem anyway. */
1191 gcc_assert (bb1g != bb2g);
1192
1193 bb1g->aux = bb2g;
1194 }
1195 \f
1196 /* This function searches all of the edges in the program flow graph, and puts
1197 as many bad edges as possible onto the spanning tree. Bad edges include
1198 abnormals edges, which can't be instrumented at the moment. Since it is
1199 possible for fake edges to form a cycle, we will have to develop some
1200 better way in the future. Also put critical edges to the tree, since they
1201 are more expensive to instrument. */
1202
1203 static void
1204 find_spanning_tree (struct edge_list *el)
1205 {
1206 int i;
1207 int num_edges = NUM_EDGES (el);
1208 basic_block bb;
1209
1210 /* We use aux field for standard union-find algorithm. */
1211 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1212 bb->aux = bb;
1213
1214 /* Add fake edge exit to entry we can't instrument. */
1215 union_groups (EXIT_BLOCK_PTR, ENTRY_BLOCK_PTR);
1216
1217 /* First add all abnormal edges to the tree unless they form a cycle. Also
1218 add all edges to EXIT_BLOCK_PTR to avoid inserting profiling code behind
1219 setting return value from function. */
1220 for (i = 0; i < num_edges; i++)
1221 {
1222 edge e = INDEX_EDGE (el, i);
1223 if (((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_FAKE))
1224 || e->dest == EXIT_BLOCK_PTR)
1225 && !EDGE_INFO (e)->ignore
1226 && (find_group (e->src) != find_group (e->dest)))
1227 {
1228 if (dump_file)
1229 fprintf (dump_file, "Abnormal edge %d to %d put to tree\n",
1230 e->src->index, e->dest->index);
1231 EDGE_INFO (e)->on_tree = 1;
1232 union_groups (e->src, e->dest);
1233 }
1234 }
1235
1236 /* Now insert all critical edges to the tree unless they form a cycle. */
1237 for (i = 0; i < num_edges; i++)
1238 {
1239 edge e = INDEX_EDGE (el, i);
1240 if (EDGE_CRITICAL_P (e) && !EDGE_INFO (e)->ignore
1241 && find_group (e->src) != find_group (e->dest))
1242 {
1243 if (dump_file)
1244 fprintf (dump_file, "Critical edge %d to %d put to tree\n",
1245 e->src->index, e->dest->index);
1246 EDGE_INFO (e)->on_tree = 1;
1247 union_groups (e->src, e->dest);
1248 }
1249 }
1250
1251 /* And now the rest. */
1252 for (i = 0; i < num_edges; i++)
1253 {
1254 edge e = INDEX_EDGE (el, i);
1255 if (!EDGE_INFO (e)->ignore
1256 && find_group (e->src) != find_group (e->dest))
1257 {
1258 if (dump_file)
1259 fprintf (dump_file, "Normal edge %d to %d put to tree\n",
1260 e->src->index, e->dest->index);
1261 EDGE_INFO (e)->on_tree = 1;
1262 union_groups (e->src, e->dest);
1263 }
1264 }
1265
1266 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1267 bb->aux = NULL;
1268 }
1269 \f
1270 /* Perform file-level initialization for branch-prob processing. */
1271
1272 void
1273 init_branch_prob (void)
1274 {
1275 int i;
1276
1277 total_num_blocks = 0;
1278 total_num_edges = 0;
1279 total_num_edges_ignored = 0;
1280 total_num_edges_instrumented = 0;
1281 total_num_blocks_created = 0;
1282 total_num_passes = 0;
1283 total_num_times_called = 0;
1284 total_num_branches = 0;
1285 total_num_never_executed = 0;
1286 for (i = 0; i < 20; i++)
1287 total_hist_br_prob[i] = 0;
1288 }
1289
1290 /* Performs file-level cleanup after branch-prob processing
1291 is completed. */
1292
1293 void
1294 end_branch_prob (void)
1295 {
1296 if (dump_file)
1297 {
1298 fprintf (dump_file, "\n");
1299 fprintf (dump_file, "Total number of blocks: %d\n",
1300 total_num_blocks);
1301 fprintf (dump_file, "Total number of edges: %d\n", total_num_edges);
1302 fprintf (dump_file, "Total number of ignored edges: %d\n",
1303 total_num_edges_ignored);
1304 fprintf (dump_file, "Total number of instrumented edges: %d\n",
1305 total_num_edges_instrumented);
1306 fprintf (dump_file, "Total number of blocks created: %d\n",
1307 total_num_blocks_created);
1308 fprintf (dump_file, "Total number of graph solution passes: %d\n",
1309 total_num_passes);
1310 if (total_num_times_called != 0)
1311 fprintf (dump_file, "Average number of graph solution passes: %d\n",
1312 (total_num_passes + (total_num_times_called >> 1))
1313 / total_num_times_called);
1314 fprintf (dump_file, "Total number of branches: %d\n",
1315 total_num_branches);
1316 fprintf (dump_file, "Total number of branches never executed: %d\n",
1317 total_num_never_executed);
1318 if (total_num_branches)
1319 {
1320 int i;
1321
1322 for (i = 0; i < 10; i++)
1323 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
1324 (total_hist_br_prob[i] + total_hist_br_prob[19-i]) * 100
1325 / total_num_branches, 5*i, 5*i+5);
1326 }
1327 }
1328 }
1329
1330 /* Set up hooks to enable tree-based profiling. */
1331
1332 void
1333 tree_register_profile_hooks (void)
1334 {
1335 gcc_assert (ir_type ());
1336 profile_hooks = &tree_profile_hooks;
1337 }
1338
1339 \f
1340 /* Do branch profiling and static profile estimation passes. */
1341 static void
1342 rest_of_handle_branch_prob (void)
1343 {
1344 struct loops loops;
1345
1346 /* Discover and record the loop depth at the head of each basic
1347 block. The loop infrastructure does the real job for us. */
1348 flow_loops_find (&loops);
1349
1350 if (dump_file)
1351 flow_loops_dump (&loops, dump_file, NULL, 0);
1352
1353 /* Estimate using heuristics if no profiling info is available. */
1354 if (flag_guess_branch_prob
1355 && profile_status == PROFILE_ABSENT)
1356 estimate_probability (&loops);
1357
1358 flow_loops_free (&loops);
1359 free_dominance_info (CDI_DOMINATORS);
1360 }
1361
1362 struct tree_opt_pass pass_branch_prob =
1363 {
1364 "bp", /* name */
1365 NULL, /* gate */
1366 rest_of_handle_branch_prob, /* execute */
1367 NULL, /* sub */
1368 NULL, /* next */
1369 0, /* static_pass_number */
1370 TV_BRANCH_PROB, /* tv_id */
1371 0, /* properties_required */
1372 0, /* properties_provided */
1373 0, /* properties_destroyed */
1374 0, /* todo_flags_start */
1375 TODO_dump_func, /* todo_flags_finish */
1376 'b' /* letter */
1377 };
1378
1379
1380