Makefile.in (rtl-profile.o): Kill all traces of it.
[gcc.git] / gcc / profile.c
1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
5 based on some ideas from Dain Samples of UC Berkeley.
6 Further mangling by Bob Manson, Cygnus Support.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 2, or (at your option) any later
13 version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to the Free
22 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 02110-1301, USA. */
24
25 /* Generate basic block profile instrumentation and auxiliary files.
26 Profile generation is optimized, so that not all arcs in the basic
27 block graph need instrumenting. First, the BB graph is closed with
28 one entry (function start), and one exit (function exit). Any
29 ABNORMAL_EDGE cannot be instrumented (because there is no control
30 path to place the code). We close the graph by inserting fake
31 EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
32 edges that do not go to the exit_block. We ignore such abnormal
33 edges. Naturally these fake edges are never directly traversed,
34 and so *cannot* be directly instrumented. Some other graph
35 massaging is done. To optimize the instrumentation we generate the
36 BB minimal span tree, only edges that are not on the span tree
37 (plus the entry point) need instrumenting. From that information
38 all other edge counts can be deduced. By construction all fake
39 edges must be on the spanning tree. We also attempt to place
40 EDGE_CRITICAL edges on the spanning tree.
41
42 The auxiliary files generated are <dumpbase>.gcno (at compile time)
43 and <dumpbase>.gcda (at run time). The format is
44 described in full in gcov-io.h. */
45
46 /* ??? Register allocation should use basic block execution counts to
47 give preference to the most commonly executed blocks. */
48
49 /* ??? Should calculate branch probabilities before instrumenting code, since
50 then we can use arc counts to help decide which arcs to instrument. */
51
52 #include "config.h"
53 #include "system.h"
54 #include "coretypes.h"
55 #include "tm.h"
56 #include "rtl.h"
57 #include "flags.h"
58 #include "output.h"
59 #include "regs.h"
60 #include "expr.h"
61 #include "function.h"
62 #include "toplev.h"
63 #include "coverage.h"
64 #include "value-prof.h"
65 #include "tree.h"
66 #include "cfghooks.h"
67 #include "tree-flow.h"
68 #include "timevar.h"
69 #include "cfgloop.h"
70 #include "tree-pass.h"
71
72 /* Hooks for profiling. */
73 static struct profile_hooks* profile_hooks;
74
75 /* File for profiling debug output. */
76 static inline FILE*
77 profile_dump_file (void) {
78 return profile_hooks->profile_dump_file ();
79 }
80
81 /* Additional information about the edges we need. */
82 struct edge_info {
83 unsigned int count_valid : 1;
84
85 /* Is on the spanning tree. */
86 unsigned int on_tree : 1;
87
88 /* Pretend this edge does not exist (it is abnormal and we've
89 inserted a fake to compensate). */
90 unsigned int ignore : 1;
91 };
92
93 struct bb_info {
94 unsigned int count_valid : 1;
95
96 /* Number of successor and predecessor edges. */
97 gcov_type succ_count;
98 gcov_type pred_count;
99 };
100
101 #define EDGE_INFO(e) ((struct edge_info *) (e)->aux)
102 #define BB_INFO(b) ((struct bb_info *) (b)->aux)
103
104 /* Counter summary from the last set of coverage counts read. */
105
106 const struct gcov_ctr_summary *profile_info;
107
108 /* Collect statistics on the performance of this pass for the entire source
109 file. */
110
111 static int total_num_blocks;
112 static int total_num_edges;
113 static int total_num_edges_ignored;
114 static int total_num_edges_instrumented;
115 static int total_num_blocks_created;
116 static int total_num_passes;
117 static int total_num_times_called;
118 static int total_hist_br_prob[20];
119 static int total_num_never_executed;
120 static int total_num_branches;
121
122 /* Forward declarations. */
123 static void find_spanning_tree (struct edge_list *);
124 static unsigned instrument_edges (struct edge_list *);
125 static void instrument_values (histogram_values);
126 static void compute_branch_probabilities (void);
127 static void compute_value_histograms (histogram_values);
128 static gcov_type * get_exec_counts (void);
129 static basic_block find_group (basic_block);
130 static void union_groups (basic_block, basic_block);
131
132 \f
133 /* Add edge instrumentation code to the entire insn chain.
134
135 F is the first insn of the chain.
136 NUM_BLOCKS is the number of basic blocks found in F. */
137
138 static unsigned
139 instrument_edges (struct edge_list *el)
140 {
141 unsigned num_instr_edges = 0;
142 int num_edges = NUM_EDGES (el);
143 basic_block bb;
144
145 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
146 {
147 edge e;
148 edge_iterator ei;
149
150 FOR_EACH_EDGE (e, ei, bb->succs)
151 {
152 struct edge_info *inf = EDGE_INFO (e);
153
154 if (!inf->ignore && !inf->on_tree)
155 {
156 gcc_assert (!(e->flags & EDGE_ABNORMAL));
157 if (dump_file)
158 fprintf (dump_file, "Edge %d to %d instrumented%s\n",
159 e->src->index, e->dest->index,
160 EDGE_CRITICAL_P (e) ? " (and split)" : "");
161 (profile_hooks->gen_edge_profiler) (num_instr_edges++, e);
162 }
163 }
164 }
165
166 total_num_blocks_created += num_edges;
167 if (dump_file)
168 fprintf (dump_file, "%d edges instrumented\n", num_instr_edges);
169 return num_instr_edges;
170 }
171
172 /* Add code to measure histograms for values in list VALUES. */
173 static void
174 instrument_values (histogram_values values)
175 {
176 unsigned i, t;
177
178 /* Emit code to generate the histograms before the insns. */
179
180 for (i = 0; i < VEC_length (histogram_value, values); i++)
181 {
182 histogram_value hist = VEC_index (histogram_value, values, i);
183 switch (hist->type)
184 {
185 case HIST_TYPE_INTERVAL:
186 t = GCOV_COUNTER_V_INTERVAL;
187 break;
188
189 case HIST_TYPE_POW2:
190 t = GCOV_COUNTER_V_POW2;
191 break;
192
193 case HIST_TYPE_SINGLE_VALUE:
194 t = GCOV_COUNTER_V_SINGLE;
195 break;
196
197 case HIST_TYPE_CONST_DELTA:
198 t = GCOV_COUNTER_V_DELTA;
199 break;
200
201 default:
202 gcc_unreachable ();
203 }
204 if (!coverage_counter_alloc (t, hist->n_counters))
205 continue;
206
207 switch (hist->type)
208 {
209 case HIST_TYPE_INTERVAL:
210 (profile_hooks->gen_interval_profiler) (hist, t, 0);
211 break;
212
213 case HIST_TYPE_POW2:
214 (profile_hooks->gen_pow2_profiler) (hist, t, 0);
215 break;
216
217 case HIST_TYPE_SINGLE_VALUE:
218 (profile_hooks->gen_one_value_profiler) (hist, t, 0);
219 break;
220
221 case HIST_TYPE_CONST_DELTA:
222 (profile_hooks->gen_const_delta_profiler) (hist, t, 0);
223 break;
224
225 default:
226 gcc_unreachable ();
227 }
228 }
229 VEC_free (histogram_value, heap, values);
230 }
231 \f
232
233 /* Computes hybrid profile for all matching entries in da_file. */
234
235 static gcov_type *
236 get_exec_counts (void)
237 {
238 unsigned num_edges = 0;
239 basic_block bb;
240 gcov_type *counts;
241
242 /* Count the edges to be (possibly) instrumented. */
243 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
244 {
245 edge e;
246 edge_iterator ei;
247
248 FOR_EACH_EDGE (e, ei, bb->succs)
249 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
250 num_edges++;
251 }
252
253 counts = get_coverage_counts (GCOV_COUNTER_ARCS, num_edges, &profile_info);
254 if (!counts)
255 return NULL;
256
257 if (dump_file && profile_info)
258 fprintf(dump_file, "Merged %u profiles with maximal count %u.\n",
259 profile_info->runs, (unsigned) profile_info->sum_max);
260
261 return counts;
262 }
263 \f
264
265 /* Compute the branch probabilities for the various branches.
266 Annotate them accordingly. */
267
268 static void
269 compute_branch_probabilities (void)
270 {
271 basic_block bb;
272 int i;
273 int num_edges = 0;
274 int changes;
275 int passes;
276 int hist_br_prob[20];
277 int num_never_executed;
278 int num_branches;
279 gcov_type *exec_counts = get_exec_counts ();
280 int exec_counts_pos = 0;
281
282 /* Very simple sanity checks so we catch bugs in our profiling code. */
283 if (profile_info)
284 {
285 if (profile_info->run_max * profile_info->runs < profile_info->sum_max)
286 {
287 error ("corrupted profile info: run_max * runs < sum_max");
288 exec_counts = NULL;
289 }
290
291 if (profile_info->sum_all < profile_info->sum_max)
292 {
293 error ("corrupted profile info: sum_all is smaller than sum_max");
294 exec_counts = NULL;
295 }
296 }
297
298 /* Attach extra info block to each bb. */
299
300 alloc_aux_for_blocks (sizeof (struct bb_info));
301 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
302 {
303 edge e;
304 edge_iterator ei;
305
306 FOR_EACH_EDGE (e, ei, bb->succs)
307 if (!EDGE_INFO (e)->ignore)
308 BB_INFO (bb)->succ_count++;
309 FOR_EACH_EDGE (e, ei, bb->preds)
310 if (!EDGE_INFO (e)->ignore)
311 BB_INFO (bb)->pred_count++;
312 }
313
314 /* Avoid predicting entry on exit nodes. */
315 BB_INFO (EXIT_BLOCK_PTR)->succ_count = 2;
316 BB_INFO (ENTRY_BLOCK_PTR)->pred_count = 2;
317
318 /* For each edge not on the spanning tree, set its execution count from
319 the .da file. */
320
321 /* The first count in the .da file is the number of times that the function
322 was entered. This is the exec_count for block zero. */
323
324 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
325 {
326 edge e;
327 edge_iterator ei;
328
329 FOR_EACH_EDGE (e, ei, bb->succs)
330 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
331 {
332 num_edges++;
333 if (exec_counts)
334 {
335 e->count = exec_counts[exec_counts_pos++];
336 if (e->count > profile_info->sum_max)
337 {
338 error ("corrupted profile info: edge from %i to %i exceeds maximal count",
339 bb->index, e->dest->index);
340 }
341 }
342 else
343 e->count = 0;
344
345 EDGE_INFO (e)->count_valid = 1;
346 BB_INFO (bb)->succ_count--;
347 BB_INFO (e->dest)->pred_count--;
348 if (dump_file)
349 {
350 fprintf (dump_file, "\nRead edge from %i to %i, count:",
351 bb->index, e->dest->index);
352 fprintf (dump_file, HOST_WIDEST_INT_PRINT_DEC,
353 (HOST_WIDEST_INT) e->count);
354 }
355 }
356 }
357
358 if (dump_file)
359 fprintf (dump_file, "\n%d edge counts read\n", num_edges);
360
361 /* For every block in the file,
362 - if every exit/entrance edge has a known count, then set the block count
363 - if the block count is known, and every exit/entrance edge but one has
364 a known execution count, then set the count of the remaining edge
365
366 As edge counts are set, decrement the succ/pred count, but don't delete
367 the edge, that way we can easily tell when all edges are known, or only
368 one edge is unknown. */
369
370 /* The order that the basic blocks are iterated through is important.
371 Since the code that finds spanning trees starts with block 0, low numbered
372 edges are put on the spanning tree in preference to high numbered edges.
373 Hence, most instrumented edges are at the end. Graph solving works much
374 faster if we propagate numbers from the end to the start.
375
376 This takes an average of slightly more than 3 passes. */
377
378 changes = 1;
379 passes = 0;
380 while (changes)
381 {
382 passes++;
383 changes = 0;
384 FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR, NULL, prev_bb)
385 {
386 struct bb_info *bi = BB_INFO (bb);
387 if (! bi->count_valid)
388 {
389 if (bi->succ_count == 0)
390 {
391 edge e;
392 edge_iterator ei;
393 gcov_type total = 0;
394
395 FOR_EACH_EDGE (e, ei, bb->succs)
396 total += e->count;
397 bb->count = total;
398 bi->count_valid = 1;
399 changes = 1;
400 }
401 else if (bi->pred_count == 0)
402 {
403 edge e;
404 edge_iterator ei;
405 gcov_type total = 0;
406
407 FOR_EACH_EDGE (e, ei, bb->preds)
408 total += e->count;
409 bb->count = total;
410 bi->count_valid = 1;
411 changes = 1;
412 }
413 }
414 if (bi->count_valid)
415 {
416 if (bi->succ_count == 1)
417 {
418 edge e;
419 edge_iterator ei;
420 gcov_type total = 0;
421
422 /* One of the counts will be invalid, but it is zero,
423 so adding it in also doesn't hurt. */
424 FOR_EACH_EDGE (e, ei, bb->succs)
425 total += e->count;
426
427 /* Seedgeh for the invalid edge, and set its count. */
428 FOR_EACH_EDGE (e, ei, bb->succs)
429 if (! EDGE_INFO (e)->count_valid && ! EDGE_INFO (e)->ignore)
430 break;
431
432 /* Calculate count for remaining edge by conservation. */
433 total = bb->count - total;
434
435 gcc_assert (e);
436 EDGE_INFO (e)->count_valid = 1;
437 e->count = total;
438 bi->succ_count--;
439
440 BB_INFO (e->dest)->pred_count--;
441 changes = 1;
442 }
443 if (bi->pred_count == 1)
444 {
445 edge e;
446 edge_iterator ei;
447 gcov_type total = 0;
448
449 /* One of the counts will be invalid, but it is zero,
450 so adding it in also doesn't hurt. */
451 FOR_EACH_EDGE (e, ei, bb->preds)
452 total += e->count;
453
454 /* Search for the invalid edge, and set its count. */
455 FOR_EACH_EDGE (e, ei, bb->preds)
456 if (!EDGE_INFO (e)->count_valid && !EDGE_INFO (e)->ignore)
457 break;
458
459 /* Calculate count for remaining edge by conservation. */
460 total = bb->count - total + e->count;
461
462 gcc_assert (e);
463 EDGE_INFO (e)->count_valid = 1;
464 e->count = total;
465 bi->pred_count--;
466
467 BB_INFO (e->src)->succ_count--;
468 changes = 1;
469 }
470 }
471 }
472 }
473 if (dump_file)
474 dump_flow_info (dump_file);
475
476 total_num_passes += passes;
477 if (dump_file)
478 fprintf (dump_file, "Graph solving took %d passes.\n\n", passes);
479
480 /* If the graph has been correctly solved, every block will have a
481 succ and pred count of zero. */
482 FOR_EACH_BB (bb)
483 {
484 gcc_assert (!BB_INFO (bb)->succ_count && !BB_INFO (bb)->pred_count);
485 }
486
487 /* For every edge, calculate its branch probability and add a reg_note
488 to the branch insn to indicate this. */
489
490 for (i = 0; i < 20; i++)
491 hist_br_prob[i] = 0;
492 num_never_executed = 0;
493 num_branches = 0;
494
495 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
496 {
497 edge e;
498 edge_iterator ei;
499 rtx note;
500
501 if (bb->count < 0)
502 {
503 error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
504 bb->index, (int)bb->count);
505 bb->count = 0;
506 }
507 FOR_EACH_EDGE (e, ei, bb->succs)
508 {
509 /* Function may return twice in the cased the called function is
510 setjmp or calls fork, but we can't represent this by extra
511 edge from the entry, since extra edge from the exit is
512 already present. We get negative frequency from the entry
513 point. */
514 if ((e->count < 0
515 && e->dest == EXIT_BLOCK_PTR)
516 || (e->count > bb->count
517 && e->dest != EXIT_BLOCK_PTR))
518 {
519 if (block_ends_with_call_p (bb))
520 e->count = e->count < 0 ? 0 : bb->count;
521 }
522 if (e->count < 0 || e->count > bb->count)
523 {
524 error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
525 e->src->index, e->dest->index,
526 (int)e->count);
527 e->count = bb->count / 2;
528 }
529 }
530 if (bb->count)
531 {
532 FOR_EACH_EDGE (e, ei, bb->succs)
533 e->probability = (e->count * REG_BR_PROB_BASE + bb->count / 2) / bb->count;
534 if (bb->index >= 0
535 && block_ends_with_condjump_p (bb)
536 && EDGE_COUNT (bb->succs) >= 2)
537 {
538 int prob;
539 edge e;
540 int index;
541
542 /* Find the branch edge. It is possible that we do have fake
543 edges here. */
544 FOR_EACH_EDGE (e, ei, bb->succs)
545 if (!(e->flags & (EDGE_FAKE | EDGE_FALLTHRU)))
546 break;
547
548 prob = e->probability;
549 index = prob * 20 / REG_BR_PROB_BASE;
550
551 if (index == 20)
552 index = 19;
553 hist_br_prob[index]++;
554
555 /* Do this for RTL only. */
556 if (!ir_type ())
557 {
558 note = find_reg_note (BB_END (bb), REG_BR_PROB, 0);
559 /* There may be already note put by some other pass, such
560 as builtin_expect expander. */
561 if (note)
562 XEXP (note, 0) = GEN_INT (prob);
563 else
564 REG_NOTES (BB_END (bb))
565 = gen_rtx_EXPR_LIST (REG_BR_PROB, GEN_INT (prob),
566 REG_NOTES (BB_END (bb)));
567 }
568 num_branches++;
569 }
570 }
571 /* Otherwise try to preserve the existing REG_BR_PROB probabilities
572 tree based profile guessing put into code. BB can be the
573 ENTRY_BLOCK, and it can have multiple (fake) successors in
574 EH cases, but it still has no code; don't crash in this case. */
575 else if (profile_status == PROFILE_ABSENT
576 && !ir_type ()
577 && EDGE_COUNT (bb->succs) > 1
578 && BB_END (bb)
579 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, 0)))
580 {
581 int prob = INTVAL (XEXP (note, 0));
582
583 BRANCH_EDGE (bb)->probability = prob;
584 FALLTHRU_EDGE (bb)->probability = REG_BR_PROB_BASE - prob;
585 }
586 /* As a last resort, distribute the probabilities evenly.
587 Use simple heuristics that if there are normal edges,
588 give all abnormals frequency of 0, otherwise distribute the
589 frequency over abnormals (this is the case of noreturn
590 calls). */
591 else if (profile_status == PROFILE_ABSENT)
592 {
593 int total = 0;
594
595 FOR_EACH_EDGE (e, ei, bb->succs)
596 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
597 total ++;
598 if (total)
599 {
600 FOR_EACH_EDGE (e, ei, bb->succs)
601 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
602 e->probability = REG_BR_PROB_BASE / total;
603 else
604 e->probability = 0;
605 }
606 else
607 {
608 total += EDGE_COUNT (bb->succs);
609 FOR_EACH_EDGE (e, ei, bb->succs)
610 e->probability = REG_BR_PROB_BASE / total;
611 }
612 if (bb->index >= 0
613 && block_ends_with_condjump_p (bb)
614 && EDGE_COUNT (bb->succs) >= 2)
615 num_branches++, num_never_executed;
616 }
617 }
618 counts_to_freqs ();
619
620 if (dump_file)
621 {
622 fprintf (dump_file, "%d branches\n", num_branches);
623 fprintf (dump_file, "%d branches never executed\n",
624 num_never_executed);
625 if (num_branches)
626 for (i = 0; i < 10; i++)
627 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
628 (hist_br_prob[i] + hist_br_prob[19-i]) * 100 / num_branches,
629 5 * i, 5 * i + 5);
630
631 total_num_branches += num_branches;
632 total_num_never_executed += num_never_executed;
633 for (i = 0; i < 20; i++)
634 total_hist_br_prob[i] += hist_br_prob[i];
635
636 fputc ('\n', dump_file);
637 fputc ('\n', dump_file);
638 }
639
640 free_aux_for_blocks ();
641 }
642
643 /* Load value histograms values whose description is stored in VALUES array
644 from .gcda file. */
645
646 static void
647 compute_value_histograms (histogram_values values)
648 {
649 unsigned i, j, t, any;
650 unsigned n_histogram_counters[GCOV_N_VALUE_COUNTERS];
651 gcov_type *histogram_counts[GCOV_N_VALUE_COUNTERS];
652 gcov_type *act_count[GCOV_N_VALUE_COUNTERS];
653 gcov_type *aact_count;
654 histogram_value hist = 0;
655
656 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
657 n_histogram_counters[t] = 0;
658
659 for (i = 0; i < VEC_length (histogram_value, values); i++)
660 {
661 hist = VEC_index (histogram_value, values, i);
662 n_histogram_counters[(int) hist->type] += hist->n_counters;
663 }
664
665 any = 0;
666 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
667 {
668 if (!n_histogram_counters[t])
669 {
670 histogram_counts[t] = NULL;
671 continue;
672 }
673
674 histogram_counts[t] =
675 get_coverage_counts (COUNTER_FOR_HIST_TYPE (t),
676 n_histogram_counters[t], NULL);
677 if (histogram_counts[t])
678 any = 1;
679 act_count[t] = histogram_counts[t];
680 }
681 if (!any)
682 return;
683
684 for (i = 0; i < VEC_length (histogram_value, values); i++)
685 {
686 tree stmt = hist->hvalue.stmt;
687 stmt_ann_t ann = get_stmt_ann (stmt);
688
689 hist = VEC_index (histogram_value, values, i);
690 t = (int) hist->type;
691
692 aact_count = act_count[t];
693 act_count[t] += hist->n_counters;
694
695 hist->hvalue.next = ann->histograms;
696 ann->histograms = hist;
697 hist->hvalue.counters =
698 xmalloc (sizeof (gcov_type) * hist->n_counters);
699 for (j = 0; j < hist->n_counters; j++)
700 hist->hvalue.counters[j] = aact_count[j];
701 }
702
703 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
704 if (histogram_counts[t])
705 free (histogram_counts[t]);
706 }
707
708 #define BB_TO_GCOV_INDEX(bb) ((bb)->index + 1)
709 /* When passed NULL as file_name, initialize.
710 When passed something else, output the necessary commands to change
711 line to LINE and offset to FILE_NAME. */
712 static void
713 output_location (char const *file_name, int line,
714 gcov_position_t *offset, basic_block bb)
715 {
716 static char const *prev_file_name;
717 static int prev_line;
718 bool name_differs, line_differs;
719
720 if (!file_name)
721 {
722 prev_file_name = NULL;
723 prev_line = -1;
724 return;
725 }
726
727 name_differs = !prev_file_name || strcmp (file_name, prev_file_name);
728 line_differs = prev_line != line;
729
730 if (name_differs || line_differs)
731 {
732 if (!*offset)
733 {
734 *offset = gcov_write_tag (GCOV_TAG_LINES);
735 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
736 name_differs = line_differs=true;
737 }
738
739 /* If this is a new source file, then output the
740 file's name to the .bb file. */
741 if (name_differs)
742 {
743 prev_file_name = file_name;
744 gcov_write_unsigned (0);
745 gcov_write_string (prev_file_name);
746 }
747 if (line_differs)
748 {
749 gcov_write_unsigned (line);
750 prev_line = line;
751 }
752 }
753 }
754
755 /* Instrument and/or analyze program behavior based on program flow graph.
756 In either case, this function builds a flow graph for the function being
757 compiled. The flow graph is stored in BB_GRAPH.
758
759 When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
760 the flow graph that are needed to reconstruct the dynamic behavior of the
761 flow graph.
762
763 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
764 information from a data file containing edge count information from previous
765 executions of the function being compiled. In this case, the flow graph is
766 annotated with actual execution counts, which are later propagated into the
767 rtl for optimization purposes.
768
769 Main entry point of this file. */
770
771 void
772 branch_prob (void)
773 {
774 basic_block bb;
775 unsigned i;
776 unsigned num_edges, ignored_edges;
777 unsigned num_instrumented;
778 struct edge_list *el;
779 histogram_values values = NULL;
780
781 total_num_times_called++;
782
783 flow_call_edges_add (NULL);
784 add_noreturn_fake_exit_edges ();
785
786 /* We can't handle cyclic regions constructed using abnormal edges.
787 To avoid these we replace every source of abnormal edge by a fake
788 edge from entry node and every destination by fake edge to exit.
789 This keeps graph acyclic and our calculation exact for all normal
790 edges except for exit and entrance ones.
791
792 We also add fake exit edges for each call and asm statement in the
793 basic, since it may not return. */
794
795 FOR_EACH_BB (bb)
796 {
797 int need_exit_edge = 0, need_entry_edge = 0;
798 int have_exit_edge = 0, have_entry_edge = 0;
799 edge e;
800 edge_iterator ei;
801
802 /* Functions returning multiple times are not handled by extra edges.
803 Instead we simply allow negative counts on edges from exit to the
804 block past call and corresponding probabilities. We can't go
805 with the extra edges because that would result in flowgraph that
806 needs to have fake edges outside the spanning tree. */
807
808 FOR_EACH_EDGE (e, ei, bb->succs)
809 {
810 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
811 && e->dest != EXIT_BLOCK_PTR)
812 need_exit_edge = 1;
813 if (e->dest == EXIT_BLOCK_PTR)
814 have_exit_edge = 1;
815 }
816 FOR_EACH_EDGE (e, ei, bb->preds)
817 {
818 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
819 && e->src != ENTRY_BLOCK_PTR)
820 need_entry_edge = 1;
821 if (e->src == ENTRY_BLOCK_PTR)
822 have_entry_edge = 1;
823 }
824
825 if (need_exit_edge && !have_exit_edge)
826 {
827 if (dump_file)
828 fprintf (dump_file, "Adding fake exit edge to bb %i\n",
829 bb->index);
830 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
831 }
832 if (need_entry_edge && !have_entry_edge)
833 {
834 if (dump_file)
835 fprintf (dump_file, "Adding fake entry edge to bb %i\n",
836 bb->index);
837 make_edge (ENTRY_BLOCK_PTR, bb, EDGE_FAKE);
838 }
839 }
840
841 el = create_edge_list ();
842 num_edges = NUM_EDGES (el);
843 alloc_aux_for_edges (sizeof (struct edge_info));
844
845 /* The basic blocks are expected to be numbered sequentially. */
846 compact_blocks ();
847
848 ignored_edges = 0;
849 for (i = 0 ; i < num_edges ; i++)
850 {
851 edge e = INDEX_EDGE (el, i);
852 e->count = 0;
853
854 /* Mark edges we've replaced by fake edges above as ignored. */
855 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
856 && e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR)
857 {
858 EDGE_INFO (e)->ignore = 1;
859 ignored_edges++;
860 }
861 }
862
863 /* Create spanning tree from basic block graph, mark each edge that is
864 on the spanning tree. We insert as many abnormal and critical edges
865 as possible to minimize number of edge splits necessary. */
866
867 find_spanning_tree (el);
868
869 /* Fake edges that are not on the tree will not be instrumented, so
870 mark them ignored. */
871 for (num_instrumented = i = 0; i < num_edges; i++)
872 {
873 edge e = INDEX_EDGE (el, i);
874 struct edge_info *inf = EDGE_INFO (e);
875
876 if (inf->ignore || inf->on_tree)
877 /*NOP*/;
878 else if (e->flags & EDGE_FAKE)
879 {
880 inf->ignore = 1;
881 ignored_edges++;
882 }
883 else
884 num_instrumented++;
885 }
886
887 total_num_blocks += n_basic_blocks + 2;
888 if (dump_file)
889 fprintf (dump_file, "%d basic blocks\n", n_basic_blocks);
890
891 total_num_edges += num_edges;
892 if (dump_file)
893 fprintf (dump_file, "%d edges\n", num_edges);
894
895 total_num_edges_ignored += ignored_edges;
896 if (dump_file)
897 fprintf (dump_file, "%d ignored edges\n", ignored_edges);
898
899 /* Write the data from which gcov can reconstruct the basic block
900 graph. */
901
902 /* Basic block flags */
903 if (coverage_begin_output ())
904 {
905 gcov_position_t offset;
906
907 offset = gcov_write_tag (GCOV_TAG_BLOCKS);
908 for (i = 0; i != (unsigned) (n_basic_blocks + 2); i++)
909 gcov_write_unsigned (0);
910 gcov_write_length (offset);
911 }
912
913 /* Keep all basic block indexes nonnegative in the gcov output.
914 Index 0 is used for entry block, last index is for exit block.
915 */
916 ENTRY_BLOCK_PTR->index = -1;
917 EXIT_BLOCK_PTR->index = last_basic_block;
918
919 /* Arcs */
920 if (coverage_begin_output ())
921 {
922 gcov_position_t offset;
923
924 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
925 {
926 edge e;
927 edge_iterator ei;
928
929 offset = gcov_write_tag (GCOV_TAG_ARCS);
930 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
931
932 FOR_EACH_EDGE (e, ei, bb->succs)
933 {
934 struct edge_info *i = EDGE_INFO (e);
935 if (!i->ignore)
936 {
937 unsigned flag_bits = 0;
938
939 if (i->on_tree)
940 flag_bits |= GCOV_ARC_ON_TREE;
941 if (e->flags & EDGE_FAKE)
942 flag_bits |= GCOV_ARC_FAKE;
943 if (e->flags & EDGE_FALLTHRU)
944 flag_bits |= GCOV_ARC_FALLTHROUGH;
945 /* On trees we don't have fallthru flags, but we can
946 recompute them from CFG shape. */
947 if (ir_type ()
948 && e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)
949 && e->src->next_bb == e->dest)
950 flag_bits |= GCOV_ARC_FALLTHROUGH;
951
952 gcov_write_unsigned (BB_TO_GCOV_INDEX (e->dest));
953 gcov_write_unsigned (flag_bits);
954 }
955 }
956
957 gcov_write_length (offset);
958 }
959 }
960
961 /* Line numbers. */
962 if (coverage_begin_output ())
963 {
964 /* Initialize the output. */
965 output_location (NULL, 0, NULL, NULL);
966
967 if (!ir_type ())
968 {
969 gcov_position_t offset;
970
971 FOR_EACH_BB (bb)
972 {
973 rtx insn = BB_HEAD (bb);
974 int ignore_next_note = 0;
975
976 offset = 0;
977
978 /* We are looking for line number notes. Search backward
979 before basic block to find correct ones. */
980 insn = prev_nonnote_insn (insn);
981 if (!insn)
982 insn = get_insns ();
983 else
984 insn = NEXT_INSN (insn);
985
986 while (insn != BB_END (bb))
987 {
988 if (NOTE_P (insn))
989 {
990 /* Must ignore the line number notes that
991 immediately follow the end of an inline function
992 to avoid counting it twice. There is a note
993 before the call, and one after the call. */
994 if (NOTE_LINE_NUMBER (insn)
995 == NOTE_INSN_REPEATED_LINE_NUMBER)
996 ignore_next_note = 1;
997 else if (NOTE_LINE_NUMBER (insn) <= 0)
998 /*NOP*/;
999 else if (ignore_next_note)
1000 ignore_next_note = 0;
1001 else
1002 {
1003 expanded_location s;
1004 NOTE_EXPANDED_LOCATION (s, insn);
1005 output_location (s.file, s.line, &offset, bb);
1006 }
1007 }
1008 insn = NEXT_INSN (insn);
1009 }
1010
1011 if (offset)
1012 {
1013 /* A file of NULL indicates the end of run. */
1014 gcov_write_unsigned (0);
1015 gcov_write_string (NULL);
1016 gcov_write_length (offset);
1017 }
1018 }
1019 }
1020 else
1021 {
1022 gcov_position_t offset;
1023
1024 FOR_EACH_BB (bb)
1025 {
1026 block_stmt_iterator bsi;
1027
1028 offset = 0;
1029
1030 if (bb == ENTRY_BLOCK_PTR->next_bb)
1031 {
1032 expanded_location curr_location =
1033 expand_location (DECL_SOURCE_LOCATION
1034 (current_function_decl));
1035 output_location (curr_location.file, curr_location.line,
1036 &offset, bb);
1037 }
1038
1039 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1040 {
1041 tree stmt = bsi_stmt (bsi);
1042 if (EXPR_HAS_LOCATION (stmt))
1043 output_location (EXPR_FILENAME (stmt),
1044 EXPR_LINENO (stmt),
1045 &offset, bb);
1046 }
1047
1048 /* Notice GOTO expressions we eliminated while constructing the
1049 CFG. */
1050 if (single_succ_p (bb) && single_succ_edge (bb)->goto_locus)
1051 {
1052 /* ??? source_locus type is marked deprecated in input.h. */
1053 source_locus curr_location = single_succ_edge (bb)->goto_locus;
1054 /* ??? The FILE/LINE API is inconsistent for these cases. */
1055 #ifdef USE_MAPPED_LOCATION
1056 output_location (LOCATION_FILE (curr_location),
1057 LOCATION_LINE (curr_location),
1058 &offset, bb);
1059 #else
1060 output_location (curr_location->file, curr_location->line,
1061 &offset, bb);
1062 #endif
1063 }
1064
1065 if (offset)
1066 {
1067 /* A file of NULL indicates the end of run. */
1068 gcov_write_unsigned (0);
1069 gcov_write_string (NULL);
1070 gcov_write_length (offset);
1071 }
1072 }
1073 }
1074 }
1075
1076 ENTRY_BLOCK_PTR->index = ENTRY_BLOCK;
1077 EXIT_BLOCK_PTR->index = EXIT_BLOCK;
1078 #undef BB_TO_GCOV_INDEX
1079
1080 if (flag_profile_values)
1081 find_values_to_profile (&values);
1082
1083 if (flag_branch_probabilities)
1084 {
1085 compute_branch_probabilities ();
1086 if (flag_profile_values)
1087 compute_value_histograms (values);
1088 }
1089
1090 remove_fake_edges ();
1091
1092 /* For each edge not on the spanning tree, add counting code. */
1093 if (profile_arc_flag
1094 && coverage_counter_alloc (GCOV_COUNTER_ARCS, num_instrumented))
1095 {
1096 unsigned n_instrumented;
1097
1098 profile_hooks->init_edge_profiler ();
1099
1100 n_instrumented = instrument_edges (el);
1101
1102 gcc_assert (n_instrumented == num_instrumented);
1103
1104 if (flag_profile_values)
1105 instrument_values (values);
1106
1107 /* Commit changes done by instrumentation. */
1108 if (ir_type ())
1109 bsi_commit_edge_inserts ();
1110 else
1111 {
1112 commit_edge_insertions_watch_calls ();
1113 allocate_reg_info (max_reg_num (), FALSE, FALSE);
1114 }
1115 }
1116
1117 free_aux_for_edges ();
1118
1119 if (!ir_type ())
1120 {
1121 /* Re-merge split basic blocks and the mess introduced by
1122 insert_insn_on_edge. */
1123 cleanup_cfg (profile_arc_flag ? CLEANUP_EXPENSIVE : 0);
1124 if (profile_dump_file())
1125 dump_flow_info (profile_dump_file());
1126 }
1127
1128 free_edge_list (el);
1129 if (flag_branch_probabilities)
1130 profile_status = PROFILE_READ;
1131 coverage_end_function ();
1132 }
1133 \f
1134 /* Union find algorithm implementation for the basic blocks using
1135 aux fields. */
1136
1137 static basic_block
1138 find_group (basic_block bb)
1139 {
1140 basic_block group = bb, bb1;
1141
1142 while ((basic_block) group->aux != group)
1143 group = (basic_block) group->aux;
1144
1145 /* Compress path. */
1146 while ((basic_block) bb->aux != group)
1147 {
1148 bb1 = (basic_block) bb->aux;
1149 bb->aux = (void *) group;
1150 bb = bb1;
1151 }
1152 return group;
1153 }
1154
1155 static void
1156 union_groups (basic_block bb1, basic_block bb2)
1157 {
1158 basic_block bb1g = find_group (bb1);
1159 basic_block bb2g = find_group (bb2);
1160
1161 /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
1162 this code is unlikely going to be performance problem anyway. */
1163 gcc_assert (bb1g != bb2g);
1164
1165 bb1g->aux = bb2g;
1166 }
1167 \f
1168 /* This function searches all of the edges in the program flow graph, and puts
1169 as many bad edges as possible onto the spanning tree. Bad edges include
1170 abnormals edges, which can't be instrumented at the moment. Since it is
1171 possible for fake edges to form a cycle, we will have to develop some
1172 better way in the future. Also put critical edges to the tree, since they
1173 are more expensive to instrument. */
1174
1175 static void
1176 find_spanning_tree (struct edge_list *el)
1177 {
1178 int i;
1179 int num_edges = NUM_EDGES (el);
1180 basic_block bb;
1181
1182 /* We use aux field for standard union-find algorithm. */
1183 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1184 bb->aux = bb;
1185
1186 /* Add fake edge exit to entry we can't instrument. */
1187 union_groups (EXIT_BLOCK_PTR, ENTRY_BLOCK_PTR);
1188
1189 /* First add all abnormal edges to the tree unless they form a cycle. Also
1190 add all edges to EXIT_BLOCK_PTR to avoid inserting profiling code behind
1191 setting return value from function. */
1192 for (i = 0; i < num_edges; i++)
1193 {
1194 edge e = INDEX_EDGE (el, i);
1195 if (((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_FAKE))
1196 || e->dest == EXIT_BLOCK_PTR)
1197 && !EDGE_INFO (e)->ignore
1198 && (find_group (e->src) != find_group (e->dest)))
1199 {
1200 if (dump_file)
1201 fprintf (dump_file, "Abnormal edge %d to %d put to tree\n",
1202 e->src->index, e->dest->index);
1203 EDGE_INFO (e)->on_tree = 1;
1204 union_groups (e->src, e->dest);
1205 }
1206 }
1207
1208 /* Now insert all critical edges to the tree unless they form a cycle. */
1209 for (i = 0; i < num_edges; i++)
1210 {
1211 edge e = INDEX_EDGE (el, i);
1212 if (EDGE_CRITICAL_P (e) && !EDGE_INFO (e)->ignore
1213 && find_group (e->src) != find_group (e->dest))
1214 {
1215 if (dump_file)
1216 fprintf (dump_file, "Critical edge %d to %d put to tree\n",
1217 e->src->index, e->dest->index);
1218 EDGE_INFO (e)->on_tree = 1;
1219 union_groups (e->src, e->dest);
1220 }
1221 }
1222
1223 /* And now the rest. */
1224 for (i = 0; i < num_edges; i++)
1225 {
1226 edge e = INDEX_EDGE (el, i);
1227 if (!EDGE_INFO (e)->ignore
1228 && find_group (e->src) != find_group (e->dest))
1229 {
1230 if (dump_file)
1231 fprintf (dump_file, "Normal edge %d to %d put to tree\n",
1232 e->src->index, e->dest->index);
1233 EDGE_INFO (e)->on_tree = 1;
1234 union_groups (e->src, e->dest);
1235 }
1236 }
1237
1238 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1239 bb->aux = NULL;
1240 }
1241 \f
1242 /* Perform file-level initialization for branch-prob processing. */
1243
1244 void
1245 init_branch_prob (void)
1246 {
1247 int i;
1248
1249 total_num_blocks = 0;
1250 total_num_edges = 0;
1251 total_num_edges_ignored = 0;
1252 total_num_edges_instrumented = 0;
1253 total_num_blocks_created = 0;
1254 total_num_passes = 0;
1255 total_num_times_called = 0;
1256 total_num_branches = 0;
1257 total_num_never_executed = 0;
1258 for (i = 0; i < 20; i++)
1259 total_hist_br_prob[i] = 0;
1260 }
1261
1262 /* Performs file-level cleanup after branch-prob processing
1263 is completed. */
1264
1265 void
1266 end_branch_prob (void)
1267 {
1268 if (dump_file)
1269 {
1270 fprintf (dump_file, "\n");
1271 fprintf (dump_file, "Total number of blocks: %d\n",
1272 total_num_blocks);
1273 fprintf (dump_file, "Total number of edges: %d\n", total_num_edges);
1274 fprintf (dump_file, "Total number of ignored edges: %d\n",
1275 total_num_edges_ignored);
1276 fprintf (dump_file, "Total number of instrumented edges: %d\n",
1277 total_num_edges_instrumented);
1278 fprintf (dump_file, "Total number of blocks created: %d\n",
1279 total_num_blocks_created);
1280 fprintf (dump_file, "Total number of graph solution passes: %d\n",
1281 total_num_passes);
1282 if (total_num_times_called != 0)
1283 fprintf (dump_file, "Average number of graph solution passes: %d\n",
1284 (total_num_passes + (total_num_times_called >> 1))
1285 / total_num_times_called);
1286 fprintf (dump_file, "Total number of branches: %d\n",
1287 total_num_branches);
1288 fprintf (dump_file, "Total number of branches never executed: %d\n",
1289 total_num_never_executed);
1290 if (total_num_branches)
1291 {
1292 int i;
1293
1294 for (i = 0; i < 10; i++)
1295 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
1296 (total_hist_br_prob[i] + total_hist_br_prob[19-i]) * 100
1297 / total_num_branches, 5*i, 5*i+5);
1298 }
1299 }
1300 }
1301
1302 /* Set up hooks to enable tree-based profiling. */
1303
1304 void
1305 tree_register_profile_hooks (void)
1306 {
1307 gcc_assert (ir_type ());
1308 profile_hooks = &tree_profile_hooks;
1309 }
1310
1311 \f
1312 /* Do branch profiling and static profile estimation passes. */
1313 static void
1314 rest_of_handle_branch_prob (void)
1315 {
1316 struct loops loops;
1317
1318 /* Discover and record the loop depth at the head of each basic
1319 block. The loop infrastructure does the real job for us. */
1320 flow_loops_find (&loops);
1321
1322 if (dump_file)
1323 flow_loops_dump (&loops, dump_file, NULL, 0);
1324
1325 /* Estimate using heuristics if no profiling info is available. */
1326 if (flag_guess_branch_prob
1327 && profile_status == PROFILE_ABSENT)
1328 estimate_probability (&loops);
1329
1330 flow_loops_free (&loops);
1331 free_dominance_info (CDI_DOMINATORS);
1332 }
1333
1334 struct tree_opt_pass pass_branch_prob =
1335 {
1336 "bp", /* name */
1337 NULL, /* gate */
1338 rest_of_handle_branch_prob, /* execute */
1339 NULL, /* sub */
1340 NULL, /* next */
1341 0, /* static_pass_number */
1342 TV_BRANCH_PROB, /* tv_id */
1343 0, /* properties_required */
1344 0, /* properties_provided */
1345 0, /* properties_destroyed */
1346 0, /* todo_flags_start */
1347 TODO_dump_func, /* todo_flags_finish */
1348 'b' /* letter */
1349 };
1350
1351
1352