basic-block.h: Changed basic block numbering so that the entry block is 0 and the...
[gcc.git] / gcc / profile.c
1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
5 based on some ideas from Dain Samples of UC Berkeley.
6 Further mangling by Bob Manson, Cygnus Support.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 2, or (at your option) any later
13 version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to the Free
22 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 02110-1301, USA. */
24
25 /* Generate basic block profile instrumentation and auxiliary files.
26 Profile generation is optimized, so that not all arcs in the basic
27 block graph need instrumenting. First, the BB graph is closed with
28 one entry (function start), and one exit (function exit). Any
29 ABNORMAL_EDGE cannot be instrumented (because there is no control
30 path to place the code). We close the graph by inserting fake
31 EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
32 edges that do not go to the exit_block. We ignore such abnormal
33 edges. Naturally these fake edges are never directly traversed,
34 and so *cannot* be directly instrumented. Some other graph
35 massaging is done. To optimize the instrumentation we generate the
36 BB minimal span tree, only edges that are not on the span tree
37 (plus the entry point) need instrumenting. From that information
38 all other edge counts can be deduced. By construction all fake
39 edges must be on the spanning tree. We also attempt to place
40 EDGE_CRITICAL edges on the spanning tree.
41
42 The auxiliary files generated are <dumpbase>.gcno (at compile time)
43 and <dumpbase>.gcda (at run time). The format is
44 described in full in gcov-io.h. */
45
46 /* ??? Register allocation should use basic block execution counts to
47 give preference to the most commonly executed blocks. */
48
49 /* ??? Should calculate branch probabilities before instrumenting code, since
50 then we can use arc counts to help decide which arcs to instrument. */
51
52 #include "config.h"
53 #include "system.h"
54 #include "coretypes.h"
55 #include "tm.h"
56 #include "rtl.h"
57 #include "flags.h"
58 #include "output.h"
59 #include "regs.h"
60 #include "expr.h"
61 #include "function.h"
62 #include "toplev.h"
63 #include "coverage.h"
64 #include "value-prof.h"
65 #include "tree.h"
66 #include "cfghooks.h"
67 #include "tree-flow.h"
68 #include "timevar.h"
69 #include "cfgloop.h"
70 #include "tree-pass.h"
71
72 /* Hooks for profiling. */
73 static struct profile_hooks* profile_hooks;
74
75 /* File for profiling debug output. */
76 static inline FILE*
77 profile_dump_file (void) {
78 return profile_hooks->profile_dump_file ();
79 }
80
81 /* Additional information about the edges we need. */
82 struct edge_info {
83 unsigned int count_valid : 1;
84
85 /* Is on the spanning tree. */
86 unsigned int on_tree : 1;
87
88 /* Pretend this edge does not exist (it is abnormal and we've
89 inserted a fake to compensate). */
90 unsigned int ignore : 1;
91 };
92
93 struct bb_info {
94 unsigned int count_valid : 1;
95
96 /* Number of successor and predecessor edges. */
97 gcov_type succ_count;
98 gcov_type pred_count;
99 };
100
101 #define EDGE_INFO(e) ((struct edge_info *) (e)->aux)
102 #define BB_INFO(b) ((struct bb_info *) (b)->aux)
103
104 /* Counter summary from the last set of coverage counts read. */
105
106 const struct gcov_ctr_summary *profile_info;
107
108 /* Collect statistics on the performance of this pass for the entire source
109 file. */
110
111 static int total_num_blocks;
112 static int total_num_edges;
113 static int total_num_edges_ignored;
114 static int total_num_edges_instrumented;
115 static int total_num_blocks_created;
116 static int total_num_passes;
117 static int total_num_times_called;
118 static int total_hist_br_prob[20];
119 static int total_num_never_executed;
120 static int total_num_branches;
121
122 /* Forward declarations. */
123 static void find_spanning_tree (struct edge_list *);
124 static unsigned instrument_edges (struct edge_list *);
125 static void instrument_values (histogram_values);
126 static void compute_branch_probabilities (void);
127 static void compute_value_histograms (histogram_values);
128 static gcov_type * get_exec_counts (void);
129 static basic_block find_group (basic_block);
130 static void union_groups (basic_block, basic_block);
131
132 \f
133 /* Add edge instrumentation code to the entire insn chain.
134
135 F is the first insn of the chain.
136 NUM_BLOCKS is the number of basic blocks found in F. */
137
138 static unsigned
139 instrument_edges (struct edge_list *el)
140 {
141 unsigned num_instr_edges = 0;
142 int num_edges = NUM_EDGES (el);
143 basic_block bb;
144
145 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
146 {
147 edge e;
148 edge_iterator ei;
149
150 FOR_EACH_EDGE (e, ei, bb->succs)
151 {
152 struct edge_info *inf = EDGE_INFO (e);
153
154 if (!inf->ignore && !inf->on_tree)
155 {
156 gcc_assert (!(e->flags & EDGE_ABNORMAL));
157 if (dump_file)
158 fprintf (dump_file, "Edge %d to %d instrumented%s\n",
159 e->src->index, e->dest->index,
160 EDGE_CRITICAL_P (e) ? " (and split)" : "");
161 (profile_hooks->gen_edge_profiler) (num_instr_edges++, e);
162 }
163 }
164 }
165
166 total_num_blocks_created += num_edges;
167 if (dump_file)
168 fprintf (dump_file, "%d edges instrumented\n", num_instr_edges);
169 return num_instr_edges;
170 }
171
172 /* Add code to measure histograms for values in list VALUES. */
173 static void
174 instrument_values (histogram_values values)
175 {
176 unsigned i, t;
177
178 /* Emit code to generate the histograms before the insns. */
179
180 for (i = 0; i < VEC_length (histogram_value, values); i++)
181 {
182 histogram_value hist = VEC_index (histogram_value, values, i);
183 switch (hist->type)
184 {
185 case HIST_TYPE_INTERVAL:
186 t = GCOV_COUNTER_V_INTERVAL;
187 break;
188
189 case HIST_TYPE_POW2:
190 t = GCOV_COUNTER_V_POW2;
191 break;
192
193 case HIST_TYPE_SINGLE_VALUE:
194 t = GCOV_COUNTER_V_SINGLE;
195 break;
196
197 case HIST_TYPE_CONST_DELTA:
198 t = GCOV_COUNTER_V_DELTA;
199 break;
200
201 default:
202 gcc_unreachable ();
203 }
204 if (!coverage_counter_alloc (t, hist->n_counters))
205 continue;
206
207 switch (hist->type)
208 {
209 case HIST_TYPE_INTERVAL:
210 (profile_hooks->gen_interval_profiler) (hist, t, 0);
211 break;
212
213 case HIST_TYPE_POW2:
214 (profile_hooks->gen_pow2_profiler) (hist, t, 0);
215 break;
216
217 case HIST_TYPE_SINGLE_VALUE:
218 (profile_hooks->gen_one_value_profiler) (hist, t, 0);
219 break;
220
221 case HIST_TYPE_CONST_DELTA:
222 (profile_hooks->gen_const_delta_profiler) (hist, t, 0);
223 break;
224
225 default:
226 gcc_unreachable ();
227 }
228 }
229 VEC_free (histogram_value, heap, values);
230 }
231 \f
232
233 /* Computes hybrid profile for all matching entries in da_file. */
234
235 static gcov_type *
236 get_exec_counts (void)
237 {
238 unsigned num_edges = 0;
239 basic_block bb;
240 gcov_type *counts;
241
242 /* Count the edges to be (possibly) instrumented. */
243 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
244 {
245 edge e;
246 edge_iterator ei;
247
248 FOR_EACH_EDGE (e, ei, bb->succs)
249 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
250 num_edges++;
251 }
252
253 counts = get_coverage_counts (GCOV_COUNTER_ARCS, num_edges, &profile_info);
254 if (!counts)
255 return NULL;
256
257 if (dump_file && profile_info)
258 fprintf(dump_file, "Merged %u profiles with maximal count %u.\n",
259 profile_info->runs, (unsigned) profile_info->sum_max);
260
261 return counts;
262 }
263 \f
264
265 /* Compute the branch probabilities for the various branches.
266 Annotate them accordingly. */
267
268 static void
269 compute_branch_probabilities (void)
270 {
271 basic_block bb;
272 int i;
273 int num_edges = 0;
274 int changes;
275 int passes;
276 int hist_br_prob[20];
277 int num_never_executed;
278 int num_branches;
279 gcov_type *exec_counts = get_exec_counts ();
280 int exec_counts_pos = 0;
281
282 /* Very simple sanity checks so we catch bugs in our profiling code. */
283 if (profile_info)
284 {
285 if (profile_info->run_max * profile_info->runs < profile_info->sum_max)
286 {
287 error ("corrupted profile info: run_max * runs < sum_max");
288 exec_counts = NULL;
289 }
290
291 if (profile_info->sum_all < profile_info->sum_max)
292 {
293 error ("corrupted profile info: sum_all is smaller than sum_max");
294 exec_counts = NULL;
295 }
296 }
297
298 /* Attach extra info block to each bb. */
299
300 alloc_aux_for_blocks (sizeof (struct bb_info));
301 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
302 {
303 edge e;
304 edge_iterator ei;
305
306 FOR_EACH_EDGE (e, ei, bb->succs)
307 if (!EDGE_INFO (e)->ignore)
308 BB_INFO (bb)->succ_count++;
309 FOR_EACH_EDGE (e, ei, bb->preds)
310 if (!EDGE_INFO (e)->ignore)
311 BB_INFO (bb)->pred_count++;
312 }
313
314 /* Avoid predicting entry on exit nodes. */
315 BB_INFO (EXIT_BLOCK_PTR)->succ_count = 2;
316 BB_INFO (ENTRY_BLOCK_PTR)->pred_count = 2;
317
318 /* For each edge not on the spanning tree, set its execution count from
319 the .da file. */
320
321 /* The first count in the .da file is the number of times that the function
322 was entered. This is the exec_count for block zero. */
323
324 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
325 {
326 edge e;
327 edge_iterator ei;
328
329 FOR_EACH_EDGE (e, ei, bb->succs)
330 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
331 {
332 num_edges++;
333 if (exec_counts)
334 {
335 e->count = exec_counts[exec_counts_pos++];
336 if (e->count > profile_info->sum_max)
337 {
338 error ("corrupted profile info: edge from %i to %i exceeds maximal count",
339 bb->index, e->dest->index);
340 }
341 }
342 else
343 e->count = 0;
344
345 EDGE_INFO (e)->count_valid = 1;
346 BB_INFO (bb)->succ_count--;
347 BB_INFO (e->dest)->pred_count--;
348 if (dump_file)
349 {
350 fprintf (dump_file, "\nRead edge from %i to %i, count:",
351 bb->index, e->dest->index);
352 fprintf (dump_file, HOST_WIDEST_INT_PRINT_DEC,
353 (HOST_WIDEST_INT) e->count);
354 }
355 }
356 }
357
358 if (dump_file)
359 fprintf (dump_file, "\n%d edge counts read\n", num_edges);
360
361 /* For every block in the file,
362 - if every exit/entrance edge has a known count, then set the block count
363 - if the block count is known, and every exit/entrance edge but one has
364 a known execution count, then set the count of the remaining edge
365
366 As edge counts are set, decrement the succ/pred count, but don't delete
367 the edge, that way we can easily tell when all edges are known, or only
368 one edge is unknown. */
369
370 /* The order that the basic blocks are iterated through is important.
371 Since the code that finds spanning trees starts with block 0, low numbered
372 edges are put on the spanning tree in preference to high numbered edges.
373 Hence, most instrumented edges are at the end. Graph solving works much
374 faster if we propagate numbers from the end to the start.
375
376 This takes an average of slightly more than 3 passes. */
377
378 changes = 1;
379 passes = 0;
380 while (changes)
381 {
382 passes++;
383 changes = 0;
384 FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR, NULL, prev_bb)
385 {
386 struct bb_info *bi = BB_INFO (bb);
387 if (! bi->count_valid)
388 {
389 if (bi->succ_count == 0)
390 {
391 edge e;
392 edge_iterator ei;
393 gcov_type total = 0;
394
395 FOR_EACH_EDGE (e, ei, bb->succs)
396 total += e->count;
397 bb->count = total;
398 bi->count_valid = 1;
399 changes = 1;
400 }
401 else if (bi->pred_count == 0)
402 {
403 edge e;
404 edge_iterator ei;
405 gcov_type total = 0;
406
407 FOR_EACH_EDGE (e, ei, bb->preds)
408 total += e->count;
409 bb->count = total;
410 bi->count_valid = 1;
411 changes = 1;
412 }
413 }
414 if (bi->count_valid)
415 {
416 if (bi->succ_count == 1)
417 {
418 edge e;
419 edge_iterator ei;
420 gcov_type total = 0;
421
422 /* One of the counts will be invalid, but it is zero,
423 so adding it in also doesn't hurt. */
424 FOR_EACH_EDGE (e, ei, bb->succs)
425 total += e->count;
426
427 /* Seedgeh for the invalid edge, and set its count. */
428 FOR_EACH_EDGE (e, ei, bb->succs)
429 if (! EDGE_INFO (e)->count_valid && ! EDGE_INFO (e)->ignore)
430 break;
431
432 /* Calculate count for remaining edge by conservation. */
433 total = bb->count - total;
434
435 gcc_assert (e);
436 EDGE_INFO (e)->count_valid = 1;
437 e->count = total;
438 bi->succ_count--;
439
440 BB_INFO (e->dest)->pred_count--;
441 changes = 1;
442 }
443 if (bi->pred_count == 1)
444 {
445 edge e;
446 edge_iterator ei;
447 gcov_type total = 0;
448
449 /* One of the counts will be invalid, but it is zero,
450 so adding it in also doesn't hurt. */
451 FOR_EACH_EDGE (e, ei, bb->preds)
452 total += e->count;
453
454 /* Search for the invalid edge, and set its count. */
455 FOR_EACH_EDGE (e, ei, bb->preds)
456 if (!EDGE_INFO (e)->count_valid && !EDGE_INFO (e)->ignore)
457 break;
458
459 /* Calculate count for remaining edge by conservation. */
460 total = bb->count - total + e->count;
461
462 gcc_assert (e);
463 EDGE_INFO (e)->count_valid = 1;
464 e->count = total;
465 bi->pred_count--;
466
467 BB_INFO (e->src)->succ_count--;
468 changes = 1;
469 }
470 }
471 }
472 }
473 if (dump_file)
474 dump_flow_info (dump_file);
475
476 total_num_passes += passes;
477 if (dump_file)
478 fprintf (dump_file, "Graph solving took %d passes.\n\n", passes);
479
480 /* If the graph has been correctly solved, every block will have a
481 succ and pred count of zero. */
482 FOR_EACH_BB (bb)
483 {
484 gcc_assert (!BB_INFO (bb)->succ_count && !BB_INFO (bb)->pred_count);
485 }
486
487 /* For every edge, calculate its branch probability and add a reg_note
488 to the branch insn to indicate this. */
489
490 for (i = 0; i < 20; i++)
491 hist_br_prob[i] = 0;
492 num_never_executed = 0;
493 num_branches = 0;
494
495 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
496 {
497 edge e;
498 edge_iterator ei;
499 rtx note;
500
501 if (bb->count < 0)
502 {
503 error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
504 bb->index, (int)bb->count);
505 bb->count = 0;
506 }
507 FOR_EACH_EDGE (e, ei, bb->succs)
508 {
509 /* Function may return twice in the cased the called function is
510 setjmp or calls fork, but we can't represent this by extra
511 edge from the entry, since extra edge from the exit is
512 already present. We get negative frequency from the entry
513 point. */
514 if ((e->count < 0
515 && e->dest == EXIT_BLOCK_PTR)
516 || (e->count > bb->count
517 && e->dest != EXIT_BLOCK_PTR))
518 {
519 if (block_ends_with_call_p (bb))
520 e->count = e->count < 0 ? 0 : bb->count;
521 }
522 if (e->count < 0 || e->count > bb->count)
523 {
524 error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
525 e->src->index, e->dest->index,
526 (int)e->count);
527 e->count = bb->count / 2;
528 }
529 }
530 if (bb->count)
531 {
532 FOR_EACH_EDGE (e, ei, bb->succs)
533 e->probability = (e->count * REG_BR_PROB_BASE + bb->count / 2) / bb->count;
534 if (bb->index >= NUM_FIXED_BLOCKS
535 && block_ends_with_condjump_p (bb)
536 && EDGE_COUNT (bb->succs) >= 2)
537 {
538 int prob;
539 edge e;
540 int index;
541
542 /* Find the branch edge. It is possible that we do have fake
543 edges here. */
544 FOR_EACH_EDGE (e, ei, bb->succs)
545 if (!(e->flags & (EDGE_FAKE | EDGE_FALLTHRU)))
546 break;
547
548 prob = e->probability;
549 index = prob * 20 / REG_BR_PROB_BASE;
550
551 if (index == 20)
552 index = 19;
553 hist_br_prob[index]++;
554
555 /* Do this for RTL only. */
556 if (!ir_type ())
557 {
558 note = find_reg_note (BB_END (bb), REG_BR_PROB, 0);
559 /* There may be already note put by some other pass, such
560 as builtin_expect expander. */
561 if (note)
562 XEXP (note, 0) = GEN_INT (prob);
563 else
564 REG_NOTES (BB_END (bb))
565 = gen_rtx_EXPR_LIST (REG_BR_PROB, GEN_INT (prob),
566 REG_NOTES (BB_END (bb)));
567 }
568 num_branches++;
569 }
570 }
571 /* Otherwise try to preserve the existing REG_BR_PROB probabilities
572 tree based profile guessing put into code. BB can be the
573 ENTRY_BLOCK, and it can have multiple (fake) successors in
574 EH cases, but it still has no code; don't crash in this case. */
575 else if (profile_status == PROFILE_ABSENT
576 && !ir_type ()
577 && EDGE_COUNT (bb->succs) > 1
578 && BB_END (bb)
579 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, 0)))
580 {
581 int prob = INTVAL (XEXP (note, 0));
582
583 BRANCH_EDGE (bb)->probability = prob;
584 FALLTHRU_EDGE (bb)->probability = REG_BR_PROB_BASE - prob;
585 }
586 /* As a last resort, distribute the probabilities evenly.
587 Use simple heuristics that if there are normal edges,
588 give all abnormals frequency of 0, otherwise distribute the
589 frequency over abnormals (this is the case of noreturn
590 calls). */
591 else if (profile_status == PROFILE_ABSENT)
592 {
593 int total = 0;
594
595 FOR_EACH_EDGE (e, ei, bb->succs)
596 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
597 total ++;
598 if (total)
599 {
600 FOR_EACH_EDGE (e, ei, bb->succs)
601 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
602 e->probability = REG_BR_PROB_BASE / total;
603 else
604 e->probability = 0;
605 }
606 else
607 {
608 total += EDGE_COUNT (bb->succs);
609 FOR_EACH_EDGE (e, ei, bb->succs)
610 e->probability = REG_BR_PROB_BASE / total;
611 }
612 if (bb->index >= NUM_FIXED_BLOCKS
613 && block_ends_with_condjump_p (bb)
614 && EDGE_COUNT (bb->succs) >= 2)
615 num_branches++, num_never_executed;
616 }
617 }
618 counts_to_freqs ();
619
620 if (dump_file)
621 {
622 fprintf (dump_file, "%d branches\n", num_branches);
623 fprintf (dump_file, "%d branches never executed\n",
624 num_never_executed);
625 if (num_branches)
626 for (i = 0; i < 10; i++)
627 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
628 (hist_br_prob[i] + hist_br_prob[19-i]) * 100 / num_branches,
629 5 * i, 5 * i + 5);
630
631 total_num_branches += num_branches;
632 total_num_never_executed += num_never_executed;
633 for (i = 0; i < 20; i++)
634 total_hist_br_prob[i] += hist_br_prob[i];
635
636 fputc ('\n', dump_file);
637 fputc ('\n', dump_file);
638 }
639
640 free_aux_for_blocks ();
641 }
642
643 /* Load value histograms values whose description is stored in VALUES array
644 from .gcda file. */
645
646 static void
647 compute_value_histograms (histogram_values values)
648 {
649 unsigned i, j, t, any;
650 unsigned n_histogram_counters[GCOV_N_VALUE_COUNTERS];
651 gcov_type *histogram_counts[GCOV_N_VALUE_COUNTERS];
652 gcov_type *act_count[GCOV_N_VALUE_COUNTERS];
653 gcov_type *aact_count;
654
655 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
656 n_histogram_counters[t] = 0;
657
658 for (i = 0; i < VEC_length (histogram_value, values); i++)
659 {
660 histogram_value hist = VEC_index (histogram_value, values, i);
661 n_histogram_counters[(int) hist->type] += hist->n_counters;
662 }
663
664 any = 0;
665 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
666 {
667 if (!n_histogram_counters[t])
668 {
669 histogram_counts[t] = NULL;
670 continue;
671 }
672
673 histogram_counts[t] =
674 get_coverage_counts (COUNTER_FOR_HIST_TYPE (t),
675 n_histogram_counters[t], NULL);
676 if (histogram_counts[t])
677 any = 1;
678 act_count[t] = histogram_counts[t];
679 }
680 if (!any)
681 return;
682
683 for (i = 0; i < VEC_length (histogram_value, values); i++)
684 {
685 histogram_value hist = VEC_index (histogram_value, values, i);
686 tree stmt = hist->hvalue.stmt;
687 stmt_ann_t ann = get_stmt_ann (stmt);
688
689 t = (int) hist->type;
690
691 aact_count = act_count[t];
692 act_count[t] += hist->n_counters;
693
694 hist->hvalue.next = ann->histograms;
695 ann->histograms = hist;
696 hist->hvalue.counters =
697 xmalloc (sizeof (gcov_type) * hist->n_counters);
698 for (j = 0; j < hist->n_counters; j++)
699 hist->hvalue.counters[j] = aact_count[j];
700 }
701
702 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
703 if (histogram_counts[t])
704 free (histogram_counts[t]);
705 }
706
707 /* The entry basic block will be moved around so that it has index=1,
708 there is nothing at index 0 and the exit is at n_basic_block. */
709 #define BB_TO_GCOV_INDEX(bb) ((bb)->index - 1)
710 /* When passed NULL as file_name, initialize.
711 When passed something else, output the necessary commands to change
712 line to LINE and offset to FILE_NAME. */
713 static void
714 output_location (char const *file_name, int line,
715 gcov_position_t *offset, basic_block bb)
716 {
717 static char const *prev_file_name;
718 static int prev_line;
719 bool name_differs, line_differs;
720
721 if (!file_name)
722 {
723 prev_file_name = NULL;
724 prev_line = -1;
725 return;
726 }
727
728 name_differs = !prev_file_name || strcmp (file_name, prev_file_name);
729 line_differs = prev_line != line;
730
731 if (name_differs || line_differs)
732 {
733 if (!*offset)
734 {
735 *offset = gcov_write_tag (GCOV_TAG_LINES);
736 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
737 name_differs = line_differs=true;
738 }
739
740 /* If this is a new source file, then output the
741 file's name to the .bb file. */
742 if (name_differs)
743 {
744 prev_file_name = file_name;
745 gcov_write_unsigned (0);
746 gcov_write_string (prev_file_name);
747 }
748 if (line_differs)
749 {
750 gcov_write_unsigned (line);
751 prev_line = line;
752 }
753 }
754 }
755
756 /* Instrument and/or analyze program behavior based on program flow graph.
757 In either case, this function builds a flow graph for the function being
758 compiled. The flow graph is stored in BB_GRAPH.
759
760 When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
761 the flow graph that are needed to reconstruct the dynamic behavior of the
762 flow graph.
763
764 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
765 information from a data file containing edge count information from previous
766 executions of the function being compiled. In this case, the flow graph is
767 annotated with actual execution counts, which are later propagated into the
768 rtl for optimization purposes.
769
770 Main entry point of this file. */
771
772 void
773 branch_prob (void)
774 {
775 basic_block bb;
776 unsigned i;
777 unsigned num_edges, ignored_edges;
778 unsigned num_instrumented;
779 struct edge_list *el;
780 histogram_values values = NULL;
781
782 total_num_times_called++;
783
784 flow_call_edges_add (NULL);
785 add_noreturn_fake_exit_edges ();
786
787 /* We can't handle cyclic regions constructed using abnormal edges.
788 To avoid these we replace every source of abnormal edge by a fake
789 edge from entry node and every destination by fake edge to exit.
790 This keeps graph acyclic and our calculation exact for all normal
791 edges except for exit and entrance ones.
792
793 We also add fake exit edges for each call and asm statement in the
794 basic, since it may not return. */
795
796 FOR_EACH_BB (bb)
797 {
798 int need_exit_edge = 0, need_entry_edge = 0;
799 int have_exit_edge = 0, have_entry_edge = 0;
800 edge e;
801 edge_iterator ei;
802
803 /* Functions returning multiple times are not handled by extra edges.
804 Instead we simply allow negative counts on edges from exit to the
805 block past call and corresponding probabilities. We can't go
806 with the extra edges because that would result in flowgraph that
807 needs to have fake edges outside the spanning tree. */
808
809 FOR_EACH_EDGE (e, ei, bb->succs)
810 {
811 block_stmt_iterator bsi;
812 tree last = NULL;
813
814 /* It may happen that there are compiler generated statements
815 without a locus at all. Go through the basic block from the
816 last to the first statement looking for a locus. */
817 for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
818 {
819 last = bsi_stmt (bsi);
820 if (EXPR_LOCUS (last))
821 break;
822 }
823
824 /* Edge with goto locus might get wrong coverage info unless
825 it is the only edge out of BB.
826 Don't do that when the locuses match, so
827 if (blah) goto something;
828 is not computed twice. */
829 if (last && EXPR_LOCUS (last)
830 && e->goto_locus
831 && !single_succ_p (bb)
832 #ifdef USE_MAPPED_LOCATION
833 && (LOCATION_FILE (e->goto_locus)
834 != LOCATION_FILE (EXPR_LOCATION (last))
835 || (LOCATION_LINE (e->goto_locus)
836 != LOCATION_LINE (EXPR_LOCATION (last)))))
837 #else
838 && (e->goto_locus->file != EXPR_LOCUS (last)->file
839 || (e->goto_locus->line != EXPR_LOCUS (last)->line)))
840 #endif
841 {
842 basic_block new = split_edge (e);
843 single_succ_edge (new)->goto_locus = e->goto_locus;
844 }
845 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
846 && e->dest != EXIT_BLOCK_PTR)
847 need_exit_edge = 1;
848 if (e->dest == EXIT_BLOCK_PTR)
849 have_exit_edge = 1;
850 }
851 FOR_EACH_EDGE (e, ei, bb->preds)
852 {
853 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
854 && e->src != ENTRY_BLOCK_PTR)
855 need_entry_edge = 1;
856 if (e->src == ENTRY_BLOCK_PTR)
857 have_entry_edge = 1;
858 }
859
860 if (need_exit_edge && !have_exit_edge)
861 {
862 if (dump_file)
863 fprintf (dump_file, "Adding fake exit edge to bb %i\n",
864 bb->index);
865 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
866 }
867 if (need_entry_edge && !have_entry_edge)
868 {
869 if (dump_file)
870 fprintf (dump_file, "Adding fake entry edge to bb %i\n",
871 bb->index);
872 make_edge (ENTRY_BLOCK_PTR, bb, EDGE_FAKE);
873 }
874 }
875
876 el = create_edge_list ();
877 num_edges = NUM_EDGES (el);
878 alloc_aux_for_edges (sizeof (struct edge_info));
879
880 /* The basic blocks are expected to be numbered sequentially. */
881 compact_blocks ();
882
883 ignored_edges = 0;
884 for (i = 0 ; i < num_edges ; i++)
885 {
886 edge e = INDEX_EDGE (el, i);
887 e->count = 0;
888
889 /* Mark edges we've replaced by fake edges above as ignored. */
890 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
891 && e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR)
892 {
893 EDGE_INFO (e)->ignore = 1;
894 ignored_edges++;
895 }
896 }
897
898 /* Create spanning tree from basic block graph, mark each edge that is
899 on the spanning tree. We insert as many abnormal and critical edges
900 as possible to minimize number of edge splits necessary. */
901
902 find_spanning_tree (el);
903
904 /* Fake edges that are not on the tree will not be instrumented, so
905 mark them ignored. */
906 for (num_instrumented = i = 0; i < num_edges; i++)
907 {
908 edge e = INDEX_EDGE (el, i);
909 struct edge_info *inf = EDGE_INFO (e);
910
911 if (inf->ignore || inf->on_tree)
912 /*NOP*/;
913 else if (e->flags & EDGE_FAKE)
914 {
915 inf->ignore = 1;
916 ignored_edges++;
917 }
918 else
919 num_instrumented++;
920 }
921
922 total_num_blocks += n_basic_blocks;
923 if (dump_file)
924 fprintf (dump_file, "%d basic blocks\n", n_basic_blocks);
925
926 total_num_edges += num_edges;
927 if (dump_file)
928 fprintf (dump_file, "%d edges\n", num_edges);
929
930 total_num_edges_ignored += ignored_edges;
931 if (dump_file)
932 fprintf (dump_file, "%d ignored edges\n", ignored_edges);
933
934 /* Write the data from which gcov can reconstruct the basic block
935 graph. */
936
937 /* Basic block flags */
938 if (coverage_begin_output ())
939 {
940 gcov_position_t offset;
941
942 offset = gcov_write_tag (GCOV_TAG_BLOCKS);
943 for (i = 0; i != (unsigned) (n_basic_blocks); i++)
944 gcov_write_unsigned (0);
945 gcov_write_length (offset);
946 }
947
948 /* Keep all basic block indexes nonnegative in the gcov output.
949 Index 0 is used for entry block, last index is for exit block.
950 */
951 ENTRY_BLOCK_PTR->index = 1;
952 EXIT_BLOCK_PTR->index = last_basic_block;
953
954 /* Arcs */
955 if (coverage_begin_output ())
956 {
957 gcov_position_t offset;
958
959 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
960 {
961 edge e;
962 edge_iterator ei;
963
964 offset = gcov_write_tag (GCOV_TAG_ARCS);
965 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
966
967 FOR_EACH_EDGE (e, ei, bb->succs)
968 {
969 struct edge_info *i = EDGE_INFO (e);
970 if (!i->ignore)
971 {
972 unsigned flag_bits = 0;
973
974 if (i->on_tree)
975 flag_bits |= GCOV_ARC_ON_TREE;
976 if (e->flags & EDGE_FAKE)
977 flag_bits |= GCOV_ARC_FAKE;
978 if (e->flags & EDGE_FALLTHRU)
979 flag_bits |= GCOV_ARC_FALLTHROUGH;
980 /* On trees we don't have fallthru flags, but we can
981 recompute them from CFG shape. */
982 if (ir_type ()
983 && e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)
984 && e->src->next_bb == e->dest)
985 flag_bits |= GCOV_ARC_FALLTHROUGH;
986
987 gcov_write_unsigned (BB_TO_GCOV_INDEX (e->dest));
988 gcov_write_unsigned (flag_bits);
989 }
990 }
991
992 gcov_write_length (offset);
993 }
994 }
995
996 /* Line numbers. */
997 if (coverage_begin_output ())
998 {
999 /* Initialize the output. */
1000 output_location (NULL, 0, NULL, NULL);
1001
1002 if (!ir_type ())
1003 {
1004 gcov_position_t offset;
1005
1006 FOR_EACH_BB (bb)
1007 {
1008 rtx insn = BB_HEAD (bb);
1009 int ignore_next_note = 0;
1010
1011 offset = 0;
1012
1013 /* We are looking for line number notes. Search backward
1014 before basic block to find correct ones. */
1015 insn = prev_nonnote_insn (insn);
1016 if (!insn)
1017 insn = get_insns ();
1018 else
1019 insn = NEXT_INSN (insn);
1020
1021 while (insn != BB_END (bb))
1022 {
1023 if (NOTE_P (insn))
1024 {
1025 /* Must ignore the line number notes that
1026 immediately follow the end of an inline function
1027 to avoid counting it twice. There is a note
1028 before the call, and one after the call. */
1029 if (NOTE_LINE_NUMBER (insn)
1030 == NOTE_INSN_REPEATED_LINE_NUMBER)
1031 ignore_next_note = 1;
1032 else if (NOTE_LINE_NUMBER (insn) <= 0)
1033 /*NOP*/;
1034 else if (ignore_next_note)
1035 ignore_next_note = 0;
1036 else
1037 {
1038 expanded_location s;
1039 NOTE_EXPANDED_LOCATION (s, insn);
1040 output_location (s.file, s.line, &offset, bb);
1041 }
1042 }
1043 insn = NEXT_INSN (insn);
1044 }
1045
1046 if (offset)
1047 {
1048 /* A file of NULL indicates the end of run. */
1049 gcov_write_unsigned (0);
1050 gcov_write_string (NULL);
1051 gcov_write_length (offset);
1052 }
1053 }
1054 }
1055 else
1056 {
1057 gcov_position_t offset;
1058
1059 FOR_EACH_BB (bb)
1060 {
1061 block_stmt_iterator bsi;
1062
1063 offset = 0;
1064
1065 if (bb == ENTRY_BLOCK_PTR->next_bb)
1066 {
1067 expanded_location curr_location =
1068 expand_location (DECL_SOURCE_LOCATION
1069 (current_function_decl));
1070 output_location (curr_location.file, curr_location.line,
1071 &offset, bb);
1072 }
1073
1074 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1075 {
1076 tree stmt = bsi_stmt (bsi);
1077 if (EXPR_HAS_LOCATION (stmt))
1078 output_location (EXPR_FILENAME (stmt),
1079 EXPR_LINENO (stmt),
1080 &offset, bb);
1081 }
1082
1083 /* Notice GOTO expressions we eliminated while constructing the
1084 CFG. */
1085 if (single_succ_p (bb) && single_succ_edge (bb)->goto_locus)
1086 {
1087 /* ??? source_locus type is marked deprecated in input.h. */
1088 source_locus curr_location = single_succ_edge (bb)->goto_locus;
1089 /* ??? The FILE/LINE API is inconsistent for these cases. */
1090 #ifdef USE_MAPPED_LOCATION
1091 output_location (LOCATION_FILE (curr_location),
1092 LOCATION_LINE (curr_location),
1093 &offset, bb);
1094 #else
1095 output_location (curr_location->file, curr_location->line,
1096 &offset, bb);
1097 #endif
1098 }
1099
1100 if (offset)
1101 {
1102 /* A file of NULL indicates the end of run. */
1103 gcov_write_unsigned (0);
1104 gcov_write_string (NULL);
1105 gcov_write_length (offset);
1106 }
1107 }
1108 }
1109 }
1110
1111 ENTRY_BLOCK_PTR->index = ENTRY_BLOCK;
1112 EXIT_BLOCK_PTR->index = EXIT_BLOCK;
1113 #undef BB_TO_GCOV_INDEX
1114
1115 if (flag_profile_values)
1116 find_values_to_profile (&values);
1117
1118 if (flag_branch_probabilities)
1119 {
1120 compute_branch_probabilities ();
1121 if (flag_profile_values)
1122 compute_value_histograms (values);
1123 }
1124
1125 remove_fake_edges ();
1126
1127 /* For each edge not on the spanning tree, add counting code. */
1128 if (profile_arc_flag
1129 && coverage_counter_alloc (GCOV_COUNTER_ARCS, num_instrumented))
1130 {
1131 unsigned n_instrumented;
1132
1133 profile_hooks->init_edge_profiler ();
1134
1135 n_instrumented = instrument_edges (el);
1136
1137 gcc_assert (n_instrumented == num_instrumented);
1138
1139 if (flag_profile_values)
1140 instrument_values (values);
1141
1142 /* Commit changes done by instrumentation. */
1143 if (ir_type ())
1144 bsi_commit_edge_inserts ();
1145 else
1146 {
1147 commit_edge_insertions_watch_calls ();
1148 allocate_reg_info (max_reg_num (), FALSE, FALSE);
1149 }
1150 }
1151
1152 free_aux_for_edges ();
1153
1154 if (!ir_type ())
1155 {
1156 /* Re-merge split basic blocks and the mess introduced by
1157 insert_insn_on_edge. */
1158 cleanup_cfg (profile_arc_flag ? CLEANUP_EXPENSIVE : 0);
1159 if (profile_dump_file())
1160 dump_flow_info (profile_dump_file());
1161 }
1162
1163 free_edge_list (el);
1164 if (flag_branch_probabilities)
1165 profile_status = PROFILE_READ;
1166 coverage_end_function ();
1167 }
1168 \f
1169 /* Union find algorithm implementation for the basic blocks using
1170 aux fields. */
1171
1172 static basic_block
1173 find_group (basic_block bb)
1174 {
1175 basic_block group = bb, bb1;
1176
1177 while ((basic_block) group->aux != group)
1178 group = (basic_block) group->aux;
1179
1180 /* Compress path. */
1181 while ((basic_block) bb->aux != group)
1182 {
1183 bb1 = (basic_block) bb->aux;
1184 bb->aux = (void *) group;
1185 bb = bb1;
1186 }
1187 return group;
1188 }
1189
1190 static void
1191 union_groups (basic_block bb1, basic_block bb2)
1192 {
1193 basic_block bb1g = find_group (bb1);
1194 basic_block bb2g = find_group (bb2);
1195
1196 /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
1197 this code is unlikely going to be performance problem anyway. */
1198 gcc_assert (bb1g != bb2g);
1199
1200 bb1g->aux = bb2g;
1201 }
1202 \f
1203 /* This function searches all of the edges in the program flow graph, and puts
1204 as many bad edges as possible onto the spanning tree. Bad edges include
1205 abnormals edges, which can't be instrumented at the moment. Since it is
1206 possible for fake edges to form a cycle, we will have to develop some
1207 better way in the future. Also put critical edges to the tree, since they
1208 are more expensive to instrument. */
1209
1210 static void
1211 find_spanning_tree (struct edge_list *el)
1212 {
1213 int i;
1214 int num_edges = NUM_EDGES (el);
1215 basic_block bb;
1216
1217 /* We use aux field for standard union-find algorithm. */
1218 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1219 bb->aux = bb;
1220
1221 /* Add fake edge exit to entry we can't instrument. */
1222 union_groups (EXIT_BLOCK_PTR, ENTRY_BLOCK_PTR);
1223
1224 /* First add all abnormal edges to the tree unless they form a cycle. Also
1225 add all edges to EXIT_BLOCK_PTR to avoid inserting profiling code behind
1226 setting return value from function. */
1227 for (i = 0; i < num_edges; i++)
1228 {
1229 edge e = INDEX_EDGE (el, i);
1230 if (((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_FAKE))
1231 || e->dest == EXIT_BLOCK_PTR)
1232 && !EDGE_INFO (e)->ignore
1233 && (find_group (e->src) != find_group (e->dest)))
1234 {
1235 if (dump_file)
1236 fprintf (dump_file, "Abnormal edge %d to %d put to tree\n",
1237 e->src->index, e->dest->index);
1238 EDGE_INFO (e)->on_tree = 1;
1239 union_groups (e->src, e->dest);
1240 }
1241 }
1242
1243 /* Now insert all critical edges to the tree unless they form a cycle. */
1244 for (i = 0; i < num_edges; i++)
1245 {
1246 edge e = INDEX_EDGE (el, i);
1247 if (EDGE_CRITICAL_P (e) && !EDGE_INFO (e)->ignore
1248 && find_group (e->src) != find_group (e->dest))
1249 {
1250 if (dump_file)
1251 fprintf (dump_file, "Critical edge %d to %d put to tree\n",
1252 e->src->index, e->dest->index);
1253 EDGE_INFO (e)->on_tree = 1;
1254 union_groups (e->src, e->dest);
1255 }
1256 }
1257
1258 /* And now the rest. */
1259 for (i = 0; i < num_edges; i++)
1260 {
1261 edge e = INDEX_EDGE (el, i);
1262 if (!EDGE_INFO (e)->ignore
1263 && find_group (e->src) != find_group (e->dest))
1264 {
1265 if (dump_file)
1266 fprintf (dump_file, "Normal edge %d to %d put to tree\n",
1267 e->src->index, e->dest->index);
1268 EDGE_INFO (e)->on_tree = 1;
1269 union_groups (e->src, e->dest);
1270 }
1271 }
1272
1273 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1274 bb->aux = NULL;
1275 }
1276 \f
1277 /* Perform file-level initialization for branch-prob processing. */
1278
1279 void
1280 init_branch_prob (void)
1281 {
1282 int i;
1283
1284 total_num_blocks = 0;
1285 total_num_edges = 0;
1286 total_num_edges_ignored = 0;
1287 total_num_edges_instrumented = 0;
1288 total_num_blocks_created = 0;
1289 total_num_passes = 0;
1290 total_num_times_called = 0;
1291 total_num_branches = 0;
1292 total_num_never_executed = 0;
1293 for (i = 0; i < 20; i++)
1294 total_hist_br_prob[i] = 0;
1295 }
1296
1297 /* Performs file-level cleanup after branch-prob processing
1298 is completed. */
1299
1300 void
1301 end_branch_prob (void)
1302 {
1303 if (dump_file)
1304 {
1305 fprintf (dump_file, "\n");
1306 fprintf (dump_file, "Total number of blocks: %d\n",
1307 total_num_blocks);
1308 fprintf (dump_file, "Total number of edges: %d\n", total_num_edges);
1309 fprintf (dump_file, "Total number of ignored edges: %d\n",
1310 total_num_edges_ignored);
1311 fprintf (dump_file, "Total number of instrumented edges: %d\n",
1312 total_num_edges_instrumented);
1313 fprintf (dump_file, "Total number of blocks created: %d\n",
1314 total_num_blocks_created);
1315 fprintf (dump_file, "Total number of graph solution passes: %d\n",
1316 total_num_passes);
1317 if (total_num_times_called != 0)
1318 fprintf (dump_file, "Average number of graph solution passes: %d\n",
1319 (total_num_passes + (total_num_times_called >> 1))
1320 / total_num_times_called);
1321 fprintf (dump_file, "Total number of branches: %d\n",
1322 total_num_branches);
1323 fprintf (dump_file, "Total number of branches never executed: %d\n",
1324 total_num_never_executed);
1325 if (total_num_branches)
1326 {
1327 int i;
1328
1329 for (i = 0; i < 10; i++)
1330 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
1331 (total_hist_br_prob[i] + total_hist_br_prob[19-i]) * 100
1332 / total_num_branches, 5*i, 5*i+5);
1333 }
1334 }
1335 }
1336
1337 /* Set up hooks to enable tree-based profiling. */
1338
1339 void
1340 tree_register_profile_hooks (void)
1341 {
1342 gcc_assert (ir_type ());
1343 profile_hooks = &tree_profile_hooks;
1344 }
1345
1346 \f
1347 /* Do branch profiling and static profile estimation passes. */
1348 static void
1349 rest_of_handle_branch_prob (void)
1350 {
1351 struct loops loops;
1352
1353 /* Discover and record the loop depth at the head of each basic
1354 block. The loop infrastructure does the real job for us. */
1355 flow_loops_find (&loops);
1356
1357 if (dump_file)
1358 flow_loops_dump (&loops, dump_file, NULL, 0);
1359
1360 /* Estimate using heuristics if no profiling info is available. */
1361 if (flag_guess_branch_prob
1362 && profile_status == PROFILE_ABSENT)
1363 estimate_probability (&loops);
1364
1365 flow_loops_free (&loops);
1366 free_dominance_info (CDI_DOMINATORS);
1367 }
1368
1369 struct tree_opt_pass pass_branch_prob =
1370 {
1371 "bp", /* name */
1372 NULL, /* gate */
1373 rest_of_handle_branch_prob, /* execute */
1374 NULL, /* sub */
1375 NULL, /* next */
1376 0, /* static_pass_number */
1377 TV_BRANCH_PROB, /* tv_id */
1378 0, /* properties_required */
1379 0, /* properties_provided */
1380 0, /* properties_destroyed */
1381 0, /* todo_flags_start */
1382 TODO_dump_func, /* todo_flags_finish */
1383 'b' /* letter */
1384 };
1385
1386
1387