gospec.c (lang_specific_driver): If we see -S without -o, add -o BASE.s rather than...
[gcc.git] / gcc / profile.c
1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011, 2012
4 Free Software Foundation, Inc.
5 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
6 based on some ideas from Dain Samples of UC Berkeley.
7 Further mangling by Bob Manson, Cygnus Support.
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 /* Generate basic block profile instrumentation and auxiliary files.
26 Profile generation is optimized, so that not all arcs in the basic
27 block graph need instrumenting. First, the BB graph is closed with
28 one entry (function start), and one exit (function exit). Any
29 ABNORMAL_EDGE cannot be instrumented (because there is no control
30 path to place the code). We close the graph by inserting fake
31 EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
32 edges that do not go to the exit_block. We ignore such abnormal
33 edges. Naturally these fake edges are never directly traversed,
34 and so *cannot* be directly instrumented. Some other graph
35 massaging is done. To optimize the instrumentation we generate the
36 BB minimal span tree, only edges that are not on the span tree
37 (plus the entry point) need instrumenting. From that information
38 all other edge counts can be deduced. By construction all fake
39 edges must be on the spanning tree. We also attempt to place
40 EDGE_CRITICAL edges on the spanning tree.
41
42 The auxiliary files generated are <dumpbase>.gcno (at compile time)
43 and <dumpbase>.gcda (at run time). The format is
44 described in full in gcov-io.h. */
45
46 /* ??? Register allocation should use basic block execution counts to
47 give preference to the most commonly executed blocks. */
48
49 /* ??? Should calculate branch probabilities before instrumenting code, since
50 then we can use arc counts to help decide which arcs to instrument. */
51
52 #include "config.h"
53 #include "system.h"
54 #include "coretypes.h"
55 #include "tm.h"
56 #include "rtl.h"
57 #include "flags.h"
58 #include "output.h"
59 #include "regs.h"
60 #include "expr.h"
61 #include "function.h"
62 #include "basic-block.h"
63 #include "diagnostic-core.h"
64 #include "coverage.h"
65 #include "value-prof.h"
66 #include "tree.h"
67 #include "cfghooks.h"
68 #include "tree-flow.h"
69 #include "timevar.h"
70 #include "cfgloop.h"
71 #include "tree-pass.h"
72
73 #include "profile.h"
74
75 struct bb_info {
76 unsigned int count_valid : 1;
77
78 /* Number of successor and predecessor edges. */
79 gcov_type succ_count;
80 gcov_type pred_count;
81 };
82
83 #define BB_INFO(b) ((struct bb_info *) (b)->aux)
84
85
86 /* Counter summary from the last set of coverage counts read. */
87
88 const struct gcov_ctr_summary *profile_info;
89
90 /* Collect statistics on the performance of this pass for the entire source
91 file. */
92
93 static int total_num_blocks;
94 static int total_num_edges;
95 static int total_num_edges_ignored;
96 static int total_num_edges_instrumented;
97 static int total_num_blocks_created;
98 static int total_num_passes;
99 static int total_num_times_called;
100 static int total_hist_br_prob[20];
101 static int total_num_branches;
102
103 /* Forward declarations. */
104 static void find_spanning_tree (struct edge_list *);
105
106 /* Add edge instrumentation code to the entire insn chain.
107
108 F is the first insn of the chain.
109 NUM_BLOCKS is the number of basic blocks found in F. */
110
111 static unsigned
112 instrument_edges (struct edge_list *el)
113 {
114 unsigned num_instr_edges = 0;
115 int num_edges = NUM_EDGES (el);
116 basic_block bb;
117
118 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
119 {
120 edge e;
121 edge_iterator ei;
122
123 FOR_EACH_EDGE (e, ei, bb->succs)
124 {
125 struct edge_info *inf = EDGE_INFO (e);
126
127 if (!inf->ignore && !inf->on_tree)
128 {
129 gcc_assert (!(e->flags & EDGE_ABNORMAL));
130 if (dump_file)
131 fprintf (dump_file, "Edge %d to %d instrumented%s\n",
132 e->src->index, e->dest->index,
133 EDGE_CRITICAL_P (e) ? " (and split)" : "");
134 gimple_gen_edge_profiler (num_instr_edges++, e);
135 }
136 }
137 }
138
139 total_num_blocks_created += num_edges;
140 if (dump_file)
141 fprintf (dump_file, "%d edges instrumented\n", num_instr_edges);
142 return num_instr_edges;
143 }
144
145 /* Add code to measure histograms for values in list VALUES. */
146 static void
147 instrument_values (histogram_values values)
148 {
149 unsigned i, t;
150
151 /* Emit code to generate the histograms before the insns. */
152
153 for (i = 0; i < VEC_length (histogram_value, values); i++)
154 {
155 histogram_value hist = VEC_index (histogram_value, values, i);
156 switch (hist->type)
157 {
158 case HIST_TYPE_INTERVAL:
159 t = GCOV_COUNTER_V_INTERVAL;
160 break;
161
162 case HIST_TYPE_POW2:
163 t = GCOV_COUNTER_V_POW2;
164 break;
165
166 case HIST_TYPE_SINGLE_VALUE:
167 t = GCOV_COUNTER_V_SINGLE;
168 break;
169
170 case HIST_TYPE_CONST_DELTA:
171 t = GCOV_COUNTER_V_DELTA;
172 break;
173
174 case HIST_TYPE_INDIR_CALL:
175 t = GCOV_COUNTER_V_INDIR;
176 break;
177
178 case HIST_TYPE_AVERAGE:
179 t = GCOV_COUNTER_AVERAGE;
180 break;
181
182 case HIST_TYPE_IOR:
183 t = GCOV_COUNTER_IOR;
184 break;
185
186 default:
187 gcc_unreachable ();
188 }
189 if (!coverage_counter_alloc (t, hist->n_counters))
190 continue;
191
192 switch (hist->type)
193 {
194 case HIST_TYPE_INTERVAL:
195 gimple_gen_interval_profiler (hist, t, 0);
196 break;
197
198 case HIST_TYPE_POW2:
199 gimple_gen_pow2_profiler (hist, t, 0);
200 break;
201
202 case HIST_TYPE_SINGLE_VALUE:
203 gimple_gen_one_value_profiler (hist, t, 0);
204 break;
205
206 case HIST_TYPE_CONST_DELTA:
207 gimple_gen_const_delta_profiler (hist, t, 0);
208 break;
209
210 case HIST_TYPE_INDIR_CALL:
211 gimple_gen_ic_profiler (hist, t, 0);
212 break;
213
214 case HIST_TYPE_AVERAGE:
215 gimple_gen_average_profiler (hist, t, 0);
216 break;
217
218 case HIST_TYPE_IOR:
219 gimple_gen_ior_profiler (hist, t, 0);
220 break;
221
222 default:
223 gcc_unreachable ();
224 }
225 }
226 }
227 \f
228
229 /* Computes hybrid profile for all matching entries in da_file.
230
231 CFG_CHECKSUM is the precomputed checksum for the CFG. */
232
233 static gcov_type *
234 get_exec_counts (unsigned cfg_checksum, unsigned lineno_checksum)
235 {
236 unsigned num_edges = 0;
237 basic_block bb;
238 gcov_type *counts;
239
240 /* Count the edges to be (possibly) instrumented. */
241 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
242 {
243 edge e;
244 edge_iterator ei;
245
246 FOR_EACH_EDGE (e, ei, bb->succs)
247 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
248 num_edges++;
249 }
250
251 counts = get_coverage_counts (GCOV_COUNTER_ARCS, num_edges, cfg_checksum,
252 lineno_checksum, &profile_info);
253 if (!counts)
254 return NULL;
255
256 if (dump_file && profile_info)
257 fprintf(dump_file, "Merged %u profiles with maximal count %u.\n",
258 profile_info->runs, (unsigned) profile_info->sum_max);
259
260 return counts;
261 }
262
263
264 static bool
265 is_edge_inconsistent (VEC(edge,gc) *edges)
266 {
267 edge e;
268 edge_iterator ei;
269 FOR_EACH_EDGE (e, ei, edges)
270 {
271 if (!EDGE_INFO (e)->ignore)
272 {
273 if (e->count < 0
274 && (!(e->flags & EDGE_FAKE)
275 || !block_ends_with_call_p (e->src)))
276 {
277 if (dump_file)
278 {
279 fprintf (dump_file,
280 "Edge %i->%i is inconsistent, count"HOST_WIDEST_INT_PRINT_DEC,
281 e->src->index, e->dest->index, e->count);
282 dump_bb (e->src, dump_file, 0);
283 dump_bb (e->dest, dump_file, 0);
284 }
285 return true;
286 }
287 }
288 }
289 return false;
290 }
291
292 static void
293 correct_negative_edge_counts (void)
294 {
295 basic_block bb;
296 edge e;
297 edge_iterator ei;
298
299 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
300 {
301 FOR_EACH_EDGE (e, ei, bb->succs)
302 {
303 if (e->count < 0)
304 e->count = 0;
305 }
306 }
307 }
308
309 /* Check consistency.
310 Return true if inconsistency is found. */
311 static bool
312 is_inconsistent (void)
313 {
314 basic_block bb;
315 bool inconsistent = false;
316 FOR_EACH_BB (bb)
317 {
318 inconsistent |= is_edge_inconsistent (bb->preds);
319 if (!dump_file && inconsistent)
320 return true;
321 inconsistent |= is_edge_inconsistent (bb->succs);
322 if (!dump_file && inconsistent)
323 return true;
324 if (bb->count < 0)
325 {
326 if (dump_file)
327 {
328 fprintf (dump_file, "BB %i count is negative "
329 HOST_WIDEST_INT_PRINT_DEC,
330 bb->index,
331 bb->count);
332 dump_bb (bb, dump_file, 0);
333 }
334 inconsistent = true;
335 }
336 if (bb->count != sum_edge_counts (bb->preds))
337 {
338 if (dump_file)
339 {
340 fprintf (dump_file, "BB %i count does not match sum of incoming edges "
341 HOST_WIDEST_INT_PRINT_DEC" should be " HOST_WIDEST_INT_PRINT_DEC,
342 bb->index,
343 bb->count,
344 sum_edge_counts (bb->preds));
345 dump_bb (bb, dump_file, 0);
346 }
347 inconsistent = true;
348 }
349 if (bb->count != sum_edge_counts (bb->succs) &&
350 ! (find_edge (bb, EXIT_BLOCK_PTR) != NULL && block_ends_with_call_p (bb)))
351 {
352 if (dump_file)
353 {
354 fprintf (dump_file, "BB %i count does not match sum of outgoing edges "
355 HOST_WIDEST_INT_PRINT_DEC" should be " HOST_WIDEST_INT_PRINT_DEC,
356 bb->index,
357 bb->count,
358 sum_edge_counts (bb->succs));
359 dump_bb (bb, dump_file, 0);
360 }
361 inconsistent = true;
362 }
363 if (!dump_file && inconsistent)
364 return true;
365 }
366
367 return inconsistent;
368 }
369
370 /* Set each basic block count to the sum of its outgoing edge counts */
371 static void
372 set_bb_counts (void)
373 {
374 basic_block bb;
375 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
376 {
377 bb->count = sum_edge_counts (bb->succs);
378 gcc_assert (bb->count >= 0);
379 }
380 }
381
382 /* Reads profile data and returns total number of edge counts read */
383 static int
384 read_profile_edge_counts (gcov_type *exec_counts)
385 {
386 basic_block bb;
387 int num_edges = 0;
388 int exec_counts_pos = 0;
389 /* For each edge not on the spanning tree, set its execution count from
390 the .da file. */
391 /* The first count in the .da file is the number of times that the function
392 was entered. This is the exec_count for block zero. */
393
394 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
395 {
396 edge e;
397 edge_iterator ei;
398
399 FOR_EACH_EDGE (e, ei, bb->succs)
400 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
401 {
402 num_edges++;
403 if (exec_counts)
404 {
405 e->count = exec_counts[exec_counts_pos++];
406 if (e->count > profile_info->sum_max)
407 {
408 if (flag_profile_correction)
409 {
410 static bool informed = 0;
411 if (!informed)
412 inform (input_location,
413 "corrupted profile info: edge count exceeds maximal count");
414 informed = 1;
415 }
416 else
417 error ("corrupted profile info: edge from %i to %i exceeds maximal count",
418 bb->index, e->dest->index);
419 }
420 }
421 else
422 e->count = 0;
423
424 EDGE_INFO (e)->count_valid = 1;
425 BB_INFO (bb)->succ_count--;
426 BB_INFO (e->dest)->pred_count--;
427 if (dump_file)
428 {
429 fprintf (dump_file, "\nRead edge from %i to %i, count:",
430 bb->index, e->dest->index);
431 fprintf (dump_file, HOST_WIDEST_INT_PRINT_DEC,
432 (HOST_WIDEST_INT) e->count);
433 }
434 }
435 }
436
437 return num_edges;
438 }
439
440 #define OVERLAP_BASE 10000
441
442 /* Compare the static estimated profile to the actual profile, and
443 return the "degree of overlap" measure between them.
444
445 Degree of overlap is a number between 0 and OVERLAP_BASE. It is
446 the sum of each basic block's minimum relative weights between
447 two profiles. And overlap of OVERLAP_BASE means two profiles are
448 identical. */
449
450 static int
451 compute_frequency_overlap (void)
452 {
453 gcov_type count_total = 0, freq_total = 0;
454 int overlap = 0;
455 basic_block bb;
456
457 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
458 {
459 count_total += bb->count;
460 freq_total += bb->frequency;
461 }
462
463 if (count_total == 0 || freq_total == 0)
464 return 0;
465
466 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
467 overlap += MIN (bb->count * OVERLAP_BASE / count_total,
468 bb->frequency * OVERLAP_BASE / freq_total);
469
470 return overlap;
471 }
472
473 /* Compute the branch probabilities for the various branches.
474 Annotate them accordingly.
475
476 CFG_CHECKSUM is the precomputed checksum for the CFG. */
477
478 static void
479 compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
480 {
481 basic_block bb;
482 int i;
483 int num_edges = 0;
484 int changes;
485 int passes;
486 int hist_br_prob[20];
487 int num_branches;
488 gcov_type *exec_counts = get_exec_counts (cfg_checksum, lineno_checksum);
489 int inconsistent = 0;
490
491 /* Very simple sanity checks so we catch bugs in our profiling code. */
492 if (!profile_info)
493 return;
494 if (profile_info->run_max * profile_info->runs < profile_info->sum_max)
495 {
496 error ("corrupted profile info: run_max * runs < sum_max");
497 exec_counts = NULL;
498 }
499
500 if (profile_info->sum_all < profile_info->sum_max)
501 {
502 error ("corrupted profile info: sum_all is smaller than sum_max");
503 exec_counts = NULL;
504 }
505
506 /* Attach extra info block to each bb. */
507 alloc_aux_for_blocks (sizeof (struct bb_info));
508 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
509 {
510 edge e;
511 edge_iterator ei;
512
513 FOR_EACH_EDGE (e, ei, bb->succs)
514 if (!EDGE_INFO (e)->ignore)
515 BB_INFO (bb)->succ_count++;
516 FOR_EACH_EDGE (e, ei, bb->preds)
517 if (!EDGE_INFO (e)->ignore)
518 BB_INFO (bb)->pred_count++;
519 }
520
521 /* Avoid predicting entry on exit nodes. */
522 BB_INFO (EXIT_BLOCK_PTR)->succ_count = 2;
523 BB_INFO (ENTRY_BLOCK_PTR)->pred_count = 2;
524
525 num_edges = read_profile_edge_counts (exec_counts);
526
527 if (dump_file)
528 fprintf (dump_file, "\n%d edge counts read\n", num_edges);
529
530 /* For every block in the file,
531 - if every exit/entrance edge has a known count, then set the block count
532 - if the block count is known, and every exit/entrance edge but one has
533 a known execution count, then set the count of the remaining edge
534
535 As edge counts are set, decrement the succ/pred count, but don't delete
536 the edge, that way we can easily tell when all edges are known, or only
537 one edge is unknown. */
538
539 /* The order that the basic blocks are iterated through is important.
540 Since the code that finds spanning trees starts with block 0, low numbered
541 edges are put on the spanning tree in preference to high numbered edges.
542 Hence, most instrumented edges are at the end. Graph solving works much
543 faster if we propagate numbers from the end to the start.
544
545 This takes an average of slightly more than 3 passes. */
546
547 changes = 1;
548 passes = 0;
549 while (changes)
550 {
551 passes++;
552 changes = 0;
553 FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR, NULL, prev_bb)
554 {
555 struct bb_info *bi = BB_INFO (bb);
556 if (! bi->count_valid)
557 {
558 if (bi->succ_count == 0)
559 {
560 edge e;
561 edge_iterator ei;
562 gcov_type total = 0;
563
564 FOR_EACH_EDGE (e, ei, bb->succs)
565 total += e->count;
566 bb->count = total;
567 bi->count_valid = 1;
568 changes = 1;
569 }
570 else if (bi->pred_count == 0)
571 {
572 edge e;
573 edge_iterator ei;
574 gcov_type total = 0;
575
576 FOR_EACH_EDGE (e, ei, bb->preds)
577 total += e->count;
578 bb->count = total;
579 bi->count_valid = 1;
580 changes = 1;
581 }
582 }
583 if (bi->count_valid)
584 {
585 if (bi->succ_count == 1)
586 {
587 edge e;
588 edge_iterator ei;
589 gcov_type total = 0;
590
591 /* One of the counts will be invalid, but it is zero,
592 so adding it in also doesn't hurt. */
593 FOR_EACH_EDGE (e, ei, bb->succs)
594 total += e->count;
595
596 /* Search for the invalid edge, and set its count. */
597 FOR_EACH_EDGE (e, ei, bb->succs)
598 if (! EDGE_INFO (e)->count_valid && ! EDGE_INFO (e)->ignore)
599 break;
600
601 /* Calculate count for remaining edge by conservation. */
602 total = bb->count - total;
603
604 gcc_assert (e);
605 EDGE_INFO (e)->count_valid = 1;
606 e->count = total;
607 bi->succ_count--;
608
609 BB_INFO (e->dest)->pred_count--;
610 changes = 1;
611 }
612 if (bi->pred_count == 1)
613 {
614 edge e;
615 edge_iterator ei;
616 gcov_type total = 0;
617
618 /* One of the counts will be invalid, but it is zero,
619 so adding it in also doesn't hurt. */
620 FOR_EACH_EDGE (e, ei, bb->preds)
621 total += e->count;
622
623 /* Search for the invalid edge, and set its count. */
624 FOR_EACH_EDGE (e, ei, bb->preds)
625 if (!EDGE_INFO (e)->count_valid && !EDGE_INFO (e)->ignore)
626 break;
627
628 /* Calculate count for remaining edge by conservation. */
629 total = bb->count - total + e->count;
630
631 gcc_assert (e);
632 EDGE_INFO (e)->count_valid = 1;
633 e->count = total;
634 bi->pred_count--;
635
636 BB_INFO (e->src)->succ_count--;
637 changes = 1;
638 }
639 }
640 }
641 }
642 if (dump_file)
643 {
644 int overlap = compute_frequency_overlap ();
645 dump_flow_info (dump_file, dump_flags);
646 fprintf (dump_file, "Static profile overlap: %d.%d%%\n",
647 overlap / (OVERLAP_BASE / 100),
648 overlap % (OVERLAP_BASE / 100));
649 }
650
651 total_num_passes += passes;
652 if (dump_file)
653 fprintf (dump_file, "Graph solving took %d passes.\n\n", passes);
654
655 /* If the graph has been correctly solved, every block will have a
656 succ and pred count of zero. */
657 FOR_EACH_BB (bb)
658 {
659 gcc_assert (!BB_INFO (bb)->succ_count && !BB_INFO (bb)->pred_count);
660 }
661
662 /* Check for inconsistent basic block counts */
663 inconsistent = is_inconsistent ();
664
665 if (inconsistent)
666 {
667 if (flag_profile_correction)
668 {
669 /* Inconsistency detected. Make it flow-consistent. */
670 static int informed = 0;
671 if (informed == 0)
672 {
673 informed = 1;
674 inform (input_location, "correcting inconsistent profile data");
675 }
676 correct_negative_edge_counts ();
677 /* Set bb counts to the sum of the outgoing edge counts */
678 set_bb_counts ();
679 if (dump_file)
680 fprintf (dump_file, "\nCalling mcf_smooth_cfg\n");
681 mcf_smooth_cfg ();
682 }
683 else
684 error ("corrupted profile info: profile data is not flow-consistent");
685 }
686
687 /* For every edge, calculate its branch probability and add a reg_note
688 to the branch insn to indicate this. */
689
690 for (i = 0; i < 20; i++)
691 hist_br_prob[i] = 0;
692 num_branches = 0;
693
694 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
695 {
696 edge e;
697 edge_iterator ei;
698
699 if (bb->count < 0)
700 {
701 error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
702 bb->index, (int)bb->count);
703 bb->count = 0;
704 }
705 FOR_EACH_EDGE (e, ei, bb->succs)
706 {
707 /* Function may return twice in the cased the called function is
708 setjmp or calls fork, but we can't represent this by extra
709 edge from the entry, since extra edge from the exit is
710 already present. We get negative frequency from the entry
711 point. */
712 if ((e->count < 0
713 && e->dest == EXIT_BLOCK_PTR)
714 || (e->count > bb->count
715 && e->dest != EXIT_BLOCK_PTR))
716 {
717 if (block_ends_with_call_p (bb))
718 e->count = e->count < 0 ? 0 : bb->count;
719 }
720 if (e->count < 0 || e->count > bb->count)
721 {
722 error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
723 e->src->index, e->dest->index,
724 (int)e->count);
725 e->count = bb->count / 2;
726 }
727 }
728 if (bb->count)
729 {
730 FOR_EACH_EDGE (e, ei, bb->succs)
731 e->probability = (e->count * REG_BR_PROB_BASE + bb->count / 2) / bb->count;
732 if (bb->index >= NUM_FIXED_BLOCKS
733 && block_ends_with_condjump_p (bb)
734 && EDGE_COUNT (bb->succs) >= 2)
735 {
736 int prob;
737 edge e;
738 int index;
739
740 /* Find the branch edge. It is possible that we do have fake
741 edges here. */
742 FOR_EACH_EDGE (e, ei, bb->succs)
743 if (!(e->flags & (EDGE_FAKE | EDGE_FALLTHRU)))
744 break;
745
746 prob = e->probability;
747 index = prob * 20 / REG_BR_PROB_BASE;
748
749 if (index == 20)
750 index = 19;
751 hist_br_prob[index]++;
752
753 num_branches++;
754 }
755 }
756 /* As a last resort, distribute the probabilities evenly.
757 Use simple heuristics that if there are normal edges,
758 give all abnormals frequency of 0, otherwise distribute the
759 frequency over abnormals (this is the case of noreturn
760 calls). */
761 else if (profile_status == PROFILE_ABSENT)
762 {
763 int total = 0;
764
765 FOR_EACH_EDGE (e, ei, bb->succs)
766 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
767 total ++;
768 if (total)
769 {
770 FOR_EACH_EDGE (e, ei, bb->succs)
771 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
772 e->probability = REG_BR_PROB_BASE / total;
773 else
774 e->probability = 0;
775 }
776 else
777 {
778 total += EDGE_COUNT (bb->succs);
779 FOR_EACH_EDGE (e, ei, bb->succs)
780 e->probability = REG_BR_PROB_BASE / total;
781 }
782 if (bb->index >= NUM_FIXED_BLOCKS
783 && block_ends_with_condjump_p (bb)
784 && EDGE_COUNT (bb->succs) >= 2)
785 num_branches++;
786 }
787 }
788 counts_to_freqs ();
789 profile_status = PROFILE_READ;
790 compute_function_frequency ();
791
792 if (dump_file)
793 {
794 fprintf (dump_file, "%d branches\n", num_branches);
795 if (num_branches)
796 for (i = 0; i < 10; i++)
797 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
798 (hist_br_prob[i] + hist_br_prob[19-i]) * 100 / num_branches,
799 5 * i, 5 * i + 5);
800
801 total_num_branches += num_branches;
802 for (i = 0; i < 20; i++)
803 total_hist_br_prob[i] += hist_br_prob[i];
804
805 fputc ('\n', dump_file);
806 fputc ('\n', dump_file);
807 }
808
809 free_aux_for_blocks ();
810 }
811
812 /* Load value histograms values whose description is stored in VALUES array
813 from .gcda file.
814
815 CFG_CHECKSUM is the precomputed checksum for the CFG. */
816
817 static void
818 compute_value_histograms (histogram_values values, unsigned cfg_checksum,
819 unsigned lineno_checksum)
820 {
821 unsigned i, j, t, any;
822 unsigned n_histogram_counters[GCOV_N_VALUE_COUNTERS];
823 gcov_type *histogram_counts[GCOV_N_VALUE_COUNTERS];
824 gcov_type *act_count[GCOV_N_VALUE_COUNTERS];
825 gcov_type *aact_count;
826
827 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
828 n_histogram_counters[t] = 0;
829
830 for (i = 0; i < VEC_length (histogram_value, values); i++)
831 {
832 histogram_value hist = VEC_index (histogram_value, values, i);
833 n_histogram_counters[(int) hist->type] += hist->n_counters;
834 }
835
836 any = 0;
837 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
838 {
839 if (!n_histogram_counters[t])
840 {
841 histogram_counts[t] = NULL;
842 continue;
843 }
844
845 histogram_counts[t] =
846 get_coverage_counts (COUNTER_FOR_HIST_TYPE (t),
847 n_histogram_counters[t], cfg_checksum,
848 lineno_checksum, NULL);
849 if (histogram_counts[t])
850 any = 1;
851 act_count[t] = histogram_counts[t];
852 }
853 if (!any)
854 return;
855
856 for (i = 0; i < VEC_length (histogram_value, values); i++)
857 {
858 histogram_value hist = VEC_index (histogram_value, values, i);
859 gimple stmt = hist->hvalue.stmt;
860
861 t = (int) hist->type;
862
863 aact_count = act_count[t];
864 act_count[t] += hist->n_counters;
865
866 gimple_add_histogram_value (cfun, stmt, hist);
867 hist->hvalue.counters = XNEWVEC (gcov_type, hist->n_counters);
868 for (j = 0; j < hist->n_counters; j++)
869 hist->hvalue.counters[j] = aact_count[j];
870 }
871
872 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
873 free (histogram_counts[t]);
874 }
875
876 /* The entry basic block will be moved around so that it has index=1,
877 there is nothing at index 0 and the exit is at n_basic_block. */
878 #define BB_TO_GCOV_INDEX(bb) ((bb)->index - 1)
879 /* When passed NULL as file_name, initialize.
880 When passed something else, output the necessary commands to change
881 line to LINE and offset to FILE_NAME. */
882 static void
883 output_location (char const *file_name, int line,
884 gcov_position_t *offset, basic_block bb)
885 {
886 static char const *prev_file_name;
887 static int prev_line;
888 bool name_differs, line_differs;
889
890 if (!file_name)
891 {
892 prev_file_name = NULL;
893 prev_line = -1;
894 return;
895 }
896
897 name_differs = !prev_file_name || filename_cmp (file_name, prev_file_name);
898 line_differs = prev_line != line;
899
900 if (name_differs || line_differs)
901 {
902 if (!*offset)
903 {
904 *offset = gcov_write_tag (GCOV_TAG_LINES);
905 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
906 name_differs = line_differs=true;
907 }
908
909 /* If this is a new source file, then output the
910 file's name to the .bb file. */
911 if (name_differs)
912 {
913 prev_file_name = file_name;
914 gcov_write_unsigned (0);
915 gcov_write_string (prev_file_name);
916 }
917 if (line_differs)
918 {
919 gcov_write_unsigned (line);
920 prev_line = line;
921 }
922 }
923 }
924
925 /* Instrument and/or analyze program behavior based on program flow graph.
926 In either case, this function builds a flow graph for the function being
927 compiled. The flow graph is stored in BB_GRAPH.
928
929 When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
930 the flow graph that are needed to reconstruct the dynamic behavior of the
931 flow graph.
932
933 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
934 information from a data file containing edge count information from previous
935 executions of the function being compiled. In this case, the flow graph is
936 annotated with actual execution counts, which are later propagated into the
937 rtl for optimization purposes.
938
939 Main entry point of this file. */
940
941 void
942 branch_prob (void)
943 {
944 basic_block bb;
945 unsigned i;
946 unsigned num_edges, ignored_edges;
947 unsigned num_instrumented;
948 struct edge_list *el;
949 histogram_values values = NULL;
950 unsigned cfg_checksum, lineno_checksum;
951
952 total_num_times_called++;
953
954 flow_call_edges_add (NULL);
955 add_noreturn_fake_exit_edges ();
956
957 /* We can't handle cyclic regions constructed using abnormal edges.
958 To avoid these we replace every source of abnormal edge by a fake
959 edge from entry node and every destination by fake edge to exit.
960 This keeps graph acyclic and our calculation exact for all normal
961 edges except for exit and entrance ones.
962
963 We also add fake exit edges for each call and asm statement in the
964 basic, since it may not return. */
965
966 FOR_EACH_BB (bb)
967 {
968 int need_exit_edge = 0, need_entry_edge = 0;
969 int have_exit_edge = 0, have_entry_edge = 0;
970 edge e;
971 edge_iterator ei;
972
973 /* Functions returning multiple times are not handled by extra edges.
974 Instead we simply allow negative counts on edges from exit to the
975 block past call and corresponding probabilities. We can't go
976 with the extra edges because that would result in flowgraph that
977 needs to have fake edges outside the spanning tree. */
978
979 FOR_EACH_EDGE (e, ei, bb->succs)
980 {
981 gimple_stmt_iterator gsi;
982 gimple last = NULL;
983
984 /* It may happen that there are compiler generated statements
985 without a locus at all. Go through the basic block from the
986 last to the first statement looking for a locus. */
987 for (gsi = gsi_last_nondebug_bb (bb);
988 !gsi_end_p (gsi);
989 gsi_prev_nondebug (&gsi))
990 {
991 last = gsi_stmt (gsi);
992 if (gimple_has_location (last))
993 break;
994 }
995
996 /* Edge with goto locus might get wrong coverage info unless
997 it is the only edge out of BB.
998 Don't do that when the locuses match, so
999 if (blah) goto something;
1000 is not computed twice. */
1001 if (last
1002 && gimple_has_location (last)
1003 && e->goto_locus != UNKNOWN_LOCATION
1004 && !single_succ_p (bb)
1005 && (LOCATION_FILE (e->goto_locus)
1006 != LOCATION_FILE (gimple_location (last))
1007 || (LOCATION_LINE (e->goto_locus)
1008 != LOCATION_LINE (gimple_location (last)))))
1009 {
1010 basic_block new_bb = split_edge (e);
1011 edge ne = single_succ_edge (new_bb);
1012 ne->goto_locus = e->goto_locus;
1013 ne->goto_block = e->goto_block;
1014 }
1015 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1016 && e->dest != EXIT_BLOCK_PTR)
1017 need_exit_edge = 1;
1018 if (e->dest == EXIT_BLOCK_PTR)
1019 have_exit_edge = 1;
1020 }
1021 FOR_EACH_EDGE (e, ei, bb->preds)
1022 {
1023 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1024 && e->src != ENTRY_BLOCK_PTR)
1025 need_entry_edge = 1;
1026 if (e->src == ENTRY_BLOCK_PTR)
1027 have_entry_edge = 1;
1028 }
1029
1030 if (need_exit_edge && !have_exit_edge)
1031 {
1032 if (dump_file)
1033 fprintf (dump_file, "Adding fake exit edge to bb %i\n",
1034 bb->index);
1035 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
1036 }
1037 if (need_entry_edge && !have_entry_edge)
1038 {
1039 if (dump_file)
1040 fprintf (dump_file, "Adding fake entry edge to bb %i\n",
1041 bb->index);
1042 make_edge (ENTRY_BLOCK_PTR, bb, EDGE_FAKE);
1043 /* Avoid bbs that have both fake entry edge and also some
1044 exit edge. One of those edges wouldn't be added to the
1045 spanning tree, but we can't instrument any of them. */
1046 if (have_exit_edge || need_exit_edge)
1047 {
1048 gimple_stmt_iterator gsi;
1049 gimple first;
1050 tree fndecl;
1051
1052 gsi = gsi_after_labels (bb);
1053 gcc_checking_assert (!gsi_end_p (gsi));
1054 first = gsi_stmt (gsi);
1055 if (is_gimple_debug (first))
1056 {
1057 gsi_next_nondebug (&gsi);
1058 gcc_checking_assert (!gsi_end_p (gsi));
1059 first = gsi_stmt (gsi);
1060 }
1061 /* Don't split the bbs containing __builtin_setjmp_receiver
1062 or __builtin_setjmp_dispatcher calls. These are very
1063 special and don't expect anything to be inserted before
1064 them. */
1065 if (!is_gimple_call (first)
1066 || (fndecl = gimple_call_fndecl (first)) == NULL
1067 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL
1068 || (DECL_FUNCTION_CODE (fndecl) != BUILT_IN_SETJMP_RECEIVER
1069 && (DECL_FUNCTION_CODE (fndecl)
1070 != BUILT_IN_SETJMP_DISPATCHER)))
1071 {
1072 if (dump_file)
1073 fprintf (dump_file, "Splitting bb %i after labels\n",
1074 bb->index);
1075 split_block_after_labels (bb);
1076 }
1077 }
1078 }
1079 }
1080
1081 el = create_edge_list ();
1082 num_edges = NUM_EDGES (el);
1083 alloc_aux_for_edges (sizeof (struct edge_info));
1084
1085 /* The basic blocks are expected to be numbered sequentially. */
1086 compact_blocks ();
1087
1088 ignored_edges = 0;
1089 for (i = 0 ; i < num_edges ; i++)
1090 {
1091 edge e = INDEX_EDGE (el, i);
1092 e->count = 0;
1093
1094 /* Mark edges we've replaced by fake edges above as ignored. */
1095 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1096 && e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR)
1097 {
1098 EDGE_INFO (e)->ignore = 1;
1099 ignored_edges++;
1100 }
1101 }
1102
1103 /* Create spanning tree from basic block graph, mark each edge that is
1104 on the spanning tree. We insert as many abnormal and critical edges
1105 as possible to minimize number of edge splits necessary. */
1106
1107 find_spanning_tree (el);
1108
1109 /* Fake edges that are not on the tree will not be instrumented, so
1110 mark them ignored. */
1111 for (num_instrumented = i = 0; i < num_edges; i++)
1112 {
1113 edge e = INDEX_EDGE (el, i);
1114 struct edge_info *inf = EDGE_INFO (e);
1115
1116 if (inf->ignore || inf->on_tree)
1117 /*NOP*/;
1118 else if (e->flags & EDGE_FAKE)
1119 {
1120 inf->ignore = 1;
1121 ignored_edges++;
1122 }
1123 else
1124 num_instrumented++;
1125 }
1126
1127 total_num_blocks += n_basic_blocks;
1128 if (dump_file)
1129 fprintf (dump_file, "%d basic blocks\n", n_basic_blocks);
1130
1131 total_num_edges += num_edges;
1132 if (dump_file)
1133 fprintf (dump_file, "%d edges\n", num_edges);
1134
1135 total_num_edges_ignored += ignored_edges;
1136 if (dump_file)
1137 fprintf (dump_file, "%d ignored edges\n", ignored_edges);
1138
1139
1140 /* Compute two different checksums. Note that we want to compute
1141 the checksum in only once place, since it depends on the shape
1142 of the control flow which can change during
1143 various transformations. */
1144 cfg_checksum = coverage_compute_cfg_checksum ();
1145 lineno_checksum = coverage_compute_lineno_checksum ();
1146
1147 /* Write the data from which gcov can reconstruct the basic block
1148 graph and function line numbers */
1149
1150 if (coverage_begin_function (lineno_checksum, cfg_checksum))
1151 {
1152 gcov_position_t offset;
1153
1154 /* Basic block flags */
1155 offset = gcov_write_tag (GCOV_TAG_BLOCKS);
1156 for (i = 0; i != (unsigned) (n_basic_blocks); i++)
1157 gcov_write_unsigned (0);
1158 gcov_write_length (offset);
1159
1160 /* Keep all basic block indexes nonnegative in the gcov output.
1161 Index 0 is used for entry block, last index is for exit
1162 block. */
1163 ENTRY_BLOCK_PTR->index = 1;
1164 EXIT_BLOCK_PTR->index = last_basic_block;
1165
1166 /* Arcs */
1167 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1168 {
1169 edge e;
1170 edge_iterator ei;
1171
1172 offset = gcov_write_tag (GCOV_TAG_ARCS);
1173 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
1174
1175 FOR_EACH_EDGE (e, ei, bb->succs)
1176 {
1177 struct edge_info *i = EDGE_INFO (e);
1178 if (!i->ignore)
1179 {
1180 unsigned flag_bits = 0;
1181
1182 if (i->on_tree)
1183 flag_bits |= GCOV_ARC_ON_TREE;
1184 if (e->flags & EDGE_FAKE)
1185 flag_bits |= GCOV_ARC_FAKE;
1186 if (e->flags & EDGE_FALLTHRU)
1187 flag_bits |= GCOV_ARC_FALLTHROUGH;
1188 /* On trees we don't have fallthru flags, but we can
1189 recompute them from CFG shape. */
1190 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)
1191 && e->src->next_bb == e->dest)
1192 flag_bits |= GCOV_ARC_FALLTHROUGH;
1193
1194 gcov_write_unsigned (BB_TO_GCOV_INDEX (e->dest));
1195 gcov_write_unsigned (flag_bits);
1196 }
1197 }
1198
1199 gcov_write_length (offset);
1200 }
1201
1202 ENTRY_BLOCK_PTR->index = ENTRY_BLOCK;
1203 EXIT_BLOCK_PTR->index = EXIT_BLOCK;
1204
1205 /* Line numbers. */
1206 /* Initialize the output. */
1207 output_location (NULL, 0, NULL, NULL);
1208
1209 FOR_EACH_BB (bb)
1210 {
1211 gimple_stmt_iterator gsi;
1212 gcov_position_t offset = 0;
1213
1214 if (bb == ENTRY_BLOCK_PTR->next_bb)
1215 {
1216 expanded_location curr_location =
1217 expand_location (DECL_SOURCE_LOCATION (current_function_decl));
1218 output_location (curr_location.file, curr_location.line,
1219 &offset, bb);
1220 }
1221
1222 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1223 {
1224 gimple stmt = gsi_stmt (gsi);
1225 if (gimple_has_location (stmt))
1226 output_location (gimple_filename (stmt), gimple_lineno (stmt),
1227 &offset, bb);
1228 }
1229
1230 /* Notice GOTO expressions eliminated while constructing the CFG. */
1231 if (single_succ_p (bb)
1232 && single_succ_edge (bb)->goto_locus != UNKNOWN_LOCATION)
1233 {
1234 expanded_location curr_location
1235 = expand_location (single_succ_edge (bb)->goto_locus);
1236 output_location (curr_location.file, curr_location.line,
1237 &offset, bb);
1238 }
1239
1240 if (offset)
1241 {
1242 /* A file of NULL indicates the end of run. */
1243 gcov_write_unsigned (0);
1244 gcov_write_string (NULL);
1245 gcov_write_length (offset);
1246 }
1247 }
1248 }
1249
1250 #undef BB_TO_GCOV_INDEX
1251
1252 if (flag_profile_values)
1253 gimple_find_values_to_profile (&values);
1254
1255 if (flag_branch_probabilities)
1256 {
1257 compute_branch_probabilities (cfg_checksum, lineno_checksum);
1258 if (flag_profile_values)
1259 compute_value_histograms (values, cfg_checksum, lineno_checksum);
1260 }
1261
1262 remove_fake_edges ();
1263
1264 /* For each edge not on the spanning tree, add counting code. */
1265 if (profile_arc_flag
1266 && coverage_counter_alloc (GCOV_COUNTER_ARCS, num_instrumented))
1267 {
1268 unsigned n_instrumented;
1269
1270 gimple_init_edge_profiler ();
1271
1272 n_instrumented = instrument_edges (el);
1273
1274 gcc_assert (n_instrumented == num_instrumented);
1275
1276 if (flag_profile_values)
1277 instrument_values (values);
1278
1279 /* Commit changes done by instrumentation. */
1280 gsi_commit_edge_inserts ();
1281 }
1282
1283 free_aux_for_edges ();
1284
1285 VEC_free (histogram_value, heap, values);
1286 free_edge_list (el);
1287 coverage_end_function (lineno_checksum, cfg_checksum);
1288 }
1289 \f
1290 /* Union find algorithm implementation for the basic blocks using
1291 aux fields. */
1292
1293 static basic_block
1294 find_group (basic_block bb)
1295 {
1296 basic_block group = bb, bb1;
1297
1298 while ((basic_block) group->aux != group)
1299 group = (basic_block) group->aux;
1300
1301 /* Compress path. */
1302 while ((basic_block) bb->aux != group)
1303 {
1304 bb1 = (basic_block) bb->aux;
1305 bb->aux = (void *) group;
1306 bb = bb1;
1307 }
1308 return group;
1309 }
1310
1311 static void
1312 union_groups (basic_block bb1, basic_block bb2)
1313 {
1314 basic_block bb1g = find_group (bb1);
1315 basic_block bb2g = find_group (bb2);
1316
1317 /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
1318 this code is unlikely going to be performance problem anyway. */
1319 gcc_assert (bb1g != bb2g);
1320
1321 bb1g->aux = bb2g;
1322 }
1323 \f
1324 /* This function searches all of the edges in the program flow graph, and puts
1325 as many bad edges as possible onto the spanning tree. Bad edges include
1326 abnormals edges, which can't be instrumented at the moment. Since it is
1327 possible for fake edges to form a cycle, we will have to develop some
1328 better way in the future. Also put critical edges to the tree, since they
1329 are more expensive to instrument. */
1330
1331 static void
1332 find_spanning_tree (struct edge_list *el)
1333 {
1334 int i;
1335 int num_edges = NUM_EDGES (el);
1336 basic_block bb;
1337
1338 /* We use aux field for standard union-find algorithm. */
1339 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1340 bb->aux = bb;
1341
1342 /* Add fake edge exit to entry we can't instrument. */
1343 union_groups (EXIT_BLOCK_PTR, ENTRY_BLOCK_PTR);
1344
1345 /* First add all abnormal edges to the tree unless they form a cycle. Also
1346 add all edges to EXIT_BLOCK_PTR to avoid inserting profiling code behind
1347 setting return value from function. */
1348 for (i = 0; i < num_edges; i++)
1349 {
1350 edge e = INDEX_EDGE (el, i);
1351 if (((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_FAKE))
1352 || e->dest == EXIT_BLOCK_PTR)
1353 && !EDGE_INFO (e)->ignore
1354 && (find_group (e->src) != find_group (e->dest)))
1355 {
1356 if (dump_file)
1357 fprintf (dump_file, "Abnormal edge %d to %d put to tree\n",
1358 e->src->index, e->dest->index);
1359 EDGE_INFO (e)->on_tree = 1;
1360 union_groups (e->src, e->dest);
1361 }
1362 }
1363
1364 /* Now insert all critical edges to the tree unless they form a cycle. */
1365 for (i = 0; i < num_edges; i++)
1366 {
1367 edge e = INDEX_EDGE (el, i);
1368 if (EDGE_CRITICAL_P (e) && !EDGE_INFO (e)->ignore
1369 && find_group (e->src) != find_group (e->dest))
1370 {
1371 if (dump_file)
1372 fprintf (dump_file, "Critical edge %d to %d put to tree\n",
1373 e->src->index, e->dest->index);
1374 EDGE_INFO (e)->on_tree = 1;
1375 union_groups (e->src, e->dest);
1376 }
1377 }
1378
1379 /* And now the rest. */
1380 for (i = 0; i < num_edges; i++)
1381 {
1382 edge e = INDEX_EDGE (el, i);
1383 if (!EDGE_INFO (e)->ignore
1384 && find_group (e->src) != find_group (e->dest))
1385 {
1386 if (dump_file)
1387 fprintf (dump_file, "Normal edge %d to %d put to tree\n",
1388 e->src->index, e->dest->index);
1389 EDGE_INFO (e)->on_tree = 1;
1390 union_groups (e->src, e->dest);
1391 }
1392 }
1393
1394 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1395 bb->aux = NULL;
1396 }
1397 \f
1398 /* Perform file-level initialization for branch-prob processing. */
1399
1400 void
1401 init_branch_prob (void)
1402 {
1403 int i;
1404
1405 total_num_blocks = 0;
1406 total_num_edges = 0;
1407 total_num_edges_ignored = 0;
1408 total_num_edges_instrumented = 0;
1409 total_num_blocks_created = 0;
1410 total_num_passes = 0;
1411 total_num_times_called = 0;
1412 total_num_branches = 0;
1413 for (i = 0; i < 20; i++)
1414 total_hist_br_prob[i] = 0;
1415 }
1416
1417 /* Performs file-level cleanup after branch-prob processing
1418 is completed. */
1419
1420 void
1421 end_branch_prob (void)
1422 {
1423 if (dump_file)
1424 {
1425 fprintf (dump_file, "\n");
1426 fprintf (dump_file, "Total number of blocks: %d\n",
1427 total_num_blocks);
1428 fprintf (dump_file, "Total number of edges: %d\n", total_num_edges);
1429 fprintf (dump_file, "Total number of ignored edges: %d\n",
1430 total_num_edges_ignored);
1431 fprintf (dump_file, "Total number of instrumented edges: %d\n",
1432 total_num_edges_instrumented);
1433 fprintf (dump_file, "Total number of blocks created: %d\n",
1434 total_num_blocks_created);
1435 fprintf (dump_file, "Total number of graph solution passes: %d\n",
1436 total_num_passes);
1437 if (total_num_times_called != 0)
1438 fprintf (dump_file, "Average number of graph solution passes: %d\n",
1439 (total_num_passes + (total_num_times_called >> 1))
1440 / total_num_times_called);
1441 fprintf (dump_file, "Total number of branches: %d\n",
1442 total_num_branches);
1443 if (total_num_branches)
1444 {
1445 int i;
1446
1447 for (i = 0; i < 10; i++)
1448 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
1449 (total_hist_br_prob[i] + total_hist_br_prob[19-i]) * 100
1450 / total_num_branches, 5*i, 5*i+5);
1451 }
1452 }
1453 }