Remove a layer of indirection from hash_table
[gcc.git] / gcc / profile.c
1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990-2014 Free Software Foundation, Inc.
3 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
4 based on some ideas from Dain Samples of UC Berkeley.
5 Further mangling by Bob Manson, Cygnus Support.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* Generate basic block profile instrumentation and auxiliary files.
24 Profile generation is optimized, so that not all arcs in the basic
25 block graph need instrumenting. First, the BB graph is closed with
26 one entry (function start), and one exit (function exit). Any
27 ABNORMAL_EDGE cannot be instrumented (because there is no control
28 path to place the code). We close the graph by inserting fake
29 EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
30 edges that do not go to the exit_block. We ignore such abnormal
31 edges. Naturally these fake edges are never directly traversed,
32 and so *cannot* be directly instrumented. Some other graph
33 massaging is done. To optimize the instrumentation we generate the
34 BB minimal span tree, only edges that are not on the span tree
35 (plus the entry point) need instrumenting. From that information
36 all other edge counts can be deduced. By construction all fake
37 edges must be on the spanning tree. We also attempt to place
38 EDGE_CRITICAL edges on the spanning tree.
39
40 The auxiliary files generated are <dumpbase>.gcno (at compile time)
41 and <dumpbase>.gcda (at run time). The format is
42 described in full in gcov-io.h. */
43
44 /* ??? Register allocation should use basic block execution counts to
45 give preference to the most commonly executed blocks. */
46
47 /* ??? Should calculate branch probabilities before instrumenting code, since
48 then we can use arc counts to help decide which arcs to instrument. */
49
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "flags.h"
56 #include "regs.h"
57 #include "expr.h"
58 #include "function.h"
59 #include "basic-block.h"
60 #include "diagnostic-core.h"
61 #include "coverage.h"
62 #include "value-prof.h"
63 #include "tree.h"
64 #include "tree-ssa-alias.h"
65 #include "internal-fn.h"
66 #include "gimple-expr.h"
67 #include "is-a.h"
68 #include "gimple.h"
69 #include "gimple-iterator.h"
70 #include "tree-cfg.h"
71 #include "cfgloop.h"
72 #include "dumpfile.h"
73 #include "cgraph.h"
74
75 #include "profile.h"
76
77 struct bb_info {
78 unsigned int count_valid : 1;
79
80 /* Number of successor and predecessor edges. */
81 gcov_type succ_count;
82 gcov_type pred_count;
83 };
84
85 #define BB_INFO(b) ((struct bb_info *) (b)->aux)
86
87
88 /* Counter summary from the last set of coverage counts read. */
89
90 const struct gcov_ctr_summary *profile_info;
91
92 /* Counter working set information computed from the current counter
93 summary. Not initialized unless profile_info summary is non-NULL. */
94 static gcov_working_set_t gcov_working_sets[NUM_GCOV_WORKING_SETS];
95
96 /* Collect statistics on the performance of this pass for the entire source
97 file. */
98
99 static int total_num_blocks;
100 static int total_num_edges;
101 static int total_num_edges_ignored;
102 static int total_num_edges_instrumented;
103 static int total_num_blocks_created;
104 static int total_num_passes;
105 static int total_num_times_called;
106 static int total_hist_br_prob[20];
107 static int total_num_branches;
108
109 /* Forward declarations. */
110 static void find_spanning_tree (struct edge_list *);
111
112 /* Add edge instrumentation code to the entire insn chain.
113
114 F is the first insn of the chain.
115 NUM_BLOCKS is the number of basic blocks found in F. */
116
117 static unsigned
118 instrument_edges (struct edge_list *el)
119 {
120 unsigned num_instr_edges = 0;
121 int num_edges = NUM_EDGES (el);
122 basic_block bb;
123
124 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
125 {
126 edge e;
127 edge_iterator ei;
128
129 FOR_EACH_EDGE (e, ei, bb->succs)
130 {
131 struct edge_info *inf = EDGE_INFO (e);
132
133 if (!inf->ignore && !inf->on_tree)
134 {
135 gcc_assert (!(e->flags & EDGE_ABNORMAL));
136 if (dump_file)
137 fprintf (dump_file, "Edge %d to %d instrumented%s\n",
138 e->src->index, e->dest->index,
139 EDGE_CRITICAL_P (e) ? " (and split)" : "");
140 gimple_gen_edge_profiler (num_instr_edges++, e);
141 }
142 }
143 }
144
145 total_num_blocks_created += num_edges;
146 if (dump_file)
147 fprintf (dump_file, "%d edges instrumented\n", num_instr_edges);
148 return num_instr_edges;
149 }
150
151 /* Add code to measure histograms for values in list VALUES. */
152 static void
153 instrument_values (histogram_values values)
154 {
155 unsigned i;
156
157 /* Emit code to generate the histograms before the insns. */
158
159 for (i = 0; i < values.length (); i++)
160 {
161 histogram_value hist = values[i];
162 unsigned t = COUNTER_FOR_HIST_TYPE (hist->type);
163
164 if (!coverage_counter_alloc (t, hist->n_counters))
165 continue;
166
167 switch (hist->type)
168 {
169 case HIST_TYPE_INTERVAL:
170 gimple_gen_interval_profiler (hist, t, 0);
171 break;
172
173 case HIST_TYPE_POW2:
174 gimple_gen_pow2_profiler (hist, t, 0);
175 break;
176
177 case HIST_TYPE_SINGLE_VALUE:
178 gimple_gen_one_value_profiler (hist, t, 0);
179 break;
180
181 case HIST_TYPE_CONST_DELTA:
182 gimple_gen_const_delta_profiler (hist, t, 0);
183 break;
184
185 case HIST_TYPE_INDIR_CALL:
186 gimple_gen_ic_profiler (hist, t, 0);
187 break;
188
189 case HIST_TYPE_AVERAGE:
190 gimple_gen_average_profiler (hist, t, 0);
191 break;
192
193 case HIST_TYPE_IOR:
194 gimple_gen_ior_profiler (hist, t, 0);
195 break;
196
197 case HIST_TYPE_TIME_PROFILE:
198 {
199 basic_block bb =
200 split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
201 gimple_stmt_iterator gsi = gsi_start_bb (bb);
202
203 gimple_gen_time_profiler (t, 0, gsi);
204 break;
205 }
206
207 default:
208 gcc_unreachable ();
209 }
210 }
211 }
212 \f
213
214 /* Fill the working set information into the profile_info structure. */
215
216 void
217 get_working_sets (void)
218 {
219 unsigned ws_ix, pctinc, pct;
220 gcov_working_set_t *ws_info;
221
222 if (!profile_info)
223 return;
224
225 compute_working_sets (profile_info, gcov_working_sets);
226
227 if (dump_file)
228 {
229 fprintf (dump_file, "Counter working sets:\n");
230 /* Multiply the percentage by 100 to avoid float. */
231 pctinc = 100 * 100 / NUM_GCOV_WORKING_SETS;
232 for (ws_ix = 0, pct = pctinc; ws_ix < NUM_GCOV_WORKING_SETS;
233 ws_ix++, pct += pctinc)
234 {
235 if (ws_ix == NUM_GCOV_WORKING_SETS - 1)
236 pct = 9990;
237 ws_info = &gcov_working_sets[ws_ix];
238 /* Print out the percentage using int arithmatic to avoid float. */
239 fprintf (dump_file, "\t\t%u.%02u%%: num counts=%u, min counter="
240 "%"PRId64 "\n",
241 pct / 100, pct - (pct / 100 * 100),
242 ws_info->num_counters,
243 (int64_t)ws_info->min_counter);
244 }
245 }
246 }
247
248 /* Given a the desired percentage of the full profile (sum_all from the
249 summary), multiplied by 10 to avoid float in PCT_TIMES_10, returns
250 the corresponding working set information. If an exact match for
251 the percentage isn't found, the closest value is used. */
252
253 gcov_working_set_t *
254 find_working_set (unsigned pct_times_10)
255 {
256 unsigned i;
257 if (!profile_info)
258 return NULL;
259 gcc_assert (pct_times_10 <= 1000);
260 if (pct_times_10 >= 999)
261 return &gcov_working_sets[NUM_GCOV_WORKING_SETS - 1];
262 i = pct_times_10 * NUM_GCOV_WORKING_SETS / 1000;
263 if (!i)
264 return &gcov_working_sets[0];
265 return &gcov_working_sets[i - 1];
266 }
267
268 /* Computes hybrid profile for all matching entries in da_file.
269
270 CFG_CHECKSUM is the precomputed checksum for the CFG. */
271
272 static gcov_type *
273 get_exec_counts (unsigned cfg_checksum, unsigned lineno_checksum)
274 {
275 unsigned num_edges = 0;
276 basic_block bb;
277 gcov_type *counts;
278
279 /* Count the edges to be (possibly) instrumented. */
280 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
281 {
282 edge e;
283 edge_iterator ei;
284
285 FOR_EACH_EDGE (e, ei, bb->succs)
286 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
287 num_edges++;
288 }
289
290 counts = get_coverage_counts (GCOV_COUNTER_ARCS, num_edges, cfg_checksum,
291 lineno_checksum, &profile_info);
292 if (!counts)
293 return NULL;
294
295 get_working_sets ();
296
297 if (dump_file && profile_info)
298 fprintf (dump_file, "Merged %u profiles with maximal count %u.\n",
299 profile_info->runs, (unsigned) profile_info->sum_max);
300
301 return counts;
302 }
303
304
305 static bool
306 is_edge_inconsistent (vec<edge, va_gc> *edges)
307 {
308 edge e;
309 edge_iterator ei;
310 FOR_EACH_EDGE (e, ei, edges)
311 {
312 if (!EDGE_INFO (e)->ignore)
313 {
314 if (e->count < 0
315 && (!(e->flags & EDGE_FAKE)
316 || !block_ends_with_call_p (e->src)))
317 {
318 if (dump_file)
319 {
320 fprintf (dump_file,
321 "Edge %i->%i is inconsistent, count%"PRId64,
322 e->src->index, e->dest->index, e->count);
323 dump_bb (dump_file, e->src, 0, TDF_DETAILS);
324 dump_bb (dump_file, e->dest, 0, TDF_DETAILS);
325 }
326 return true;
327 }
328 }
329 }
330 return false;
331 }
332
333 static void
334 correct_negative_edge_counts (void)
335 {
336 basic_block bb;
337 edge e;
338 edge_iterator ei;
339
340 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
341 {
342 FOR_EACH_EDGE (e, ei, bb->succs)
343 {
344 if (e->count < 0)
345 e->count = 0;
346 }
347 }
348 }
349
350 /* Check consistency.
351 Return true if inconsistency is found. */
352 static bool
353 is_inconsistent (void)
354 {
355 basic_block bb;
356 bool inconsistent = false;
357 FOR_EACH_BB_FN (bb, cfun)
358 {
359 inconsistent |= is_edge_inconsistent (bb->preds);
360 if (!dump_file && inconsistent)
361 return true;
362 inconsistent |= is_edge_inconsistent (bb->succs);
363 if (!dump_file && inconsistent)
364 return true;
365 if (bb->count < 0)
366 {
367 if (dump_file)
368 {
369 fprintf (dump_file, "BB %i count is negative "
370 "%"PRId64,
371 bb->index,
372 bb->count);
373 dump_bb (dump_file, bb, 0, TDF_DETAILS);
374 }
375 inconsistent = true;
376 }
377 if (bb->count != sum_edge_counts (bb->preds))
378 {
379 if (dump_file)
380 {
381 fprintf (dump_file, "BB %i count does not match sum of incoming edges "
382 "%"PRId64" should be %"PRId64,
383 bb->index,
384 bb->count,
385 sum_edge_counts (bb->preds));
386 dump_bb (dump_file, bb, 0, TDF_DETAILS);
387 }
388 inconsistent = true;
389 }
390 if (bb->count != sum_edge_counts (bb->succs) &&
391 ! (find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun)) != NULL
392 && block_ends_with_call_p (bb)))
393 {
394 if (dump_file)
395 {
396 fprintf (dump_file, "BB %i count does not match sum of outgoing edges "
397 "%"PRId64" should be %"PRId64,
398 bb->index,
399 bb->count,
400 sum_edge_counts (bb->succs));
401 dump_bb (dump_file, bb, 0, TDF_DETAILS);
402 }
403 inconsistent = true;
404 }
405 if (!dump_file && inconsistent)
406 return true;
407 }
408
409 return inconsistent;
410 }
411
412 /* Set each basic block count to the sum of its outgoing edge counts */
413 static void
414 set_bb_counts (void)
415 {
416 basic_block bb;
417 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
418 {
419 bb->count = sum_edge_counts (bb->succs);
420 gcc_assert (bb->count >= 0);
421 }
422 }
423
424 /* Reads profile data and returns total number of edge counts read */
425 static int
426 read_profile_edge_counts (gcov_type *exec_counts)
427 {
428 basic_block bb;
429 int num_edges = 0;
430 int exec_counts_pos = 0;
431 /* For each edge not on the spanning tree, set its execution count from
432 the .da file. */
433 /* The first count in the .da file is the number of times that the function
434 was entered. This is the exec_count for block zero. */
435
436 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
437 {
438 edge e;
439 edge_iterator ei;
440
441 FOR_EACH_EDGE (e, ei, bb->succs)
442 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
443 {
444 num_edges++;
445 if (exec_counts)
446 {
447 e->count = exec_counts[exec_counts_pos++];
448 if (e->count > profile_info->sum_max)
449 {
450 if (flag_profile_correction)
451 {
452 static bool informed = 0;
453 if (dump_enabled_p () && !informed)
454 dump_printf_loc (MSG_NOTE, input_location,
455 "corrupted profile info: edge count"
456 " exceeds maximal count\n");
457 informed = 1;
458 }
459 else
460 error ("corrupted profile info: edge from %i to %i exceeds maximal count",
461 bb->index, e->dest->index);
462 }
463 }
464 else
465 e->count = 0;
466
467 EDGE_INFO (e)->count_valid = 1;
468 BB_INFO (bb)->succ_count--;
469 BB_INFO (e->dest)->pred_count--;
470 if (dump_file)
471 {
472 fprintf (dump_file, "\nRead edge from %i to %i, count:",
473 bb->index, e->dest->index);
474 fprintf (dump_file, "%"PRId64,
475 (int64_t) e->count);
476 }
477 }
478 }
479
480 return num_edges;
481 }
482
483 #define OVERLAP_BASE 10000
484
485 /* Compare the static estimated profile to the actual profile, and
486 return the "degree of overlap" measure between them.
487
488 Degree of overlap is a number between 0 and OVERLAP_BASE. It is
489 the sum of each basic block's minimum relative weights between
490 two profiles. And overlap of OVERLAP_BASE means two profiles are
491 identical. */
492
493 static int
494 compute_frequency_overlap (void)
495 {
496 gcov_type count_total = 0, freq_total = 0;
497 int overlap = 0;
498 basic_block bb;
499
500 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
501 {
502 count_total += bb->count;
503 freq_total += bb->frequency;
504 }
505
506 if (count_total == 0 || freq_total == 0)
507 return 0;
508
509 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
510 overlap += MIN (bb->count * OVERLAP_BASE / count_total,
511 bb->frequency * OVERLAP_BASE / freq_total);
512
513 return overlap;
514 }
515
516 /* Compute the branch probabilities for the various branches.
517 Annotate them accordingly.
518
519 CFG_CHECKSUM is the precomputed checksum for the CFG. */
520
521 static void
522 compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
523 {
524 basic_block bb;
525 int i;
526 int num_edges = 0;
527 int changes;
528 int passes;
529 int hist_br_prob[20];
530 int num_branches;
531 gcov_type *exec_counts = get_exec_counts (cfg_checksum, lineno_checksum);
532 int inconsistent = 0;
533
534 /* Very simple sanity checks so we catch bugs in our profiling code. */
535 if (!profile_info)
536 return;
537
538 if (profile_info->sum_all < profile_info->sum_max)
539 {
540 error ("corrupted profile info: sum_all is smaller than sum_max");
541 exec_counts = NULL;
542 }
543
544 /* Attach extra info block to each bb. */
545 alloc_aux_for_blocks (sizeof (struct bb_info));
546 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
547 {
548 edge e;
549 edge_iterator ei;
550
551 FOR_EACH_EDGE (e, ei, bb->succs)
552 if (!EDGE_INFO (e)->ignore)
553 BB_INFO (bb)->succ_count++;
554 FOR_EACH_EDGE (e, ei, bb->preds)
555 if (!EDGE_INFO (e)->ignore)
556 BB_INFO (bb)->pred_count++;
557 }
558
559 /* Avoid predicting entry on exit nodes. */
560 BB_INFO (EXIT_BLOCK_PTR_FOR_FN (cfun))->succ_count = 2;
561 BB_INFO (ENTRY_BLOCK_PTR_FOR_FN (cfun))->pred_count = 2;
562
563 num_edges = read_profile_edge_counts (exec_counts);
564
565 if (dump_file)
566 fprintf (dump_file, "\n%d edge counts read\n", num_edges);
567
568 /* For every block in the file,
569 - if every exit/entrance edge has a known count, then set the block count
570 - if the block count is known, and every exit/entrance edge but one has
571 a known execution count, then set the count of the remaining edge
572
573 As edge counts are set, decrement the succ/pred count, but don't delete
574 the edge, that way we can easily tell when all edges are known, or only
575 one edge is unknown. */
576
577 /* The order that the basic blocks are iterated through is important.
578 Since the code that finds spanning trees starts with block 0, low numbered
579 edges are put on the spanning tree in preference to high numbered edges.
580 Hence, most instrumented edges are at the end. Graph solving works much
581 faster if we propagate numbers from the end to the start.
582
583 This takes an average of slightly more than 3 passes. */
584
585 changes = 1;
586 passes = 0;
587 while (changes)
588 {
589 passes++;
590 changes = 0;
591 FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, prev_bb)
592 {
593 struct bb_info *bi = BB_INFO (bb);
594 if (! bi->count_valid)
595 {
596 if (bi->succ_count == 0)
597 {
598 edge e;
599 edge_iterator ei;
600 gcov_type total = 0;
601
602 FOR_EACH_EDGE (e, ei, bb->succs)
603 total += e->count;
604 bb->count = total;
605 bi->count_valid = 1;
606 changes = 1;
607 }
608 else if (bi->pred_count == 0)
609 {
610 edge e;
611 edge_iterator ei;
612 gcov_type total = 0;
613
614 FOR_EACH_EDGE (e, ei, bb->preds)
615 total += e->count;
616 bb->count = total;
617 bi->count_valid = 1;
618 changes = 1;
619 }
620 }
621 if (bi->count_valid)
622 {
623 if (bi->succ_count == 1)
624 {
625 edge e;
626 edge_iterator ei;
627 gcov_type total = 0;
628
629 /* One of the counts will be invalid, but it is zero,
630 so adding it in also doesn't hurt. */
631 FOR_EACH_EDGE (e, ei, bb->succs)
632 total += e->count;
633
634 /* Search for the invalid edge, and set its count. */
635 FOR_EACH_EDGE (e, ei, bb->succs)
636 if (! EDGE_INFO (e)->count_valid && ! EDGE_INFO (e)->ignore)
637 break;
638
639 /* Calculate count for remaining edge by conservation. */
640 total = bb->count - total;
641
642 gcc_assert (e);
643 EDGE_INFO (e)->count_valid = 1;
644 e->count = total;
645 bi->succ_count--;
646
647 BB_INFO (e->dest)->pred_count--;
648 changes = 1;
649 }
650 if (bi->pred_count == 1)
651 {
652 edge e;
653 edge_iterator ei;
654 gcov_type total = 0;
655
656 /* One of the counts will be invalid, but it is zero,
657 so adding it in also doesn't hurt. */
658 FOR_EACH_EDGE (e, ei, bb->preds)
659 total += e->count;
660
661 /* Search for the invalid edge, and set its count. */
662 FOR_EACH_EDGE (e, ei, bb->preds)
663 if (!EDGE_INFO (e)->count_valid && !EDGE_INFO (e)->ignore)
664 break;
665
666 /* Calculate count for remaining edge by conservation. */
667 total = bb->count - total + e->count;
668
669 gcc_assert (e);
670 EDGE_INFO (e)->count_valid = 1;
671 e->count = total;
672 bi->pred_count--;
673
674 BB_INFO (e->src)->succ_count--;
675 changes = 1;
676 }
677 }
678 }
679 }
680 if (dump_file)
681 {
682 int overlap = compute_frequency_overlap ();
683 gimple_dump_cfg (dump_file, dump_flags);
684 fprintf (dump_file, "Static profile overlap: %d.%d%%\n",
685 overlap / (OVERLAP_BASE / 100),
686 overlap % (OVERLAP_BASE / 100));
687 }
688
689 total_num_passes += passes;
690 if (dump_file)
691 fprintf (dump_file, "Graph solving took %d passes.\n\n", passes);
692
693 /* If the graph has been correctly solved, every block will have a
694 succ and pred count of zero. */
695 FOR_EACH_BB_FN (bb, cfun)
696 {
697 gcc_assert (!BB_INFO (bb)->succ_count && !BB_INFO (bb)->pred_count);
698 }
699
700 /* Check for inconsistent basic block counts */
701 inconsistent = is_inconsistent ();
702
703 if (inconsistent)
704 {
705 if (flag_profile_correction)
706 {
707 /* Inconsistency detected. Make it flow-consistent. */
708 static int informed = 0;
709 if (dump_enabled_p () && informed == 0)
710 {
711 informed = 1;
712 dump_printf_loc (MSG_NOTE, input_location,
713 "correcting inconsistent profile data\n");
714 }
715 correct_negative_edge_counts ();
716 /* Set bb counts to the sum of the outgoing edge counts */
717 set_bb_counts ();
718 if (dump_file)
719 fprintf (dump_file, "\nCalling mcf_smooth_cfg\n");
720 mcf_smooth_cfg ();
721 }
722 else
723 error ("corrupted profile info: profile data is not flow-consistent");
724 }
725
726 /* For every edge, calculate its branch probability and add a reg_note
727 to the branch insn to indicate this. */
728
729 for (i = 0; i < 20; i++)
730 hist_br_prob[i] = 0;
731 num_branches = 0;
732
733 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
734 {
735 edge e;
736 edge_iterator ei;
737
738 if (bb->count < 0)
739 {
740 error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
741 bb->index, (int)bb->count);
742 bb->count = 0;
743 }
744 FOR_EACH_EDGE (e, ei, bb->succs)
745 {
746 /* Function may return twice in the cased the called function is
747 setjmp or calls fork, but we can't represent this by extra
748 edge from the entry, since extra edge from the exit is
749 already present. We get negative frequency from the entry
750 point. */
751 if ((e->count < 0
752 && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
753 || (e->count > bb->count
754 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)))
755 {
756 if (block_ends_with_call_p (bb))
757 e->count = e->count < 0 ? 0 : bb->count;
758 }
759 if (e->count < 0 || e->count > bb->count)
760 {
761 error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
762 e->src->index, e->dest->index,
763 (int)e->count);
764 e->count = bb->count / 2;
765 }
766 }
767 if (bb->count)
768 {
769 FOR_EACH_EDGE (e, ei, bb->succs)
770 e->probability = GCOV_COMPUTE_SCALE (e->count, bb->count);
771 if (bb->index >= NUM_FIXED_BLOCKS
772 && block_ends_with_condjump_p (bb)
773 && EDGE_COUNT (bb->succs) >= 2)
774 {
775 int prob;
776 edge e;
777 int index;
778
779 /* Find the branch edge. It is possible that we do have fake
780 edges here. */
781 FOR_EACH_EDGE (e, ei, bb->succs)
782 if (!(e->flags & (EDGE_FAKE | EDGE_FALLTHRU)))
783 break;
784
785 prob = e->probability;
786 index = prob * 20 / REG_BR_PROB_BASE;
787
788 if (index == 20)
789 index = 19;
790 hist_br_prob[index]++;
791
792 num_branches++;
793 }
794 }
795 /* As a last resort, distribute the probabilities evenly.
796 Use simple heuristics that if there are normal edges,
797 give all abnormals frequency of 0, otherwise distribute the
798 frequency over abnormals (this is the case of noreturn
799 calls). */
800 else if (profile_status_for_fn (cfun) == PROFILE_ABSENT)
801 {
802 int total = 0;
803
804 FOR_EACH_EDGE (e, ei, bb->succs)
805 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
806 total ++;
807 if (total)
808 {
809 FOR_EACH_EDGE (e, ei, bb->succs)
810 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
811 e->probability = REG_BR_PROB_BASE / total;
812 else
813 e->probability = 0;
814 }
815 else
816 {
817 total += EDGE_COUNT (bb->succs);
818 FOR_EACH_EDGE (e, ei, bb->succs)
819 e->probability = REG_BR_PROB_BASE / total;
820 }
821 if (bb->index >= NUM_FIXED_BLOCKS
822 && block_ends_with_condjump_p (bb)
823 && EDGE_COUNT (bb->succs) >= 2)
824 num_branches++;
825 }
826 }
827 counts_to_freqs ();
828 profile_status_for_fn (cfun) = PROFILE_READ;
829 compute_function_frequency ();
830
831 if (dump_file)
832 {
833 fprintf (dump_file, "%d branches\n", num_branches);
834 if (num_branches)
835 for (i = 0; i < 10; i++)
836 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
837 (hist_br_prob[i] + hist_br_prob[19-i]) * 100 / num_branches,
838 5 * i, 5 * i + 5);
839
840 total_num_branches += num_branches;
841 for (i = 0; i < 20; i++)
842 total_hist_br_prob[i] += hist_br_prob[i];
843
844 fputc ('\n', dump_file);
845 fputc ('\n', dump_file);
846 }
847
848 free_aux_for_blocks ();
849 }
850
851 /* Load value histograms values whose description is stored in VALUES array
852 from .gcda file.
853
854 CFG_CHECKSUM is the precomputed checksum for the CFG. */
855
856 static void
857 compute_value_histograms (histogram_values values, unsigned cfg_checksum,
858 unsigned lineno_checksum)
859 {
860 unsigned i, j, t, any;
861 unsigned n_histogram_counters[GCOV_N_VALUE_COUNTERS];
862 gcov_type *histogram_counts[GCOV_N_VALUE_COUNTERS];
863 gcov_type *act_count[GCOV_N_VALUE_COUNTERS];
864 gcov_type *aact_count;
865 struct cgraph_node *node;
866
867 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
868 n_histogram_counters[t] = 0;
869
870 for (i = 0; i < values.length (); i++)
871 {
872 histogram_value hist = values[i];
873 n_histogram_counters[(int) hist->type] += hist->n_counters;
874 }
875
876 any = 0;
877 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
878 {
879 if (!n_histogram_counters[t])
880 {
881 histogram_counts[t] = NULL;
882 continue;
883 }
884
885 histogram_counts[t] =
886 get_coverage_counts (COUNTER_FOR_HIST_TYPE (t),
887 n_histogram_counters[t], cfg_checksum,
888 lineno_checksum, NULL);
889 if (histogram_counts[t])
890 any = 1;
891 act_count[t] = histogram_counts[t];
892 }
893 if (!any)
894 return;
895
896 for (i = 0; i < values.length (); i++)
897 {
898 histogram_value hist = values[i];
899 gimple stmt = hist->hvalue.stmt;
900
901 t = (int) hist->type;
902
903 aact_count = act_count[t];
904
905 if (act_count[t])
906 act_count[t] += hist->n_counters;
907
908 gimple_add_histogram_value (cfun, stmt, hist);
909 hist->hvalue.counters = XNEWVEC (gcov_type, hist->n_counters);
910 for (j = 0; j < hist->n_counters; j++)
911 if (aact_count)
912 hist->hvalue.counters[j] = aact_count[j];
913 else
914 hist->hvalue.counters[j] = 0;
915
916 /* Time profiler counter is not related to any statement,
917 so that we have to read the counter and set the value to
918 the corresponding call graph node. */
919 if (hist->type == HIST_TYPE_TIME_PROFILE)
920 {
921 node = cgraph_get_node (hist->fun->decl);
922
923 node->tp_first_run = hist->hvalue.counters[0];
924
925 if (dump_file)
926 fprintf (dump_file, "Read tp_first_run: %d\n", node->tp_first_run);
927 }
928 }
929
930 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
931 free (histogram_counts[t]);
932 }
933
934 /* When passed NULL as file_name, initialize.
935 When passed something else, output the necessary commands to change
936 line to LINE and offset to FILE_NAME. */
937 static void
938 output_location (char const *file_name, int line,
939 gcov_position_t *offset, basic_block bb)
940 {
941 static char const *prev_file_name;
942 static int prev_line;
943 bool name_differs, line_differs;
944
945 if (!file_name)
946 {
947 prev_file_name = NULL;
948 prev_line = -1;
949 return;
950 }
951
952 name_differs = !prev_file_name || filename_cmp (file_name, prev_file_name);
953 line_differs = prev_line != line;
954
955 if (name_differs || line_differs)
956 {
957 if (!*offset)
958 {
959 *offset = gcov_write_tag (GCOV_TAG_LINES);
960 gcov_write_unsigned (bb->index);
961 name_differs = line_differs=true;
962 }
963
964 /* If this is a new source file, then output the
965 file's name to the .bb file. */
966 if (name_differs)
967 {
968 prev_file_name = file_name;
969 gcov_write_unsigned (0);
970 gcov_write_string (prev_file_name);
971 }
972 if (line_differs)
973 {
974 gcov_write_unsigned (line);
975 prev_line = line;
976 }
977 }
978 }
979
980 /* Instrument and/or analyze program behavior based on program the CFG.
981
982 This function creates a representation of the control flow graph (of
983 the function being compiled) that is suitable for the instrumentation
984 of edges and/or converting measured edge counts to counts on the
985 complete CFG.
986
987 When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
988 the flow graph that are needed to reconstruct the dynamic behavior of the
989 flow graph. This data is written to the gcno file for gcov.
990
991 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
992 information from the gcda file containing edge count information from
993 previous executions of the function being compiled. In this case, the
994 control flow graph is annotated with actual execution counts by
995 compute_branch_probabilities().
996
997 Main entry point of this file. */
998
999 void
1000 branch_prob (void)
1001 {
1002 basic_block bb;
1003 unsigned i;
1004 unsigned num_edges, ignored_edges;
1005 unsigned num_instrumented;
1006 struct edge_list *el;
1007 histogram_values values = histogram_values ();
1008 unsigned cfg_checksum, lineno_checksum;
1009
1010 total_num_times_called++;
1011
1012 flow_call_edges_add (NULL);
1013 add_noreturn_fake_exit_edges ();
1014
1015 /* We can't handle cyclic regions constructed using abnormal edges.
1016 To avoid these we replace every source of abnormal edge by a fake
1017 edge from entry node and every destination by fake edge to exit.
1018 This keeps graph acyclic and our calculation exact for all normal
1019 edges except for exit and entrance ones.
1020
1021 We also add fake exit edges for each call and asm statement in the
1022 basic, since it may not return. */
1023
1024 FOR_EACH_BB_FN (bb, cfun)
1025 {
1026 int need_exit_edge = 0, need_entry_edge = 0;
1027 int have_exit_edge = 0, have_entry_edge = 0;
1028 edge e;
1029 edge_iterator ei;
1030
1031 /* Functions returning multiple times are not handled by extra edges.
1032 Instead we simply allow negative counts on edges from exit to the
1033 block past call and corresponding probabilities. We can't go
1034 with the extra edges because that would result in flowgraph that
1035 needs to have fake edges outside the spanning tree. */
1036
1037 FOR_EACH_EDGE (e, ei, bb->succs)
1038 {
1039 gimple_stmt_iterator gsi;
1040 gimple last = NULL;
1041
1042 /* It may happen that there are compiler generated statements
1043 without a locus at all. Go through the basic block from the
1044 last to the first statement looking for a locus. */
1045 for (gsi = gsi_last_nondebug_bb (bb);
1046 !gsi_end_p (gsi);
1047 gsi_prev_nondebug (&gsi))
1048 {
1049 last = gsi_stmt (gsi);
1050 if (gimple_has_location (last))
1051 break;
1052 }
1053
1054 /* Edge with goto locus might get wrong coverage info unless
1055 it is the only edge out of BB.
1056 Don't do that when the locuses match, so
1057 if (blah) goto something;
1058 is not computed twice. */
1059 if (last
1060 && gimple_has_location (last)
1061 && LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
1062 && !single_succ_p (bb)
1063 && (LOCATION_FILE (e->goto_locus)
1064 != LOCATION_FILE (gimple_location (last))
1065 || (LOCATION_LINE (e->goto_locus)
1066 != LOCATION_LINE (gimple_location (last)))))
1067 {
1068 basic_block new_bb = split_edge (e);
1069 edge ne = single_succ_edge (new_bb);
1070 ne->goto_locus = e->goto_locus;
1071 }
1072 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1073 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1074 need_exit_edge = 1;
1075 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1076 have_exit_edge = 1;
1077 }
1078 FOR_EACH_EDGE (e, ei, bb->preds)
1079 {
1080 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1081 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1082 need_entry_edge = 1;
1083 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1084 have_entry_edge = 1;
1085 }
1086
1087 if (need_exit_edge && !have_exit_edge)
1088 {
1089 if (dump_file)
1090 fprintf (dump_file, "Adding fake exit edge to bb %i\n",
1091 bb->index);
1092 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
1093 }
1094 if (need_entry_edge && !have_entry_edge)
1095 {
1096 if (dump_file)
1097 fprintf (dump_file, "Adding fake entry edge to bb %i\n",
1098 bb->index);
1099 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FAKE);
1100 /* Avoid bbs that have both fake entry edge and also some
1101 exit edge. One of those edges wouldn't be added to the
1102 spanning tree, but we can't instrument any of them. */
1103 if (have_exit_edge || need_exit_edge)
1104 {
1105 gimple_stmt_iterator gsi;
1106 gimple first;
1107
1108 gsi = gsi_start_nondebug_after_labels_bb (bb);
1109 gcc_checking_assert (!gsi_end_p (gsi));
1110 first = gsi_stmt (gsi);
1111 /* Don't split the bbs containing __builtin_setjmp_receiver
1112 or ABNORMAL_DISPATCHER calls. These are very
1113 special and don't expect anything to be inserted before
1114 them. */
1115 if (is_gimple_call (first)
1116 && (gimple_call_builtin_p (first, BUILT_IN_SETJMP_RECEIVER)
1117 || (gimple_call_flags (first) & ECF_RETURNS_TWICE)
1118 || (gimple_call_internal_p (first)
1119 && (gimple_call_internal_fn (first)
1120 == IFN_ABNORMAL_DISPATCHER))))
1121 continue;
1122
1123 if (dump_file)
1124 fprintf (dump_file, "Splitting bb %i after labels\n",
1125 bb->index);
1126 split_block_after_labels (bb);
1127 }
1128 }
1129 }
1130
1131 el = create_edge_list ();
1132 num_edges = NUM_EDGES (el);
1133 alloc_aux_for_edges (sizeof (struct edge_info));
1134
1135 /* The basic blocks are expected to be numbered sequentially. */
1136 compact_blocks ();
1137
1138 ignored_edges = 0;
1139 for (i = 0 ; i < num_edges ; i++)
1140 {
1141 edge e = INDEX_EDGE (el, i);
1142 e->count = 0;
1143
1144 /* Mark edges we've replaced by fake edges above as ignored. */
1145 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1146 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1147 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1148 {
1149 EDGE_INFO (e)->ignore = 1;
1150 ignored_edges++;
1151 }
1152 }
1153
1154 /* Create spanning tree from basic block graph, mark each edge that is
1155 on the spanning tree. We insert as many abnormal and critical edges
1156 as possible to minimize number of edge splits necessary. */
1157
1158 find_spanning_tree (el);
1159
1160 /* Fake edges that are not on the tree will not be instrumented, so
1161 mark them ignored. */
1162 for (num_instrumented = i = 0; i < num_edges; i++)
1163 {
1164 edge e = INDEX_EDGE (el, i);
1165 struct edge_info *inf = EDGE_INFO (e);
1166
1167 if (inf->ignore || inf->on_tree)
1168 /*NOP*/;
1169 else if (e->flags & EDGE_FAKE)
1170 {
1171 inf->ignore = 1;
1172 ignored_edges++;
1173 }
1174 else
1175 num_instrumented++;
1176 }
1177
1178 total_num_blocks += n_basic_blocks_for_fn (cfun);
1179 if (dump_file)
1180 fprintf (dump_file, "%d basic blocks\n", n_basic_blocks_for_fn (cfun));
1181
1182 total_num_edges += num_edges;
1183 if (dump_file)
1184 fprintf (dump_file, "%d edges\n", num_edges);
1185
1186 total_num_edges_ignored += ignored_edges;
1187 if (dump_file)
1188 fprintf (dump_file, "%d ignored edges\n", ignored_edges);
1189
1190 total_num_edges_instrumented += num_instrumented;
1191 if (dump_file)
1192 fprintf (dump_file, "%d instrumentation edges\n", num_instrumented);
1193
1194 /* Compute two different checksums. Note that we want to compute
1195 the checksum in only once place, since it depends on the shape
1196 of the control flow which can change during
1197 various transformations. */
1198 cfg_checksum = coverage_compute_cfg_checksum (cfun);
1199 lineno_checksum = coverage_compute_lineno_checksum ();
1200
1201 /* Write the data from which gcov can reconstruct the basic block
1202 graph and function line numbers (the gcno file). */
1203 if (coverage_begin_function (lineno_checksum, cfg_checksum))
1204 {
1205 gcov_position_t offset;
1206
1207 /* Basic block flags */
1208 offset = gcov_write_tag (GCOV_TAG_BLOCKS);
1209 for (i = 0; i != (unsigned) (n_basic_blocks_for_fn (cfun)); i++)
1210 gcov_write_unsigned (0);
1211 gcov_write_length (offset);
1212
1213 /* Arcs */
1214 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
1215 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
1216 {
1217 edge e;
1218 edge_iterator ei;
1219
1220 offset = gcov_write_tag (GCOV_TAG_ARCS);
1221 gcov_write_unsigned (bb->index);
1222
1223 FOR_EACH_EDGE (e, ei, bb->succs)
1224 {
1225 struct edge_info *i = EDGE_INFO (e);
1226 if (!i->ignore)
1227 {
1228 unsigned flag_bits = 0;
1229
1230 if (i->on_tree)
1231 flag_bits |= GCOV_ARC_ON_TREE;
1232 if (e->flags & EDGE_FAKE)
1233 flag_bits |= GCOV_ARC_FAKE;
1234 if (e->flags & EDGE_FALLTHRU)
1235 flag_bits |= GCOV_ARC_FALLTHROUGH;
1236 /* On trees we don't have fallthru flags, but we can
1237 recompute them from CFG shape. */
1238 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)
1239 && e->src->next_bb == e->dest)
1240 flag_bits |= GCOV_ARC_FALLTHROUGH;
1241
1242 gcov_write_unsigned (e->dest->index);
1243 gcov_write_unsigned (flag_bits);
1244 }
1245 }
1246
1247 gcov_write_length (offset);
1248 }
1249
1250 /* Line numbers. */
1251 /* Initialize the output. */
1252 output_location (NULL, 0, NULL, NULL);
1253
1254 FOR_EACH_BB_FN (bb, cfun)
1255 {
1256 gimple_stmt_iterator gsi;
1257 gcov_position_t offset = 0;
1258
1259 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
1260 {
1261 expanded_location curr_location =
1262 expand_location (DECL_SOURCE_LOCATION (current_function_decl));
1263 output_location (curr_location.file, curr_location.line,
1264 &offset, bb);
1265 }
1266
1267 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1268 {
1269 gimple stmt = gsi_stmt (gsi);
1270 if (gimple_has_location (stmt))
1271 output_location (gimple_filename (stmt), gimple_lineno (stmt),
1272 &offset, bb);
1273 }
1274
1275 /* Notice GOTO expressions eliminated while constructing the CFG. */
1276 if (single_succ_p (bb)
1277 && LOCATION_LOCUS (single_succ_edge (bb)->goto_locus)
1278 != UNKNOWN_LOCATION)
1279 {
1280 expanded_location curr_location
1281 = expand_location (single_succ_edge (bb)->goto_locus);
1282 output_location (curr_location.file, curr_location.line,
1283 &offset, bb);
1284 }
1285
1286 if (offset)
1287 {
1288 /* A file of NULL indicates the end of run. */
1289 gcov_write_unsigned (0);
1290 gcov_write_string (NULL);
1291 gcov_write_length (offset);
1292 }
1293 }
1294 }
1295
1296 if (flag_profile_values)
1297 gimple_find_values_to_profile (&values);
1298
1299 if (flag_branch_probabilities)
1300 {
1301 compute_branch_probabilities (cfg_checksum, lineno_checksum);
1302 if (flag_profile_values)
1303 compute_value_histograms (values, cfg_checksum, lineno_checksum);
1304 }
1305
1306 remove_fake_edges ();
1307
1308 /* For each edge not on the spanning tree, add counting code. */
1309 if (profile_arc_flag
1310 && coverage_counter_alloc (GCOV_COUNTER_ARCS, num_instrumented))
1311 {
1312 unsigned n_instrumented;
1313
1314 gimple_init_edge_profiler ();
1315
1316 n_instrumented = instrument_edges (el);
1317
1318 gcc_assert (n_instrumented == num_instrumented);
1319
1320 if (flag_profile_values)
1321 instrument_values (values);
1322
1323 /* Commit changes done by instrumentation. */
1324 gsi_commit_edge_inserts ();
1325 }
1326
1327 free_aux_for_edges ();
1328
1329 values.release ();
1330 free_edge_list (el);
1331 coverage_end_function (lineno_checksum, cfg_checksum);
1332 }
1333 \f
1334 /* Union find algorithm implementation for the basic blocks using
1335 aux fields. */
1336
1337 static basic_block
1338 find_group (basic_block bb)
1339 {
1340 basic_block group = bb, bb1;
1341
1342 while ((basic_block) group->aux != group)
1343 group = (basic_block) group->aux;
1344
1345 /* Compress path. */
1346 while ((basic_block) bb->aux != group)
1347 {
1348 bb1 = (basic_block) bb->aux;
1349 bb->aux = (void *) group;
1350 bb = bb1;
1351 }
1352 return group;
1353 }
1354
1355 static void
1356 union_groups (basic_block bb1, basic_block bb2)
1357 {
1358 basic_block bb1g = find_group (bb1);
1359 basic_block bb2g = find_group (bb2);
1360
1361 /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
1362 this code is unlikely going to be performance problem anyway. */
1363 gcc_assert (bb1g != bb2g);
1364
1365 bb1g->aux = bb2g;
1366 }
1367 \f
1368 /* This function searches all of the edges in the program flow graph, and puts
1369 as many bad edges as possible onto the spanning tree. Bad edges include
1370 abnormals edges, which can't be instrumented at the moment. Since it is
1371 possible for fake edges to form a cycle, we will have to develop some
1372 better way in the future. Also put critical edges to the tree, since they
1373 are more expensive to instrument. */
1374
1375 static void
1376 find_spanning_tree (struct edge_list *el)
1377 {
1378 int i;
1379 int num_edges = NUM_EDGES (el);
1380 basic_block bb;
1381
1382 /* We use aux field for standard union-find algorithm. */
1383 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
1384 bb->aux = bb;
1385
1386 /* Add fake edge exit to entry we can't instrument. */
1387 union_groups (EXIT_BLOCK_PTR_FOR_FN (cfun), ENTRY_BLOCK_PTR_FOR_FN (cfun));
1388
1389 /* First add all abnormal edges to the tree unless they form a cycle. Also
1390 add all edges to the exit block to avoid inserting profiling code behind
1391 setting return value from function. */
1392 for (i = 0; i < num_edges; i++)
1393 {
1394 edge e = INDEX_EDGE (el, i);
1395 if (((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_FAKE))
1396 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1397 && !EDGE_INFO (e)->ignore
1398 && (find_group (e->src) != find_group (e->dest)))
1399 {
1400 if (dump_file)
1401 fprintf (dump_file, "Abnormal edge %d to %d put to tree\n",
1402 e->src->index, e->dest->index);
1403 EDGE_INFO (e)->on_tree = 1;
1404 union_groups (e->src, e->dest);
1405 }
1406 }
1407
1408 /* Now insert all critical edges to the tree unless they form a cycle. */
1409 for (i = 0; i < num_edges; i++)
1410 {
1411 edge e = INDEX_EDGE (el, i);
1412 if (EDGE_CRITICAL_P (e) && !EDGE_INFO (e)->ignore
1413 && find_group (e->src) != find_group (e->dest))
1414 {
1415 if (dump_file)
1416 fprintf (dump_file, "Critical edge %d to %d put to tree\n",
1417 e->src->index, e->dest->index);
1418 EDGE_INFO (e)->on_tree = 1;
1419 union_groups (e->src, e->dest);
1420 }
1421 }
1422
1423 /* And now the rest. */
1424 for (i = 0; i < num_edges; i++)
1425 {
1426 edge e = INDEX_EDGE (el, i);
1427 if (!EDGE_INFO (e)->ignore
1428 && find_group (e->src) != find_group (e->dest))
1429 {
1430 if (dump_file)
1431 fprintf (dump_file, "Normal edge %d to %d put to tree\n",
1432 e->src->index, e->dest->index);
1433 EDGE_INFO (e)->on_tree = 1;
1434 union_groups (e->src, e->dest);
1435 }
1436 }
1437
1438 clear_aux_for_blocks ();
1439 }
1440 \f
1441 /* Perform file-level initialization for branch-prob processing. */
1442
1443 void
1444 init_branch_prob (void)
1445 {
1446 int i;
1447
1448 total_num_blocks = 0;
1449 total_num_edges = 0;
1450 total_num_edges_ignored = 0;
1451 total_num_edges_instrumented = 0;
1452 total_num_blocks_created = 0;
1453 total_num_passes = 0;
1454 total_num_times_called = 0;
1455 total_num_branches = 0;
1456 for (i = 0; i < 20; i++)
1457 total_hist_br_prob[i] = 0;
1458 }
1459
1460 /* Performs file-level cleanup after branch-prob processing
1461 is completed. */
1462
1463 void
1464 end_branch_prob (void)
1465 {
1466 if (dump_file)
1467 {
1468 fprintf (dump_file, "\n");
1469 fprintf (dump_file, "Total number of blocks: %d\n",
1470 total_num_blocks);
1471 fprintf (dump_file, "Total number of edges: %d\n", total_num_edges);
1472 fprintf (dump_file, "Total number of ignored edges: %d\n",
1473 total_num_edges_ignored);
1474 fprintf (dump_file, "Total number of instrumented edges: %d\n",
1475 total_num_edges_instrumented);
1476 fprintf (dump_file, "Total number of blocks created: %d\n",
1477 total_num_blocks_created);
1478 fprintf (dump_file, "Total number of graph solution passes: %d\n",
1479 total_num_passes);
1480 if (total_num_times_called != 0)
1481 fprintf (dump_file, "Average number of graph solution passes: %d\n",
1482 (total_num_passes + (total_num_times_called >> 1))
1483 / total_num_times_called);
1484 fprintf (dump_file, "Total number of branches: %d\n",
1485 total_num_branches);
1486 if (total_num_branches)
1487 {
1488 int i;
1489
1490 for (i = 0; i < 10; i++)
1491 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
1492 (total_hist_br_prob[i] + total_hist_br_prob[19-i]) * 100
1493 / total_num_branches, 5*i, 5*i+5);
1494 }
1495 }
1496 }