Automated conversion of passes to C++ classes
[gcc.git] / gcc / ipa.c
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "cgraph.h"
25 #include "tree-pass.h"
26 #include "gimple.h"
27 #include "ggc.h"
28 #include "flags.h"
29 #include "pointer-set.h"
30 #include "target.h"
31 #include "tree-iterator.h"
32 #include "ipa-utils.h"
33 #include "pointer-set.h"
34 #include "ipa-inline.h"
35 #include "hash-table.h"
36 #include "tree-inline.h"
37 #include "profile.h"
38 #include "params.h"
39 #include "lto-streamer.h"
40 #include "data-streamer.h"
41
42 /* Return true when NODE can not be local. Worker for cgraph_local_node_p. */
43
44 static bool
45 cgraph_non_local_node_p_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
46 {
47 /* FIXME: Aliases can be local, but i386 gets thunks wrong then. */
48 return !(cgraph_only_called_directly_or_aliased_p (node)
49 && !ipa_ref_has_aliases_p (&node->symbol.ref_list)
50 && node->symbol.definition
51 && !DECL_EXTERNAL (node->symbol.decl)
52 && !node->symbol.externally_visible
53 && !node->symbol.used_from_other_partition
54 && !node->symbol.in_other_partition);
55 }
56
57 /* Return true when function can be marked local. */
58
59 static bool
60 cgraph_local_node_p (struct cgraph_node *node)
61 {
62 struct cgraph_node *n = cgraph_function_or_thunk_node (node, NULL);
63
64 /* FIXME: thunks can be considered local, but we need prevent i386
65 from attempting to change calling convention of them. */
66 if (n->thunk.thunk_p)
67 return false;
68 return !cgraph_for_node_and_aliases (n,
69 cgraph_non_local_node_p_1, NULL, true);
70
71 }
72
73 /* Return true when NODE has ADDR reference. */
74
75 static bool
76 has_addr_references_p (struct cgraph_node *node,
77 void *data ATTRIBUTE_UNUSED)
78 {
79 int i;
80 struct ipa_ref *ref;
81
82 for (i = 0; ipa_ref_list_referring_iterate (&node->symbol.ref_list,
83 i, ref); i++)
84 if (ref->use == IPA_REF_ADDR)
85 return true;
86 return false;
87 }
88
89 /* Look for all functions inlined to NODE and update their inlined_to pointers
90 to INLINED_TO. */
91
92 static void
93 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
94 {
95 struct cgraph_edge *e;
96 for (e = node->callees; e; e = e->next_callee)
97 if (e->callee->global.inlined_to)
98 {
99 e->callee->global.inlined_to = inlined_to;
100 update_inlined_to_pointer (e->callee, inlined_to);
101 }
102 }
103
104 /* Add symtab NODE to queue starting at FIRST.
105
106 The queue is linked via AUX pointers and terminated by pointer to 1.
107 We enqueue nodes at two occasions: when we find them reachable or when we find
108 their bodies needed for further clonning. In the second case we mark them
109 by pointer to 2 after processing so they are re-queue when they become
110 reachable. */
111
112 static void
113 enqueue_node (symtab_node node, symtab_node *first,
114 struct pointer_set_t *reachable)
115 {
116 /* Node is still in queue; do nothing. */
117 if (node->symbol.aux && node->symbol.aux != (void *) 2)
118 return;
119 /* Node was already processed as unreachable, re-enqueue
120 only if it became reachable now. */
121 if (node->symbol.aux == (void *)2 && !pointer_set_contains (reachable, node))
122 return;
123 node->symbol.aux = *first;
124 *first = node;
125 }
126
127 /* Process references. */
128
129 static void
130 process_references (struct ipa_ref_list *list,
131 symtab_node *first,
132 bool before_inlining_p,
133 struct pointer_set_t *reachable)
134 {
135 int i;
136 struct ipa_ref *ref;
137 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
138 {
139 symtab_node node = ref->referred;
140
141 if (node->symbol.definition && !node->symbol.in_other_partition
142 && ((!DECL_EXTERNAL (node->symbol.decl) || node->symbol.alias)
143 || (before_inlining_p
144 /* We use variable constructors during late complation for
145 constant folding. Keep references alive so partitioning
146 knows about potential references. */
147 || (TREE_CODE (node->symbol.decl) == VAR_DECL
148 && flag_wpa
149 && ctor_for_folding (node->symbol.decl)
150 != error_mark_node))))
151 pointer_set_insert (reachable, node);
152 enqueue_node ((symtab_node) node, first, reachable);
153 }
154 }
155
156
157 /* Perform reachability analysis and reclaim all unreachable nodes.
158
159 The algorithm is basically mark&sweep but with some extra refinements:
160
161 - reachable extern inline functions needs special handling; the bodies needs
162 to stay in memory until inlining in hope that they will be inlined.
163 After inlining we release their bodies and turn them into unanalyzed
164 nodes even when they are reachable.
165
166 BEFORE_INLINING_P specify whether we are before or after inlining.
167
168 - virtual functions are kept in callgraph even if they seem unreachable in
169 hope calls to them will be devirtualized.
170
171 Again we remove them after inlining. In late optimization some
172 devirtualization may happen, but it is not importnat since we won't inline
173 the call. In theory early opts and IPA should work out all important cases.
174
175 - virtual clones needs bodies of their origins for later materialization;
176 this means that we want to keep the body even if the origin is unreachable
177 otherwise. To avoid origin from sitting in the callgraph and being
178 walked by IPA passes, we turn them into unanalyzed nodes with body
179 defined.
180
181 We maintain set of function declaration where body needs to stay in
182 body_needed_for_clonning
183
184 Inline clones represent special case: their declaration match the
185 declaration of origin and cgraph_remove_node already knows how to
186 reshape callgraph and preserve body when offline copy of function or
187 inline clone is being removed.
188
189 - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
190 variables with DECL_INITIAL set. We finalize these and keep reachable
191 ones around for constant folding purposes. After inlining we however
192 stop walking their references to let everything static referneced by them
193 to be removed when it is otherwise unreachable.
194
195 We maintain queue of both reachable symbols (i.e. defined symbols that needs
196 to stay) and symbols that are in boundary (i.e. external symbols referenced
197 by reachable symbols or origins of clones). The queue is represented
198 as linked list by AUX pointer terminated by 1.
199
200 A the end we keep all reachable symbols. For symbols in boundary we always
201 turn definition into a declaration, but we may keep function body around
202 based on body_needed_for_clonning
203
204 All symbols that enter the queue have AUX pointer non-zero and are in the
205 boundary. Pointer set REACHABLE is used to track reachable symbols.
206
207 Every symbol can be visited twice - once as part of boundary and once
208 as real reachable symbol. enqueue_node needs to decide whether the
209 node needs to be re-queued for second processing. For this purpose
210 we set AUX pointer of processed symbols in the boundary to constant 2. */
211
212 bool
213 symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
214 {
215 symtab_node first = (symtab_node) (void *) 1;
216 struct cgraph_node *node, *next;
217 struct varpool_node *vnode, *vnext;
218 bool changed = false;
219 struct pointer_set_t *reachable = pointer_set_create ();
220 struct pointer_set_t *body_needed_for_clonning = pointer_set_create ();
221
222 #ifdef ENABLE_CHECKING
223 verify_symtab ();
224 #endif
225 if (file)
226 fprintf (file, "\nReclaiming functions:");
227 #ifdef ENABLE_CHECKING
228 FOR_EACH_FUNCTION (node)
229 gcc_assert (!node->symbol.aux);
230 FOR_EACH_VARIABLE (vnode)
231 gcc_assert (!vnode->symbol.aux);
232 #endif
233 /* Mark functions whose bodies are obviously needed.
234 This is mostly when they can be referenced externally. Inline clones
235 are special since their declarations are shared with master clone and thus
236 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
237 FOR_EACH_FUNCTION (node)
238 {
239 node->used_as_abstract_origin = false;
240 if (node->symbol.definition
241 && !node->global.inlined_to
242 && !node->symbol.in_other_partition
243 && (!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
244 /* Keep around virtual functions for possible devirtualization. */
245 || (before_inlining_p
246 && DECL_VIRTUAL_P (node->symbol.decl))))
247 {
248 gcc_assert (!node->global.inlined_to);
249 pointer_set_insert (reachable, node);
250 enqueue_node ((symtab_node)node, &first, reachable);
251 }
252 else
253 gcc_assert (!node->symbol.aux);
254 }
255
256 /* Mark variables that are obviously needed. */
257 FOR_EACH_DEFINED_VARIABLE (vnode)
258 if (!varpool_can_remove_if_no_refs (vnode)
259 && !vnode->symbol.in_other_partition)
260 {
261 pointer_set_insert (reachable, vnode);
262 enqueue_node ((symtab_node)vnode, &first, reachable);
263 }
264
265 /* Perform reachability analysis. */
266 while (first != (symtab_node) (void *) 1)
267 {
268 bool in_boundary_p = !pointer_set_contains (reachable, first);
269 symtab_node node = first;
270
271 first = (symtab_node)first->symbol.aux;
272
273 /* If we are processing symbol in boundary, mark its AUX pointer for
274 possible later re-processing in enqueue_node. */
275 if (in_boundary_p)
276 node->symbol.aux = (void *)2;
277 else
278 {
279 if (DECL_ABSTRACT_ORIGIN (node->symbol.decl))
280 {
281 struct cgraph_node *origin_node
282 = cgraph_get_create_real_symbol_node (DECL_ABSTRACT_ORIGIN (node->symbol.decl));
283 origin_node->used_as_abstract_origin = true;
284 enqueue_node ((symtab_node) origin_node, &first, reachable);
285 }
286 /* If any symbol in a comdat group is reachable, force
287 all other in the same comdat group to be also reachable. */
288 if (node->symbol.same_comdat_group)
289 {
290 symtab_node next;
291 for (next = node->symbol.same_comdat_group;
292 next != node;
293 next = next->symbol.same_comdat_group)
294 if (!pointer_set_insert (reachable, next))
295 enqueue_node ((symtab_node) next, &first, reachable);
296 }
297 /* Mark references as reachable. */
298 process_references (&node->symbol.ref_list, &first,
299 before_inlining_p, reachable);
300 }
301
302 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
303 {
304 /* Mark the callees reachable unless they are direct calls to extern
305 inline functions we decided to not inline. */
306 if (!in_boundary_p)
307 {
308 struct cgraph_edge *e;
309 for (e = cnode->callees; e; e = e->next_callee)
310 {
311 if (e->callee->symbol.definition
312 && !e->callee->symbol.in_other_partition
313 && (!e->inline_failed
314 || !DECL_EXTERNAL (e->callee->symbol.decl)
315 || e->callee->symbol.alias
316 || before_inlining_p))
317 pointer_set_insert (reachable, e->callee);
318 enqueue_node ((symtab_node) e->callee, &first, reachable);
319 }
320
321 /* When inline clone exists, mark body to be preserved so when removing
322 offline copy of the function we don't kill it. */
323 if (cnode->global.inlined_to)
324 pointer_set_insert (body_needed_for_clonning, cnode->symbol.decl);
325
326 /* For non-inline clones, force their origins to the boundary and ensure
327 that body is not removed. */
328 while (cnode->clone_of)
329 {
330 bool noninline = cnode->clone_of->symbol.decl != cnode->symbol.decl;
331 cnode = cnode->clone_of;
332 if (noninline)
333 {
334 pointer_set_insert (body_needed_for_clonning, cnode->symbol.decl);
335 enqueue_node ((symtab_node)cnode, &first, reachable);
336 }
337 }
338 }
339 }
340 /* When we see constructor of external variable, keep referred nodes in the
341 boundary. This will also hold initializers of the external vars NODE
342 refers to. */
343 varpool_node *vnode = dyn_cast <varpool_node> (node);
344 if (vnode
345 && DECL_EXTERNAL (node->symbol.decl)
346 && !vnode->symbol.alias
347 && in_boundary_p)
348 {
349 struct ipa_ref *ref;
350 for (int i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list, i, ref); i++)
351 enqueue_node (ref->referred, &first, reachable);
352 }
353 }
354
355 /* Remove unreachable functions. */
356 for (node = cgraph_first_function (); node; node = next)
357 {
358 next = cgraph_next_function (node);
359
360 /* If node is not needed at all, remove it. */
361 if (!node->symbol.aux)
362 {
363 if (file)
364 fprintf (file, " %s", cgraph_node_name (node));
365 cgraph_remove_node (node);
366 changed = true;
367 }
368 /* If node is unreachable, remove its body. */
369 else if (!pointer_set_contains (reachable, node))
370 {
371 if (!pointer_set_contains (body_needed_for_clonning, node->symbol.decl))
372 cgraph_release_function_body (node);
373 else if (!node->clone_of)
374 gcc_assert (in_lto_p || DECL_RESULT (node->symbol.decl));
375 if (node->symbol.definition)
376 {
377 if (file)
378 fprintf (file, " %s", cgraph_node_name (node));
379 node->symbol.analyzed = false;
380 node->symbol.definition = false;
381 node->symbol.cpp_implicit_alias = false;
382 node->symbol.alias = false;
383 node->symbol.weakref = false;
384 if (!node->symbol.in_other_partition)
385 node->local.local = false;
386 cgraph_node_remove_callees (node);
387 ipa_remove_all_references (&node->symbol.ref_list);
388 changed = true;
389 }
390 }
391 else
392 gcc_assert (node->clone_of || !cgraph_function_with_gimple_body_p (node)
393 || in_lto_p || DECL_RESULT (node->symbol.decl));
394 }
395
396 /* Inline clones might be kept around so their materializing allows further
397 cloning. If the function the clone is inlined into is removed, we need
398 to turn it into normal cone. */
399 FOR_EACH_FUNCTION (node)
400 {
401 if (node->global.inlined_to
402 && !node->callers)
403 {
404 gcc_assert (node->clones);
405 node->global.inlined_to = NULL;
406 update_inlined_to_pointer (node, node);
407 }
408 node->symbol.aux = NULL;
409 }
410
411 /* Remove unreachable variables. */
412 if (file)
413 fprintf (file, "\nReclaiming variables:");
414 for (vnode = varpool_first_variable (); vnode; vnode = vnext)
415 {
416 vnext = varpool_next_variable (vnode);
417 if (!vnode->symbol.aux
418 /* For can_refer_decl_in_current_unit_p we want to track for
419 all external variables if they are defined in other partition
420 or not. */
421 && (!flag_ltrans || !DECL_EXTERNAL (vnode->symbol.decl)))
422 {
423 if (file)
424 fprintf (file, " %s", varpool_node_name (vnode));
425 varpool_remove_node (vnode);
426 changed = true;
427 }
428 else if (!pointer_set_contains (reachable, vnode))
429 {
430 tree init;
431 if (vnode->symbol.definition)
432 {
433 if (file)
434 fprintf (file, " %s", varpool_node_name (vnode));
435 changed = true;
436 }
437 vnode->symbol.definition = false;
438 vnode->symbol.analyzed = false;
439 vnode->symbol.aux = NULL;
440
441 /* Keep body if it may be useful for constant folding. */
442 if ((init = ctor_for_folding (vnode->symbol.decl)) == error_mark_node)
443 varpool_remove_initializer (vnode);
444 else
445 DECL_INITIAL (vnode->symbol.decl) = init;
446 ipa_remove_all_references (&vnode->symbol.ref_list);
447 }
448 else
449 vnode->symbol.aux = NULL;
450 }
451
452 pointer_set_destroy (reachable);
453 pointer_set_destroy (body_needed_for_clonning);
454
455 /* Now update address_taken flags and try to promote functions to be local. */
456 if (file)
457 fprintf (file, "\nClearing address taken flags:");
458 FOR_EACH_DEFINED_FUNCTION (node)
459 if (node->symbol.address_taken
460 && !node->symbol.used_from_other_partition)
461 {
462 if (!cgraph_for_node_and_aliases (node, has_addr_references_p, NULL, true))
463 {
464 if (file)
465 fprintf (file, " %s", cgraph_node_name (node));
466 node->symbol.address_taken = false;
467 changed = true;
468 if (cgraph_local_node_p (node))
469 {
470 node->local.local = true;
471 if (file)
472 fprintf (file, " (local)");
473 }
474 }
475 }
476 if (file)
477 fprintf (file, "\n");
478
479 #ifdef ENABLE_CHECKING
480 verify_symtab ();
481 #endif
482
483 /* If we removed something, perhaps profile could be improved. */
484 if (changed && optimize && inline_edge_summary_vec.exists ())
485 FOR_EACH_DEFINED_FUNCTION (node)
486 cgraph_propagate_frequency (node);
487
488 return changed;
489 }
490
491 /* Discover variables that have no longer address taken or that are read only
492 and update their flags.
493
494 FIXME: This can not be done in between gimplify and omp_expand since
495 readonly flag plays role on what is shared and what is not. Currently we do
496 this transformation as part of whole program visibility and re-do at
497 ipa-reference pass (to take into account clonning), but it would
498 make sense to do it before early optimizations. */
499
500 void
501 ipa_discover_readonly_nonaddressable_vars (void)
502 {
503 struct varpool_node *vnode;
504 if (dump_file)
505 fprintf (dump_file, "Clearing variable flags:");
506 FOR_EACH_VARIABLE (vnode)
507 if (vnode->symbol.definition && varpool_all_refs_explicit_p (vnode)
508 && (TREE_ADDRESSABLE (vnode->symbol.decl)
509 || !TREE_READONLY (vnode->symbol.decl)))
510 {
511 bool written = false;
512 bool address_taken = false;
513 int i;
514 struct ipa_ref *ref;
515 for (i = 0; ipa_ref_list_referring_iterate (&vnode->symbol.ref_list,
516 i, ref)
517 && (!written || !address_taken); i++)
518 switch (ref->use)
519 {
520 case IPA_REF_ADDR:
521 address_taken = true;
522 break;
523 case IPA_REF_LOAD:
524 break;
525 case IPA_REF_STORE:
526 written = true;
527 break;
528 }
529 if (TREE_ADDRESSABLE (vnode->symbol.decl) && !address_taken)
530 {
531 if (dump_file)
532 fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode));
533 TREE_ADDRESSABLE (vnode->symbol.decl) = 0;
534 }
535 if (!TREE_READONLY (vnode->symbol.decl) && !address_taken && !written
536 /* Making variable in explicit section readonly can cause section
537 type conflict.
538 See e.g. gcc.c-torture/compile/pr23237.c */
539 && DECL_SECTION_NAME (vnode->symbol.decl) == NULL)
540 {
541 if (dump_file)
542 fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode));
543 TREE_READONLY (vnode->symbol.decl) = 1;
544 }
545 }
546 if (dump_file)
547 fprintf (dump_file, "\n");
548 }
549
550 /* Return true when there is a reference to node and it is not vtable. */
551 static bool
552 address_taken_from_non_vtable_p (symtab_node node)
553 {
554 int i;
555 struct ipa_ref *ref;
556 for (i = 0; ipa_ref_list_referring_iterate (&node->symbol.ref_list,
557 i, ref); i++)
558 if (ref->use == IPA_REF_ADDR)
559 {
560 struct varpool_node *node;
561 if (is_a <cgraph_node> (ref->referring))
562 return true;
563 node = ipa_ref_referring_varpool_node (ref);
564 if (!DECL_VIRTUAL_P (node->symbol.decl))
565 return true;
566 }
567 return false;
568 }
569
570 /* A helper for comdat_can_be_unshared_p. */
571
572 static bool
573 comdat_can_be_unshared_p_1 (symtab_node node)
574 {
575 /* When address is taken, we don't know if equality comparison won't
576 break eventually. Exception are virutal functions and vtables,
577 where this is not possible by language standard. */
578 if (!DECL_VIRTUAL_P (node->symbol.decl)
579 && address_taken_from_non_vtable_p (node))
580 return false;
581
582 /* If the symbol is used in some weird way, better to not touch it. */
583 if (node->symbol.force_output)
584 return false;
585
586 /* Explicit instantiations needs to be output when possibly
587 used externally. */
588 if (node->symbol.forced_by_abi
589 && TREE_PUBLIC (node->symbol.decl)
590 && (node->symbol.resolution != LDPR_PREVAILING_DEF_IRONLY
591 && !flag_whole_program))
592 return false;
593
594 /* Non-readonly and volatile variables can not be duplicated. */
595 if (is_a <varpool_node> (node)
596 && (!TREE_READONLY (node->symbol.decl)
597 || TREE_THIS_VOLATILE (node->symbol.decl)))
598 return false;
599 return true;
600 }
601
602 /* COMDAT functions must be shared only if they have address taken,
603 otherwise we can produce our own private implementation with
604 -fwhole-program.
605 Return true when turning COMDAT functoin static can not lead to wrong
606 code when the resulting object links with a library defining same COMDAT.
607
608 Virtual functions do have their addresses taken from the vtables,
609 but in C++ there is no way to compare their addresses for equality. */
610
611 static bool
612 comdat_can_be_unshared_p (symtab_node node)
613 {
614 if (!comdat_can_be_unshared_p_1 (node))
615 return false;
616 if (node->symbol.same_comdat_group)
617 {
618 symtab_node next;
619
620 /* If more than one function is in the same COMDAT group, it must
621 be shared even if just one function in the comdat group has
622 address taken. */
623 for (next = node->symbol.same_comdat_group;
624 next != node; next = next->symbol.same_comdat_group)
625 if (!comdat_can_be_unshared_p_1 (next))
626 return false;
627 }
628 return true;
629 }
630
631 /* Return true when function NODE should be considered externally visible. */
632
633 static bool
634 cgraph_externally_visible_p (struct cgraph_node *node,
635 bool whole_program)
636 {
637 if (!node->symbol.definition)
638 return false;
639 if (!TREE_PUBLIC (node->symbol.decl)
640 || DECL_EXTERNAL (node->symbol.decl))
641 return false;
642
643 /* Do not try to localize built-in functions yet. One of problems is that we
644 end up mangling their asm for WHOPR that makes it impossible to call them
645 using the implicit built-in declarations anymore. Similarly this enables
646 us to remove them as unreachable before actual calls may appear during
647 expansion or folding. */
648 if (DECL_BUILT_IN (node->symbol.decl))
649 return true;
650
651 /* If linker counts on us, we must preserve the function. */
652 if (symtab_used_from_object_file_p ((symtab_node) node))
653 return true;
654 if (DECL_PRESERVE_P (node->symbol.decl))
655 return true;
656 if (lookup_attribute ("externally_visible",
657 DECL_ATTRIBUTES (node->symbol.decl)))
658 return true;
659 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
660 && lookup_attribute ("dllexport",
661 DECL_ATTRIBUTES (node->symbol.decl)))
662 return true;
663 if (node->symbol.resolution == LDPR_PREVAILING_DEF_IRONLY)
664 return false;
665 /* When doing LTO or whole program, we can bring COMDAT functoins static.
666 This improves code quality and we know we will duplicate them at most twice
667 (in the case that we are not using plugin and link with object file
668 implementing same COMDAT) */
669 if ((in_lto_p || whole_program)
670 && DECL_COMDAT (node->symbol.decl)
671 && comdat_can_be_unshared_p ((symtab_node) node))
672 return false;
673
674 /* When doing link time optimizations, hidden symbols become local. */
675 if (in_lto_p
676 && (DECL_VISIBILITY (node->symbol.decl) == VISIBILITY_HIDDEN
677 || DECL_VISIBILITY (node->symbol.decl) == VISIBILITY_INTERNAL)
678 /* Be sure that node is defined in IR file, not in other object
679 file. In that case we don't set used_from_other_object_file. */
680 && node->symbol.definition)
681 ;
682 else if (!whole_program)
683 return true;
684
685 if (MAIN_NAME_P (DECL_NAME (node->symbol.decl)))
686 return true;
687
688 return false;
689 }
690
691 /* Return true when variable VNODE should be considered externally visible. */
692
693 bool
694 varpool_externally_visible_p (struct varpool_node *vnode)
695 {
696 if (DECL_EXTERNAL (vnode->symbol.decl))
697 return true;
698
699 if (!TREE_PUBLIC (vnode->symbol.decl))
700 return false;
701
702 /* If linker counts on us, we must preserve the function. */
703 if (symtab_used_from_object_file_p ((symtab_node) vnode))
704 return true;
705
706 if (DECL_HARD_REGISTER (vnode->symbol.decl))
707 return true;
708 if (DECL_PRESERVE_P (vnode->symbol.decl))
709 return true;
710 if (lookup_attribute ("externally_visible",
711 DECL_ATTRIBUTES (vnode->symbol.decl)))
712 return true;
713 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
714 && lookup_attribute ("dllexport",
715 DECL_ATTRIBUTES (vnode->symbol.decl)))
716 return true;
717
718 /* See if we have linker information about symbol not being used or
719 if we need to make guess based on the declaration.
720
721 Even if the linker clams the symbol is unused, never bring internal
722 symbols that are declared by user as used or externally visible.
723 This is needed for i.e. references from asm statements. */
724 if (symtab_used_from_object_file_p ((symtab_node) vnode))
725 return true;
726 if (vnode->symbol.resolution == LDPR_PREVAILING_DEF_IRONLY)
727 return false;
728
729 /* As a special case, the COMDAT virtual tables can be unshared.
730 In LTO mode turn vtables into static variables. The variable is readonly,
731 so this does not enable more optimization, but referring static var
732 is faster for dynamic linking. Also this match logic hidding vtables
733 from LTO symbol tables. */
734 if ((in_lto_p || flag_whole_program)
735 && DECL_COMDAT (vnode->symbol.decl)
736 && comdat_can_be_unshared_p ((symtab_node) vnode))
737 return false;
738
739 /* When doing link time optimizations, hidden symbols become local. */
740 if (in_lto_p
741 && (DECL_VISIBILITY (vnode->symbol.decl) == VISIBILITY_HIDDEN
742 || DECL_VISIBILITY (vnode->symbol.decl) == VISIBILITY_INTERNAL)
743 /* Be sure that node is defined in IR file, not in other object
744 file. In that case we don't set used_from_other_object_file. */
745 && vnode->symbol.definition)
746 ;
747 else if (!flag_whole_program)
748 return true;
749
750 /* Do not attempt to privatize COMDATS by default.
751 This would break linking with C++ libraries sharing
752 inline definitions.
753
754 FIXME: We can do so for readonly vars with no address taken and
755 possibly also for vtables since no direct pointer comparsion is done.
756 It might be interesting to do so to reduce linking overhead. */
757 if (DECL_COMDAT (vnode->symbol.decl) || DECL_WEAK (vnode->symbol.decl))
758 return true;
759 return false;
760 }
761
762 /* Return true if reference to NODE can be replaced by a local alias.
763 Local aliases save dynamic linking overhead and enable more optimizations.
764 */
765
766 bool
767 can_replace_by_local_alias (symtab_node node)
768 {
769 return (symtab_node_availability (node) > AVAIL_OVERWRITABLE
770 && !DECL_EXTERNAL (node->symbol.decl)
771 && (!DECL_ONE_ONLY (node->symbol.decl)
772 || node->symbol.resolution == LDPR_PREVAILING_DEF
773 || node->symbol.resolution == LDPR_PREVAILING_DEF_IRONLY
774 || node->symbol.resolution == LDPR_PREVAILING_DEF_IRONLY_EXP));
775 }
776
777 /* Mark visibility of all functions.
778
779 A local function is one whose calls can occur only in the current
780 compilation unit and all its calls are explicit, so we can change
781 its calling convention. We simply mark all static functions whose
782 address is not taken as local.
783
784 We also change the TREE_PUBLIC flag of all declarations that are public
785 in language point of view but we want to overwrite this default
786 via visibilities for the backend point of view. */
787
788 static unsigned int
789 function_and_variable_visibility (bool whole_program)
790 {
791 struct cgraph_node *node;
792 struct varpool_node *vnode;
793
794 /* All aliases should be procssed at this point. */
795 gcc_checking_assert (!alias_pairs || !alias_pairs->length());
796
797 FOR_EACH_FUNCTION (node)
798 {
799 int flags = flags_from_decl_or_type (node->symbol.decl);
800
801 /* Optimize away PURE and CONST constructors and destructors. */
802 if (optimize
803 && (flags & (ECF_CONST | ECF_PURE))
804 && !(flags & ECF_LOOPING_CONST_OR_PURE))
805 {
806 DECL_STATIC_CONSTRUCTOR (node->symbol.decl) = 0;
807 DECL_STATIC_DESTRUCTOR (node->symbol.decl) = 0;
808 }
809
810 /* Frontends and alias code marks nodes as needed before parsing is finished.
811 We may end up marking as node external nodes where this flag is meaningless
812 strip it. */
813 if (DECL_EXTERNAL (node->symbol.decl) || !node->symbol.definition)
814 {
815 node->symbol.force_output = 0;
816 node->symbol.forced_by_abi = 0;
817 }
818
819 /* C++ FE on lack of COMDAT support create local COMDAT functions
820 (that ought to be shared but can not due to object format
821 limitations). It is necessary to keep the flag to make rest of C++ FE
822 happy. Clear the flag here to avoid confusion in middle-end. */
823 if (DECL_COMDAT (node->symbol.decl) && !TREE_PUBLIC (node->symbol.decl))
824 DECL_COMDAT (node->symbol.decl) = 0;
825
826 /* For external decls stop tracking same_comdat_group. It doesn't matter
827 what comdat group they are in when they won't be emitted in this TU. */
828 if (node->symbol.same_comdat_group && DECL_EXTERNAL (node->symbol.decl))
829 {
830 #ifdef ENABLE_CHECKING
831 symtab_node n;
832
833 for (n = node->symbol.same_comdat_group;
834 n != (symtab_node)node;
835 n = n->symbol.same_comdat_group)
836 /* If at least one of same comdat group functions is external,
837 all of them have to be, otherwise it is a front-end bug. */
838 gcc_assert (DECL_EXTERNAL (n->symbol.decl));
839 #endif
840 symtab_dissolve_same_comdat_group_list ((symtab_node) node);
841 }
842 gcc_assert ((!DECL_WEAK (node->symbol.decl)
843 && !DECL_COMDAT (node->symbol.decl))
844 || TREE_PUBLIC (node->symbol.decl)
845 || node->symbol.weakref
846 || DECL_EXTERNAL (node->symbol.decl));
847 if (cgraph_externally_visible_p (node, whole_program))
848 {
849 gcc_assert (!node->global.inlined_to);
850 node->symbol.externally_visible = true;
851 }
852 else
853 {
854 node->symbol.externally_visible = false;
855 node->symbol.forced_by_abi = false;
856 }
857 if (!node->symbol.externally_visible
858 && node->symbol.definition && !node->symbol.weakref
859 && !DECL_EXTERNAL (node->symbol.decl))
860 {
861 gcc_assert (whole_program || in_lto_p
862 || !TREE_PUBLIC (node->symbol.decl));
863 node->symbol.unique_name = ((node->symbol.resolution == LDPR_PREVAILING_DEF_IRONLY
864 || node->symbol.resolution == LDPR_PREVAILING_DEF_IRONLY_EXP)
865 && TREE_PUBLIC (node->symbol.decl));
866 symtab_make_decl_local (node->symbol.decl);
867 node->symbol.resolution = LDPR_PREVAILING_DEF_IRONLY;
868 if (node->symbol.same_comdat_group)
869 /* cgraph_externally_visible_p has already checked all other nodes
870 in the group and they will all be made local. We need to
871 dissolve the group at once so that the predicate does not
872 segfault though. */
873 symtab_dissolve_same_comdat_group_list ((symtab_node) node);
874 }
875
876 if (node->thunk.thunk_p
877 && TREE_PUBLIC (node->symbol.decl))
878 {
879 struct cgraph_node *decl_node = node;
880
881 decl_node = cgraph_function_node (decl_node->callees->callee, NULL);
882
883 /* Thunks have the same visibility as function they are attached to.
884 Make sure the C++ front end set this up properly. */
885 if (DECL_ONE_ONLY (decl_node->symbol.decl))
886 {
887 gcc_checking_assert (DECL_COMDAT (node->symbol.decl)
888 == DECL_COMDAT (decl_node->symbol.decl));
889 gcc_checking_assert (DECL_COMDAT_GROUP (node->symbol.decl)
890 == DECL_COMDAT_GROUP (decl_node->symbol.decl));
891 gcc_checking_assert (node->symbol.same_comdat_group);
892 }
893 if (DECL_EXTERNAL (decl_node->symbol.decl))
894 DECL_EXTERNAL (node->symbol.decl) = 1;
895 }
896 }
897 FOR_EACH_DEFINED_FUNCTION (node)
898 {
899 node->local.local |= cgraph_local_node_p (node);
900
901 /* If we know that function can not be overwritten by a different semantics
902 and moreover its section can not be discarded, replace all direct calls
903 by calls to an nonoverwritable alias. This make dynamic linking
904 cheaper and enable more optimization.
905
906 TODO: We can also update virtual tables. */
907 if (node->callers && can_replace_by_local_alias ((symtab_node)node))
908 {
909 struct cgraph_node *alias = cgraph (symtab_nonoverwritable_alias ((symtab_node) node));
910
911 if (alias != node)
912 {
913 while (node->callers)
914 {
915 struct cgraph_edge *e = node->callers;
916
917 cgraph_redirect_edge_callee (e, alias);
918 if (!flag_wpa)
919 {
920 push_cfun (DECL_STRUCT_FUNCTION (e->caller->symbol.decl));
921 cgraph_redirect_edge_call_stmt_to_callee (e);
922 pop_cfun ();
923 }
924 }
925 }
926 }
927 }
928 FOR_EACH_VARIABLE (vnode)
929 {
930 /* weak flag makes no sense on local variables. */
931 gcc_assert (!DECL_WEAK (vnode->symbol.decl)
932 || vnode->symbol.weakref
933 || TREE_PUBLIC (vnode->symbol.decl)
934 || DECL_EXTERNAL (vnode->symbol.decl));
935 /* In several cases declarations can not be common:
936
937 - when declaration has initializer
938 - when it is in weak
939 - when it has specific section
940 - when it resides in non-generic address space.
941 - if declaration is local, it will get into .local common section
942 so common flag is not needed. Frontends still produce these in
943 certain cases, such as for:
944
945 static int a __attribute__ ((common))
946
947 Canonicalize things here and clear the redundant flag. */
948 if (DECL_COMMON (vnode->symbol.decl)
949 && (!(TREE_PUBLIC (vnode->symbol.decl)
950 || DECL_EXTERNAL (vnode->symbol.decl))
951 || (DECL_INITIAL (vnode->symbol.decl)
952 && DECL_INITIAL (vnode->symbol.decl) != error_mark_node)
953 || DECL_WEAK (vnode->symbol.decl)
954 || DECL_SECTION_NAME (vnode->symbol.decl) != NULL
955 || ! (ADDR_SPACE_GENERIC_P
956 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->symbol.decl))))))
957 DECL_COMMON (vnode->symbol.decl) = 0;
958 }
959 FOR_EACH_DEFINED_VARIABLE (vnode)
960 {
961 if (!vnode->symbol.definition)
962 continue;
963 if (varpool_externally_visible_p (vnode))
964 vnode->symbol.externally_visible = true;
965 else
966 {
967 vnode->symbol.externally_visible = false;
968 vnode->symbol.forced_by_abi = false;
969 }
970 if (!vnode->symbol.externally_visible
971 && !vnode->symbol.weakref)
972 {
973 gcc_assert (in_lto_p || whole_program || !TREE_PUBLIC (vnode->symbol.decl));
974 symtab_make_decl_local (vnode->symbol.decl);
975 vnode->symbol.unique_name = ((vnode->symbol.resolution == LDPR_PREVAILING_DEF_IRONLY
976 || vnode->symbol.resolution == LDPR_PREVAILING_DEF_IRONLY_EXP)
977 && TREE_PUBLIC (vnode->symbol.decl));
978 if (vnode->symbol.same_comdat_group)
979 symtab_dissolve_same_comdat_group_list ((symtab_node) vnode);
980 vnode->symbol.resolution = LDPR_PREVAILING_DEF_IRONLY;
981 }
982 }
983
984 if (dump_file)
985 {
986 fprintf (dump_file, "\nMarking local functions:");
987 FOR_EACH_DEFINED_FUNCTION (node)
988 if (node->local.local)
989 fprintf (dump_file, " %s", cgraph_node_name (node));
990 fprintf (dump_file, "\n\n");
991 fprintf (dump_file, "\nMarking externally visible functions:");
992 FOR_EACH_DEFINED_FUNCTION (node)
993 if (node->symbol.externally_visible)
994 fprintf (dump_file, " %s", cgraph_node_name (node));
995 fprintf (dump_file, "\n\n");
996 fprintf (dump_file, "\nMarking externally visible variables:");
997 FOR_EACH_DEFINED_VARIABLE (vnode)
998 if (vnode->symbol.externally_visible)
999 fprintf (dump_file, " %s", varpool_node_name (vnode));
1000 fprintf (dump_file, "\n\n");
1001 }
1002 cgraph_function_flags_ready = true;
1003 return 0;
1004 }
1005
1006 /* Local function pass handling visibilities. This happens before LTO streaming
1007 so in particular -fwhole-program should be ignored at this level. */
1008
1009 static unsigned int
1010 local_function_and_variable_visibility (void)
1011 {
1012 return function_and_variable_visibility (flag_whole_program && !flag_lto);
1013 }
1014
1015 namespace {
1016
1017 const pass_data pass_data_ipa_function_and_variable_visibility =
1018 {
1019 SIMPLE_IPA_PASS, /* type */
1020 "visibility", /* name */
1021 OPTGROUP_NONE, /* optinfo_flags */
1022 false, /* has_gate */
1023 true, /* has_execute */
1024 TV_CGRAPHOPT, /* tv_id */
1025 0, /* properties_required */
1026 0, /* properties_provided */
1027 0, /* properties_destroyed */
1028 0, /* todo_flags_start */
1029 ( TODO_remove_functions | TODO_dump_symtab ), /* todo_flags_finish */
1030 };
1031
1032 class pass_ipa_function_and_variable_visibility : public simple_ipa_opt_pass
1033 {
1034 public:
1035 pass_ipa_function_and_variable_visibility(gcc::context *ctxt)
1036 : simple_ipa_opt_pass(pass_data_ipa_function_and_variable_visibility, ctxt)
1037 {}
1038
1039 /* opt_pass methods: */
1040 unsigned int execute () {
1041 return local_function_and_variable_visibility ();
1042 }
1043
1044 }; // class pass_ipa_function_and_variable_visibility
1045
1046 } // anon namespace
1047
1048 simple_ipa_opt_pass *
1049 make_pass_ipa_function_and_variable_visibility (gcc::context *ctxt)
1050 {
1051 return new pass_ipa_function_and_variable_visibility (ctxt);
1052 }
1053
1054 /* Free inline summary. */
1055
1056 static unsigned
1057 free_inline_summary (void)
1058 {
1059 inline_free_summary ();
1060 return 0;
1061 }
1062
1063 namespace {
1064
1065 const pass_data pass_data_ipa_free_inline_summary =
1066 {
1067 SIMPLE_IPA_PASS, /* type */
1068 "*free_inline_summary", /* name */
1069 OPTGROUP_NONE, /* optinfo_flags */
1070 false, /* has_gate */
1071 true, /* has_execute */
1072 TV_IPA_FREE_INLINE_SUMMARY, /* tv_id */
1073 0, /* properties_required */
1074 0, /* properties_provided */
1075 0, /* properties_destroyed */
1076 0, /* todo_flags_start */
1077 0, /* todo_flags_finish */
1078 };
1079
1080 class pass_ipa_free_inline_summary : public simple_ipa_opt_pass
1081 {
1082 public:
1083 pass_ipa_free_inline_summary(gcc::context *ctxt)
1084 : simple_ipa_opt_pass(pass_data_ipa_free_inline_summary, ctxt)
1085 {}
1086
1087 /* opt_pass methods: */
1088 unsigned int execute () { return free_inline_summary (); }
1089
1090 }; // class pass_ipa_free_inline_summary
1091
1092 } // anon namespace
1093
1094 simple_ipa_opt_pass *
1095 make_pass_ipa_free_inline_summary (gcc::context *ctxt)
1096 {
1097 return new pass_ipa_free_inline_summary (ctxt);
1098 }
1099
1100 /* Do not re-run on ltrans stage. */
1101
1102 static bool
1103 gate_whole_program_function_and_variable_visibility (void)
1104 {
1105 return !flag_ltrans;
1106 }
1107
1108 /* Bring functionss local at LTO time with -fwhole-program. */
1109
1110 static unsigned int
1111 whole_program_function_and_variable_visibility (void)
1112 {
1113 function_and_variable_visibility (flag_whole_program);
1114 if (optimize)
1115 ipa_discover_readonly_nonaddressable_vars ();
1116 return 0;
1117 }
1118
1119 namespace {
1120
1121 const pass_data pass_data_ipa_whole_program_visibility =
1122 {
1123 IPA_PASS, /* type */
1124 "whole-program", /* name */
1125 OPTGROUP_NONE, /* optinfo_flags */
1126 true, /* has_gate */
1127 true, /* has_execute */
1128 TV_CGRAPHOPT, /* tv_id */
1129 0, /* properties_required */
1130 0, /* properties_provided */
1131 0, /* properties_destroyed */
1132 0, /* todo_flags_start */
1133 ( TODO_remove_functions | TODO_dump_symtab ), /* todo_flags_finish */
1134 };
1135
1136 class pass_ipa_whole_program_visibility : public ipa_opt_pass_d
1137 {
1138 public:
1139 pass_ipa_whole_program_visibility(gcc::context *ctxt)
1140 : ipa_opt_pass_d(pass_data_ipa_whole_program_visibility, ctxt,
1141 NULL, /* generate_summary */
1142 NULL, /* write_summary */
1143 NULL, /* read_summary */
1144 NULL, /* write_optimization_summary */
1145 NULL, /* read_optimization_summary */
1146 NULL, /* stmt_fixup */
1147 0, /* function_transform_todo_flags_start */
1148 NULL, /* function_transform */
1149 NULL) /* variable_transform */
1150 {}
1151
1152 /* opt_pass methods: */
1153 bool gate () {
1154 return gate_whole_program_function_and_variable_visibility ();
1155 }
1156 unsigned int execute () {
1157 return whole_program_function_and_variable_visibility ();
1158 }
1159
1160 }; // class pass_ipa_whole_program_visibility
1161
1162 } // anon namespace
1163
1164 ipa_opt_pass_d *
1165 make_pass_ipa_whole_program_visibility (gcc::context *ctxt)
1166 {
1167 return new pass_ipa_whole_program_visibility (ctxt);
1168 }
1169
1170 /* Entry in the histogram. */
1171
1172 struct histogram_entry
1173 {
1174 gcov_type count;
1175 int time;
1176 int size;
1177 };
1178
1179 /* Histogram of profile values.
1180 The histogram is represented as an ordered vector of entries allocated via
1181 histogram_pool. During construction a separate hashtable is kept to lookup
1182 duplicate entries. */
1183
1184 vec<histogram_entry *> histogram;
1185 static alloc_pool histogram_pool;
1186
1187 /* Hashtable support for storing SSA names hashed by their SSA_NAME_VAR. */
1188
1189 struct histogram_hash : typed_noop_remove <histogram_entry>
1190 {
1191 typedef histogram_entry value_type;
1192 typedef histogram_entry compare_type;
1193 static inline hashval_t hash (const value_type *);
1194 static inline int equal (const value_type *, const compare_type *);
1195 };
1196
1197 inline hashval_t
1198 histogram_hash::hash (const histogram_entry *val)
1199 {
1200 return val->count;
1201 }
1202
1203 inline int
1204 histogram_hash::equal (const histogram_entry *val, const histogram_entry *val2)
1205 {
1206 return val->count == val2->count;
1207 }
1208
1209 /* Account TIME and SIZE executed COUNT times into HISTOGRAM.
1210 HASHTABLE is the on-side hash kept to avoid duplicates. */
1211
1212 static void
1213 account_time_size (hash_table <histogram_hash> hashtable,
1214 vec<histogram_entry *> &histogram,
1215 gcov_type count, int time, int size)
1216 {
1217 histogram_entry key = {count, 0, 0};
1218 histogram_entry **val = hashtable.find_slot (&key, INSERT);
1219
1220 if (!*val)
1221 {
1222 *val = (histogram_entry *) pool_alloc (histogram_pool);
1223 **val = key;
1224 histogram.safe_push (*val);
1225 }
1226 (*val)->time += time;
1227 (*val)->size += size;
1228 }
1229
1230 int
1231 cmp_counts (const void *v1, const void *v2)
1232 {
1233 const histogram_entry *h1 = *(const histogram_entry * const *)v1;
1234 const histogram_entry *h2 = *(const histogram_entry * const *)v2;
1235 if (h1->count < h2->count)
1236 return 1;
1237 if (h1->count > h2->count)
1238 return -1;
1239 return 0;
1240 }
1241
1242 /* Dump HISTOGRAM to FILE. */
1243
1244 static void
1245 dump_histogram (FILE *file, vec<histogram_entry *> histogram)
1246 {
1247 unsigned int i;
1248 gcov_type overall_time = 0, cumulated_time = 0, cumulated_size = 0, overall_size = 0;
1249
1250 fprintf (dump_file, "Histogram:\n");
1251 for (i = 0; i < histogram.length (); i++)
1252 {
1253 overall_time += histogram[i]->count * histogram[i]->time;
1254 overall_size += histogram[i]->size;
1255 }
1256 if (!overall_time)
1257 overall_time = 1;
1258 if (!overall_size)
1259 overall_size = 1;
1260 for (i = 0; i < histogram.length (); i++)
1261 {
1262 cumulated_time += histogram[i]->count * histogram[i]->time;
1263 cumulated_size += histogram[i]->size;
1264 fprintf (file, " "HOST_WIDEST_INT_PRINT_DEC": time:%i (%2.2f) size:%i (%2.2f)\n",
1265 (HOST_WIDEST_INT) histogram[i]->count,
1266 histogram[i]->time,
1267 cumulated_time * 100.0 / overall_time,
1268 histogram[i]->size,
1269 cumulated_size * 100.0 / overall_size);
1270 }
1271 }
1272
1273 /* Collect histogram from CFG profiles. */
1274
1275 static void
1276 ipa_profile_generate_summary (void)
1277 {
1278 struct cgraph_node *node;
1279 gimple_stmt_iterator gsi;
1280 hash_table <histogram_hash> hashtable;
1281 basic_block bb;
1282
1283 hashtable.create (10);
1284 histogram_pool = create_alloc_pool ("IPA histogram", sizeof (struct histogram_entry),
1285 10);
1286
1287 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
1288 FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (node->symbol.decl))
1289 {
1290 int time = 0;
1291 int size = 0;
1292 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1293 {
1294 time += estimate_num_insns (gsi_stmt (gsi), &eni_time_weights);
1295 size += estimate_num_insns (gsi_stmt (gsi), &eni_size_weights);
1296 }
1297 account_time_size (hashtable, histogram, bb->count, time, size);
1298 }
1299 hashtable.dispose ();
1300 histogram.qsort (cmp_counts);
1301 }
1302
1303 /* Serialize the ipa info for lto. */
1304
1305 static void
1306 ipa_profile_write_summary (void)
1307 {
1308 struct lto_simple_output_block *ob
1309 = lto_create_simple_output_block (LTO_section_ipa_profile);
1310 unsigned int i;
1311
1312 streamer_write_uhwi_stream (ob->main_stream, histogram.length());
1313 for (i = 0; i < histogram.length (); i++)
1314 {
1315 streamer_write_gcov_count_stream (ob->main_stream, histogram[i]->count);
1316 streamer_write_uhwi_stream (ob->main_stream, histogram[i]->time);
1317 streamer_write_uhwi_stream (ob->main_stream, histogram[i]->size);
1318 }
1319 lto_destroy_simple_output_block (ob);
1320 }
1321
1322 /* Deserialize the ipa info for lto. */
1323
1324 static void
1325 ipa_profile_read_summary (void)
1326 {
1327 struct lto_file_decl_data ** file_data_vec
1328 = lto_get_file_decl_data ();
1329 struct lto_file_decl_data * file_data;
1330 hash_table <histogram_hash> hashtable;
1331 int j = 0;
1332
1333 hashtable.create (10);
1334 histogram_pool = create_alloc_pool ("IPA histogram", sizeof (struct histogram_entry),
1335 10);
1336
1337 while ((file_data = file_data_vec[j++]))
1338 {
1339 const char *data;
1340 size_t len;
1341 struct lto_input_block *ib
1342 = lto_create_simple_input_block (file_data,
1343 LTO_section_ipa_profile,
1344 &data, &len);
1345 if (ib)
1346 {
1347 unsigned int num = streamer_read_uhwi (ib);
1348 unsigned int n;
1349 for (n = 0; n < num; n++)
1350 {
1351 gcov_type count = streamer_read_gcov_count (ib);
1352 int time = streamer_read_uhwi (ib);
1353 int size = streamer_read_uhwi (ib);
1354 account_time_size (hashtable, histogram,
1355 count, time, size);
1356 }
1357 lto_destroy_simple_input_block (file_data,
1358 LTO_section_ipa_profile,
1359 ib, data, len);
1360 }
1361 }
1362 hashtable.dispose ();
1363 histogram.qsort (cmp_counts);
1364 }
1365
1366 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1367
1368 static unsigned int
1369 ipa_profile (void)
1370 {
1371 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1372 struct cgraph_edge *e;
1373 int order_pos;
1374 bool something_changed = false;
1375 int i;
1376 gcov_type overall_time = 0, cutoff = 0, cumulated = 0, overall_size = 0;
1377
1378 if (dump_file)
1379 dump_histogram (dump_file, histogram);
1380 for (i = 0; i < (int)histogram.length (); i++)
1381 {
1382 overall_time += histogram[i]->count * histogram[i]->time;
1383 overall_size += histogram[i]->size;
1384 }
1385 if (overall_time)
1386 {
1387 gcov_type threshold;
1388
1389 gcc_assert (overall_size);
1390 if (dump_file)
1391 {
1392 gcov_type min, cumulated_time = 0, cumulated_size = 0;
1393
1394 fprintf (dump_file, "Overall time: "HOST_WIDEST_INT_PRINT_DEC"\n",
1395 (HOST_WIDEST_INT)overall_time);
1396 min = get_hot_bb_threshold ();
1397 for (i = 0; i < (int)histogram.length () && histogram[i]->count >= min;
1398 i++)
1399 {
1400 cumulated_time += histogram[i]->count * histogram[i]->time;
1401 cumulated_size += histogram[i]->size;
1402 }
1403 fprintf (dump_file, "GCOV min count: "HOST_WIDEST_INT_PRINT_DEC
1404 " Time:%3.2f%% Size:%3.2f%%\n",
1405 (HOST_WIDEST_INT)min,
1406 cumulated_time * 100.0 / overall_time,
1407 cumulated_size * 100.0 / overall_size);
1408 }
1409 cutoff = (overall_time * PARAM_VALUE (HOT_BB_COUNT_WS_PERMILLE) + 500) / 1000;
1410 threshold = 0;
1411 for (i = 0; cumulated < cutoff; i++)
1412 {
1413 cumulated += histogram[i]->count * histogram[i]->time;
1414 threshold = histogram[i]->count;
1415 }
1416 if (!threshold)
1417 threshold = 1;
1418 if (dump_file)
1419 {
1420 gcov_type cumulated_time = 0, cumulated_size = 0;
1421
1422 for (i = 0;
1423 i < (int)histogram.length () && histogram[i]->count >= threshold;
1424 i++)
1425 {
1426 cumulated_time += histogram[i]->count * histogram[i]->time;
1427 cumulated_size += histogram[i]->size;
1428 }
1429 fprintf (dump_file, "Determined min count: "HOST_WIDEST_INT_PRINT_DEC
1430 " Time:%3.2f%% Size:%3.2f%%\n",
1431 (HOST_WIDEST_INT)threshold,
1432 cumulated_time * 100.0 / overall_time,
1433 cumulated_size * 100.0 / overall_size);
1434 }
1435 if (threshold > get_hot_bb_threshold ()
1436 || in_lto_p)
1437 {
1438 if (dump_file)
1439 fprintf (dump_file, "Threshold updated.\n");
1440 set_hot_bb_threshold (threshold);
1441 }
1442 }
1443 histogram.release();
1444 free_alloc_pool (histogram_pool);
1445
1446 order_pos = ipa_reverse_postorder (order);
1447 for (i = order_pos - 1; i >= 0; i--)
1448 {
1449 if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1450 {
1451 for (e = order[i]->callees; e; e = e->next_callee)
1452 if (e->callee->local.local && !e->callee->symbol.aux)
1453 {
1454 something_changed = true;
1455 e->callee->symbol.aux = (void *)1;
1456 }
1457 }
1458 order[i]->symbol.aux = NULL;
1459 }
1460
1461 while (something_changed)
1462 {
1463 something_changed = false;
1464 for (i = order_pos - 1; i >= 0; i--)
1465 {
1466 if (order[i]->symbol.aux && cgraph_propagate_frequency (order[i]))
1467 {
1468 for (e = order[i]->callees; e; e = e->next_callee)
1469 if (e->callee->local.local && !e->callee->symbol.aux)
1470 {
1471 something_changed = true;
1472 e->callee->symbol.aux = (void *)1;
1473 }
1474 }
1475 order[i]->symbol.aux = NULL;
1476 }
1477 }
1478 free (order);
1479 return 0;
1480 }
1481
1482 static bool
1483 gate_ipa_profile (void)
1484 {
1485 return flag_ipa_profile;
1486 }
1487
1488 namespace {
1489
1490 const pass_data pass_data_ipa_profile =
1491 {
1492 IPA_PASS, /* type */
1493 "profile_estimate", /* name */
1494 OPTGROUP_NONE, /* optinfo_flags */
1495 true, /* has_gate */
1496 true, /* has_execute */
1497 TV_IPA_PROFILE, /* tv_id */
1498 0, /* properties_required */
1499 0, /* properties_provided */
1500 0, /* properties_destroyed */
1501 0, /* todo_flags_start */
1502 0, /* todo_flags_finish */
1503 };
1504
1505 class pass_ipa_profile : public ipa_opt_pass_d
1506 {
1507 public:
1508 pass_ipa_profile(gcc::context *ctxt)
1509 : ipa_opt_pass_d(pass_data_ipa_profile, ctxt,
1510 ipa_profile_generate_summary, /* generate_summary */
1511 ipa_profile_write_summary, /* write_summary */
1512 ipa_profile_read_summary, /* read_summary */
1513 NULL, /* write_optimization_summary */
1514 NULL, /* read_optimization_summary */
1515 NULL, /* stmt_fixup */
1516 0, /* function_transform_todo_flags_start */
1517 NULL, /* function_transform */
1518 NULL) /* variable_transform */
1519 {}
1520
1521 /* opt_pass methods: */
1522 bool gate () { return gate_ipa_profile (); }
1523 unsigned int execute () { return ipa_profile (); }
1524
1525 }; // class pass_ipa_profile
1526
1527 } // anon namespace
1528
1529 ipa_opt_pass_d *
1530 make_pass_ipa_profile (gcc::context *ctxt)
1531 {
1532 return new pass_ipa_profile (ctxt);
1533 }
1534
1535 /* Generate and emit a static constructor or destructor. WHICH must
1536 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1537 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1538 initialization priority for this constructor or destructor.
1539
1540 FINAL specify whether the externally visible name for collect2 should
1541 be produced. */
1542
1543 static void
1544 cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
1545 {
1546 static int counter = 0;
1547 char which_buf[16];
1548 tree decl, name, resdecl;
1549
1550 /* The priority is encoded in the constructor or destructor name.
1551 collect2 will sort the names and arrange that they are called at
1552 program startup. */
1553 if (final)
1554 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1555 else
1556 /* Proudce sane name but one not recognizable by collect2, just for the
1557 case we fail to inline the function. */
1558 sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
1559 name = get_file_function_name (which_buf);
1560
1561 decl = build_decl (input_location, FUNCTION_DECL, name,
1562 build_function_type_list (void_type_node, NULL_TREE));
1563 current_function_decl = decl;
1564
1565 resdecl = build_decl (input_location,
1566 RESULT_DECL, NULL_TREE, void_type_node);
1567 DECL_ARTIFICIAL (resdecl) = 1;
1568 DECL_RESULT (decl) = resdecl;
1569 DECL_CONTEXT (resdecl) = decl;
1570
1571 allocate_struct_function (decl, false);
1572
1573 TREE_STATIC (decl) = 1;
1574 TREE_USED (decl) = 1;
1575 DECL_ARTIFICIAL (decl) = 1;
1576 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1577 DECL_SAVED_TREE (decl) = body;
1578 if (!targetm.have_ctors_dtors && final)
1579 {
1580 TREE_PUBLIC (decl) = 1;
1581 DECL_PRESERVE_P (decl) = 1;
1582 }
1583 DECL_UNINLINABLE (decl) = 1;
1584
1585 DECL_INITIAL (decl) = make_node (BLOCK);
1586 TREE_USED (DECL_INITIAL (decl)) = 1;
1587
1588 DECL_SOURCE_LOCATION (decl) = input_location;
1589 cfun->function_end_locus = input_location;
1590
1591 switch (which)
1592 {
1593 case 'I':
1594 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1595 decl_init_priority_insert (decl, priority);
1596 break;
1597 case 'D':
1598 DECL_STATIC_DESTRUCTOR (decl) = 1;
1599 decl_fini_priority_insert (decl, priority);
1600 break;
1601 default:
1602 gcc_unreachable ();
1603 }
1604
1605 gimplify_function_tree (decl);
1606
1607 cgraph_add_new_function (decl, false);
1608
1609 set_cfun (NULL);
1610 current_function_decl = NULL;
1611 }
1612
1613 /* Generate and emit a static constructor or destructor. WHICH must
1614 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1615 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1616 initialization priority for this constructor or destructor. */
1617
1618 void
1619 cgraph_build_static_cdtor (char which, tree body, int priority)
1620 {
1621 cgraph_build_static_cdtor_1 (which, body, priority, false);
1622 }
1623
1624 /* A vector of FUNCTION_DECLs declared as static constructors. */
1625 static vec<tree> static_ctors;
1626 /* A vector of FUNCTION_DECLs declared as static destructors. */
1627 static vec<tree> static_dtors;
1628
1629 /* When target does not have ctors and dtors, we call all constructor
1630 and destructor by special initialization/destruction function
1631 recognized by collect2.
1632
1633 When we are going to build this function, collect all constructors and
1634 destructors and turn them into normal functions. */
1635
1636 static void
1637 record_cdtor_fn (struct cgraph_node *node)
1638 {
1639 if (DECL_STATIC_CONSTRUCTOR (node->symbol.decl))
1640 static_ctors.safe_push (node->symbol.decl);
1641 if (DECL_STATIC_DESTRUCTOR (node->symbol.decl))
1642 static_dtors.safe_push (node->symbol.decl);
1643 node = cgraph_get_node (node->symbol.decl);
1644 DECL_DISREGARD_INLINE_LIMITS (node->symbol.decl) = 1;
1645 }
1646
1647 /* Define global constructors/destructor functions for the CDTORS, of
1648 which they are LEN. The CDTORS are sorted by initialization
1649 priority. If CTOR_P is true, these are constructors; otherwise,
1650 they are destructors. */
1651
1652 static void
1653 build_cdtor (bool ctor_p, vec<tree> cdtors)
1654 {
1655 size_t i,j;
1656 size_t len = cdtors.length ();
1657
1658 i = 0;
1659 while (i < len)
1660 {
1661 tree body;
1662 tree fn;
1663 priority_type priority;
1664
1665 priority = 0;
1666 body = NULL_TREE;
1667 j = i;
1668 do
1669 {
1670 priority_type p;
1671 fn = cdtors[j];
1672 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1673 if (j == i)
1674 priority = p;
1675 else if (p != priority)
1676 break;
1677 j++;
1678 }
1679 while (j < len);
1680
1681 /* When there is only one cdtor and target supports them, do nothing. */
1682 if (j == i + 1
1683 && targetm.have_ctors_dtors)
1684 {
1685 i++;
1686 continue;
1687 }
1688 /* Find the next batch of constructors/destructors with the same
1689 initialization priority. */
1690 for (;i < j; i++)
1691 {
1692 tree call;
1693 fn = cdtors[i];
1694 call = build_call_expr (fn, 0);
1695 if (ctor_p)
1696 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1697 else
1698 DECL_STATIC_DESTRUCTOR (fn) = 0;
1699 /* We do not want to optimize away pure/const calls here.
1700 When optimizing, these should be already removed, when not
1701 optimizing, we want user to be able to breakpoint in them. */
1702 TREE_SIDE_EFFECTS (call) = 1;
1703 append_to_statement_list (call, &body);
1704 }
1705 gcc_assert (body != NULL_TREE);
1706 /* Generate a function to call all the function of like
1707 priority. */
1708 cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true);
1709 }
1710 }
1711
1712 /* Comparison function for qsort. P1 and P2 are actually of type
1713 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1714 used to determine the sort order. */
1715
1716 static int
1717 compare_ctor (const void *p1, const void *p2)
1718 {
1719 tree f1;
1720 tree f2;
1721 int priority1;
1722 int priority2;
1723
1724 f1 = *(const tree *)p1;
1725 f2 = *(const tree *)p2;
1726 priority1 = DECL_INIT_PRIORITY (f1);
1727 priority2 = DECL_INIT_PRIORITY (f2);
1728
1729 if (priority1 < priority2)
1730 return -1;
1731 else if (priority1 > priority2)
1732 return 1;
1733 else
1734 /* Ensure a stable sort. Constructors are executed in backwarding
1735 order to make LTO initialize braries first. */
1736 return DECL_UID (f2) - DECL_UID (f1);
1737 }
1738
1739 /* Comparison function for qsort. P1 and P2 are actually of type
1740 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1741 used to determine the sort order. */
1742
1743 static int
1744 compare_dtor (const void *p1, const void *p2)
1745 {
1746 tree f1;
1747 tree f2;
1748 int priority1;
1749 int priority2;
1750
1751 f1 = *(const tree *)p1;
1752 f2 = *(const tree *)p2;
1753 priority1 = DECL_FINI_PRIORITY (f1);
1754 priority2 = DECL_FINI_PRIORITY (f2);
1755
1756 if (priority1 < priority2)
1757 return -1;
1758 else if (priority1 > priority2)
1759 return 1;
1760 else
1761 /* Ensure a stable sort. */
1762 return DECL_UID (f1) - DECL_UID (f2);
1763 }
1764
1765 /* Generate functions to call static constructors and destructors
1766 for targets that do not support .ctors/.dtors sections. These
1767 functions have magic names which are detected by collect2. */
1768
1769 static void
1770 build_cdtor_fns (void)
1771 {
1772 if (!static_ctors.is_empty ())
1773 {
1774 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1775 static_ctors.qsort (compare_ctor);
1776 build_cdtor (/*ctor_p=*/true, static_ctors);
1777 }
1778
1779 if (!static_dtors.is_empty ())
1780 {
1781 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1782 static_dtors.qsort (compare_dtor);
1783 build_cdtor (/*ctor_p=*/false, static_dtors);
1784 }
1785 }
1786
1787 /* Look for constructors and destructors and produce function calling them.
1788 This is needed for targets not supporting ctors or dtors, but we perform the
1789 transformation also at linktime to merge possibly numerous
1790 constructors/destructors into single function to improve code locality and
1791 reduce size. */
1792
1793 static unsigned int
1794 ipa_cdtor_merge (void)
1795 {
1796 struct cgraph_node *node;
1797 FOR_EACH_DEFINED_FUNCTION (node)
1798 if (DECL_STATIC_CONSTRUCTOR (node->symbol.decl)
1799 || DECL_STATIC_DESTRUCTOR (node->symbol.decl))
1800 record_cdtor_fn (node);
1801 build_cdtor_fns ();
1802 static_ctors.release ();
1803 static_dtors.release ();
1804 return 0;
1805 }
1806
1807 /* Perform the pass when we have no ctors/dtors support
1808 or at LTO time to merge multiple constructors into single
1809 function. */
1810
1811 static bool
1812 gate_ipa_cdtor_merge (void)
1813 {
1814 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1815 }
1816
1817 namespace {
1818
1819 const pass_data pass_data_ipa_cdtor_merge =
1820 {
1821 IPA_PASS, /* type */
1822 "cdtor", /* name */
1823 OPTGROUP_NONE, /* optinfo_flags */
1824 true, /* has_gate */
1825 true, /* has_execute */
1826 TV_CGRAPHOPT, /* tv_id */
1827 0, /* properties_required */
1828 0, /* properties_provided */
1829 0, /* properties_destroyed */
1830 0, /* todo_flags_start */
1831 0, /* todo_flags_finish */
1832 };
1833
1834 class pass_ipa_cdtor_merge : public ipa_opt_pass_d
1835 {
1836 public:
1837 pass_ipa_cdtor_merge(gcc::context *ctxt)
1838 : ipa_opt_pass_d(pass_data_ipa_cdtor_merge, ctxt,
1839 NULL, /* generate_summary */
1840 NULL, /* write_summary */
1841 NULL, /* read_summary */
1842 NULL, /* write_optimization_summary */
1843 NULL, /* read_optimization_summary */
1844 NULL, /* stmt_fixup */
1845 0, /* function_transform_todo_flags_start */
1846 NULL, /* function_transform */
1847 NULL) /* variable_transform */
1848 {}
1849
1850 /* opt_pass methods: */
1851 bool gate () { return gate_ipa_cdtor_merge (); }
1852 unsigned int execute () { return ipa_cdtor_merge (); }
1853
1854 }; // class pass_ipa_cdtor_merge
1855
1856 } // anon namespace
1857
1858 ipa_opt_pass_d *
1859 make_pass_ipa_cdtor_merge (gcc::context *ctxt)
1860 {
1861 return new pass_ipa_cdtor_merge (ctxt);
1862 }