lto-cgraph.c (get_alias_symbol): Remove weakref sanity check.
[gcc.git] / gcc / ipa.c
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "cgraph.h"
25 #include "tree-pass.h"
26 #include "gimple.h"
27 #include "ggc.h"
28 #include "flags.h"
29 #include "pointer-set.h"
30 #include "target.h"
31 #include "tree-iterator.h"
32 #include "ipa-utils.h"
33 #include "pointer-set.h"
34 #include "ipa-inline.h"
35 #include "hash-table.h"
36 #include "tree-inline.h"
37 #include "profile.h"
38 #include "params.h"
39 #include "lto-streamer.h"
40 #include "data-streamer.h"
41
42 /* Return true when NODE can not be local. Worker for cgraph_local_node_p. */
43
44 static bool
45 cgraph_non_local_node_p_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
46 {
47 /* FIXME: Aliases can be local, but i386 gets thunks wrong then. */
48 return !(cgraph_only_called_directly_or_aliased_p (node)
49 && !ipa_ref_has_aliases_p (&node->symbol.ref_list)
50 && node->symbol.definition
51 && !DECL_EXTERNAL (node->symbol.decl)
52 && !node->symbol.externally_visible
53 && !node->symbol.used_from_other_partition
54 && !node->symbol.in_other_partition);
55 }
56
57 /* Return true when function can be marked local. */
58
59 static bool
60 cgraph_local_node_p (struct cgraph_node *node)
61 {
62 struct cgraph_node *n = cgraph_function_or_thunk_node (node, NULL);
63
64 /* FIXME: thunks can be considered local, but we need prevent i386
65 from attempting to change calling convention of them. */
66 if (n->thunk.thunk_p)
67 return false;
68 return !cgraph_for_node_and_aliases (n,
69 cgraph_non_local_node_p_1, NULL, true);
70
71 }
72
73 /* Return true when NODE has ADDR reference. */
74
75 static bool
76 has_addr_references_p (struct cgraph_node *node,
77 void *data ATTRIBUTE_UNUSED)
78 {
79 int i;
80 struct ipa_ref *ref;
81
82 for (i = 0; ipa_ref_list_referring_iterate (&node->symbol.ref_list,
83 i, ref); i++)
84 if (ref->use == IPA_REF_ADDR)
85 return true;
86 return false;
87 }
88
89 /* Look for all functions inlined to NODE and update their inlined_to pointers
90 to INLINED_TO. */
91
92 static void
93 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
94 {
95 struct cgraph_edge *e;
96 for (e = node->callees; e; e = e->next_callee)
97 if (e->callee->global.inlined_to)
98 {
99 e->callee->global.inlined_to = inlined_to;
100 update_inlined_to_pointer (e->callee, inlined_to);
101 }
102 }
103
104 /* Add symtab NODE to queue starting at FIRST.
105
106 The queue is linked via AUX pointers and terminated by pointer to 1.
107 We enqueue nodes at two occasions: when we find them reachable or when we find
108 their bodies needed for further clonning. In the second case we mark them
109 by pointer to 2 after processing so they are re-queue when they become
110 reachable. */
111
112 static void
113 enqueue_node (symtab_node node, symtab_node *first,
114 struct pointer_set_t *reachable)
115 {
116 /* Node is still in queue; do nothing. */
117 if (node->symbol.aux && node->symbol.aux != (void *) 2)
118 return;
119 /* Node was already processed as unreachable, re-enqueue
120 only if it became reachable now. */
121 if (node->symbol.aux == (void *)2 && !pointer_set_contains (reachable, node))
122 return;
123 node->symbol.aux = *first;
124 *first = node;
125 }
126
127 /* Process references. */
128
129 static void
130 process_references (struct ipa_ref_list *list,
131 symtab_node *first,
132 bool before_inlining_p,
133 struct pointer_set_t *reachable)
134 {
135 int i;
136 struct ipa_ref *ref;
137 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
138 {
139 symtab_node node = ref->referred;
140
141 if (node->symbol.definition
142 && ((!DECL_EXTERNAL (node->symbol.decl) || node->symbol.alias)
143 || (before_inlining_p
144 /* We use variable constructors during late complation for
145 constant folding. Keep references alive so partitioning
146 knows about potential references. */
147 || (TREE_CODE (node->symbol.decl) == VAR_DECL
148 && flag_wpa && const_value_known_p (node->symbol.decl)))))
149 pointer_set_insert (reachable, node);
150 enqueue_node ((symtab_node) node, first, reachable);
151 }
152 }
153
154
155 /* Perform reachability analysis and reclaim all unreachable nodes.
156
157 The algorithm is basically mark&sweep but with some extra refinements:
158
159 - reachable extern inline functions needs special handling; the bodies needs
160 to stay in memory until inlining in hope that they will be inlined.
161 After inlining we release their bodies and turn them into unanalyzed
162 nodes even when they are reachable.
163
164 BEFORE_INLINING_P specify whether we are before or after inlining.
165
166 - virtual functions are kept in callgraph even if they seem unreachable in
167 hope calls to them will be devirtualized.
168
169 Again we remove them after inlining. In late optimization some
170 devirtualization may happen, but it is not importnat since we won't inline
171 the call. In theory early opts and IPA should work out all important cases.
172
173 - virtual clones needs bodies of their origins for later materialization;
174 this means that we want to keep the body even if the origin is unreachable
175 otherwise. To avoid origin from sitting in the callgraph and being
176 walked by IPA passes, we turn them into unanalyzed nodes with body
177 defined.
178
179 We maintain set of function declaration where body needs to stay in
180 body_needed_for_clonning
181
182 Inline clones represent special case: their declaration match the
183 declaration of origin and cgraph_remove_node already knows how to
184 reshape callgraph and preserve body when offline copy of function or
185 inline clone is being removed.
186
187 - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
188 variables with DECL_INITIAL set. We finalize these and keep reachable
189 ones around for constant folding purposes. After inlining we however
190 stop walking their references to let everything static referneced by them
191 to be removed when it is otherwise unreachable.
192
193 We maintain queue of both reachable symbols (i.e. defined symbols that needs
194 to stay) and symbols that are in boundary (i.e. external symbols referenced
195 by reachable symbols or origins of clones). The queue is represented
196 as linked list by AUX pointer terminated by 1.
197
198 A the end we keep all reachable symbols. For symbols in boundary we always
199 turn definition into a declaration, but we may keep function body around
200 based on body_needed_for_clonning
201
202 All symbols that enter the queue have AUX pointer non-zero and are in the
203 boundary. Pointer set REACHABLE is used to track reachable symbols.
204
205 Every symbol can be visited twice - once as part of boundary and once
206 as real reachable symbol. enqueue_node needs to decide whether the
207 node needs to be re-queued for second processing. For this purpose
208 we set AUX pointer of processed symbols in the boundary to constant 2. */
209
210 bool
211 symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
212 {
213 symtab_node first = (symtab_node) (void *) 1;
214 struct cgraph_node *node, *next;
215 struct varpool_node *vnode, *vnext;
216 bool changed = false;
217 struct pointer_set_t *reachable = pointer_set_create ();
218 struct pointer_set_t *body_needed_for_clonning = pointer_set_create ();
219
220 #ifdef ENABLE_CHECKING
221 verify_symtab ();
222 #endif
223 if (file)
224 fprintf (file, "\nReclaiming functions:");
225 #ifdef ENABLE_CHECKING
226 FOR_EACH_FUNCTION (node)
227 gcc_assert (!node->symbol.aux);
228 FOR_EACH_VARIABLE (vnode)
229 gcc_assert (!vnode->symbol.aux);
230 #endif
231 /* Mark functions whose bodies are obviously needed.
232 This is mostly when they can be referenced externally. Inline clones
233 are special since their declarations are shared with master clone and thus
234 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
235 FOR_EACH_DEFINED_FUNCTION (node)
236 if (!node->global.inlined_to
237 && (!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
238 /* Keep around virtual functions for possible devirtualization. */
239 || (before_inlining_p
240 && DECL_VIRTUAL_P (node->symbol.decl))))
241 {
242 gcc_assert (!node->global.inlined_to);
243 pointer_set_insert (reachable, node);
244 enqueue_node ((symtab_node)node, &first, reachable);
245 }
246 else
247 gcc_assert (!node->symbol.aux);
248
249 /* Mark variables that are obviously needed. */
250 FOR_EACH_DEFINED_VARIABLE (vnode)
251 if (!varpool_can_remove_if_no_refs (vnode))
252 {
253 pointer_set_insert (reachable, vnode);
254 enqueue_node ((symtab_node)vnode, &first, reachable);
255 }
256
257 /* Perform reachability analysis. */
258 while (first != (symtab_node) (void *) 1)
259 {
260 bool in_boundary_p = !pointer_set_contains (reachable, first);
261 symtab_node node = first;
262
263 first = (symtab_node)first->symbol.aux;
264
265 /* If we are processing symbol in boundary, mark its AUX pointer for
266 possible later re-processing in enqueue_node. */
267 if (in_boundary_p)
268 node->symbol.aux = (void *)2;
269 else
270 {
271 /* If any symbol in a comdat group is reachable, force
272 all other in the same comdat group to be also reachable. */
273 if (node->symbol.same_comdat_group)
274 {
275 symtab_node next;
276 for (next = node->symbol.same_comdat_group;
277 next != node;
278 next = next->symbol.same_comdat_group)
279 if (!pointer_set_insert (reachable, next))
280 enqueue_node ((symtab_node) next, &first, reachable);
281 }
282 /* Mark references as reachable. */
283 process_references (&node->symbol.ref_list, &first,
284 before_inlining_p, reachable);
285 }
286
287 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
288 {
289 /* Mark the callees reachable unless they are direct calls to extern
290 inline functions we decided to not inline. */
291 if (!in_boundary_p)
292 {
293 struct cgraph_edge *e;
294 for (e = cnode->callees; e; e = e->next_callee)
295 {
296 if (e->callee->symbol.definition
297 && (!e->inline_failed
298 || !DECL_EXTERNAL (e->callee->symbol.decl)
299 || e->callee->symbol.alias
300 || before_inlining_p))
301 pointer_set_insert (reachable, e->callee);
302 enqueue_node ((symtab_node) e->callee, &first, reachable);
303 }
304
305 /* When inline clone exists, mark body to be preserved so when removing
306 offline copy of the function we don't kill it. */
307 if (!cnode->symbol.alias && cnode->global.inlined_to)
308 pointer_set_insert (body_needed_for_clonning, cnode->symbol.decl);
309 }
310
311 /* For non-inline clones, force their origins to the boundary and ensure
312 that body is not removed. */
313 while (cnode->clone_of
314 && !gimple_has_body_p (cnode->symbol.decl))
315 {
316 bool noninline = cnode->clone_of->symbol.decl != cnode->symbol.decl;
317 cnode = cnode->clone_of;
318 if (noninline)
319 {
320 pointer_set_insert (body_needed_for_clonning, cnode->symbol.decl);
321 enqueue_node ((symtab_node)cnode, &first, reachable);
322 break;
323 }
324 }
325 }
326 /* When we see constructor of external variable, keep referred nodes in the
327 boundary. This will also hold initializers of the external vars NODE
328 refers to. */
329 varpool_node *vnode = dyn_cast <varpool_node> (node);
330 if (vnode
331 && DECL_EXTERNAL (node->symbol.decl)
332 && !vnode->symbol.alias
333 && in_boundary_p)
334 {
335 struct ipa_ref *ref;
336 for (int i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list, i, ref); i++)
337 enqueue_node (ref->referred, &first, reachable);
338 }
339 }
340
341 /* Remove unreachable functions. */
342 for (node = cgraph_first_function (); node; node = next)
343 {
344 next = cgraph_next_function (node);
345
346 /* If node is not needed at all, remove it. */
347 if (!node->symbol.aux)
348 {
349 if (file)
350 fprintf (file, " %s", cgraph_node_name (node));
351 cgraph_remove_node (node);
352 changed = true;
353 }
354 /* If node is unreachable, remove its body. */
355 else if (!pointer_set_contains (reachable, node))
356 {
357 if (!pointer_set_contains (body_needed_for_clonning, node->symbol.decl))
358 cgraph_release_function_body (node);
359 if (node->symbol.definition)
360 {
361 if (file)
362 fprintf (file, " %s", cgraph_node_name (node));
363 cgraph_reset_node (node);
364 changed = true;
365 }
366 }
367 }
368
369 /* Inline clones might be kept around so their materializing allows further
370 cloning. If the function the clone is inlined into is removed, we need
371 to turn it into normal cone. */
372 FOR_EACH_FUNCTION (node)
373 {
374 if (node->global.inlined_to
375 && !node->callers)
376 {
377 gcc_assert (node->clones);
378 node->global.inlined_to = NULL;
379 update_inlined_to_pointer (node, node);
380 }
381 node->symbol.aux = NULL;
382 }
383
384 /* Remove unreachable variables. */
385 if (file)
386 fprintf (file, "\nReclaiming variables:");
387 for (vnode = varpool_first_variable (); vnode; vnode = vnext)
388 {
389 vnext = varpool_next_variable (vnode);
390 if (!vnode->symbol.aux
391 /* For can_refer_decl_in_current_unit_p we want to track for
392 all external variables if they are defined in other partition
393 or not. */
394 && (!flag_ltrans || !DECL_EXTERNAL (vnode->symbol.decl)))
395 {
396 if (file)
397 fprintf (file, " %s", varpool_node_name (vnode));
398 varpool_remove_node (vnode);
399 changed = true;
400 }
401 else if (!pointer_set_contains (reachable, vnode))
402 {
403 if (vnode->symbol.definition)
404 {
405 if (file)
406 fprintf (file, " %s", varpool_node_name (vnode));
407 changed = true;
408 }
409 vnode->symbol.definition = false;
410 vnode->symbol.analyzed = false;
411 vnode->symbol.aux = NULL;
412
413 /* Keep body if it may be useful for constant folding. */
414 if (!const_value_known_p (vnode->symbol.decl))
415 varpool_remove_initializer (vnode);
416 ipa_remove_all_references (&vnode->symbol.ref_list);
417 }
418 else
419 vnode->symbol.aux = NULL;
420 }
421
422 pointer_set_destroy (reachable);
423 pointer_set_destroy (body_needed_for_clonning);
424
425 /* Now update address_taken flags and try to promote functions to be local. */
426 if (file)
427 fprintf (file, "\nClearing address taken flags:");
428 FOR_EACH_DEFINED_FUNCTION (node)
429 if (node->symbol.address_taken
430 && !node->symbol.used_from_other_partition)
431 {
432 if (!cgraph_for_node_and_aliases (node, has_addr_references_p, NULL, true))
433 {
434 if (file)
435 fprintf (file, " %s", cgraph_node_name (node));
436 node->symbol.address_taken = false;
437 changed = true;
438 if (cgraph_local_node_p (node))
439 {
440 node->local.local = true;
441 if (file)
442 fprintf (file, " (local)");
443 }
444 }
445 }
446 if (file)
447 fprintf (file, "\n");
448
449 #ifdef ENABLE_CHECKING
450 verify_symtab ();
451 #endif
452
453 /* If we removed something, perhaps profile could be improved. */
454 if (changed && optimize && inline_edge_summary_vec.exists ())
455 FOR_EACH_DEFINED_FUNCTION (node)
456 cgraph_propagate_frequency (node);
457
458 return changed;
459 }
460
461 /* Discover variables that have no longer address taken or that are read only
462 and update their flags.
463
464 FIXME: This can not be done in between gimplify and omp_expand since
465 readonly flag plays role on what is shared and what is not. Currently we do
466 this transformation as part of whole program visibility and re-do at
467 ipa-reference pass (to take into account clonning), but it would
468 make sense to do it before early optimizations. */
469
470 void
471 ipa_discover_readonly_nonaddressable_vars (void)
472 {
473 struct varpool_node *vnode;
474 if (dump_file)
475 fprintf (dump_file, "Clearing variable flags:");
476 FOR_EACH_VARIABLE (vnode)
477 if (vnode->symbol.definition && varpool_all_refs_explicit_p (vnode)
478 && (TREE_ADDRESSABLE (vnode->symbol.decl)
479 || !TREE_READONLY (vnode->symbol.decl)))
480 {
481 bool written = false;
482 bool address_taken = false;
483 int i;
484 struct ipa_ref *ref;
485 for (i = 0; ipa_ref_list_referring_iterate (&vnode->symbol.ref_list,
486 i, ref)
487 && (!written || !address_taken); i++)
488 switch (ref->use)
489 {
490 case IPA_REF_ADDR:
491 address_taken = true;
492 break;
493 case IPA_REF_LOAD:
494 break;
495 case IPA_REF_STORE:
496 written = true;
497 break;
498 }
499 if (TREE_ADDRESSABLE (vnode->symbol.decl) && !address_taken)
500 {
501 if (dump_file)
502 fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode));
503 TREE_ADDRESSABLE (vnode->symbol.decl) = 0;
504 }
505 if (!TREE_READONLY (vnode->symbol.decl) && !address_taken && !written
506 /* Making variable in explicit section readonly can cause section
507 type conflict.
508 See e.g. gcc.c-torture/compile/pr23237.c */
509 && DECL_SECTION_NAME (vnode->symbol.decl) == NULL)
510 {
511 if (dump_file)
512 fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode));
513 TREE_READONLY (vnode->symbol.decl) = 1;
514 }
515 }
516 if (dump_file)
517 fprintf (dump_file, "\n");
518 }
519
520 /* Return true when there is a reference to node and it is not vtable. */
521 static bool
522 cgraph_address_taken_from_non_vtable_p (struct cgraph_node *node)
523 {
524 int i;
525 struct ipa_ref *ref;
526 for (i = 0; ipa_ref_list_referring_iterate (&node->symbol.ref_list,
527 i, ref); i++)
528 if (ref->use == IPA_REF_ADDR)
529 {
530 struct varpool_node *node;
531 if (is_a <cgraph_node> (ref->referring))
532 return true;
533 node = ipa_ref_referring_varpool_node (ref);
534 if (!DECL_VIRTUAL_P (node->symbol.decl))
535 return true;
536 }
537 return false;
538 }
539
540 /* COMDAT functions must be shared only if they have address taken,
541 otherwise we can produce our own private implementation with
542 -fwhole-program.
543 Return true when turning COMDAT functoin static can not lead to wrong
544 code when the resulting object links with a library defining same COMDAT.
545
546 Virtual functions do have their addresses taken from the vtables,
547 but in C++ there is no way to compare their addresses for equality. */
548
549 static bool
550 cgraph_comdat_can_be_unshared_p (struct cgraph_node *node)
551 {
552 if ((cgraph_address_taken_from_non_vtable_p (node)
553 && !DECL_VIRTUAL_P (node->symbol.decl))
554 || !node->symbol.definition)
555 return false;
556 if (node->symbol.same_comdat_group)
557 {
558 struct cgraph_node *next;
559
560 /* If more than one function is in the same COMDAT group, it must
561 be shared even if just one function in the comdat group has
562 address taken. */
563 for (next = cgraph (node->symbol.same_comdat_group);
564 next != node; next = cgraph (next->symbol.same_comdat_group))
565 if (cgraph_address_taken_from_non_vtable_p (next)
566 && !DECL_VIRTUAL_P (next->symbol.decl))
567 return false;
568 }
569 return true;
570 }
571
572 /* Return true when function NODE should be considered externally visible. */
573
574 static bool
575 cgraph_externally_visible_p (struct cgraph_node *node,
576 bool whole_program)
577 {
578 if (!node->symbol.definition)
579 return false;
580 if (!DECL_COMDAT (node->symbol.decl)
581 && (!TREE_PUBLIC (node->symbol.decl)
582 || DECL_EXTERNAL (node->symbol.decl)))
583 return false;
584
585 /* Do not try to localize built-in functions yet. One of problems is that we
586 end up mangling their asm for WHOPR that makes it impossible to call them
587 using the implicit built-in declarations anymore. Similarly this enables
588 us to remove them as unreachable before actual calls may appear during
589 expansion or folding. */
590 if (DECL_BUILT_IN (node->symbol.decl))
591 return true;
592
593 /* If linker counts on us, we must preserve the function. */
594 if (symtab_used_from_object_file_p ((symtab_node) node))
595 return true;
596 if (DECL_PRESERVE_P (node->symbol.decl))
597 return true;
598 if (lookup_attribute ("externally_visible",
599 DECL_ATTRIBUTES (node->symbol.decl)))
600 return true;
601 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
602 && lookup_attribute ("dllexport",
603 DECL_ATTRIBUTES (node->symbol.decl)))
604 return true;
605 if (node->symbol.resolution == LDPR_PREVAILING_DEF_IRONLY)
606 return false;
607 /* When doing LTO or whole program, we can bring COMDAT functoins static.
608 This improves code quality and we know we will duplicate them at most twice
609 (in the case that we are not using plugin and link with object file
610 implementing same COMDAT) */
611 if ((in_lto_p || whole_program)
612 && DECL_COMDAT (node->symbol.decl)
613 && cgraph_comdat_can_be_unshared_p (node))
614 return false;
615
616 /* When doing link time optimizations, hidden symbols become local. */
617 if (in_lto_p
618 && (DECL_VISIBILITY (node->symbol.decl) == VISIBILITY_HIDDEN
619 || DECL_VISIBILITY (node->symbol.decl) == VISIBILITY_INTERNAL)
620 /* Be sure that node is defined in IR file, not in other object
621 file. In that case we don't set used_from_other_object_file. */
622 && node->symbol.definition)
623 ;
624 else if (!whole_program)
625 return true;
626
627 if (MAIN_NAME_P (DECL_NAME (node->symbol.decl)))
628 return true;
629
630 return false;
631 }
632
633 /* Return true when variable VNODE should be considered externally visible. */
634
635 bool
636 varpool_externally_visible_p (struct varpool_node *vnode)
637 {
638 /* Do not touch weakrefs; while they are not externally visible,
639 dropping their DECL_EXTERNAL flags confuse most
640 of code handling them. */
641 if (vnode->symbol.alias && DECL_EXTERNAL (vnode->symbol.decl))
642 return true;
643
644 if (DECL_EXTERNAL (vnode->symbol.decl))
645 return true;
646
647 if (!DECL_COMDAT (vnode->symbol.decl) && !TREE_PUBLIC (vnode->symbol.decl))
648 return false;
649
650 /* If linker counts on us, we must preserve the function. */
651 if (symtab_used_from_object_file_p ((symtab_node) vnode))
652 return true;
653
654 if (DECL_HARD_REGISTER (vnode->symbol.decl))
655 return true;
656 if (DECL_PRESERVE_P (vnode->symbol.decl))
657 return true;
658 if (lookup_attribute ("externally_visible",
659 DECL_ATTRIBUTES (vnode->symbol.decl)))
660 return true;
661 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
662 && lookup_attribute ("dllexport",
663 DECL_ATTRIBUTES (vnode->symbol.decl)))
664 return true;
665
666 /* See if we have linker information about symbol not being used or
667 if we need to make guess based on the declaration.
668
669 Even if the linker clams the symbol is unused, never bring internal
670 symbols that are declared by user as used or externally visible.
671 This is needed for i.e. references from asm statements. */
672 if (symtab_used_from_object_file_p ((symtab_node) vnode))
673 return true;
674 if (vnode->symbol.resolution == LDPR_PREVAILING_DEF_IRONLY)
675 return false;
676
677 /* As a special case, the COMDAT virtual tables can be unshared.
678 In LTO mode turn vtables into static variables. The variable is readonly,
679 so this does not enable more optimization, but referring static var
680 is faster for dynamic linking. Also this match logic hidding vtables
681 from LTO symbol tables. */
682 if ((in_lto_p || flag_whole_program)
683 && !vnode->symbol.force_output
684 && DECL_COMDAT (vnode->symbol.decl) && DECL_VIRTUAL_P (vnode->symbol.decl))
685 return false;
686
687 /* When doing link time optimizations, hidden symbols become local. */
688 if (in_lto_p
689 && (DECL_VISIBILITY (vnode->symbol.decl) == VISIBILITY_HIDDEN
690 || DECL_VISIBILITY (vnode->symbol.decl) == VISIBILITY_INTERNAL)
691 /* Be sure that node is defined in IR file, not in other object
692 file. In that case we don't set used_from_other_object_file. */
693 && vnode->symbol.definition)
694 ;
695 else if (!flag_whole_program)
696 return true;
697
698 /* Do not attempt to privatize COMDATS by default.
699 This would break linking with C++ libraries sharing
700 inline definitions.
701
702 FIXME: We can do so for readonly vars with no address taken and
703 possibly also for vtables since no direct pointer comparsion is done.
704 It might be interesting to do so to reduce linking overhead. */
705 if (DECL_COMDAT (vnode->symbol.decl) || DECL_WEAK (vnode->symbol.decl))
706 return true;
707 return false;
708 }
709
710 /* Mark visibility of all functions.
711
712 A local function is one whose calls can occur only in the current
713 compilation unit and all its calls are explicit, so we can change
714 its calling convention. We simply mark all static functions whose
715 address is not taken as local.
716
717 We also change the TREE_PUBLIC flag of all declarations that are public
718 in language point of view but we want to overwrite this default
719 via visibilities for the backend point of view. */
720
721 static unsigned int
722 function_and_variable_visibility (bool whole_program)
723 {
724 struct cgraph_node *node;
725 struct varpool_node *vnode;
726
727 /* All aliases should be procssed at this point. */
728 gcc_checking_assert (!alias_pairs || !alias_pairs->length());
729
730 FOR_EACH_FUNCTION (node)
731 {
732 int flags = flags_from_decl_or_type (node->symbol.decl);
733
734 /* Optimize away PURE and CONST constructors and destructors. */
735 if (optimize
736 && (flags & (ECF_CONST | ECF_PURE))
737 && !(flags & ECF_LOOPING_CONST_OR_PURE))
738 {
739 DECL_STATIC_CONSTRUCTOR (node->symbol.decl) = 0;
740 DECL_STATIC_DESTRUCTOR (node->symbol.decl) = 0;
741 }
742
743 /* Frontends and alias code marks nodes as needed before parsing is finished.
744 We may end up marking as node external nodes where this flag is meaningless
745 strip it. */
746 if (node->symbol.force_output
747 && (DECL_EXTERNAL (node->symbol.decl) || !node->symbol.definition))
748 node->symbol.force_output = 0;
749
750 /* C++ FE on lack of COMDAT support create local COMDAT functions
751 (that ought to be shared but can not due to object format
752 limitations). It is necessary to keep the flag to make rest of C++ FE
753 happy. Clear the flag here to avoid confusion in middle-end. */
754 if (DECL_COMDAT (node->symbol.decl) && !TREE_PUBLIC (node->symbol.decl))
755 DECL_COMDAT (node->symbol.decl) = 0;
756 /* For external decls stop tracking same_comdat_group, it doesn't matter
757 what comdat group they are in when they won't be emitted in this TU,
758 and simplifies later passes. */
759 if (node->symbol.same_comdat_group && DECL_EXTERNAL (node->symbol.decl))
760 {
761 #ifdef ENABLE_CHECKING
762 symtab_node n;
763
764 for (n = node->symbol.same_comdat_group;
765 n != (symtab_node)node;
766 n = n->symbol.same_comdat_group)
767 /* If at least one of same comdat group functions is external,
768 all of them have to be, otherwise it is a front-end bug. */
769 gcc_assert (DECL_EXTERNAL (n->symbol.decl));
770 #endif
771 symtab_dissolve_same_comdat_group_list ((symtab_node) node);
772 }
773 gcc_assert ((!DECL_WEAK (node->symbol.decl)
774 && !DECL_COMDAT (node->symbol.decl))
775 || TREE_PUBLIC (node->symbol.decl)
776 || DECL_EXTERNAL (node->symbol.decl));
777 if (cgraph_externally_visible_p (node, whole_program))
778 {
779 gcc_assert (!node->global.inlined_to);
780 node->symbol.externally_visible = true;
781 }
782 else
783 node->symbol.externally_visible = false;
784 if (!node->symbol.externally_visible && node->symbol.definition
785 && !DECL_EXTERNAL (node->symbol.decl))
786 {
787 gcc_assert (whole_program || in_lto_p
788 || !TREE_PUBLIC (node->symbol.decl));
789 node->symbol.unique_name = ((node->symbol.resolution == LDPR_PREVAILING_DEF_IRONLY
790 || node->symbol.resolution == LDPR_PREVAILING_DEF_IRONLY_EXP)
791 && TREE_PUBLIC (node->symbol.decl));
792 symtab_make_decl_local (node->symbol.decl);
793 node->symbol.resolution = LDPR_PREVAILING_DEF_IRONLY;
794 if (node->symbol.same_comdat_group)
795 /* cgraph_externally_visible_p has already checked all other nodes
796 in the group and they will all be made local. We need to
797 dissolve the group at once so that the predicate does not
798 segfault though. */
799 symtab_dissolve_same_comdat_group_list ((symtab_node) node);
800 }
801
802 if (node->thunk.thunk_p
803 && TREE_PUBLIC (node->symbol.decl))
804 {
805 struct cgraph_node *decl_node = node;
806
807 decl_node = cgraph_function_node (decl_node->callees->callee, NULL);
808
809 /* Thunks have the same visibility as function they are attached to.
810 Make sure the C++ front end set this up properly. */
811 if (DECL_ONE_ONLY (decl_node->symbol.decl))
812 {
813 gcc_checking_assert (DECL_COMDAT (node->symbol.decl)
814 == DECL_COMDAT (decl_node->symbol.decl));
815 gcc_checking_assert (DECL_COMDAT_GROUP (node->symbol.decl)
816 == DECL_COMDAT_GROUP (decl_node->symbol.decl));
817 gcc_checking_assert (node->symbol.same_comdat_group);
818 }
819 if (DECL_EXTERNAL (decl_node->symbol.decl))
820 DECL_EXTERNAL (node->symbol.decl) = 1;
821 }
822 }
823 FOR_EACH_DEFINED_FUNCTION (node)
824 node->local.local = cgraph_local_node_p (node);
825 FOR_EACH_VARIABLE (vnode)
826 {
827 /* weak flag makes no sense on local variables. */
828 gcc_assert (!DECL_WEAK (vnode->symbol.decl)
829 || TREE_PUBLIC (vnode->symbol.decl)
830 || DECL_EXTERNAL (vnode->symbol.decl));
831 /* In several cases declarations can not be common:
832
833 - when declaration has initializer
834 - when it is in weak
835 - when it has specific section
836 - when it resides in non-generic address space.
837 - if declaration is local, it will get into .local common section
838 so common flag is not needed. Frontends still produce these in
839 certain cases, such as for:
840
841 static int a __attribute__ ((common))
842
843 Canonicalize things here and clear the redundant flag. */
844 if (DECL_COMMON (vnode->symbol.decl)
845 && (!(TREE_PUBLIC (vnode->symbol.decl)
846 || DECL_EXTERNAL (vnode->symbol.decl))
847 || (DECL_INITIAL (vnode->symbol.decl)
848 && DECL_INITIAL (vnode->symbol.decl) != error_mark_node)
849 || DECL_WEAK (vnode->symbol.decl)
850 || DECL_SECTION_NAME (vnode->symbol.decl) != NULL
851 || ! (ADDR_SPACE_GENERIC_P
852 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->symbol.decl))))))
853 DECL_COMMON (vnode->symbol.decl) = 0;
854 }
855 FOR_EACH_DEFINED_VARIABLE (vnode)
856 {
857 if (!vnode->symbol.definition)
858 continue;
859 if (varpool_externally_visible_p (vnode))
860 vnode->symbol.externally_visible = true;
861 else
862 vnode->symbol.externally_visible = false;
863 if (!vnode->symbol.externally_visible)
864 {
865 gcc_assert (in_lto_p || whole_program || !TREE_PUBLIC (vnode->symbol.decl));
866 symtab_make_decl_local (vnode->symbol.decl);
867 vnode->symbol.unique_name = ((vnode->symbol.resolution == LDPR_PREVAILING_DEF_IRONLY
868 || vnode->symbol.resolution == LDPR_PREVAILING_DEF_IRONLY_EXP)
869 && TREE_PUBLIC (vnode->symbol.decl));
870 if (vnode->symbol.same_comdat_group)
871 symtab_dissolve_same_comdat_group_list ((symtab_node) vnode);
872 vnode->symbol.resolution = LDPR_PREVAILING_DEF_IRONLY;
873 }
874 }
875
876 if (dump_file)
877 {
878 fprintf (dump_file, "\nMarking local functions:");
879 FOR_EACH_DEFINED_FUNCTION (node)
880 if (node->local.local)
881 fprintf (dump_file, " %s", cgraph_node_name (node));
882 fprintf (dump_file, "\n\n");
883 fprintf (dump_file, "\nMarking externally visible functions:");
884 FOR_EACH_DEFINED_FUNCTION (node)
885 if (node->symbol.externally_visible)
886 fprintf (dump_file, " %s", cgraph_node_name (node));
887 fprintf (dump_file, "\n\n");
888 fprintf (dump_file, "\nMarking externally visible variables:");
889 FOR_EACH_DEFINED_VARIABLE (vnode)
890 if (vnode->symbol.externally_visible)
891 fprintf (dump_file, " %s", varpool_node_name (vnode));
892 fprintf (dump_file, "\n\n");
893 }
894 cgraph_function_flags_ready = true;
895 return 0;
896 }
897
898 /* Local function pass handling visibilities. This happens before LTO streaming
899 so in particular -fwhole-program should be ignored at this level. */
900
901 static unsigned int
902 local_function_and_variable_visibility (void)
903 {
904 return function_and_variable_visibility (flag_whole_program && !flag_lto);
905 }
906
907 struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility =
908 {
909 {
910 SIMPLE_IPA_PASS,
911 "visibility", /* name */
912 OPTGROUP_NONE, /* optinfo_flags */
913 NULL, /* gate */
914 local_function_and_variable_visibility,/* execute */
915 NULL, /* sub */
916 NULL, /* next */
917 0, /* static_pass_number */
918 TV_CGRAPHOPT, /* tv_id */
919 0, /* properties_required */
920 0, /* properties_provided */
921 0, /* properties_destroyed */
922 0, /* todo_flags_start */
923 TODO_remove_functions | TODO_dump_symtab /* todo_flags_finish */
924 }
925 };
926
927 /* Free inline summary. */
928
929 static unsigned
930 free_inline_summary (void)
931 {
932 inline_free_summary ();
933 return 0;
934 }
935
936 struct simple_ipa_opt_pass pass_ipa_free_inline_summary =
937 {
938 {
939 SIMPLE_IPA_PASS,
940 "*free_inline_summary", /* name */
941 OPTGROUP_NONE, /* optinfo_flags */
942 NULL, /* gate */
943 free_inline_summary, /* execute */
944 NULL, /* sub */
945 NULL, /* next */
946 0, /* static_pass_number */
947 TV_IPA_FREE_INLINE_SUMMARY, /* tv_id */
948 0, /* properties_required */
949 0, /* properties_provided */
950 0, /* properties_destroyed */
951 0, /* todo_flags_start */
952 0 /* todo_flags_finish */
953 }
954 };
955
956 /* Do not re-run on ltrans stage. */
957
958 static bool
959 gate_whole_program_function_and_variable_visibility (void)
960 {
961 return !flag_ltrans;
962 }
963
964 /* Bring functionss local at LTO time with -fwhole-program. */
965
966 static unsigned int
967 whole_program_function_and_variable_visibility (void)
968 {
969 function_and_variable_visibility (flag_whole_program);
970 if (optimize)
971 ipa_discover_readonly_nonaddressable_vars ();
972 return 0;
973 }
974
975 struct ipa_opt_pass_d pass_ipa_whole_program_visibility =
976 {
977 {
978 IPA_PASS,
979 "whole-program", /* name */
980 OPTGROUP_NONE, /* optinfo_flags */
981 gate_whole_program_function_and_variable_visibility,/* gate */
982 whole_program_function_and_variable_visibility,/* execute */
983 NULL, /* sub */
984 NULL, /* next */
985 0, /* static_pass_number */
986 TV_CGRAPHOPT, /* tv_id */
987 0, /* properties_required */
988 0, /* properties_provided */
989 0, /* properties_destroyed */
990 0, /* todo_flags_start */
991 TODO_remove_functions | TODO_dump_symtab /* todo_flags_finish */
992 },
993 NULL, /* generate_summary */
994 NULL, /* write_summary */
995 NULL, /* read_summary */
996 NULL, /* write_optimization_summary */
997 NULL, /* read_optimization_summary */
998 NULL, /* stmt_fixup */
999 0, /* TODOs */
1000 NULL, /* function_transform */
1001 NULL, /* variable_transform */
1002 };
1003
1004 /* Entry in the histogram. */
1005
1006 struct histogram_entry
1007 {
1008 gcov_type count;
1009 int time;
1010 int size;
1011 };
1012
1013 /* Histogram of profile values.
1014 The histogram is represented as an ordered vector of entries allocated via
1015 histogram_pool. During construction a separate hashtable is kept to lookup
1016 duplicate entries. */
1017
1018 vec<histogram_entry *> histogram;
1019 static alloc_pool histogram_pool;
1020
1021 /* Hashtable support for storing SSA names hashed by their SSA_NAME_VAR. */
1022
1023 struct histogram_hash : typed_noop_remove <histogram_entry>
1024 {
1025 typedef histogram_entry value_type;
1026 typedef histogram_entry compare_type;
1027 static inline hashval_t hash (const value_type *);
1028 static inline int equal (const value_type *, const compare_type *);
1029 };
1030
1031 inline hashval_t
1032 histogram_hash::hash (const histogram_entry *val)
1033 {
1034 return val->count;
1035 }
1036
1037 inline int
1038 histogram_hash::equal (const histogram_entry *val, const histogram_entry *val2)
1039 {
1040 return val->count == val2->count;
1041 }
1042
1043 /* Account TIME and SIZE executed COUNT times into HISTOGRAM.
1044 HASHTABLE is the on-side hash kept to avoid duplicates. */
1045
1046 static void
1047 account_time_size (hash_table <histogram_hash> hashtable,
1048 vec<histogram_entry *> &histogram,
1049 gcov_type count, int time, int size)
1050 {
1051 histogram_entry key = {count, 0, 0};
1052 histogram_entry **val = hashtable.find_slot (&key, INSERT);
1053
1054 if (!*val)
1055 {
1056 *val = (histogram_entry *) pool_alloc (histogram_pool);
1057 **val = key;
1058 histogram.safe_push (*val);
1059 }
1060 (*val)->time += time;
1061 (*val)->size += size;
1062 }
1063
1064 int
1065 cmp_counts (const void *v1, const void *v2)
1066 {
1067 const histogram_entry *h1 = *(const histogram_entry * const *)v1;
1068 const histogram_entry *h2 = *(const histogram_entry * const *)v2;
1069 if (h1->count < h2->count)
1070 return 1;
1071 if (h1->count > h2->count)
1072 return -1;
1073 return 0;
1074 }
1075
1076 /* Dump HISTOGRAM to FILE. */
1077
1078 static void
1079 dump_histogram (FILE *file, vec<histogram_entry *> histogram)
1080 {
1081 unsigned int i;
1082 gcov_type overall_time = 0, cumulated_time = 0, cumulated_size = 0, overall_size = 0;
1083
1084 fprintf (dump_file, "Histogram:\n");
1085 for (i = 0; i < histogram.length (); i++)
1086 {
1087 overall_time += histogram[i]->count * histogram[i]->time;
1088 overall_size += histogram[i]->size;
1089 }
1090 if (!overall_time)
1091 overall_time = 1;
1092 if (!overall_size)
1093 overall_size = 1;
1094 for (i = 0; i < histogram.length (); i++)
1095 {
1096 cumulated_time += histogram[i]->count * histogram[i]->time;
1097 cumulated_size += histogram[i]->size;
1098 fprintf (file, " "HOST_WIDEST_INT_PRINT_DEC": time:%i (%2.2f) size:%i (%2.2f)\n",
1099 (HOST_WIDEST_INT) histogram[i]->count,
1100 histogram[i]->time,
1101 cumulated_time * 100.0 / overall_time,
1102 histogram[i]->size,
1103 cumulated_size * 100.0 / overall_size);
1104 }
1105 }
1106
1107 /* Collect histogram from CFG profiles. */
1108
1109 static void
1110 ipa_profile_generate_summary (void)
1111 {
1112 struct cgraph_node *node;
1113 gimple_stmt_iterator gsi;
1114 hash_table <histogram_hash> hashtable;
1115 basic_block bb;
1116
1117 hashtable.create (10);
1118 histogram_pool = create_alloc_pool ("IPA histogram", sizeof (struct histogram_entry),
1119 10);
1120
1121 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
1122 FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (node->symbol.decl))
1123 {
1124 int time = 0;
1125 int size = 0;
1126 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1127 {
1128 time += estimate_num_insns (gsi_stmt (gsi), &eni_time_weights);
1129 size += estimate_num_insns (gsi_stmt (gsi), &eni_size_weights);
1130 }
1131 account_time_size (hashtable, histogram, bb->count, time, size);
1132 }
1133 hashtable.dispose ();
1134 histogram.qsort (cmp_counts);
1135 }
1136
1137 /* Serialize the ipa info for lto. */
1138
1139 static void
1140 ipa_profile_write_summary (void)
1141 {
1142 struct lto_simple_output_block *ob
1143 = lto_create_simple_output_block (LTO_section_ipa_profile);
1144 unsigned int i;
1145
1146 streamer_write_uhwi_stream (ob->main_stream, histogram.length());
1147 for (i = 0; i < histogram.length (); i++)
1148 {
1149 streamer_write_gcov_count_stream (ob->main_stream, histogram[i]->count);
1150 streamer_write_uhwi_stream (ob->main_stream, histogram[i]->time);
1151 streamer_write_uhwi_stream (ob->main_stream, histogram[i]->size);
1152 }
1153 lto_destroy_simple_output_block (ob);
1154 }
1155
1156 /* Deserialize the ipa info for lto. */
1157
1158 static void
1159 ipa_profile_read_summary (void)
1160 {
1161 struct lto_file_decl_data ** file_data_vec
1162 = lto_get_file_decl_data ();
1163 struct lto_file_decl_data * file_data;
1164 hash_table <histogram_hash> hashtable;
1165 int j = 0;
1166
1167 hashtable.create (10);
1168 histogram_pool = create_alloc_pool ("IPA histogram", sizeof (struct histogram_entry),
1169 10);
1170
1171 while ((file_data = file_data_vec[j++]))
1172 {
1173 const char *data;
1174 size_t len;
1175 struct lto_input_block *ib
1176 = lto_create_simple_input_block (file_data,
1177 LTO_section_ipa_profile,
1178 &data, &len);
1179 if (ib)
1180 {
1181 unsigned int num = streamer_read_uhwi (ib);
1182 unsigned int n;
1183 for (n = 0; n < num; n++)
1184 {
1185 gcov_type count = streamer_read_gcov_count (ib);
1186 int time = streamer_read_uhwi (ib);
1187 int size = streamer_read_uhwi (ib);
1188 account_time_size (hashtable, histogram,
1189 count, time, size);
1190 }
1191 lto_destroy_simple_input_block (file_data,
1192 LTO_section_ipa_profile,
1193 ib, data, len);
1194 }
1195 }
1196 hashtable.dispose ();
1197 histogram.qsort (cmp_counts);
1198 }
1199
1200 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1201
1202 static unsigned int
1203 ipa_profile (void)
1204 {
1205 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1206 struct cgraph_edge *e;
1207 int order_pos;
1208 bool something_changed = false;
1209 int i;
1210 gcov_type overall_time = 0, cutoff = 0, cumulated = 0, overall_size = 0;
1211
1212 if (dump_file)
1213 dump_histogram (dump_file, histogram);
1214 for (i = 0; i < (int)histogram.length (); i++)
1215 {
1216 overall_time += histogram[i]->count * histogram[i]->time;
1217 overall_size += histogram[i]->size;
1218 }
1219 if (overall_time)
1220 {
1221 gcov_type threshold;
1222
1223 gcc_assert (overall_size);
1224 if (dump_file)
1225 {
1226 gcov_type min, cumulated_time = 0, cumulated_size = 0;
1227
1228 fprintf (dump_file, "Overall time: "HOST_WIDEST_INT_PRINT_DEC"\n",
1229 (HOST_WIDEST_INT)overall_time);
1230 min = get_hot_bb_threshold ();
1231 for (i = 0; i < (int)histogram.length () && histogram[i]->count >= min;
1232 i++)
1233 {
1234 cumulated_time += histogram[i]->count * histogram[i]->time;
1235 cumulated_size += histogram[i]->size;
1236 }
1237 fprintf (dump_file, "GCOV min count: "HOST_WIDEST_INT_PRINT_DEC
1238 " Time:%3.2f%% Size:%3.2f%%\n",
1239 (HOST_WIDEST_INT)min,
1240 cumulated_time * 100.0 / overall_time,
1241 cumulated_size * 100.0 / overall_size);
1242 }
1243 cutoff = (overall_time * PARAM_VALUE (HOT_BB_COUNT_WS_PERMILLE) + 500) / 1000;
1244 threshold = 0;
1245 for (i = 0; cumulated < cutoff; i++)
1246 {
1247 cumulated += histogram[i]->count * histogram[i]->time;
1248 threshold = histogram[i]->count;
1249 }
1250 if (!threshold)
1251 threshold = 1;
1252 if (dump_file)
1253 {
1254 gcov_type cumulated_time = 0, cumulated_size = 0;
1255
1256 for (i = 0;
1257 i < (int)histogram.length () && histogram[i]->count >= threshold;
1258 i++)
1259 {
1260 cumulated_time += histogram[i]->count * histogram[i]->time;
1261 cumulated_size += histogram[i]->size;
1262 }
1263 fprintf (dump_file, "Determined min count: "HOST_WIDEST_INT_PRINT_DEC
1264 " Time:%3.2f%% Size:%3.2f%%\n",
1265 (HOST_WIDEST_INT)threshold,
1266 cumulated_time * 100.0 / overall_time,
1267 cumulated_size * 100.0 / overall_size);
1268 }
1269 if (threshold > get_hot_bb_threshold ()
1270 || in_lto_p)
1271 {
1272 if (dump_file)
1273 fprintf (dump_file, "Threshold updated.\n");
1274 set_hot_bb_threshold (threshold);
1275 }
1276 }
1277 histogram.release();
1278 free_alloc_pool (histogram_pool);
1279
1280 order_pos = ipa_reverse_postorder (order);
1281 for (i = order_pos - 1; i >= 0; i--)
1282 {
1283 if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1284 {
1285 for (e = order[i]->callees; e; e = e->next_callee)
1286 if (e->callee->local.local && !e->callee->symbol.aux)
1287 {
1288 something_changed = true;
1289 e->callee->symbol.aux = (void *)1;
1290 }
1291 }
1292 order[i]->symbol.aux = NULL;
1293 }
1294
1295 while (something_changed)
1296 {
1297 something_changed = false;
1298 for (i = order_pos - 1; i >= 0; i--)
1299 {
1300 if (order[i]->symbol.aux && cgraph_propagate_frequency (order[i]))
1301 {
1302 for (e = order[i]->callees; e; e = e->next_callee)
1303 if (e->callee->local.local && !e->callee->symbol.aux)
1304 {
1305 something_changed = true;
1306 e->callee->symbol.aux = (void *)1;
1307 }
1308 }
1309 order[i]->symbol.aux = NULL;
1310 }
1311 }
1312 free (order);
1313 return 0;
1314 }
1315
1316 static bool
1317 gate_ipa_profile (void)
1318 {
1319 return flag_ipa_profile;
1320 }
1321
1322 struct ipa_opt_pass_d pass_ipa_profile =
1323 {
1324 {
1325 IPA_PASS,
1326 "profile_estimate", /* name */
1327 OPTGROUP_NONE, /* optinfo_flags */
1328 gate_ipa_profile, /* gate */
1329 ipa_profile, /* execute */
1330 NULL, /* sub */
1331 NULL, /* next */
1332 0, /* static_pass_number */
1333 TV_IPA_PROFILE, /* tv_id */
1334 0, /* properties_required */
1335 0, /* properties_provided */
1336 0, /* properties_destroyed */
1337 0, /* todo_flags_start */
1338 0 /* todo_flags_finish */
1339 },
1340 ipa_profile_generate_summary, /* generate_summary */
1341 ipa_profile_write_summary, /* write_summary */
1342 ipa_profile_read_summary, /* read_summary */
1343 NULL, /* write_optimization_summary */
1344 NULL, /* read_optimization_summary */
1345 NULL, /* stmt_fixup */
1346 0, /* TODOs */
1347 NULL, /* function_transform */
1348 NULL /* variable_transform */
1349 };
1350
1351 /* Generate and emit a static constructor or destructor. WHICH must
1352 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1353 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1354 initialization priority for this constructor or destructor.
1355
1356 FINAL specify whether the externally visible name for collect2 should
1357 be produced. */
1358
1359 static void
1360 cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
1361 {
1362 static int counter = 0;
1363 char which_buf[16];
1364 tree decl, name, resdecl;
1365
1366 /* The priority is encoded in the constructor or destructor name.
1367 collect2 will sort the names and arrange that they are called at
1368 program startup. */
1369 if (final)
1370 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1371 else
1372 /* Proudce sane name but one not recognizable by collect2, just for the
1373 case we fail to inline the function. */
1374 sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
1375 name = get_file_function_name (which_buf);
1376
1377 decl = build_decl (input_location, FUNCTION_DECL, name,
1378 build_function_type_list (void_type_node, NULL_TREE));
1379 current_function_decl = decl;
1380
1381 resdecl = build_decl (input_location,
1382 RESULT_DECL, NULL_TREE, void_type_node);
1383 DECL_ARTIFICIAL (resdecl) = 1;
1384 DECL_RESULT (decl) = resdecl;
1385 DECL_CONTEXT (resdecl) = decl;
1386
1387 allocate_struct_function (decl, false);
1388
1389 TREE_STATIC (decl) = 1;
1390 TREE_USED (decl) = 1;
1391 DECL_ARTIFICIAL (decl) = 1;
1392 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1393 DECL_SAVED_TREE (decl) = body;
1394 if (!targetm.have_ctors_dtors && final)
1395 {
1396 TREE_PUBLIC (decl) = 1;
1397 DECL_PRESERVE_P (decl) = 1;
1398 }
1399 DECL_UNINLINABLE (decl) = 1;
1400
1401 DECL_INITIAL (decl) = make_node (BLOCK);
1402 TREE_USED (DECL_INITIAL (decl)) = 1;
1403
1404 DECL_SOURCE_LOCATION (decl) = input_location;
1405 cfun->function_end_locus = input_location;
1406
1407 switch (which)
1408 {
1409 case 'I':
1410 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1411 decl_init_priority_insert (decl, priority);
1412 break;
1413 case 'D':
1414 DECL_STATIC_DESTRUCTOR (decl) = 1;
1415 decl_fini_priority_insert (decl, priority);
1416 break;
1417 default:
1418 gcc_unreachable ();
1419 }
1420
1421 gimplify_function_tree (decl);
1422
1423 cgraph_add_new_function (decl, false);
1424
1425 set_cfun (NULL);
1426 current_function_decl = NULL;
1427 }
1428
1429 /* Generate and emit a static constructor or destructor. WHICH must
1430 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1431 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1432 initialization priority for this constructor or destructor. */
1433
1434 void
1435 cgraph_build_static_cdtor (char which, tree body, int priority)
1436 {
1437 cgraph_build_static_cdtor_1 (which, body, priority, false);
1438 }
1439
1440 /* A vector of FUNCTION_DECLs declared as static constructors. */
1441 static vec<tree> static_ctors;
1442 /* A vector of FUNCTION_DECLs declared as static destructors. */
1443 static vec<tree> static_dtors;
1444
1445 /* When target does not have ctors and dtors, we call all constructor
1446 and destructor by special initialization/destruction function
1447 recognized by collect2.
1448
1449 When we are going to build this function, collect all constructors and
1450 destructors and turn them into normal functions. */
1451
1452 static void
1453 record_cdtor_fn (struct cgraph_node *node)
1454 {
1455 if (DECL_STATIC_CONSTRUCTOR (node->symbol.decl))
1456 static_ctors.safe_push (node->symbol.decl);
1457 if (DECL_STATIC_DESTRUCTOR (node->symbol.decl))
1458 static_dtors.safe_push (node->symbol.decl);
1459 node = cgraph_get_node (node->symbol.decl);
1460 DECL_DISREGARD_INLINE_LIMITS (node->symbol.decl) = 1;
1461 }
1462
1463 /* Define global constructors/destructor functions for the CDTORS, of
1464 which they are LEN. The CDTORS are sorted by initialization
1465 priority. If CTOR_P is true, these are constructors; otherwise,
1466 they are destructors. */
1467
1468 static void
1469 build_cdtor (bool ctor_p, vec<tree> cdtors)
1470 {
1471 size_t i,j;
1472 size_t len = cdtors.length ();
1473
1474 i = 0;
1475 while (i < len)
1476 {
1477 tree body;
1478 tree fn;
1479 priority_type priority;
1480
1481 priority = 0;
1482 body = NULL_TREE;
1483 j = i;
1484 do
1485 {
1486 priority_type p;
1487 fn = cdtors[j];
1488 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1489 if (j == i)
1490 priority = p;
1491 else if (p != priority)
1492 break;
1493 j++;
1494 }
1495 while (j < len);
1496
1497 /* When there is only one cdtor and target supports them, do nothing. */
1498 if (j == i + 1
1499 && targetm.have_ctors_dtors)
1500 {
1501 i++;
1502 continue;
1503 }
1504 /* Find the next batch of constructors/destructors with the same
1505 initialization priority. */
1506 for (;i < j; i++)
1507 {
1508 tree call;
1509 fn = cdtors[i];
1510 call = build_call_expr (fn, 0);
1511 if (ctor_p)
1512 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1513 else
1514 DECL_STATIC_DESTRUCTOR (fn) = 0;
1515 /* We do not want to optimize away pure/const calls here.
1516 When optimizing, these should be already removed, when not
1517 optimizing, we want user to be able to breakpoint in them. */
1518 TREE_SIDE_EFFECTS (call) = 1;
1519 append_to_statement_list (call, &body);
1520 }
1521 gcc_assert (body != NULL_TREE);
1522 /* Generate a function to call all the function of like
1523 priority. */
1524 cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true);
1525 }
1526 }
1527
1528 /* Comparison function for qsort. P1 and P2 are actually of type
1529 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1530 used to determine the sort order. */
1531
1532 static int
1533 compare_ctor (const void *p1, const void *p2)
1534 {
1535 tree f1;
1536 tree f2;
1537 int priority1;
1538 int priority2;
1539
1540 f1 = *(const tree *)p1;
1541 f2 = *(const tree *)p2;
1542 priority1 = DECL_INIT_PRIORITY (f1);
1543 priority2 = DECL_INIT_PRIORITY (f2);
1544
1545 if (priority1 < priority2)
1546 return -1;
1547 else if (priority1 > priority2)
1548 return 1;
1549 else
1550 /* Ensure a stable sort. Constructors are executed in backwarding
1551 order to make LTO initialize braries first. */
1552 return DECL_UID (f2) - DECL_UID (f1);
1553 }
1554
1555 /* Comparison function for qsort. P1 and P2 are actually of type
1556 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1557 used to determine the sort order. */
1558
1559 static int
1560 compare_dtor (const void *p1, const void *p2)
1561 {
1562 tree f1;
1563 tree f2;
1564 int priority1;
1565 int priority2;
1566
1567 f1 = *(const tree *)p1;
1568 f2 = *(const tree *)p2;
1569 priority1 = DECL_FINI_PRIORITY (f1);
1570 priority2 = DECL_FINI_PRIORITY (f2);
1571
1572 if (priority1 < priority2)
1573 return -1;
1574 else if (priority1 > priority2)
1575 return 1;
1576 else
1577 /* Ensure a stable sort. */
1578 return DECL_UID (f1) - DECL_UID (f2);
1579 }
1580
1581 /* Generate functions to call static constructors and destructors
1582 for targets that do not support .ctors/.dtors sections. These
1583 functions have magic names which are detected by collect2. */
1584
1585 static void
1586 build_cdtor_fns (void)
1587 {
1588 if (!static_ctors.is_empty ())
1589 {
1590 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1591 static_ctors.qsort (compare_ctor);
1592 build_cdtor (/*ctor_p=*/true, static_ctors);
1593 }
1594
1595 if (!static_dtors.is_empty ())
1596 {
1597 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1598 static_dtors.qsort (compare_dtor);
1599 build_cdtor (/*ctor_p=*/false, static_dtors);
1600 }
1601 }
1602
1603 /* Look for constructors and destructors and produce function calling them.
1604 This is needed for targets not supporting ctors or dtors, but we perform the
1605 transformation also at linktime to merge possibly numerous
1606 constructors/destructors into single function to improve code locality and
1607 reduce size. */
1608
1609 static unsigned int
1610 ipa_cdtor_merge (void)
1611 {
1612 struct cgraph_node *node;
1613 FOR_EACH_DEFINED_FUNCTION (node)
1614 if (DECL_STATIC_CONSTRUCTOR (node->symbol.decl)
1615 || DECL_STATIC_DESTRUCTOR (node->symbol.decl))
1616 record_cdtor_fn (node);
1617 build_cdtor_fns ();
1618 static_ctors.release ();
1619 static_dtors.release ();
1620 return 0;
1621 }
1622
1623 /* Perform the pass when we have no ctors/dtors support
1624 or at LTO time to merge multiple constructors into single
1625 function. */
1626
1627 static bool
1628 gate_ipa_cdtor_merge (void)
1629 {
1630 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1631 }
1632
1633 struct ipa_opt_pass_d pass_ipa_cdtor_merge =
1634 {
1635 {
1636 IPA_PASS,
1637 "cdtor", /* name */
1638 OPTGROUP_NONE, /* optinfo_flags */
1639 gate_ipa_cdtor_merge, /* gate */
1640 ipa_cdtor_merge, /* execute */
1641 NULL, /* sub */
1642 NULL, /* next */
1643 0, /* static_pass_number */
1644 TV_CGRAPHOPT, /* tv_id */
1645 0, /* properties_required */
1646 0, /* properties_provided */
1647 0, /* properties_destroyed */
1648 0, /* todo_flags_start */
1649 0 /* todo_flags_finish */
1650 },
1651 NULL, /* generate_summary */
1652 NULL, /* write_summary */
1653 NULL, /* read_summary */
1654 NULL, /* write_optimization_summary */
1655 NULL, /* read_optimization_summary */
1656 NULL, /* stmt_fixup */
1657 0, /* TODOs */
1658 NULL, /* function_transform */
1659 NULL /* variable_transform */
1660 };