re PR tree-optimization/46469 (ICE: verify_cgraph_node failed: inline clone is needed...
[gcc.git] / gcc / ipa.c
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "cgraph.h"
26 #include "tree-pass.h"
27 #include "timevar.h"
28 #include "gimple.h"
29 #include "ggc.h"
30 #include "flags.h"
31 #include "pointer-set.h"
32 #include "target.h"
33 #include "tree-iterator.h"
34
35 /* Fill array order with all nodes with output flag set in the reverse
36 topological order. */
37
38 int
39 cgraph_postorder (struct cgraph_node **order)
40 {
41 struct cgraph_node *node, *node2;
42 int stack_size = 0;
43 int order_pos = 0;
44 struct cgraph_edge *edge, last;
45 int pass;
46
47 struct cgraph_node **stack =
48 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
49
50 /* We have to deal with cycles nicely, so use a depth first traversal
51 output algorithm. Ignore the fact that some functions won't need
52 to be output and put them into order as well, so we get dependencies
53 right through inline functions. */
54 for (node = cgraph_nodes; node; node = node->next)
55 node->aux = NULL;
56 for (pass = 0; pass < 2; pass++)
57 for (node = cgraph_nodes; node; node = node->next)
58 if (!node->aux
59 && (pass
60 || (!node->address_taken
61 && !node->global.inlined_to
62 && !cgraph_only_called_directly_p (node))))
63 {
64 node2 = node;
65 if (!node->callers)
66 node->aux = &last;
67 else
68 node->aux = node->callers;
69 while (node2)
70 {
71 while (node2->aux != &last)
72 {
73 edge = (struct cgraph_edge *) node2->aux;
74 if (edge->next_caller)
75 node2->aux = edge->next_caller;
76 else
77 node2->aux = &last;
78 /* Break possible cycles involving always-inline
79 functions by ignoring edges from always-inline
80 functions to non-always-inline functions. */
81 if (edge->caller->local.disregard_inline_limits
82 && !edge->callee->local.disregard_inline_limits)
83 continue;
84 if (!edge->caller->aux)
85 {
86 if (!edge->caller->callers)
87 edge->caller->aux = &last;
88 else
89 edge->caller->aux = edge->caller->callers;
90 stack[stack_size++] = node2;
91 node2 = edge->caller;
92 break;
93 }
94 }
95 if (node2->aux == &last)
96 {
97 order[order_pos++] = node2;
98 if (stack_size)
99 node2 = stack[--stack_size];
100 else
101 node2 = NULL;
102 }
103 }
104 }
105 free (stack);
106 for (node = cgraph_nodes; node; node = node->next)
107 node->aux = NULL;
108 return order_pos;
109 }
110
111 /* Look for all functions inlined to NODE and update their inlined_to pointers
112 to INLINED_TO. */
113
114 static void
115 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
116 {
117 struct cgraph_edge *e;
118 for (e = node->callees; e; e = e->next_callee)
119 if (e->callee->global.inlined_to)
120 {
121 e->callee->global.inlined_to = inlined_to;
122 update_inlined_to_pointer (e->callee, inlined_to);
123 }
124 }
125
126 /* Add cgraph NODE to queue starting at FIRST.
127
128 The queue is linked via AUX pointers and terminated by pointer to 1.
129 We enqueue nodes at two occasions: when we find them reachable or when we find
130 their bodies needed for further clonning. In the second case we mark them
131 by pointer to 2 after processing so they are re-queue when they become
132 reachable. */
133
134 static void
135 enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first)
136 {
137 /* Node is still in queue; do nothing. */
138 if (node->aux && node->aux != (void *) 2)
139 return;
140 /* Node was already processed as unreachable, re-enqueue
141 only if it became reachable now. */
142 if (node->aux == (void *)2 && !node->reachable)
143 return;
144 node->aux = *first;
145 *first = node;
146 }
147
148 /* Add varpool NODE to queue starting at FIRST. */
149
150 static void
151 enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first)
152 {
153 node->aux = *first;
154 *first = node;
155 }
156
157 /* Process references. */
158
159 static void
160 process_references (struct ipa_ref_list *list,
161 struct cgraph_node **first,
162 struct varpool_node **first_varpool,
163 bool before_inlining_p)
164 {
165 int i;
166 struct ipa_ref *ref;
167 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
168 {
169 if (ref->refered_type == IPA_REF_CGRAPH)
170 {
171 struct cgraph_node *node = ipa_ref_node (ref);
172 if (!node->reachable
173 && node->analyzed
174 && (!DECL_EXTERNAL (node->decl)
175 || before_inlining_p))
176 node->reachable = true;
177 enqueue_cgraph_node (node, first);
178 }
179 else
180 {
181 struct varpool_node *node = ipa_ref_varpool_node (ref);
182 if (!node->needed)
183 {
184 varpool_mark_needed_node (node);
185 enqueue_varpool_node (node, first_varpool);
186 }
187 }
188 }
189 }
190
191 /* Return true when function can be marked local. */
192
193 static bool
194 cgraph_local_node_p (struct cgraph_node *node)
195 {
196 return (cgraph_only_called_directly_p (node)
197 && node->analyzed
198 && !DECL_EXTERNAL (node->decl)
199 && !node->local.externally_visible
200 && !node->reachable_from_other_partition
201 && !node->in_other_partition);
202 }
203
204 /* Perform reachability analysis and reclaim all unreachable nodes.
205 If BEFORE_INLINING_P is true this function is called before inlining
206 decisions has been made. If BEFORE_INLINING_P is false this function also
207 removes unneeded bodies of extern inline functions. */
208
209 bool
210 cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
211 {
212 struct cgraph_node *first = (struct cgraph_node *) (void *) 1;
213 struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1;
214 struct cgraph_node *node, *next;
215 struct varpool_node *vnode, *vnext;
216 bool changed = false;
217
218 #ifdef ENABLE_CHECKING
219 verify_cgraph ();
220 #endif
221 if (file)
222 fprintf (file, "\nReclaiming functions:");
223 #ifdef ENABLE_CHECKING
224 for (node = cgraph_nodes; node; node = node->next)
225 gcc_assert (!node->aux);
226 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
227 gcc_assert (!vnode->aux);
228 #endif
229 varpool_reset_queue ();
230 /* Mark functions whose bodies are obviously needed.
231 This is mostly when they can be referenced externally. Inline clones
232 are special since their declarations are shared with master clone and thus
233 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
234 for (node = cgraph_nodes; node; node = node->next)
235 if (node->analyzed && !node->global.inlined_to
236 && (!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
237 /* Keep around virtual functions for possible devirtualization. */
238 || (before_inlining_p
239 && DECL_VIRTUAL_P (node->decl)
240 && (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl)))
241 /* Also external functions with address taken are better to stay
242 for indirect inlining. */
243 || (before_inlining_p
244 && DECL_EXTERNAL (node->decl)
245 && node->address_taken)))
246 {
247 gcc_assert (!node->global.inlined_to);
248 enqueue_cgraph_node (node, &first);
249 node->reachable = true;
250 }
251 else
252 {
253 gcc_assert (!node->aux);
254 node->reachable = false;
255 }
256
257 /* Mark variables that are obviously needed. */
258 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
259 {
260 vnode->next_needed = NULL;
261 vnode->prev_needed = NULL;
262 if (vnode->analyzed
263 && !varpool_can_remove_if_no_refs (vnode))
264 {
265 vnode->needed = false;
266 varpool_mark_needed_node (vnode);
267 enqueue_varpool_node (vnode, &first_varpool);
268 }
269 else
270 vnode->needed = false;
271 }
272
273 /* Perform reachability analysis. As a special case do not consider
274 extern inline functions not inlined as live because we won't output
275 them at all.
276
277 We maintain two worklist, one for cgraph nodes other for varpools and
278 are finished once both are empty. */
279
280 while (first != (struct cgraph_node *) (void *) 1
281 || first_varpool != (struct varpool_node *) (void *) 1)
282 {
283 if (first != (struct cgraph_node *) (void *) 1)
284 {
285 struct cgraph_edge *e;
286 node = first;
287 first = (struct cgraph_node *) first->aux;
288 if (!node->reachable)
289 node->aux = (void *)2;
290
291 /* If we found this node reachable, first mark on the callees
292 reachable too, unless they are direct calls to extern inline functions
293 we decided to not inline. */
294 if (node->reachable)
295 {
296 for (e = node->callees; e; e = e->next_callee)
297 {
298 if (!e->callee->reachable
299 && node->analyzed
300 && (!e->inline_failed
301 || !DECL_EXTERNAL (e->callee->decl)
302 || before_inlining_p))
303 e->callee->reachable = true;
304 enqueue_cgraph_node (e->callee, &first);
305 }
306 process_references (&node->ref_list, &first, &first_varpool, before_inlining_p);
307 }
308
309 /* If any function in a comdat group is reachable, force
310 all other functions in the same comdat group to be
311 also reachable. */
312 if (node->same_comdat_group
313 && node->reachable
314 && !node->global.inlined_to)
315 {
316 for (next = node->same_comdat_group;
317 next != node;
318 next = next->same_comdat_group)
319 if (!next->reachable)
320 {
321 next->reachable = true;
322 enqueue_cgraph_node (next, &first);
323 }
324 }
325
326 /* We can freely remove inline clones even if they are cloned, however if
327 function is clone of real clone, we must keep it around in order to
328 make materialize_clones produce function body with the changes
329 applied. */
330 while (node->clone_of && !node->clone_of->aux
331 && !gimple_has_body_p (node->decl))
332 {
333 bool noninline = node->clone_of->decl != node->decl;
334 node = node->clone_of;
335 if (noninline && !node->reachable && !node->aux)
336 {
337 enqueue_cgraph_node (node, &first);
338 break;
339 }
340 }
341 }
342 if (first_varpool != (struct varpool_node *) (void *) 1)
343 {
344 vnode = first_varpool;
345 first_varpool = (struct varpool_node *)first_varpool->aux;
346 vnode->aux = NULL;
347 process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p);
348 /* If any function in a comdat group is reachable, force
349 all other functions in the same comdat group to be
350 also reachable. */
351 if (vnode->same_comdat_group)
352 {
353 struct varpool_node *next;
354 for (next = vnode->same_comdat_group;
355 next != vnode;
356 next = next->same_comdat_group)
357 if (!next->needed)
358 {
359 varpool_mark_needed_node (next);
360 enqueue_varpool_node (next, &first_varpool);
361 }
362 }
363 }
364 }
365
366 /* Remove unreachable nodes.
367
368 Completely unreachable functions can be fully removed from the callgraph.
369 Extern inline functions that we decided to not inline need to become unanalyzed nodes of
370 callgraph (so we still have edges to them). We remove function body then.
371
372 Also we need to care functions that are unreachable but we need to keep them around
373 for later clonning. In this case we also turn them to unanalyzed nodes, but
374 keep the body around. */
375 for (node = cgraph_nodes; node; node = next)
376 {
377 next = node->next;
378 if (node->aux && !node->reachable)
379 {
380 cgraph_node_remove_callees (node);
381 ipa_remove_all_references (&node->ref_list);
382 node->analyzed = false;
383 node->local.inlinable = false;
384 }
385 if (!node->aux)
386 {
387 struct cgraph_edge *e;
388 bool found = false;
389 int i;
390 struct ipa_ref *ref;
391
392 node->global.inlined_to = NULL;
393 if (file)
394 fprintf (file, " %s", cgraph_node_name (node));
395 /* See if there is reachable caller. */
396 for (e = node->callers; e && !found; e = e->next_caller)
397 if (e->caller->reachable)
398 found = true;
399 for (i = 0; (ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
400 && !found); i++)
401 if (ref->refering_type == IPA_REF_CGRAPH
402 && ipa_ref_refering_node (ref)->reachable)
403 found = true;
404 else if (ref->refering_type == IPA_REF_VARPOOL
405 && ipa_ref_refering_varpool_node (ref)->needed)
406 found = true;
407
408 /* If so, we need to keep node in the callgraph. */
409 if (found)
410 {
411 if (node->analyzed)
412 {
413 struct cgraph_node *clone;
414
415 /* If there are still clones, we must keep body around.
416 Otherwise we can just remove the body but keep the clone. */
417 for (clone = node->clones; clone;
418 clone = clone->next_sibling_clone)
419 if (clone->aux)
420 break;
421 if (!clone)
422 {
423 cgraph_release_function_body (node);
424 node->local.inlinable = false;
425 if (node->prev_sibling_clone)
426 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
427 else if (node->clone_of)
428 node->clone_of->clones = node->next_sibling_clone;
429 if (node->next_sibling_clone)
430 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
431 if (node->clone_of)
432 node->former_clone_of = node->clone_of->decl;
433 node->clone_of = NULL;
434 node->next_sibling_clone = NULL;
435 node->prev_sibling_clone = NULL;
436 }
437 else
438 gcc_assert (!clone->in_other_partition);
439 node->analyzed = false;
440 changed = true;
441 cgraph_node_remove_callees (node);
442 ipa_remove_all_references (&node->ref_list);
443 }
444 }
445 else
446 {
447 cgraph_remove_node (node);
448 changed = true;
449 }
450 }
451 }
452 for (node = cgraph_nodes; node; node = node->next)
453 {
454 /* Inline clones might be kept around so their materializing allows further
455 cloning. If the function the clone is inlined into is removed, we need
456 to turn it into normal cone. */
457 if (node->global.inlined_to
458 && !node->callers)
459 {
460 gcc_assert (node->clones);
461 node->global.inlined_to = NULL;
462 update_inlined_to_pointer (node, node);
463 }
464 node->aux = NULL;
465 }
466
467 if (file)
468 fprintf (file, "\n");
469
470 /* We must release unused extern inlines or sanity checking will fail. Rest of transformations
471 are undesirable at -O0 since we do not want to remove anything. */
472 if (!optimize)
473 return changed;
474
475 if (file)
476 fprintf (file, "Reclaiming variables:");
477 for (vnode = varpool_nodes; vnode; vnode = vnext)
478 {
479 vnext = vnode->next;
480 if (!vnode->needed)
481 {
482 if (file)
483 fprintf (file, " %s", varpool_node_name (vnode));
484 varpool_remove_node (vnode);
485 changed = true;
486 }
487 }
488
489 /* Now update address_taken flags and try to promote functions to be local. */
490
491 if (file)
492 fprintf (file, "\nClearing address taken flags:");
493 for (node = cgraph_nodes; node; node = node->next)
494 if (node->address_taken
495 && !node->reachable_from_other_partition)
496 {
497 int i;
498 struct ipa_ref *ref;
499 bool found = false;
500 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
501 && !found; i++)
502 {
503 gcc_assert (ref->use == IPA_REF_ADDR);
504 found = true;
505 }
506 if (!found)
507 {
508 if (file)
509 fprintf (file, " %s", cgraph_node_name (node));
510 node->address_taken = false;
511 changed = true;
512 if (cgraph_local_node_p (node))
513 {
514 node->local.local = true;
515 if (file)
516 fprintf (file, " (local)");
517 }
518 }
519 }
520
521 #ifdef ENABLE_CHECKING
522 verify_cgraph ();
523 #endif
524
525 /* Reclaim alias pairs for functions that have disappeared from the
526 call graph. */
527 remove_unreachable_alias_pairs ();
528
529 return changed;
530 }
531
532 /* Discover variables that have no longer address taken or that are read only
533 and update their flags.
534
535 FIXME: This can not be done in between gimplify and omp_expand since
536 readonly flag plays role on what is shared and what is not. Currently we do
537 this transformation as part of whole program visibility and re-do at
538 ipa-reference pass (to take into account clonning), but it would
539 make sense to do it before early optimizations. */
540
541 void
542 ipa_discover_readonly_nonaddressable_vars (void)
543 {
544 struct varpool_node *vnode;
545 if (dump_file)
546 fprintf (dump_file, "Clearing variable flags:");
547 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
548 if (vnode->finalized && varpool_all_refs_explicit_p (vnode)
549 && (TREE_ADDRESSABLE (vnode->decl) || !TREE_READONLY (vnode->decl)))
550 {
551 bool written = false;
552 bool address_taken = false;
553 int i;
554 struct ipa_ref *ref;
555 for (i = 0; ipa_ref_list_refering_iterate (&vnode->ref_list, i, ref)
556 && (!written || !address_taken); i++)
557 switch (ref->use)
558 {
559 case IPA_REF_ADDR:
560 address_taken = true;
561 break;
562 case IPA_REF_LOAD:
563 break;
564 case IPA_REF_STORE:
565 written = true;
566 break;
567 }
568 if (TREE_ADDRESSABLE (vnode->decl) && !address_taken)
569 {
570 if (dump_file)
571 fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode));
572 TREE_ADDRESSABLE (vnode->decl) = 0;
573 }
574 if (!TREE_READONLY (vnode->decl) && !address_taken && !written
575 /* Making variable in explicit section readonly can cause section
576 type conflict.
577 See e.g. gcc.c-torture/compile/pr23237.c */
578 && DECL_SECTION_NAME (vnode->decl) == NULL)
579 {
580 if (dump_file)
581 fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode));
582 TREE_READONLY (vnode->decl) = 1;
583 }
584 }
585 if (dump_file)
586 fprintf (dump_file, "\n");
587 }
588
589 /* Return true when there is a reference to node and it is not vtable. */
590 static bool
591 cgraph_address_taken_from_non_vtable_p (struct cgraph_node *node)
592 {
593 int i;
594 struct ipa_ref *ref;
595 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
596 {
597 struct varpool_node *node;
598 if (ref->refered_type == IPA_REF_CGRAPH)
599 return true;
600 node = ipa_ref_varpool_node (ref);
601 if (!DECL_VIRTUAL_P (node->decl))
602 return true;
603 }
604 return false;
605 }
606
607 /* COMDAT functions must be shared only if they have address taken,
608 otherwise we can produce our own private implementation with
609 -fwhole-program.
610 Return true when turning COMDAT functoin static can not lead to wrong
611 code when the resulting object links with a library defining same COMDAT.
612
613 Virtual functions do have their addresses taken from the vtables,
614 but in C++ there is no way to compare their addresses for equality. */
615
616 bool
617 cgraph_comdat_can_be_unshared_p (struct cgraph_node *node)
618 {
619 if ((cgraph_address_taken_from_non_vtable_p (node)
620 && !DECL_VIRTUAL_P (node->decl))
621 || !node->analyzed)
622 return false;
623 if (node->same_comdat_group)
624 {
625 struct cgraph_node *next;
626
627 /* If more than one function is in the same COMDAT group, it must
628 be shared even if just one function in the comdat group has
629 address taken. */
630 for (next = node->same_comdat_group;
631 next != node; next = next->same_comdat_group)
632 if (cgraph_address_taken_from_non_vtable_p (node)
633 && !DECL_VIRTUAL_P (next->decl))
634 return false;
635 }
636 return true;
637 }
638
639 /* Return true when function NODE should be considered externally visible. */
640
641 static bool
642 cgraph_externally_visible_p (struct cgraph_node *node, bool whole_program, bool aliased)
643 {
644 struct cgraph_node *alias;
645 if (!node->local.finalized)
646 return false;
647 if (!DECL_COMDAT (node->decl)
648 && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)))
649 return false;
650
651 /* Do not even try to be smart about aliased nodes. Until we properly
652 represent everything by same body alias, these are just evil. */
653 if (aliased)
654 return true;
655
656 /* Do not try to localize built-in functions yet. One of problems is that we
657 end up mangling their asm for WHOPR that makes it impossible to call them
658 using the implicit built-in declarations anymore. Similarly this enables
659 us to remove them as unreachable before actual calls may appear during
660 expansion or folding. */
661 if (DECL_BUILT_IN (node->decl))
662 return true;
663
664 /* FIXME: We get wrong symbols with asm aliases in callgraph and LTO.
665 This is because very little of code knows that assembler name needs to
666 mangled. Avoid touching declarations with user asm name set to mask
667 some of the problems. */
668 if (DECL_ASSEMBLER_NAME_SET_P (node->decl)
669 && IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl))[0]=='*')
670 return true;
671
672 /* If linker counts on us, we must preserve the function. */
673 if (cgraph_used_from_object_file_p (node))
674 return true;
675 if (DECL_PRESERVE_P (node->decl))
676 return true;
677 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl)))
678 return true;
679 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
680 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (node->decl)))
681 return true;
682 /* When doing LTO or whole program, we can bring COMDAT functoins static.
683 This improves code quality and we know we will duplicate them at most twice
684 (in the case that we are not using plugin and link with object file
685 implementing same COMDAT) */
686 if ((in_lto_p || whole_program)
687 && DECL_COMDAT (node->decl)
688 && cgraph_comdat_can_be_unshared_p (node))
689 return false;
690
691 /* See if we have linker information about symbol not being used or
692 if we need to make guess based on the declaration.
693
694 Even if the linker clams the symbol is unused, never bring internal
695 symbols that are declared by user as used or externally visible.
696 This is needed for i.e. references from asm statements. */
697 for (alias = node->same_body; alias; alias = alias->next)
698 if (alias->resolution != LDPR_PREVAILING_DEF_IRONLY)
699 break;
700 if (!alias && node->resolution == LDPR_PREVAILING_DEF_IRONLY)
701 return false;
702
703 /* When doing link time optimizations, hidden symbols become local. */
704 if (in_lto_p
705 && (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
706 || DECL_VISIBILITY (node->decl) == VISIBILITY_INTERNAL)
707 /* Be sure that node is defined in IR file, not in other object
708 file. In that case we don't set used_from_other_object_file. */
709 && node->analyzed)
710 ;
711 else if (!whole_program)
712 return true;
713
714 if (MAIN_NAME_P (DECL_NAME (node->decl)))
715 return true;
716
717 return false;
718 }
719
720 /* Return true when variable VNODE should be considered externally visible. */
721
722 static bool
723 varpool_externally_visible_p (struct varpool_node *vnode, bool aliased)
724 {
725 struct varpool_node *alias;
726 if (!DECL_COMDAT (vnode->decl) && !TREE_PUBLIC (vnode->decl))
727 return false;
728
729 /* Do not even try to be smart about aliased nodes. Until we properly
730 represent everything by same body alias, these are just evil. */
731 if (aliased)
732 return true;
733
734 /* If linker counts on us, we must preserve the function. */
735 if (varpool_used_from_object_file_p (vnode))
736 return true;
737
738 /* FIXME: We get wrong symbols with asm aliases in callgraph and LTO.
739 This is because very little of code knows that assembler name needs to
740 mangled. Avoid touching declarations with user asm name set to mask
741 some of the problems. */
742 if (DECL_ASSEMBLER_NAME_SET_P (vnode->decl)
743 && IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (vnode->decl))[0]=='*')
744 return true;
745
746 if (DECL_PRESERVE_P (vnode->decl))
747 return true;
748 if (lookup_attribute ("externally_visible",
749 DECL_ATTRIBUTES (vnode->decl)))
750 return true;
751 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
752 && lookup_attribute ("dllexport",
753 DECL_ATTRIBUTES (vnode->decl)))
754 return true;
755
756 /* See if we have linker information about symbol not being used or
757 if we need to make guess based on the declaration.
758
759 Even if the linker clams the symbol is unused, never bring internal
760 symbols that are declared by user as used or externally visible.
761 This is needed for i.e. references from asm statements. */
762 if (varpool_used_from_object_file_p (vnode))
763 return true;
764 for (alias = vnode->extra_name; alias; alias = alias->next)
765 if (alias->resolution != LDPR_PREVAILING_DEF_IRONLY)
766 break;
767 if (!alias && vnode->resolution == LDPR_PREVAILING_DEF_IRONLY)
768 return false;
769
770 /* As a special case, the COMDAT virutal tables can be unshared.
771 In LTO mode turn vtables into static variables. The variable is readonly,
772 so this does not enable more optimization, but referring static var
773 is faster for dynamic linking. Also this match logic hidding vtables
774 from LTO symbol tables. */
775 if ((in_lto_p || flag_whole_program)
776 && !vnode->force_output
777 && DECL_COMDAT (vnode->decl) && DECL_VIRTUAL_P (vnode->decl))
778 return false;
779
780 /* When doing link time optimizations, hidden symbols become local. */
781 if (in_lto_p
782 && (DECL_VISIBILITY (vnode->decl) == VISIBILITY_HIDDEN
783 || DECL_VISIBILITY (vnode->decl) == VISIBILITY_INTERNAL)
784 /* Be sure that node is defined in IR file, not in other object
785 file. In that case we don't set used_from_other_object_file. */
786 && vnode->finalized)
787 ;
788 else if (!flag_whole_program)
789 return true;
790
791 /* Do not attempt to privatize COMDATS by default.
792 This would break linking with C++ libraries sharing
793 inline definitions.
794
795 FIXME: We can do so for readonly vars with no address taken and
796 possibly also for vtables since no direct pointer comparsion is done.
797 It might be interesting to do so to reduce linking overhead. */
798 if (DECL_COMDAT (vnode->decl) || DECL_WEAK (vnode->decl))
799 return true;
800 return false;
801 }
802
803 /* Dissolve the same_comdat_group list in which NODE resides. */
804
805 static void
806 dissolve_same_comdat_group_list (struct cgraph_node *node)
807 {
808 struct cgraph_node *n = node, *next;
809 do
810 {
811 next = n->same_comdat_group;
812 n->same_comdat_group = NULL;
813 n = next;
814 }
815 while (n != node);
816 }
817
818 /* Mark visibility of all functions.
819
820 A local function is one whose calls can occur only in the current
821 compilation unit and all its calls are explicit, so we can change
822 its calling convention. We simply mark all static functions whose
823 address is not taken as local.
824
825 We also change the TREE_PUBLIC flag of all declarations that are public
826 in language point of view but we want to overwrite this default
827 via visibilities for the backend point of view. */
828
829 static unsigned int
830 function_and_variable_visibility (bool whole_program)
831 {
832 struct cgraph_node *node;
833 struct varpool_node *vnode;
834 struct pointer_set_t *aliased_nodes = pointer_set_create ();
835 struct pointer_set_t *aliased_vnodes = pointer_set_create ();
836 unsigned i;
837 alias_pair *p;
838
839 /* Discover aliased nodes. */
840 FOR_EACH_VEC_ELT (alias_pair, alias_pairs, i, p)
841 {
842 if (dump_file)
843 fprintf (dump_file, "Alias %s->%s",
844 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)),
845 IDENTIFIER_POINTER (p->target));
846
847 if ((node = cgraph_node_for_asm (p->target)) != NULL
848 && !DECL_EXTERNAL (node->decl))
849 {
850 /* Weakrefs alias symbols from other compilation unit. In the case
851 the destination of weakref became available because of LTO, we must
852 mark it as needed. */
853 if (in_lto_p
854 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl))
855 && !node->needed)
856 cgraph_mark_needed_node (node);
857 gcc_assert (node->needed);
858 pointer_set_insert (aliased_nodes, node);
859 if (dump_file)
860 fprintf (dump_file, " node %s/%i",
861 cgraph_node_name (node), node->uid);
862 }
863 else if ((vnode = varpool_node_for_asm (p->target)) != NULL
864 && !DECL_EXTERNAL (vnode->decl))
865 {
866 /* Weakrefs alias symbols from other compilation unit. In the case
867 the destination of weakref became available because of LTO, we must
868 mark it as needed. */
869 if (in_lto_p
870 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl))
871 && !vnode->needed)
872 varpool_mark_needed_node (vnode);
873 gcc_assert (vnode->needed);
874 pointer_set_insert (aliased_vnodes, vnode);
875 if (dump_file)
876 fprintf (dump_file, " varpool node %s",
877 varpool_node_name (vnode));
878 }
879 if (dump_file)
880 fprintf (dump_file, "\n");
881 }
882
883 for (node = cgraph_nodes; node; node = node->next)
884 {
885 int flags = flags_from_decl_or_type (node->decl);
886
887 /* Optimize away PURE and CONST constructors and destructors. */
888 if (optimize
889 && (flags & (ECF_CONST | ECF_PURE))
890 && !(flags & ECF_LOOPING_CONST_OR_PURE))
891 {
892 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
893 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
894 }
895
896 /* Frontends and alias code marks nodes as needed before parsing is finished.
897 We may end up marking as node external nodes where this flag is meaningless
898 strip it. */
899 if (node->needed
900 && (DECL_EXTERNAL (node->decl) || !node->analyzed))
901 node->needed = 0;
902
903 /* C++ FE on lack of COMDAT support create local COMDAT functions
904 (that ought to be shared but can not due to object format
905 limitations). It is neccesary to keep the flag to make rest of C++ FE
906 happy. Clear the flag here to avoid confusion in middle-end. */
907 if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl))
908 DECL_COMDAT (node->decl) = 0;
909 /* For external decls stop tracking same_comdat_group, it doesn't matter
910 what comdat group they are in when they won't be emitted in this TU,
911 and simplifies later passes. */
912 if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
913 {
914 #ifdef ENABLE_CHECKING
915 struct cgraph_node *n;
916
917 for (n = node->same_comdat_group;
918 n != node;
919 n = n->same_comdat_group)
920 /* If at least one of same comdat group functions is external,
921 all of them have to be, otherwise it is a front-end bug. */
922 gcc_assert (DECL_EXTERNAL (n->decl));
923 #endif
924 dissolve_same_comdat_group_list (node);
925 }
926 gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl))
927 || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl));
928 if (cgraph_externally_visible_p (node, whole_program,
929 pointer_set_contains (aliased_nodes,
930 node)))
931 {
932 gcc_assert (!node->global.inlined_to);
933 node->local.externally_visible = true;
934 }
935 else
936 node->local.externally_visible = false;
937 if (!node->local.externally_visible && node->analyzed
938 && !DECL_EXTERNAL (node->decl))
939 {
940 struct cgraph_node *alias;
941 gcc_assert (whole_program || in_lto_p || !TREE_PUBLIC (node->decl));
942 cgraph_make_decl_local (node->decl);
943 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
944 for (alias = node->same_body; alias; alias = alias->next)
945 cgraph_make_decl_local (alias->decl);
946 if (node->same_comdat_group)
947 /* cgraph_externally_visible_p has already checked all other nodes
948 in the group and they will all be made local. We need to
949 dissolve the group at once so that the predicate does not
950 segfault though. */
951 dissolve_same_comdat_group_list (node);
952 }
953 node->local.local = cgraph_local_node_p (node);
954 }
955 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
956 {
957 /* weak flag makes no sense on local variables. */
958 gcc_assert (!DECL_WEAK (vnode->decl)
959 || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl));
960 /* In several cases declarations can not be common:
961
962 - when declaration has initializer
963 - when it is in weak
964 - when it has specific section
965 - when it resides in non-generic address space.
966 - if declaration is local, it will get into .local common section
967 so common flag is not needed. Frontends still produce these in
968 certain cases, such as for:
969
970 static int a __attribute__ ((common))
971
972 Canonicalize things here and clear the redundant flag. */
973 if (DECL_COMMON (vnode->decl)
974 && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
975 || (DECL_INITIAL (vnode->decl)
976 && DECL_INITIAL (vnode->decl) != error_mark_node)
977 || DECL_WEAK (vnode->decl)
978 || DECL_SECTION_NAME (vnode->decl) != NULL
979 || ! (ADDR_SPACE_GENERIC_P
980 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl))))))
981 DECL_COMMON (vnode->decl) = 0;
982 }
983 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
984 {
985 if (!vnode->finalized)
986 continue;
987 if (vnode->needed
988 && varpool_externally_visible_p
989 (vnode,
990 pointer_set_contains (aliased_vnodes, vnode)))
991 vnode->externally_visible = true;
992 else
993 vnode->externally_visible = false;
994 if (!vnode->externally_visible)
995 {
996 gcc_assert (in_lto_p || whole_program || !TREE_PUBLIC (vnode->decl));
997 cgraph_make_decl_local (vnode->decl);
998 vnode->resolution = LDPR_PREVAILING_DEF_IRONLY;
999 }
1000 gcc_assert (TREE_STATIC (vnode->decl));
1001 }
1002 pointer_set_destroy (aliased_nodes);
1003 pointer_set_destroy (aliased_vnodes);
1004
1005 if (dump_file)
1006 {
1007 fprintf (dump_file, "\nMarking local functions:");
1008 for (node = cgraph_nodes; node; node = node->next)
1009 if (node->local.local)
1010 fprintf (dump_file, " %s", cgraph_node_name (node));
1011 fprintf (dump_file, "\n\n");
1012 fprintf (dump_file, "\nMarking externally visible functions:");
1013 for (node = cgraph_nodes; node; node = node->next)
1014 if (node->local.externally_visible)
1015 fprintf (dump_file, " %s", cgraph_node_name (node));
1016 fprintf (dump_file, "\n\n");
1017 fprintf (dump_file, "\nMarking externally visible variables:");
1018 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
1019 if (vnode->externally_visible)
1020 fprintf (dump_file, " %s", varpool_node_name (vnode));
1021 fprintf (dump_file, "\n\n");
1022 }
1023 cgraph_function_flags_ready = true;
1024 return 0;
1025 }
1026
1027 /* Local function pass handling visibilities. This happens before LTO streaming
1028 so in particular -fwhole-program should be ignored at this level. */
1029
1030 static unsigned int
1031 local_function_and_variable_visibility (void)
1032 {
1033 return function_and_variable_visibility (flag_whole_program && !flag_lto);
1034 }
1035
1036 struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility =
1037 {
1038 {
1039 SIMPLE_IPA_PASS,
1040 "visibility", /* name */
1041 NULL, /* gate */
1042 local_function_and_variable_visibility,/* execute */
1043 NULL, /* sub */
1044 NULL, /* next */
1045 0, /* static_pass_number */
1046 TV_CGRAPHOPT, /* tv_id */
1047 0, /* properties_required */
1048 0, /* properties_provided */
1049 0, /* properties_destroyed */
1050 0, /* todo_flags_start */
1051 TODO_remove_functions | TODO_dump_cgraph
1052 | TODO_ggc_collect /* todo_flags_finish */
1053 }
1054 };
1055
1056 /* Do not re-run on ltrans stage. */
1057
1058 static bool
1059 gate_whole_program_function_and_variable_visibility (void)
1060 {
1061 return !flag_ltrans;
1062 }
1063
1064 /* Bring functionss local at LTO time whith -fwhole-program. */
1065
1066 static unsigned int
1067 whole_program_function_and_variable_visibility (void)
1068 {
1069 struct cgraph_node *node;
1070 struct varpool_node *vnode;
1071
1072 function_and_variable_visibility (flag_whole_program);
1073
1074 for (node = cgraph_nodes; node; node = node->next)
1075 if ((node->local.externally_visible && !DECL_COMDAT (node->decl))
1076 && node->local.finalized)
1077 cgraph_mark_needed_node (node);
1078 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
1079 if (vnode->externally_visible && !DECL_COMDAT (vnode->decl))
1080 varpool_mark_needed_node (vnode);
1081 if (dump_file)
1082 {
1083 fprintf (dump_file, "\nNeeded variables:");
1084 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
1085 if (vnode->needed)
1086 fprintf (dump_file, " %s", varpool_node_name (vnode));
1087 fprintf (dump_file, "\n\n");
1088 }
1089 if (optimize)
1090 ipa_discover_readonly_nonaddressable_vars ();
1091 return 0;
1092 }
1093
1094 struct ipa_opt_pass_d pass_ipa_whole_program_visibility =
1095 {
1096 {
1097 IPA_PASS,
1098 "whole-program", /* name */
1099 gate_whole_program_function_and_variable_visibility,/* gate */
1100 whole_program_function_and_variable_visibility,/* execute */
1101 NULL, /* sub */
1102 NULL, /* next */
1103 0, /* static_pass_number */
1104 TV_CGRAPHOPT, /* tv_id */
1105 0, /* properties_required */
1106 0, /* properties_provided */
1107 0, /* properties_destroyed */
1108 0, /* todo_flags_start */
1109 TODO_remove_functions | TODO_dump_cgraph
1110 | TODO_ggc_collect /* todo_flags_finish */
1111 },
1112 NULL, /* generate_summary */
1113 NULL, /* write_summary */
1114 NULL, /* read_summary */
1115 NULL, /* write_optimization_summary */
1116 NULL, /* read_optimization_summary */
1117 NULL, /* stmt_fixup */
1118 0, /* TODOs */
1119 NULL, /* function_transform */
1120 NULL, /* variable_transform */
1121 };
1122
1123 /* Hash a cgraph node set element. */
1124
1125 static hashval_t
1126 hash_cgraph_node_set_element (const void *p)
1127 {
1128 const_cgraph_node_set_element element = (const_cgraph_node_set_element) p;
1129 return htab_hash_pointer (element->node);
1130 }
1131
1132 /* Compare two cgraph node set elements. */
1133
1134 static int
1135 eq_cgraph_node_set_element (const void *p1, const void *p2)
1136 {
1137 const_cgraph_node_set_element e1 = (const_cgraph_node_set_element) p1;
1138 const_cgraph_node_set_element e2 = (const_cgraph_node_set_element) p2;
1139
1140 return e1->node == e2->node;
1141 }
1142
1143 /* Create a new cgraph node set. */
1144
1145 cgraph_node_set
1146 cgraph_node_set_new (void)
1147 {
1148 cgraph_node_set new_node_set;
1149
1150 new_node_set = ggc_alloc_cgraph_node_set_def ();
1151 new_node_set->hashtab = htab_create_ggc (10,
1152 hash_cgraph_node_set_element,
1153 eq_cgraph_node_set_element,
1154 NULL);
1155 new_node_set->nodes = NULL;
1156 return new_node_set;
1157 }
1158
1159 /* Add cgraph_node NODE to cgraph_node_set SET. */
1160
1161 void
1162 cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node)
1163 {
1164 void **slot;
1165 cgraph_node_set_element element;
1166 struct cgraph_node_set_element_def dummy;
1167
1168 dummy.node = node;
1169 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
1170
1171 if (*slot != HTAB_EMPTY_ENTRY)
1172 {
1173 element = (cgraph_node_set_element) *slot;
1174 gcc_assert (node == element->node
1175 && (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1176 == node));
1177 return;
1178 }
1179
1180 /* Insert node into hash table. */
1181 element = ggc_alloc_cgraph_node_set_element_def ();
1182 element->node = node;
1183 element->index = VEC_length (cgraph_node_ptr, set->nodes);
1184 *slot = element;
1185
1186 /* Insert into node vector. */
1187 VEC_safe_push (cgraph_node_ptr, gc, set->nodes, node);
1188 }
1189
1190 /* Remove cgraph_node NODE from cgraph_node_set SET. */
1191
1192 void
1193 cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node)
1194 {
1195 void **slot, **last_slot;
1196 cgraph_node_set_element element, last_element;
1197 struct cgraph_node *last_node;
1198 struct cgraph_node_set_element_def dummy;
1199
1200 dummy.node = node;
1201 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1202 if (slot == NULL)
1203 return;
1204
1205 element = (cgraph_node_set_element) *slot;
1206 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1207 == node);
1208
1209 /* Remove from vector. We do this by swapping node with the last element
1210 of the vector. */
1211 last_node = VEC_pop (cgraph_node_ptr, set->nodes);
1212 if (last_node != node)
1213 {
1214 dummy.node = last_node;
1215 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1216 last_element = (cgraph_node_set_element) *last_slot;
1217 gcc_assert (last_element);
1218
1219 /* Move the last element to the original spot of NODE. */
1220 last_element->index = element->index;
1221 VEC_replace (cgraph_node_ptr, set->nodes, last_element->index,
1222 last_node);
1223 }
1224
1225 /* Remove element from hash table. */
1226 htab_clear_slot (set->hashtab, slot);
1227 ggc_free (element);
1228 }
1229
1230 /* Find NODE in SET and return an iterator to it if found. A null iterator
1231 is returned if NODE is not in SET. */
1232
1233 cgraph_node_set_iterator
1234 cgraph_node_set_find (cgraph_node_set set, struct cgraph_node *node)
1235 {
1236 void **slot;
1237 struct cgraph_node_set_element_def dummy;
1238 cgraph_node_set_element element;
1239 cgraph_node_set_iterator csi;
1240
1241 dummy.node = node;
1242 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1243 if (slot == NULL)
1244 csi.index = (unsigned) ~0;
1245 else
1246 {
1247 element = (cgraph_node_set_element) *slot;
1248 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1249 == node);
1250 csi.index = element->index;
1251 }
1252 csi.set = set;
1253
1254 return csi;
1255 }
1256
1257 /* Dump content of SET to file F. */
1258
1259 void
1260 dump_cgraph_node_set (FILE *f, cgraph_node_set set)
1261 {
1262 cgraph_node_set_iterator iter;
1263
1264 for (iter = csi_start (set); !csi_end_p (iter); csi_next (&iter))
1265 {
1266 struct cgraph_node *node = csi_node (iter);
1267 fprintf (f, " %s/%i", cgraph_node_name (node), node->uid);
1268 }
1269 fprintf (f, "\n");
1270 }
1271
1272 /* Dump content of SET to stderr. */
1273
1274 DEBUG_FUNCTION void
1275 debug_cgraph_node_set (cgraph_node_set set)
1276 {
1277 dump_cgraph_node_set (stderr, set);
1278 }
1279
1280 /* Hash a varpool node set element. */
1281
1282 static hashval_t
1283 hash_varpool_node_set_element (const void *p)
1284 {
1285 const_varpool_node_set_element element = (const_varpool_node_set_element) p;
1286 return htab_hash_pointer (element->node);
1287 }
1288
1289 /* Compare two varpool node set elements. */
1290
1291 static int
1292 eq_varpool_node_set_element (const void *p1, const void *p2)
1293 {
1294 const_varpool_node_set_element e1 = (const_varpool_node_set_element) p1;
1295 const_varpool_node_set_element e2 = (const_varpool_node_set_element) p2;
1296
1297 return e1->node == e2->node;
1298 }
1299
1300 /* Create a new varpool node set. */
1301
1302 varpool_node_set
1303 varpool_node_set_new (void)
1304 {
1305 varpool_node_set new_node_set;
1306
1307 new_node_set = ggc_alloc_varpool_node_set_def ();
1308 new_node_set->hashtab = htab_create_ggc (10,
1309 hash_varpool_node_set_element,
1310 eq_varpool_node_set_element,
1311 NULL);
1312 new_node_set->nodes = NULL;
1313 return new_node_set;
1314 }
1315
1316 /* Add varpool_node NODE to varpool_node_set SET. */
1317
1318 void
1319 varpool_node_set_add (varpool_node_set set, struct varpool_node *node)
1320 {
1321 void **slot;
1322 varpool_node_set_element element;
1323 struct varpool_node_set_element_def dummy;
1324
1325 dummy.node = node;
1326 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
1327
1328 if (*slot != HTAB_EMPTY_ENTRY)
1329 {
1330 element = (varpool_node_set_element) *slot;
1331 gcc_assert (node == element->node
1332 && (VEC_index (varpool_node_ptr, set->nodes, element->index)
1333 == node));
1334 return;
1335 }
1336
1337 /* Insert node into hash table. */
1338 element = ggc_alloc_varpool_node_set_element_def ();
1339 element->node = node;
1340 element->index = VEC_length (varpool_node_ptr, set->nodes);
1341 *slot = element;
1342
1343 /* Insert into node vector. */
1344 VEC_safe_push (varpool_node_ptr, gc, set->nodes, node);
1345 }
1346
1347 /* Remove varpool_node NODE from varpool_node_set SET. */
1348
1349 void
1350 varpool_node_set_remove (varpool_node_set set, struct varpool_node *node)
1351 {
1352 void **slot, **last_slot;
1353 varpool_node_set_element element, last_element;
1354 struct varpool_node *last_node;
1355 struct varpool_node_set_element_def dummy;
1356
1357 dummy.node = node;
1358 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1359 if (slot == NULL)
1360 return;
1361
1362 element = (varpool_node_set_element) *slot;
1363 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1364 == node);
1365
1366 /* Remove from vector. We do this by swapping node with the last element
1367 of the vector. */
1368 last_node = VEC_pop (varpool_node_ptr, set->nodes);
1369 if (last_node != node)
1370 {
1371 dummy.node = last_node;
1372 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1373 last_element = (varpool_node_set_element) *last_slot;
1374 gcc_assert (last_element);
1375
1376 /* Move the last element to the original spot of NODE. */
1377 last_element->index = element->index;
1378 VEC_replace (varpool_node_ptr, set->nodes, last_element->index,
1379 last_node);
1380 }
1381
1382 /* Remove element from hash table. */
1383 htab_clear_slot (set->hashtab, slot);
1384 ggc_free (element);
1385 }
1386
1387 /* Find NODE in SET and return an iterator to it if found. A null iterator
1388 is returned if NODE is not in SET. */
1389
1390 varpool_node_set_iterator
1391 varpool_node_set_find (varpool_node_set set, struct varpool_node *node)
1392 {
1393 void **slot;
1394 struct varpool_node_set_element_def dummy;
1395 varpool_node_set_element element;
1396 varpool_node_set_iterator vsi;
1397
1398 dummy.node = node;
1399 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1400 if (slot == NULL)
1401 vsi.index = (unsigned) ~0;
1402 else
1403 {
1404 element = (varpool_node_set_element) *slot;
1405 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1406 == node);
1407 vsi.index = element->index;
1408 }
1409 vsi.set = set;
1410
1411 return vsi;
1412 }
1413
1414 /* Dump content of SET to file F. */
1415
1416 void
1417 dump_varpool_node_set (FILE *f, varpool_node_set set)
1418 {
1419 varpool_node_set_iterator iter;
1420
1421 for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter))
1422 {
1423 struct varpool_node *node = vsi_node (iter);
1424 fprintf (f, " %s", varpool_node_name (node));
1425 }
1426 fprintf (f, "\n");
1427 }
1428
1429 /* Dump content of SET to stderr. */
1430
1431 DEBUG_FUNCTION void
1432 debug_varpool_node_set (varpool_node_set set)
1433 {
1434 dump_varpool_node_set (stderr, set);
1435 }
1436
1437
1438 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1439
1440 static unsigned int
1441 ipa_profile (void)
1442 {
1443 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1444 struct cgraph_edge *e;
1445 int order_pos;
1446 bool something_changed = false;
1447 int i;
1448
1449 order_pos = cgraph_postorder (order);
1450 for (i = order_pos - 1; i >= 0; i--)
1451 {
1452 if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1453 {
1454 for (e = order[i]->callees; e; e = e->next_callee)
1455 if (e->callee->local.local && !e->callee->aux)
1456 {
1457 something_changed = true;
1458 e->callee->aux = (void *)1;
1459 }
1460 }
1461 order[i]->aux = NULL;
1462 }
1463
1464 while (something_changed)
1465 {
1466 something_changed = false;
1467 for (i = order_pos - 1; i >= 0; i--)
1468 {
1469 if (order[i]->aux && cgraph_propagate_frequency (order[i]))
1470 {
1471 for (e = order[i]->callees; e; e = e->next_callee)
1472 if (e->callee->local.local && !e->callee->aux)
1473 {
1474 something_changed = true;
1475 e->callee->aux = (void *)1;
1476 }
1477 }
1478 order[i]->aux = NULL;
1479 }
1480 }
1481 free (order);
1482 return 0;
1483 }
1484
1485 static bool
1486 gate_ipa_profile (void)
1487 {
1488 return flag_ipa_profile;
1489 }
1490
1491 struct ipa_opt_pass_d pass_ipa_profile =
1492 {
1493 {
1494 IPA_PASS,
1495 "ipa-profile", /* name */
1496 gate_ipa_profile, /* gate */
1497 ipa_profile, /* execute */
1498 NULL, /* sub */
1499 NULL, /* next */
1500 0, /* static_pass_number */
1501 TV_IPA_PROFILE, /* tv_id */
1502 0, /* properties_required */
1503 0, /* properties_provided */
1504 0, /* properties_destroyed */
1505 0, /* todo_flags_start */
1506 0 /* todo_flags_finish */
1507 },
1508 NULL, /* generate_summary */
1509 NULL, /* write_summary */
1510 NULL, /* read_summary */
1511 NULL, /* write_optimization_summary */
1512 NULL, /* read_optimization_summary */
1513 NULL, /* stmt_fixup */
1514 0, /* TODOs */
1515 NULL, /* function_transform */
1516 NULL /* variable_transform */
1517 };
1518
1519 /* Generate and emit a static constructor or destructor. WHICH must
1520 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1521 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1522 initialization priority for this constructor or destructor.
1523
1524 FINAL specify whether the externally visible name for collect2 should
1525 be produced. */
1526
1527 static void
1528 cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
1529 {
1530 static int counter = 0;
1531 char which_buf[16];
1532 tree decl, name, resdecl;
1533
1534 /* The priority is encoded in the constructor or destructor name.
1535 collect2 will sort the names and arrange that they are called at
1536 program startup. */
1537 if (final)
1538 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1539 else
1540 /* Proudce sane name but one not recognizable by collect2, just for the
1541 case we fail to inline the function. */
1542 sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
1543 name = get_file_function_name (which_buf);
1544
1545 decl = build_decl (input_location, FUNCTION_DECL, name,
1546 build_function_type_list (void_type_node, NULL_TREE));
1547 current_function_decl = decl;
1548
1549 resdecl = build_decl (input_location,
1550 RESULT_DECL, NULL_TREE, void_type_node);
1551 DECL_ARTIFICIAL (resdecl) = 1;
1552 DECL_RESULT (decl) = resdecl;
1553 DECL_CONTEXT (resdecl) = decl;
1554
1555 allocate_struct_function (decl, false);
1556
1557 TREE_STATIC (decl) = 1;
1558 TREE_USED (decl) = 1;
1559 DECL_ARTIFICIAL (decl) = 1;
1560 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1561 DECL_SAVED_TREE (decl) = body;
1562 if (!targetm.have_ctors_dtors && final)
1563 {
1564 TREE_PUBLIC (decl) = 1;
1565 DECL_PRESERVE_P (decl) = 1;
1566 }
1567 DECL_UNINLINABLE (decl) = 1;
1568
1569 DECL_INITIAL (decl) = make_node (BLOCK);
1570 TREE_USED (DECL_INITIAL (decl)) = 1;
1571
1572 DECL_SOURCE_LOCATION (decl) = input_location;
1573 cfun->function_end_locus = input_location;
1574
1575 switch (which)
1576 {
1577 case 'I':
1578 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1579 decl_init_priority_insert (decl, priority);
1580 break;
1581 case 'D':
1582 DECL_STATIC_DESTRUCTOR (decl) = 1;
1583 decl_fini_priority_insert (decl, priority);
1584 break;
1585 default:
1586 gcc_unreachable ();
1587 }
1588
1589 gimplify_function_tree (decl);
1590
1591 cgraph_add_new_function (decl, false);
1592
1593 set_cfun (NULL);
1594 current_function_decl = NULL;
1595 }
1596
1597 /* Generate and emit a static constructor or destructor. WHICH must
1598 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1599 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1600 initialization priority for this constructor or destructor. */
1601
1602 void
1603 cgraph_build_static_cdtor (char which, tree body, int priority)
1604 {
1605 cgraph_build_static_cdtor_1 (which, body, priority, false);
1606 }
1607
1608 /* A vector of FUNCTION_DECLs declared as static constructors. */
1609 static VEC(tree, heap) *static_ctors;
1610 /* A vector of FUNCTION_DECLs declared as static destructors. */
1611 static VEC(tree, heap) *static_dtors;
1612
1613 /* When target does not have ctors and dtors, we call all constructor
1614 and destructor by special initialization/destruction function
1615 recognized by collect2.
1616
1617 When we are going to build this function, collect all constructors and
1618 destructors and turn them into normal functions. */
1619
1620 static void
1621 record_cdtor_fn (struct cgraph_node *node)
1622 {
1623 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1624 VEC_safe_push (tree, heap, static_ctors, node->decl);
1625 if (DECL_STATIC_DESTRUCTOR (node->decl))
1626 VEC_safe_push (tree, heap, static_dtors, node->decl);
1627 node = cgraph_node (node->decl);
1628 node->local.disregard_inline_limits = 1;
1629 }
1630
1631 /* Define global constructors/destructor functions for the CDTORS, of
1632 which they are LEN. The CDTORS are sorted by initialization
1633 priority. If CTOR_P is true, these are constructors; otherwise,
1634 they are destructors. */
1635
1636 static void
1637 build_cdtor (bool ctor_p, VEC (tree, heap) *cdtors)
1638 {
1639 size_t i,j;
1640 size_t len = VEC_length (tree, cdtors);
1641
1642 i = 0;
1643 while (i < len)
1644 {
1645 tree body;
1646 tree fn;
1647 priority_type priority;
1648
1649 priority = 0;
1650 body = NULL_TREE;
1651 j = i;
1652 do
1653 {
1654 priority_type p;
1655 fn = VEC_index (tree, cdtors, j);
1656 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1657 if (j == i)
1658 priority = p;
1659 else if (p != priority)
1660 break;
1661 j++;
1662 }
1663 while (j < len);
1664
1665 /* When there is only one cdtor and target supports them, do nothing. */
1666 if (j == i + 1
1667 && targetm.have_ctors_dtors)
1668 {
1669 i++;
1670 continue;
1671 }
1672 /* Find the next batch of constructors/destructors with the same
1673 initialization priority. */
1674 for (;i < j; i++)
1675 {
1676 tree call;
1677 fn = VEC_index (tree, cdtors, i);
1678 call = build_call_expr (fn, 0);
1679 if (ctor_p)
1680 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1681 else
1682 DECL_STATIC_DESTRUCTOR (fn) = 0;
1683 /* We do not want to optimize away pure/const calls here.
1684 When optimizing, these should be already removed, when not
1685 optimizing, we want user to be able to breakpoint in them. */
1686 TREE_SIDE_EFFECTS (call) = 1;
1687 append_to_statement_list (call, &body);
1688 }
1689 gcc_assert (body != NULL_TREE);
1690 /* Generate a function to call all the function of like
1691 priority. */
1692 cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true);
1693 }
1694 }
1695
1696 /* Comparison function for qsort. P1 and P2 are actually of type
1697 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1698 used to determine the sort order. */
1699
1700 static int
1701 compare_ctor (const void *p1, const void *p2)
1702 {
1703 tree f1;
1704 tree f2;
1705 int priority1;
1706 int priority2;
1707
1708 f1 = *(const tree *)p1;
1709 f2 = *(const tree *)p2;
1710 priority1 = DECL_INIT_PRIORITY (f1);
1711 priority2 = DECL_INIT_PRIORITY (f2);
1712
1713 if (priority1 < priority2)
1714 return -1;
1715 else if (priority1 > priority2)
1716 return 1;
1717 else
1718 /* Ensure a stable sort. Constructors are executed in backwarding
1719 order to make LTO initialize braries first. */
1720 return DECL_UID (f2) - DECL_UID (f1);
1721 }
1722
1723 /* Comparison function for qsort. P1 and P2 are actually of type
1724 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1725 used to determine the sort order. */
1726
1727 static int
1728 compare_dtor (const void *p1, const void *p2)
1729 {
1730 tree f1;
1731 tree f2;
1732 int priority1;
1733 int priority2;
1734
1735 f1 = *(const tree *)p1;
1736 f2 = *(const tree *)p2;
1737 priority1 = DECL_FINI_PRIORITY (f1);
1738 priority2 = DECL_FINI_PRIORITY (f2);
1739
1740 if (priority1 < priority2)
1741 return -1;
1742 else if (priority1 > priority2)
1743 return 1;
1744 else
1745 /* Ensure a stable sort. */
1746 return DECL_UID (f1) - DECL_UID (f2);
1747 }
1748
1749 /* Generate functions to call static constructors and destructors
1750 for targets that do not support .ctors/.dtors sections. These
1751 functions have magic names which are detected by collect2. */
1752
1753 static void
1754 build_cdtor_fns (void)
1755 {
1756 if (!VEC_empty (tree, static_ctors))
1757 {
1758 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1759 VEC_qsort (tree, static_ctors, compare_ctor);
1760 build_cdtor (/*ctor_p=*/true, static_ctors);
1761 }
1762
1763 if (!VEC_empty (tree, static_dtors))
1764 {
1765 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1766 VEC_qsort (tree, static_dtors, compare_dtor);
1767 build_cdtor (/*ctor_p=*/false, static_dtors);
1768 }
1769 }
1770
1771 /* Look for constructors and destructors and produce function calling them.
1772 This is needed for targets not supporting ctors or dtors, but we perform the
1773 transformation also at linktime to merge possibly numberous
1774 constructors/destructors into single function to improve code locality and
1775 reduce size. */
1776
1777 static unsigned int
1778 ipa_cdtor_merge (void)
1779 {
1780 struct cgraph_node *node;
1781 for (node = cgraph_nodes; node; node = node->next)
1782 if (node->analyzed
1783 && (DECL_STATIC_CONSTRUCTOR (node->decl)
1784 || DECL_STATIC_DESTRUCTOR (node->decl)))
1785 record_cdtor_fn (node);
1786 build_cdtor_fns ();
1787 VEC_free (tree, heap, static_ctors);
1788 VEC_free (tree, heap, static_dtors);
1789 return 0;
1790 }
1791
1792 /* Perform the pass when we have no ctors/dtors support
1793 or at LTO time to merge multiple constructors into single
1794 function. */
1795
1796 static bool
1797 gate_ipa_cdtor_merge (void)
1798 {
1799 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1800 }
1801
1802 struct ipa_opt_pass_d pass_ipa_cdtor_merge =
1803 {
1804 {
1805 IPA_PASS,
1806 "cdtor", /* name */
1807 gate_ipa_cdtor_merge, /* gate */
1808 ipa_cdtor_merge, /* execute */
1809 NULL, /* sub */
1810 NULL, /* next */
1811 0, /* static_pass_number */
1812 TV_CGRAPHOPT, /* tv_id */
1813 0, /* properties_required */
1814 0, /* properties_provided */
1815 0, /* properties_destroyed */
1816 0, /* todo_flags_start */
1817 0 /* todo_flags_finish */
1818 },
1819 NULL, /* generate_summary */
1820 NULL, /* write_summary */
1821 NULL, /* read_summary */
1822 NULL, /* write_optimization_summary */
1823 NULL, /* read_optimization_summary */
1824 NULL, /* stmt_fixup */
1825 0, /* TODOs */
1826 NULL, /* function_transform */
1827 NULL /* variable_transform */
1828 };