cgraphunit.c (process_function_and_variable_attributes): Mark dllexport-ed function...
[gcc.git] / gcc / ipa.c
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "cgraph.h"
26 #include "tree-pass.h"
27 #include "timevar.h"
28 #include "gimple.h"
29 #include "ggc.h"
30 #include "flags.h"
31 #include "pointer-set.h"
32 #include "target.h"
33 #include "tree-iterator.h"
34
35 /* Fill array order with all nodes with output flag set in the reverse
36 topological order. */
37
38 int
39 cgraph_postorder (struct cgraph_node **order)
40 {
41 struct cgraph_node *node, *node2;
42 int stack_size = 0;
43 int order_pos = 0;
44 struct cgraph_edge *edge, last;
45 int pass;
46
47 struct cgraph_node **stack =
48 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
49
50 /* We have to deal with cycles nicely, so use a depth first traversal
51 output algorithm. Ignore the fact that some functions won't need
52 to be output and put them into order as well, so we get dependencies
53 right through inline functions. */
54 for (node = cgraph_nodes; node; node = node->next)
55 node->aux = NULL;
56 for (pass = 0; pass < 2; pass++)
57 for (node = cgraph_nodes; node; node = node->next)
58 if (!node->aux
59 && (pass
60 || (!node->address_taken
61 && !node->global.inlined_to
62 && !cgraph_only_called_directly_p (node))))
63 {
64 node2 = node;
65 if (!node->callers)
66 node->aux = &last;
67 else
68 node->aux = node->callers;
69 while (node2)
70 {
71 while (node2->aux != &last)
72 {
73 edge = (struct cgraph_edge *) node2->aux;
74 if (edge->next_caller)
75 node2->aux = edge->next_caller;
76 else
77 node2->aux = &last;
78 /* Break possible cycles involving always-inline
79 functions by ignoring edges from always-inline
80 functions to non-always-inline functions. */
81 if (edge->caller->local.disregard_inline_limits
82 && !edge->callee->local.disregard_inline_limits)
83 continue;
84 if (!edge->caller->aux)
85 {
86 if (!edge->caller->callers)
87 edge->caller->aux = &last;
88 else
89 edge->caller->aux = edge->caller->callers;
90 stack[stack_size++] = node2;
91 node2 = edge->caller;
92 break;
93 }
94 }
95 if (node2->aux == &last)
96 {
97 order[order_pos++] = node2;
98 if (stack_size)
99 node2 = stack[--stack_size];
100 else
101 node2 = NULL;
102 }
103 }
104 }
105 free (stack);
106 for (node = cgraph_nodes; node; node = node->next)
107 node->aux = NULL;
108 return order_pos;
109 }
110
111 /* Look for all functions inlined to NODE and update their inlined_to pointers
112 to INLINED_TO. */
113
114 static void
115 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
116 {
117 struct cgraph_edge *e;
118 for (e = node->callees; e; e = e->next_callee)
119 if (e->callee->global.inlined_to)
120 {
121 e->callee->global.inlined_to = inlined_to;
122 update_inlined_to_pointer (e->callee, inlined_to);
123 }
124 }
125
126 /* Add cgraph NODE to queue starting at FIRST.
127
128 The queue is linked via AUX pointers and terminated by pointer to 1.
129 We enqueue nodes at two occasions: when we find them reachable or when we find
130 their bodies needed for further clonning. In the second case we mark them
131 by pointer to 2 after processing so they are re-queue when they become
132 reachable. */
133
134 static void
135 enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first)
136 {
137 /* Node is still in queue; do nothing. */
138 if (node->aux && node->aux != (void *) 2)
139 return;
140 /* Node was already processed as unreachable, re-enqueue
141 only if it became reachable now. */
142 if (node->aux == (void *)2 && !node->reachable)
143 return;
144 node->aux = *first;
145 *first = node;
146 }
147
148 /* Add varpool NODE to queue starting at FIRST. */
149
150 static void
151 enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first)
152 {
153 node->aux = *first;
154 *first = node;
155 }
156
157 /* Process references. */
158
159 static void
160 process_references (struct ipa_ref_list *list,
161 struct cgraph_node **first,
162 struct varpool_node **first_varpool,
163 bool before_inlining_p)
164 {
165 int i;
166 struct ipa_ref *ref;
167 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
168 {
169 if (ref->refered_type == IPA_REF_CGRAPH)
170 {
171 struct cgraph_node *node = ipa_ref_node (ref);
172 if (!node->reachable
173 && node->analyzed
174 && (!DECL_EXTERNAL (node->decl)
175 || before_inlining_p))
176 node->reachable = true;
177 enqueue_cgraph_node (node, first);
178 }
179 else
180 {
181 struct varpool_node *node = ipa_ref_varpool_node (ref);
182 if (!node->needed)
183 {
184 varpool_mark_needed_node (node);
185 enqueue_varpool_node (node, first_varpool);
186 }
187 }
188 }
189 }
190
191 /* Return true when function can be marked local. */
192
193 static bool
194 cgraph_local_node_p (struct cgraph_node *node)
195 {
196 return (cgraph_only_called_directly_p (node)
197 && node->analyzed
198 && !DECL_EXTERNAL (node->decl)
199 && !node->local.externally_visible
200 && !node->reachable_from_other_partition
201 && !node->in_other_partition);
202 }
203
204 /* Perform reachability analysis and reclaim all unreachable nodes.
205 If BEFORE_INLINING_P is true this function is called before inlining
206 decisions has been made. If BEFORE_INLINING_P is false this function also
207 removes unneeded bodies of extern inline functions. */
208
209 bool
210 cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
211 {
212 struct cgraph_node *first = (struct cgraph_node *) (void *) 1;
213 struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1;
214 struct cgraph_node *node, *next;
215 struct varpool_node *vnode, *vnext;
216 bool changed = false;
217
218 #ifdef ENABLE_CHECKING
219 verify_cgraph ();
220 #endif
221 if (file)
222 fprintf (file, "\nReclaiming functions:");
223 #ifdef ENABLE_CHECKING
224 for (node = cgraph_nodes; node; node = node->next)
225 gcc_assert (!node->aux);
226 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
227 gcc_assert (!vnode->aux);
228 #endif
229 varpool_reset_queue ();
230 /* Mark functions whose bodies are obviously needed.
231 This is mostly when they can be referenced externally. Inline clones
232 are special since their declarations are shared with master clone and thus
233 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
234 for (node = cgraph_nodes; node; node = node->next)
235 if (node->analyzed && !node->global.inlined_to
236 && (!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
237 /* Keep around virtual functions for possible devirtualization. */
238 || (before_inlining_p
239 && DECL_VIRTUAL_P (node->decl)
240 && (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl)))
241 /* Also external functions with address taken are better to stay
242 for indirect inlining. */
243 || (before_inlining_p
244 && DECL_EXTERNAL (node->decl)
245 && node->address_taken)))
246 {
247 gcc_assert (!node->global.inlined_to);
248 enqueue_cgraph_node (node, &first);
249 node->reachable = true;
250 }
251 else
252 {
253 gcc_assert (!node->aux);
254 node->reachable = false;
255 }
256
257 /* Mark variables that are obviously needed. */
258 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
259 {
260 vnode->next_needed = NULL;
261 vnode->prev_needed = NULL;
262 if (vnode->analyzed
263 && !varpool_can_remove_if_no_refs (vnode))
264 {
265 vnode->needed = false;
266 varpool_mark_needed_node (vnode);
267 enqueue_varpool_node (vnode, &first_varpool);
268 }
269 else
270 vnode->needed = false;
271 }
272
273 /* Perform reachability analysis. As a special case do not consider
274 extern inline functions not inlined as live because we won't output
275 them at all.
276
277 We maintain two worklist, one for cgraph nodes other for varpools and
278 are finished once both are empty. */
279
280 while (first != (struct cgraph_node *) (void *) 1
281 || first_varpool != (struct varpool_node *) (void *) 1)
282 {
283 if (first != (struct cgraph_node *) (void *) 1)
284 {
285 struct cgraph_edge *e;
286 node = first;
287 first = (struct cgraph_node *) first->aux;
288 if (!node->reachable)
289 node->aux = (void *)2;
290
291 /* If we found this node reachable, first mark on the callees
292 reachable too, unless they are direct calls to extern inline functions
293 we decided to not inline. */
294 if (node->reachable)
295 {
296 for (e = node->callees; e; e = e->next_callee)
297 {
298 if (!e->callee->reachable
299 && node->analyzed
300 && (!e->inline_failed
301 || !DECL_EXTERNAL (e->callee->decl)
302 || before_inlining_p))
303 e->callee->reachable = true;
304 enqueue_cgraph_node (e->callee, &first);
305 }
306 process_references (&node->ref_list, &first, &first_varpool, before_inlining_p);
307 }
308
309 /* If any function in a comdat group is reachable, force
310 all other functions in the same comdat group to be
311 also reachable. */
312 if (node->same_comdat_group
313 && node->reachable
314 && !node->global.inlined_to)
315 {
316 for (next = node->same_comdat_group;
317 next != node;
318 next = next->same_comdat_group)
319 if (!next->reachable)
320 {
321 next->reachable = true;
322 enqueue_cgraph_node (next, &first);
323 }
324 }
325
326 /* We can freely remove inline clones even if they are cloned, however if
327 function is clone of real clone, we must keep it around in order to
328 make materialize_clones produce function body with the changes
329 applied. */
330 while (node->clone_of && !node->clone_of->aux
331 && !gimple_has_body_p (node->decl))
332 {
333 bool noninline = node->clone_of->decl != node->decl;
334 node = node->clone_of;
335 if (noninline && !node->reachable && !node->aux)
336 {
337 enqueue_cgraph_node (node, &first);
338 break;
339 }
340 }
341 }
342 if (first_varpool != (struct varpool_node *) (void *) 1)
343 {
344 vnode = first_varpool;
345 first_varpool = (struct varpool_node *)first_varpool->aux;
346 vnode->aux = NULL;
347 process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p);
348 /* If any function in a comdat group is reachable, force
349 all other functions in the same comdat group to be
350 also reachable. */
351 if (vnode->same_comdat_group)
352 {
353 struct varpool_node *next;
354 for (next = vnode->same_comdat_group;
355 next != vnode;
356 next = next->same_comdat_group)
357 if (!next->needed)
358 {
359 varpool_mark_needed_node (next);
360 enqueue_varpool_node (next, &first_varpool);
361 }
362 }
363 }
364 }
365
366 /* Remove unreachable nodes.
367
368 Completely unreachable functions can be fully removed from the callgraph.
369 Extern inline functions that we decided to not inline need to become unanalyzed nodes of
370 callgraph (so we still have edges to them). We remove function body then.
371
372 Also we need to care functions that are unreachable but we need to keep them around
373 for later clonning. In this case we also turn them to unanalyzed nodes, but
374 keep the body around. */
375 for (node = cgraph_nodes; node; node = next)
376 {
377 next = node->next;
378 if (node->aux && !node->reachable)
379 {
380 cgraph_node_remove_callees (node);
381 ipa_remove_all_references (&node->ref_list);
382 node->analyzed = false;
383 node->local.inlinable = false;
384 }
385 if (!node->aux)
386 {
387 struct cgraph_edge *e;
388 bool found = false;
389 int i;
390 struct ipa_ref *ref;
391
392 node->global.inlined_to = NULL;
393 if (file)
394 fprintf (file, " %s", cgraph_node_name (node));
395 /* See if there is reachable caller. */
396 for (e = node->callers; e && !found; e = e->next_caller)
397 if (e->caller->reachable)
398 found = true;
399 for (i = 0; (ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
400 && !found); i++)
401 if (ref->refering_type == IPA_REF_CGRAPH
402 && ipa_ref_refering_node (ref)->reachable)
403 found = true;
404 else if (ref->refering_type == IPA_REF_VARPOOL
405 && ipa_ref_refering_varpool_node (ref)->needed)
406 found = true;
407
408 /* If so, we need to keep node in the callgraph. */
409 if (found)
410 {
411 if (node->analyzed)
412 {
413 struct cgraph_node *clone;
414
415 /* If there are still clones, we must keep body around.
416 Otherwise we can just remove the body but keep the clone. */
417 for (clone = node->clones; clone;
418 clone = clone->next_sibling_clone)
419 if (clone->aux)
420 break;
421 if (!clone)
422 {
423 cgraph_release_function_body (node);
424 node->local.inlinable = false;
425 if (node->prev_sibling_clone)
426 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
427 else if (node->clone_of)
428 node->clone_of->clones = node->next_sibling_clone;
429 if (node->next_sibling_clone)
430 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
431 #ifdef ENABLE_CHECKING
432 if (node->clone_of)
433 node->former_clone_of = node->clone_of->decl;
434 #endif
435 node->clone_of = NULL;
436 node->next_sibling_clone = NULL;
437 node->prev_sibling_clone = NULL;
438 }
439 else
440 gcc_assert (!clone->in_other_partition);
441 node->analyzed = false;
442 changed = true;
443 cgraph_node_remove_callees (node);
444 ipa_remove_all_references (&node->ref_list);
445 }
446 }
447 else
448 {
449 cgraph_remove_node (node);
450 changed = true;
451 }
452 }
453 }
454 for (node = cgraph_nodes; node; node = node->next)
455 {
456 /* Inline clones might be kept around so their materializing allows further
457 cloning. If the function the clone is inlined into is removed, we need
458 to turn it into normal cone. */
459 if (node->global.inlined_to
460 && !node->callers)
461 {
462 gcc_assert (node->clones);
463 node->global.inlined_to = NULL;
464 update_inlined_to_pointer (node, node);
465 }
466 node->aux = NULL;
467 }
468
469 if (file)
470 fprintf (file, "\n");
471
472 /* We must release unused extern inlines or sanity checking will fail. Rest of transformations
473 are undesirable at -O0 since we do not want to remove anything. */
474 if (!optimize)
475 return changed;
476
477 if (file)
478 fprintf (file, "Reclaiming variables:");
479 for (vnode = varpool_nodes; vnode; vnode = vnext)
480 {
481 vnext = vnode->next;
482 if (!vnode->needed)
483 {
484 if (file)
485 fprintf (file, " %s", varpool_node_name (vnode));
486 varpool_remove_node (vnode);
487 changed = true;
488 }
489 }
490
491 /* Now update address_taken flags and try to promote functions to be local. */
492
493 if (file)
494 fprintf (file, "\nClearing address taken flags:");
495 for (node = cgraph_nodes; node; node = node->next)
496 if (node->address_taken
497 && !node->reachable_from_other_partition)
498 {
499 int i;
500 struct ipa_ref *ref;
501 bool found = false;
502 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
503 && !found; i++)
504 {
505 gcc_assert (ref->use == IPA_REF_ADDR);
506 found = true;
507 }
508 if (!found)
509 {
510 if (file)
511 fprintf (file, " %s", cgraph_node_name (node));
512 node->address_taken = false;
513 changed = true;
514 if (cgraph_local_node_p (node))
515 {
516 node->local.local = true;
517 if (file)
518 fprintf (file, " (local)");
519 }
520 }
521 }
522
523 #ifdef ENABLE_CHECKING
524 verify_cgraph ();
525 #endif
526
527 /* Reclaim alias pairs for functions that have disappeared from the
528 call graph. */
529 remove_unreachable_alias_pairs ();
530
531 return changed;
532 }
533
534 /* Discover variables that have no longer address taken or that are read only
535 and update their flags.
536
537 FIXME: This can not be done in between gimplify and omp_expand since
538 readonly flag plays role on what is shared and what is not. Currently we do
539 this transformation as part of whole program visibility and re-do at
540 ipa-reference pass (to take into account clonning), but it would
541 make sense to do it before early optimizations. */
542
543 void
544 ipa_discover_readonly_nonaddressable_vars (void)
545 {
546 struct varpool_node *vnode;
547 if (dump_file)
548 fprintf (dump_file, "Clearing variable flags:");
549 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
550 if (vnode->finalized && varpool_all_refs_explicit_p (vnode)
551 && (TREE_ADDRESSABLE (vnode->decl) || !TREE_READONLY (vnode->decl)))
552 {
553 bool written = false;
554 bool address_taken = false;
555 int i;
556 struct ipa_ref *ref;
557 for (i = 0; ipa_ref_list_refering_iterate (&vnode->ref_list, i, ref)
558 && (!written || !address_taken); i++)
559 switch (ref->use)
560 {
561 case IPA_REF_ADDR:
562 address_taken = true;
563 break;
564 case IPA_REF_LOAD:
565 break;
566 case IPA_REF_STORE:
567 written = true;
568 break;
569 }
570 if (TREE_ADDRESSABLE (vnode->decl) && !address_taken)
571 {
572 if (dump_file)
573 fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode));
574 TREE_ADDRESSABLE (vnode->decl) = 0;
575 }
576 if (!TREE_READONLY (vnode->decl) && !address_taken && !written
577 /* Making variable in explicit section readonly can cause section
578 type conflict.
579 See e.g. gcc.c-torture/compile/pr23237.c */
580 && DECL_SECTION_NAME (vnode->decl) == NULL)
581 {
582 if (dump_file)
583 fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode));
584 TREE_READONLY (vnode->decl) = 1;
585 }
586 }
587 if (dump_file)
588 fprintf (dump_file, "\n");
589 }
590
591 /* Return true when there is a reference to node and it is not vtable. */
592 static bool
593 cgraph_address_taken_from_non_vtable_p (struct cgraph_node *node)
594 {
595 int i;
596 struct ipa_ref *ref;
597 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
598 {
599 struct varpool_node *node;
600 if (ref->refered_type == IPA_REF_CGRAPH)
601 return true;
602 node = ipa_ref_varpool_node (ref);
603 if (!DECL_VIRTUAL_P (node->decl))
604 return true;
605 }
606 return false;
607 }
608
609 /* COMDAT functions must be shared only if they have address taken,
610 otherwise we can produce our own private implementation with
611 -fwhole-program.
612 Return true when turning COMDAT functoin static can not lead to wrong
613 code when the resulting object links with a library defining same COMDAT.
614
615 Virtual functions do have their addresses taken from the vtables,
616 but in C++ there is no way to compare their addresses for equality. */
617
618 bool
619 cgraph_comdat_can_be_unshared_p (struct cgraph_node *node)
620 {
621 if ((cgraph_address_taken_from_non_vtable_p (node)
622 && !DECL_VIRTUAL_P (node->decl))
623 || !node->analyzed)
624 return false;
625 if (node->same_comdat_group)
626 {
627 struct cgraph_node *next;
628
629 /* If more than one function is in the same COMDAT group, it must
630 be shared even if just one function in the comdat group has
631 address taken. */
632 for (next = node->same_comdat_group;
633 next != node; next = next->same_comdat_group)
634 if (cgraph_address_taken_from_non_vtable_p (node)
635 && !DECL_VIRTUAL_P (next->decl))
636 return false;
637 }
638 return true;
639 }
640
641 /* Return true when function NODE should be considered externally visible. */
642
643 static bool
644 cgraph_externally_visible_p (struct cgraph_node *node, bool whole_program, bool aliased)
645 {
646 struct cgraph_node *alias;
647 if (!node->local.finalized)
648 return false;
649 if (!DECL_COMDAT (node->decl)
650 && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)))
651 return false;
652
653 /* Do not even try to be smart about aliased nodes. Until we properly
654 represent everything by same body alias, these are just evil. */
655 if (aliased)
656 return true;
657
658 /* If linker counts on us, we must preserve the function. */
659 if (cgraph_used_from_object_file_p (node))
660 return true;
661 if (DECL_PRESERVE_P (node->decl))
662 return true;
663 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl)))
664 return true;
665 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
666 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (node->decl)))
667 return true;
668 /* When doing LTO or whole program, we can bring COMDAT functoins static.
669 This improves code quality and we know we will duplicate them at most twice
670 (in the case that we are not using plugin and link with object file
671 implementing same COMDAT) */
672 if ((in_lto_p || whole_program)
673 && DECL_COMDAT (node->decl)
674 && cgraph_comdat_can_be_unshared_p (node))
675 return false;
676
677 /* See if we have linker information about symbol not being used or
678 if we need to make guess based on the declaration.
679
680 Even if the linker clams the symbol is unused, never bring internal
681 symbols that are declared by user as used or externally visible.
682 This is needed for i.e. references from asm statements. */
683 for (alias = node->same_body; alias; alias = alias->next)
684 if (alias->resolution != LDPR_PREVAILING_DEF_IRONLY)
685 break;
686 if (!alias && node->resolution == LDPR_PREVAILING_DEF_IRONLY)
687 return false;
688
689 /* When doing link time optimizations, hidden symbols become local. */
690 if (in_lto_p
691 && (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
692 || DECL_VISIBILITY (node->decl) == VISIBILITY_INTERNAL)
693 /* Be sure that node is defined in IR file, not in other object
694 file. In that case we don't set used_from_other_object_file. */
695 && node->analyzed)
696 ;
697 else if (!whole_program)
698 return true;
699
700 if (MAIN_NAME_P (DECL_NAME (node->decl)))
701 return true;
702
703 return false;
704 }
705
706 /* Return true when variable VNODE should be considered externally visible. */
707
708 static bool
709 varpool_externally_visible_p (struct varpool_node *vnode, bool aliased)
710 {
711 struct varpool_node *alias;
712 if (!DECL_COMDAT (vnode->decl) && !TREE_PUBLIC (vnode->decl))
713 return false;
714
715 /* Do not even try to be smart about aliased nodes. Until we properly
716 represent everything by same body alias, these are just evil. */
717 if (aliased)
718 return true;
719
720 /* If linker counts on us, we must preserve the function. */
721 if (varpool_used_from_object_file_p (vnode))
722 return true;
723
724 if (DECL_PRESERVE_P (vnode->decl))
725 return true;
726 if (lookup_attribute ("externally_visible",
727 DECL_ATTRIBUTES (vnode->decl)))
728 return true;
729 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
730 && lookup_attribute ("dllexport",
731 DECL_ATTRIBUTES (vnode->decl)))
732 return true;
733
734 /* See if we have linker information about symbol not being used or
735 if we need to make guess based on the declaration.
736
737 Even if the linker clams the symbol is unused, never bring internal
738 symbols that are declared by user as used or externally visible.
739 This is needed for i.e. references from asm statements. */
740 if (varpool_used_from_object_file_p (vnode))
741 return true;
742 for (alias = vnode->extra_name; alias; alias = alias->next)
743 if (alias->resolution != LDPR_PREVAILING_DEF_IRONLY)
744 break;
745 if (!alias && vnode->resolution == LDPR_PREVAILING_DEF_IRONLY)
746 return false;
747
748 /* As a special case, the COMDAT virutal tables can be unshared.
749 In LTO mode turn vtables into static variables. The variable is readonly,
750 so this does not enable more optimization, but referring static var
751 is faster for dynamic linking. Also this match logic hidding vtables
752 from LTO symbol tables. */
753 if ((in_lto_p || flag_whole_program)
754 && !vnode->force_output
755 && DECL_COMDAT (vnode->decl) && DECL_VIRTUAL_P (vnode->decl))
756 return false;
757
758 /* When doing link time optimizations, hidden symbols become local. */
759 if (in_lto_p
760 && (DECL_VISIBILITY (vnode->decl) == VISIBILITY_HIDDEN
761 || DECL_VISIBILITY (vnode->decl) == VISIBILITY_INTERNAL)
762 /* Be sure that node is defined in IR file, not in other object
763 file. In that case we don't set used_from_other_object_file. */
764 && vnode->finalized)
765 ;
766 else if (!flag_whole_program)
767 return true;
768
769 /* Do not attempt to privatize COMDATS by default.
770 This would break linking with C++ libraries sharing
771 inline definitions.
772
773 FIXME: We can do so for readonly vars with no address taken and
774 possibly also for vtables since no direct pointer comparsion is done.
775 It might be interesting to do so to reduce linking overhead. */
776 if (DECL_COMDAT (vnode->decl) || DECL_WEAK (vnode->decl))
777 return true;
778 return false;
779 }
780
781 /* Dissolve the same_comdat_group list in which NODE resides. */
782
783 static void
784 dissolve_same_comdat_group_list (struct cgraph_node *node)
785 {
786 struct cgraph_node *n = node, *next;
787 do
788 {
789 next = n->same_comdat_group;
790 n->same_comdat_group = NULL;
791 n = next;
792 }
793 while (n != node);
794 }
795
796 /* Mark visibility of all functions.
797
798 A local function is one whose calls can occur only in the current
799 compilation unit and all its calls are explicit, so we can change
800 its calling convention. We simply mark all static functions whose
801 address is not taken as local.
802
803 We also change the TREE_PUBLIC flag of all declarations that are public
804 in language point of view but we want to overwrite this default
805 via visibilities for the backend point of view. */
806
807 static unsigned int
808 function_and_variable_visibility (bool whole_program)
809 {
810 struct cgraph_node *node;
811 struct varpool_node *vnode;
812 struct pointer_set_t *aliased_nodes = pointer_set_create ();
813 struct pointer_set_t *aliased_vnodes = pointer_set_create ();
814 unsigned i;
815 alias_pair *p;
816
817 /* Discover aliased nodes. */
818 FOR_EACH_VEC_ELT (alias_pair, alias_pairs, i, p)
819 {
820 if (dump_file)
821 fprintf (dump_file, "Alias %s->%s",
822 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)),
823 IDENTIFIER_POINTER (p->target));
824
825 if ((node = cgraph_node_for_asm (p->target)) != NULL)
826 {
827 gcc_assert (node->needed);
828 pointer_set_insert (aliased_nodes, node);
829 if (dump_file)
830 fprintf (dump_file, " node %s/%i",
831 cgraph_node_name (node), node->uid);
832 }
833 else if ((vnode = varpool_node_for_asm (p->target)) != NULL)
834 {
835 gcc_assert (vnode->needed);
836 pointer_set_insert (aliased_vnodes, vnode);
837 if (dump_file)
838 fprintf (dump_file, " varpool node %s",
839 varpool_node_name (vnode));
840 }
841 if (dump_file)
842 fprintf (dump_file, "\n");
843 }
844
845 for (node = cgraph_nodes; node; node = node->next)
846 {
847 int flags = flags_from_decl_or_type (node->decl);
848 if (optimize
849 && (flags & (ECF_CONST | ECF_PURE))
850 && !(flags & ECF_LOOPING_CONST_OR_PURE))
851 {
852 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
853 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
854 }
855
856 /* C++ FE on lack of COMDAT support create local COMDAT functions
857 (that ought to be shared but can not due to object format
858 limitations). It is neccesary to keep the flag to make rest of C++ FE
859 happy. Clear the flag here to avoid confusion in middle-end. */
860 if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl))
861 DECL_COMDAT (node->decl) = 0;
862 /* For external decls stop tracking same_comdat_group, it doesn't matter
863 what comdat group they are in when they won't be emitted in this TU,
864 and simplifies later passes. */
865 if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
866 {
867 #ifdef ENABLE_CHECKING
868 struct cgraph_node *n;
869
870 for (n = node->same_comdat_group;
871 n != node;
872 n = n->same_comdat_group)
873 /* If at least one of same comdat group functions is external,
874 all of them have to be, otherwise it is a front-end bug. */
875 gcc_assert (DECL_EXTERNAL (n->decl));
876 #endif
877 dissolve_same_comdat_group_list (node);
878 }
879 gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl))
880 || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl));
881 if (cgraph_externally_visible_p (node, whole_program,
882 pointer_set_contains (aliased_nodes,
883 node)))
884 {
885 gcc_assert (!node->global.inlined_to);
886 node->local.externally_visible = true;
887 }
888 else
889 node->local.externally_visible = false;
890 if (!node->local.externally_visible && node->analyzed
891 && !DECL_EXTERNAL (node->decl))
892 {
893 struct cgraph_node *alias;
894 gcc_assert (whole_program || in_lto_p || !TREE_PUBLIC (node->decl));
895 cgraph_make_decl_local (node->decl);
896 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
897 for (alias = node->same_body; alias; alias = alias->next)
898 cgraph_make_decl_local (alias->decl);
899 if (node->same_comdat_group)
900 /* cgraph_externally_visible_p has already checked all other nodes
901 in the group and they will all be made local. We need to
902 dissolve the group at once so that the predicate does not
903 segfault though. */
904 dissolve_same_comdat_group_list (node);
905 }
906 node->local.local = cgraph_local_node_p (node);
907 }
908 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
909 {
910 /* weak flag makes no sense on local variables. */
911 gcc_assert (!DECL_WEAK (vnode->decl)
912 || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl));
913 /* In several cases declarations can not be common:
914
915 - when declaration has initializer
916 - when it is in weak
917 - when it has specific section
918 - when it resides in non-generic address space.
919 - if declaration is local, it will get into .local common section
920 so common flag is not needed. Frontends still produce these in
921 certain cases, such as for:
922
923 static int a __attribute__ ((common))
924
925 Canonicalize things here and clear the redundant flag. */
926 if (DECL_COMMON (vnode->decl)
927 && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
928 || (DECL_INITIAL (vnode->decl)
929 && DECL_INITIAL (vnode->decl) != error_mark_node)
930 || DECL_WEAK (vnode->decl)
931 || DECL_SECTION_NAME (vnode->decl) != NULL
932 || ! (ADDR_SPACE_GENERIC_P
933 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl))))))
934 DECL_COMMON (vnode->decl) = 0;
935 }
936 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
937 {
938 if (!vnode->finalized)
939 continue;
940 if (vnode->needed
941 && varpool_externally_visible_p
942 (vnode,
943 pointer_set_contains (aliased_vnodes, vnode)))
944 vnode->externally_visible = true;
945 else
946 vnode->externally_visible = false;
947 if (!vnode->externally_visible)
948 {
949 gcc_assert (in_lto_p || whole_program || !TREE_PUBLIC (vnode->decl));
950 cgraph_make_decl_local (vnode->decl);
951 vnode->resolution = LDPR_PREVAILING_DEF_IRONLY;
952 }
953 gcc_assert (TREE_STATIC (vnode->decl));
954 }
955 pointer_set_destroy (aliased_nodes);
956 pointer_set_destroy (aliased_vnodes);
957
958 if (dump_file)
959 {
960 fprintf (dump_file, "\nMarking local functions:");
961 for (node = cgraph_nodes; node; node = node->next)
962 if (node->local.local)
963 fprintf (dump_file, " %s", cgraph_node_name (node));
964 fprintf (dump_file, "\n\n");
965 fprintf (dump_file, "\nMarking externally visible functions:");
966 for (node = cgraph_nodes; node; node = node->next)
967 if (node->local.externally_visible)
968 fprintf (dump_file, " %s", cgraph_node_name (node));
969 fprintf (dump_file, "\n\n");
970 fprintf (dump_file, "\nMarking externally visible variables:");
971 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
972 if (vnode->externally_visible)
973 fprintf (dump_file, " %s", varpool_node_name (vnode));
974 fprintf (dump_file, "\n\n");
975 }
976 cgraph_function_flags_ready = true;
977 return 0;
978 }
979
980 /* Local function pass handling visibilities. This happens before LTO streaming
981 so in particular -fwhole-program should be ignored at this level. */
982
983 static unsigned int
984 local_function_and_variable_visibility (void)
985 {
986 return function_and_variable_visibility (flag_whole_program && !flag_lto);
987 }
988
989 struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility =
990 {
991 {
992 SIMPLE_IPA_PASS,
993 "visibility", /* name */
994 NULL, /* gate */
995 local_function_and_variable_visibility,/* execute */
996 NULL, /* sub */
997 NULL, /* next */
998 0, /* static_pass_number */
999 TV_CGRAPHOPT, /* tv_id */
1000 0, /* properties_required */
1001 0, /* properties_provided */
1002 0, /* properties_destroyed */
1003 0, /* todo_flags_start */
1004 TODO_remove_functions | TODO_dump_cgraph
1005 | TODO_ggc_collect /* todo_flags_finish */
1006 }
1007 };
1008
1009 /* Do not re-run on ltrans stage. */
1010
1011 static bool
1012 gate_whole_program_function_and_variable_visibility (void)
1013 {
1014 return !flag_ltrans;
1015 }
1016
1017 /* Bring functionss local at LTO time whith -fwhole-program. */
1018
1019 static unsigned int
1020 whole_program_function_and_variable_visibility (void)
1021 {
1022 struct cgraph_node *node;
1023 struct varpool_node *vnode;
1024
1025 function_and_variable_visibility (flag_whole_program);
1026
1027 for (node = cgraph_nodes; node; node = node->next)
1028 if ((node->local.externally_visible && !DECL_COMDAT (node->decl))
1029 && node->local.finalized)
1030 cgraph_mark_needed_node (node);
1031 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
1032 if (vnode->externally_visible && !DECL_COMDAT (vnode->decl))
1033 varpool_mark_needed_node (vnode);
1034 if (dump_file)
1035 {
1036 fprintf (dump_file, "\nNeeded variables:");
1037 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
1038 if (vnode->needed)
1039 fprintf (dump_file, " %s", varpool_node_name (vnode));
1040 fprintf (dump_file, "\n\n");
1041 }
1042 if (optimize)
1043 ipa_discover_readonly_nonaddressable_vars ();
1044 return 0;
1045 }
1046
1047 struct ipa_opt_pass_d pass_ipa_whole_program_visibility =
1048 {
1049 {
1050 IPA_PASS,
1051 "whole-program", /* name */
1052 gate_whole_program_function_and_variable_visibility,/* gate */
1053 whole_program_function_and_variable_visibility,/* execute */
1054 NULL, /* sub */
1055 NULL, /* next */
1056 0, /* static_pass_number */
1057 TV_CGRAPHOPT, /* tv_id */
1058 0, /* properties_required */
1059 0, /* properties_provided */
1060 0, /* properties_destroyed */
1061 0, /* todo_flags_start */
1062 TODO_remove_functions | TODO_dump_cgraph
1063 | TODO_ggc_collect /* todo_flags_finish */
1064 },
1065 NULL, /* generate_summary */
1066 NULL, /* write_summary */
1067 NULL, /* read_summary */
1068 NULL, /* write_optimization_summary */
1069 NULL, /* read_optimization_summary */
1070 NULL, /* stmt_fixup */
1071 0, /* TODOs */
1072 NULL, /* function_transform */
1073 NULL, /* variable_transform */
1074 };
1075
1076 /* Hash a cgraph node set element. */
1077
1078 static hashval_t
1079 hash_cgraph_node_set_element (const void *p)
1080 {
1081 const_cgraph_node_set_element element = (const_cgraph_node_set_element) p;
1082 return htab_hash_pointer (element->node);
1083 }
1084
1085 /* Compare two cgraph node set elements. */
1086
1087 static int
1088 eq_cgraph_node_set_element (const void *p1, const void *p2)
1089 {
1090 const_cgraph_node_set_element e1 = (const_cgraph_node_set_element) p1;
1091 const_cgraph_node_set_element e2 = (const_cgraph_node_set_element) p2;
1092
1093 return e1->node == e2->node;
1094 }
1095
1096 /* Create a new cgraph node set. */
1097
1098 cgraph_node_set
1099 cgraph_node_set_new (void)
1100 {
1101 cgraph_node_set new_node_set;
1102
1103 new_node_set = ggc_alloc_cgraph_node_set_def ();
1104 new_node_set->hashtab = htab_create_ggc (10,
1105 hash_cgraph_node_set_element,
1106 eq_cgraph_node_set_element,
1107 NULL);
1108 new_node_set->nodes = NULL;
1109 return new_node_set;
1110 }
1111
1112 /* Add cgraph_node NODE to cgraph_node_set SET. */
1113
1114 void
1115 cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node)
1116 {
1117 void **slot;
1118 cgraph_node_set_element element;
1119 struct cgraph_node_set_element_def dummy;
1120
1121 dummy.node = node;
1122 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
1123
1124 if (*slot != HTAB_EMPTY_ENTRY)
1125 {
1126 element = (cgraph_node_set_element) *slot;
1127 gcc_assert (node == element->node
1128 && (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1129 == node));
1130 return;
1131 }
1132
1133 /* Insert node into hash table. */
1134 element = ggc_alloc_cgraph_node_set_element_def ();
1135 element->node = node;
1136 element->index = VEC_length (cgraph_node_ptr, set->nodes);
1137 *slot = element;
1138
1139 /* Insert into node vector. */
1140 VEC_safe_push (cgraph_node_ptr, gc, set->nodes, node);
1141 }
1142
1143 /* Remove cgraph_node NODE from cgraph_node_set SET. */
1144
1145 void
1146 cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node)
1147 {
1148 void **slot, **last_slot;
1149 cgraph_node_set_element element, last_element;
1150 struct cgraph_node *last_node;
1151 struct cgraph_node_set_element_def dummy;
1152
1153 dummy.node = node;
1154 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1155 if (slot == NULL)
1156 return;
1157
1158 element = (cgraph_node_set_element) *slot;
1159 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1160 == node);
1161
1162 /* Remove from vector. We do this by swapping node with the last element
1163 of the vector. */
1164 last_node = VEC_pop (cgraph_node_ptr, set->nodes);
1165 if (last_node != node)
1166 {
1167 dummy.node = last_node;
1168 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1169 last_element = (cgraph_node_set_element) *last_slot;
1170 gcc_assert (last_element);
1171
1172 /* Move the last element to the original spot of NODE. */
1173 last_element->index = element->index;
1174 VEC_replace (cgraph_node_ptr, set->nodes, last_element->index,
1175 last_node);
1176 }
1177
1178 /* Remove element from hash table. */
1179 htab_clear_slot (set->hashtab, slot);
1180 ggc_free (element);
1181 }
1182
1183 /* Find NODE in SET and return an iterator to it if found. A null iterator
1184 is returned if NODE is not in SET. */
1185
1186 cgraph_node_set_iterator
1187 cgraph_node_set_find (cgraph_node_set set, struct cgraph_node *node)
1188 {
1189 void **slot;
1190 struct cgraph_node_set_element_def dummy;
1191 cgraph_node_set_element element;
1192 cgraph_node_set_iterator csi;
1193
1194 dummy.node = node;
1195 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1196 if (slot == NULL)
1197 csi.index = (unsigned) ~0;
1198 else
1199 {
1200 element = (cgraph_node_set_element) *slot;
1201 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1202 == node);
1203 csi.index = element->index;
1204 }
1205 csi.set = set;
1206
1207 return csi;
1208 }
1209
1210 /* Dump content of SET to file F. */
1211
1212 void
1213 dump_cgraph_node_set (FILE *f, cgraph_node_set set)
1214 {
1215 cgraph_node_set_iterator iter;
1216
1217 for (iter = csi_start (set); !csi_end_p (iter); csi_next (&iter))
1218 {
1219 struct cgraph_node *node = csi_node (iter);
1220 fprintf (f, " %s/%i", cgraph_node_name (node), node->uid);
1221 }
1222 fprintf (f, "\n");
1223 }
1224
1225 /* Dump content of SET to stderr. */
1226
1227 DEBUG_FUNCTION void
1228 debug_cgraph_node_set (cgraph_node_set set)
1229 {
1230 dump_cgraph_node_set (stderr, set);
1231 }
1232
1233 /* Hash a varpool node set element. */
1234
1235 static hashval_t
1236 hash_varpool_node_set_element (const void *p)
1237 {
1238 const_varpool_node_set_element element = (const_varpool_node_set_element) p;
1239 return htab_hash_pointer (element->node);
1240 }
1241
1242 /* Compare two varpool node set elements. */
1243
1244 static int
1245 eq_varpool_node_set_element (const void *p1, const void *p2)
1246 {
1247 const_varpool_node_set_element e1 = (const_varpool_node_set_element) p1;
1248 const_varpool_node_set_element e2 = (const_varpool_node_set_element) p2;
1249
1250 return e1->node == e2->node;
1251 }
1252
1253 /* Create a new varpool node set. */
1254
1255 varpool_node_set
1256 varpool_node_set_new (void)
1257 {
1258 varpool_node_set new_node_set;
1259
1260 new_node_set = ggc_alloc_varpool_node_set_def ();
1261 new_node_set->hashtab = htab_create_ggc (10,
1262 hash_varpool_node_set_element,
1263 eq_varpool_node_set_element,
1264 NULL);
1265 new_node_set->nodes = NULL;
1266 return new_node_set;
1267 }
1268
1269 /* Add varpool_node NODE to varpool_node_set SET. */
1270
1271 void
1272 varpool_node_set_add (varpool_node_set set, struct varpool_node *node)
1273 {
1274 void **slot;
1275 varpool_node_set_element element;
1276 struct varpool_node_set_element_def dummy;
1277
1278 dummy.node = node;
1279 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
1280
1281 if (*slot != HTAB_EMPTY_ENTRY)
1282 {
1283 element = (varpool_node_set_element) *slot;
1284 gcc_assert (node == element->node
1285 && (VEC_index (varpool_node_ptr, set->nodes, element->index)
1286 == node));
1287 return;
1288 }
1289
1290 /* Insert node into hash table. */
1291 element = ggc_alloc_varpool_node_set_element_def ();
1292 element->node = node;
1293 element->index = VEC_length (varpool_node_ptr, set->nodes);
1294 *slot = element;
1295
1296 /* Insert into node vector. */
1297 VEC_safe_push (varpool_node_ptr, gc, set->nodes, node);
1298 }
1299
1300 /* Remove varpool_node NODE from varpool_node_set SET. */
1301
1302 void
1303 varpool_node_set_remove (varpool_node_set set, struct varpool_node *node)
1304 {
1305 void **slot, **last_slot;
1306 varpool_node_set_element element, last_element;
1307 struct varpool_node *last_node;
1308 struct varpool_node_set_element_def dummy;
1309
1310 dummy.node = node;
1311 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1312 if (slot == NULL)
1313 return;
1314
1315 element = (varpool_node_set_element) *slot;
1316 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1317 == node);
1318
1319 /* Remove from vector. We do this by swapping node with the last element
1320 of the vector. */
1321 last_node = VEC_pop (varpool_node_ptr, set->nodes);
1322 if (last_node != node)
1323 {
1324 dummy.node = last_node;
1325 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1326 last_element = (varpool_node_set_element) *last_slot;
1327 gcc_assert (last_element);
1328
1329 /* Move the last element to the original spot of NODE. */
1330 last_element->index = element->index;
1331 VEC_replace (varpool_node_ptr, set->nodes, last_element->index,
1332 last_node);
1333 }
1334
1335 /* Remove element from hash table. */
1336 htab_clear_slot (set->hashtab, slot);
1337 ggc_free (element);
1338 }
1339
1340 /* Find NODE in SET and return an iterator to it if found. A null iterator
1341 is returned if NODE is not in SET. */
1342
1343 varpool_node_set_iterator
1344 varpool_node_set_find (varpool_node_set set, struct varpool_node *node)
1345 {
1346 void **slot;
1347 struct varpool_node_set_element_def dummy;
1348 varpool_node_set_element element;
1349 varpool_node_set_iterator vsi;
1350
1351 dummy.node = node;
1352 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1353 if (slot == NULL)
1354 vsi.index = (unsigned) ~0;
1355 else
1356 {
1357 element = (varpool_node_set_element) *slot;
1358 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1359 == node);
1360 vsi.index = element->index;
1361 }
1362 vsi.set = set;
1363
1364 return vsi;
1365 }
1366
1367 /* Dump content of SET to file F. */
1368
1369 void
1370 dump_varpool_node_set (FILE *f, varpool_node_set set)
1371 {
1372 varpool_node_set_iterator iter;
1373
1374 for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter))
1375 {
1376 struct varpool_node *node = vsi_node (iter);
1377 fprintf (f, " %s", varpool_node_name (node));
1378 }
1379 fprintf (f, "\n");
1380 }
1381
1382 /* Dump content of SET to stderr. */
1383
1384 DEBUG_FUNCTION void
1385 debug_varpool_node_set (varpool_node_set set)
1386 {
1387 dump_varpool_node_set (stderr, set);
1388 }
1389
1390
1391 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1392
1393 static unsigned int
1394 ipa_profile (void)
1395 {
1396 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1397 struct cgraph_edge *e;
1398 int order_pos;
1399 bool something_changed = false;
1400 int i;
1401
1402 order_pos = cgraph_postorder (order);
1403 for (i = order_pos - 1; i >= 0; i--)
1404 {
1405 if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1406 {
1407 for (e = order[i]->callees; e; e = e->next_callee)
1408 if (e->callee->local.local && !e->callee->aux)
1409 {
1410 something_changed = true;
1411 e->callee->aux = (void *)1;
1412 }
1413 }
1414 order[i]->aux = NULL;
1415 }
1416
1417 while (something_changed)
1418 {
1419 something_changed = false;
1420 for (i = order_pos - 1; i >= 0; i--)
1421 {
1422 if (order[i]->aux && cgraph_propagate_frequency (order[i]))
1423 {
1424 for (e = order[i]->callees; e; e = e->next_callee)
1425 if (e->callee->local.local && !e->callee->aux)
1426 {
1427 something_changed = true;
1428 e->callee->aux = (void *)1;
1429 }
1430 }
1431 order[i]->aux = NULL;
1432 }
1433 }
1434 free (order);
1435 return 0;
1436 }
1437
1438 static bool
1439 gate_ipa_profile (void)
1440 {
1441 return flag_ipa_profile;
1442 }
1443
1444 struct ipa_opt_pass_d pass_ipa_profile =
1445 {
1446 {
1447 IPA_PASS,
1448 "ipa-profile", /* name */
1449 gate_ipa_profile, /* gate */
1450 ipa_profile, /* execute */
1451 NULL, /* sub */
1452 NULL, /* next */
1453 0, /* static_pass_number */
1454 TV_IPA_PROFILE, /* tv_id */
1455 0, /* properties_required */
1456 0, /* properties_provided */
1457 0, /* properties_destroyed */
1458 0, /* todo_flags_start */
1459 0 /* todo_flags_finish */
1460 },
1461 NULL, /* generate_summary */
1462 NULL, /* write_summary */
1463 NULL, /* read_summary */
1464 NULL, /* write_optimization_summary */
1465 NULL, /* read_optimization_summary */
1466 NULL, /* stmt_fixup */
1467 0, /* TODOs */
1468 NULL, /* function_transform */
1469 NULL /* variable_transform */
1470 };
1471
1472 /* Generate and emit a static constructor or destructor. WHICH must
1473 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1474 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1475 initialization priority for this constructor or destructor. */
1476
1477 void
1478 cgraph_build_static_cdtor (char which, tree body, int priority)
1479 {
1480 static int counter = 0;
1481 char which_buf[16];
1482 tree decl, name, resdecl;
1483
1484 /* The priority is encoded in the constructor or destructor name.
1485 collect2 will sort the names and arrange that they are called at
1486 program startup. */
1487 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1488 name = get_file_function_name (which_buf);
1489
1490 decl = build_decl (input_location, FUNCTION_DECL, name,
1491 build_function_type_list (void_type_node, NULL_TREE));
1492 current_function_decl = decl;
1493
1494 resdecl = build_decl (input_location,
1495 RESULT_DECL, NULL_TREE, void_type_node);
1496 DECL_ARTIFICIAL (resdecl) = 1;
1497 DECL_RESULT (decl) = resdecl;
1498 DECL_CONTEXT (resdecl) = decl;
1499
1500 allocate_struct_function (decl, false);
1501
1502 TREE_STATIC (decl) = 1;
1503 TREE_USED (decl) = 1;
1504 DECL_ARTIFICIAL (decl) = 1;
1505 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1506 DECL_SAVED_TREE (decl) = body;
1507 if (!targetm.have_ctors_dtors)
1508 {
1509 TREE_PUBLIC (decl) = 1;
1510 DECL_PRESERVE_P (decl) = 1;
1511 }
1512 DECL_UNINLINABLE (decl) = 1;
1513
1514 DECL_INITIAL (decl) = make_node (BLOCK);
1515 TREE_USED (DECL_INITIAL (decl)) = 1;
1516
1517 DECL_SOURCE_LOCATION (decl) = input_location;
1518 cfun->function_end_locus = input_location;
1519
1520 switch (which)
1521 {
1522 case 'I':
1523 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1524 decl_init_priority_insert (decl, priority);
1525 break;
1526 case 'D':
1527 DECL_STATIC_DESTRUCTOR (decl) = 1;
1528 decl_fini_priority_insert (decl, priority);
1529 break;
1530 default:
1531 gcc_unreachable ();
1532 }
1533
1534 gimplify_function_tree (decl);
1535
1536 cgraph_add_new_function (decl, false);
1537
1538 set_cfun (NULL);
1539 current_function_decl = NULL;
1540 }
1541
1542
1543 /* A vector of FUNCTION_DECLs declared as static constructors. */
1544 static VEC(tree, heap) *static_ctors;
1545 /* A vector of FUNCTION_DECLs declared as static destructors. */
1546 static VEC(tree, heap) *static_dtors;
1547
1548 /* When target does not have ctors and dtors, we call all constructor
1549 and destructor by special initialization/destruction function
1550 recognized by collect2.
1551
1552 When we are going to build this function, collect all constructors and
1553 destructors and turn them into normal functions. */
1554
1555 static void
1556 record_cdtor_fn (struct cgraph_node *node)
1557 {
1558 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1559 VEC_safe_push (tree, heap, static_ctors, node->decl);
1560 if (DECL_STATIC_DESTRUCTOR (node->decl))
1561 VEC_safe_push (tree, heap, static_dtors, node->decl);
1562 node = cgraph_node (node->decl);
1563 node->local.disregard_inline_limits = 1;
1564 }
1565
1566 /* Define global constructors/destructor functions for the CDTORS, of
1567 which they are LEN. The CDTORS are sorted by initialization
1568 priority. If CTOR_P is true, these are constructors; otherwise,
1569 they are destructors. */
1570
1571 static void
1572 build_cdtor (bool ctor_p, VEC (tree, heap) *cdtors)
1573 {
1574 size_t i,j;
1575 size_t len = VEC_length (tree, cdtors);
1576
1577 i = 0;
1578 while (i < len)
1579 {
1580 tree body;
1581 tree fn;
1582 priority_type priority;
1583
1584 priority = 0;
1585 body = NULL_TREE;
1586 j = i;
1587 do
1588 {
1589 priority_type p;
1590 fn = VEC_index (tree, cdtors, j);
1591 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1592 if (j == i)
1593 priority = p;
1594 else if (p != priority)
1595 break;
1596 j++;
1597 }
1598 while (j < len);
1599
1600 /* When there is only one cdtor and target supports them, do nothing. */
1601 if (j == i + 1
1602 && targetm.have_ctors_dtors)
1603 {
1604 i++;
1605 continue;
1606 }
1607 /* Find the next batch of constructors/destructors with the same
1608 initialization priority. */
1609 for (;i < j; i++)
1610 {
1611 tree call;
1612 fn = VEC_index (tree, cdtors, i);
1613 call = build_call_expr (fn, 0);
1614 if (ctor_p)
1615 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1616 else
1617 DECL_STATIC_DESTRUCTOR (fn) = 0;
1618 /* We do not want to optimize away pure/const calls here.
1619 When optimizing, these should be already removed, when not
1620 optimizing, we want user to be able to breakpoint in them. */
1621 TREE_SIDE_EFFECTS (call) = 1;
1622 append_to_statement_list (call, &body);
1623 }
1624 gcc_assert (body != NULL_TREE);
1625 /* Generate a function to call all the function of like
1626 priority. */
1627 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
1628 }
1629 }
1630
1631 /* Comparison function for qsort. P1 and P2 are actually of type
1632 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1633 used to determine the sort order. */
1634
1635 static int
1636 compare_ctor (const void *p1, const void *p2)
1637 {
1638 tree f1;
1639 tree f2;
1640 int priority1;
1641 int priority2;
1642
1643 f1 = *(const tree *)p1;
1644 f2 = *(const tree *)p2;
1645 priority1 = DECL_INIT_PRIORITY (f1);
1646 priority2 = DECL_INIT_PRIORITY (f2);
1647
1648 if (priority1 < priority2)
1649 return -1;
1650 else if (priority1 > priority2)
1651 return 1;
1652 else
1653 /* Ensure a stable sort. Constructors are executed in backwarding
1654 order to make LTO initialize braries first. */
1655 return DECL_UID (f2) - DECL_UID (f1);
1656 }
1657
1658 /* Comparison function for qsort. P1 and P2 are actually of type
1659 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1660 used to determine the sort order. */
1661
1662 static int
1663 compare_dtor (const void *p1, const void *p2)
1664 {
1665 tree f1;
1666 tree f2;
1667 int priority1;
1668 int priority2;
1669
1670 f1 = *(const tree *)p1;
1671 f2 = *(const tree *)p2;
1672 priority1 = DECL_FINI_PRIORITY (f1);
1673 priority2 = DECL_FINI_PRIORITY (f2);
1674
1675 if (priority1 < priority2)
1676 return -1;
1677 else if (priority1 > priority2)
1678 return 1;
1679 else
1680 /* Ensure a stable sort. */
1681 return DECL_UID (f1) - DECL_UID (f2);
1682 }
1683
1684 /* Generate functions to call static constructors and destructors
1685 for targets that do not support .ctors/.dtors sections. These
1686 functions have magic names which are detected by collect2. */
1687
1688 static void
1689 build_cdtor_fns (void)
1690 {
1691 if (!VEC_empty (tree, static_ctors))
1692 {
1693 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1694 VEC_qsort (tree, static_ctors, compare_ctor);
1695 build_cdtor (/*ctor_p=*/true, static_ctors);
1696 }
1697
1698 if (!VEC_empty (tree, static_dtors))
1699 {
1700 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1701 VEC_qsort (tree, static_dtors, compare_dtor);
1702 build_cdtor (/*ctor_p=*/false, static_dtors);
1703 }
1704 }
1705
1706 /* Look for constructors and destructors and produce function calling them.
1707 This is needed for targets not supporting ctors or dtors, but we perform the
1708 transformation also at linktime to merge possibly numberous
1709 constructors/destructors into single function to improve code locality and
1710 reduce size. */
1711
1712 static unsigned int
1713 ipa_cdtor_merge (void)
1714 {
1715 struct cgraph_node *node;
1716 for (node = cgraph_nodes; node; node = node->next)
1717 if (node->analyzed
1718 && (DECL_STATIC_CONSTRUCTOR (node->decl)
1719 || DECL_STATIC_DESTRUCTOR (node->decl)))
1720 record_cdtor_fn (node);
1721 build_cdtor_fns ();
1722 VEC_free (tree, heap, static_ctors);
1723 VEC_free (tree, heap, static_dtors);
1724 return 0;
1725 }
1726
1727 /* Perform the pass when we have no ctors/dtors support
1728 or at LTO time to merge multiple constructors into single
1729 function. */
1730
1731 static bool
1732 gate_ipa_cdtor_merge (void)
1733 {
1734 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1735 }
1736
1737 struct ipa_opt_pass_d pass_ipa_cdtor_merge =
1738 {
1739 {
1740 IPA_PASS,
1741 "cdtor", /* name */
1742 gate_ipa_cdtor_merge, /* gate */
1743 ipa_cdtor_merge, /* execute */
1744 NULL, /* sub */
1745 NULL, /* next */
1746 0, /* static_pass_number */
1747 TV_CGRAPHOPT, /* tv_id */
1748 0, /* properties_required */
1749 0, /* properties_provided */
1750 0, /* properties_destroyed */
1751 0, /* todo_flags_start */
1752 0 /* todo_flags_finish */
1753 },
1754 NULL, /* generate_summary */
1755 NULL, /* write_summary */
1756 NULL, /* read_summary */
1757 NULL, /* write_optimization_summary */
1758 NULL, /* read_optimization_summary */
1759 NULL, /* stmt_fixup */
1760 0, /* TODOs */
1761 NULL, /* function_transform */
1762 NULL /* variable_transform */
1763 };