1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "tree-pass.h"
31 #include "pointer-set.h"
33 #include "tree-iterator.h"
35 /* Fill array order with all nodes with output flag set in the reverse
39 cgraph_postorder (struct cgraph_node
**order
)
41 struct cgraph_node
*node
, *node2
;
44 struct cgraph_edge
*edge
, last
;
47 struct cgraph_node
**stack
=
48 XCNEWVEC (struct cgraph_node
*, cgraph_n_nodes
);
50 /* We have to deal with cycles nicely, so use a depth first traversal
51 output algorithm. Ignore the fact that some functions won't need
52 to be output and put them into order as well, so we get dependencies
53 right through inline functions. */
54 for (node
= cgraph_nodes
; node
; node
= node
->next
)
56 for (pass
= 0; pass
< 2; pass
++)
57 for (node
= cgraph_nodes
; node
; node
= node
->next
)
60 || (!node
->address_taken
61 && !node
->global
.inlined_to
62 && !cgraph_only_called_directly_p (node
))))
68 node
->aux
= node
->callers
;
71 while (node2
->aux
!= &last
)
73 edge
= (struct cgraph_edge
*) node2
->aux
;
74 if (edge
->next_caller
)
75 node2
->aux
= edge
->next_caller
;
78 /* Break possible cycles involving always-inline
79 functions by ignoring edges from always-inline
80 functions to non-always-inline functions. */
81 if (edge
->caller
->local
.disregard_inline_limits
82 && !edge
->callee
->local
.disregard_inline_limits
)
84 if (!edge
->caller
->aux
)
86 if (!edge
->caller
->callers
)
87 edge
->caller
->aux
= &last
;
89 edge
->caller
->aux
= edge
->caller
->callers
;
90 stack
[stack_size
++] = node2
;
95 if (node2
->aux
== &last
)
97 order
[order_pos
++] = node2
;
99 node2
= stack
[--stack_size
];
106 for (node
= cgraph_nodes
; node
; node
= node
->next
)
111 /* Look for all functions inlined to NODE and update their inlined_to pointers
115 update_inlined_to_pointer (struct cgraph_node
*node
, struct cgraph_node
*inlined_to
)
117 struct cgraph_edge
*e
;
118 for (e
= node
->callees
; e
; e
= e
->next_callee
)
119 if (e
->callee
->global
.inlined_to
)
121 e
->callee
->global
.inlined_to
= inlined_to
;
122 update_inlined_to_pointer (e
->callee
, inlined_to
);
126 /* Add cgraph NODE to queue starting at FIRST.
128 The queue is linked via AUX pointers and terminated by pointer to 1.
129 We enqueue nodes at two occasions: when we find them reachable or when we find
130 their bodies needed for further clonning. In the second case we mark them
131 by pointer to 2 after processing so they are re-queue when they become
135 enqueue_cgraph_node (struct cgraph_node
*node
, struct cgraph_node
**first
)
137 /* Node is still in queue; do nothing. */
138 if (node
->aux
&& node
->aux
!= (void *) 2)
140 /* Node was already processed as unreachable, re-enqueue
141 only if it became reachable now. */
142 if (node
->aux
== (void *)2 && !node
->reachable
)
148 /* Add varpool NODE to queue starting at FIRST. */
151 enqueue_varpool_node (struct varpool_node
*node
, struct varpool_node
**first
)
157 /* Process references. */
160 process_references (struct ipa_ref_list
*list
,
161 struct cgraph_node
**first
,
162 struct varpool_node
**first_varpool
,
163 bool before_inlining_p
)
167 for (i
= 0; ipa_ref_list_reference_iterate (list
, i
, ref
); i
++)
169 if (ref
->refered_type
== IPA_REF_CGRAPH
)
171 struct cgraph_node
*node
= ipa_ref_node (ref
);
173 && (!DECL_EXTERNAL (node
->decl
)
174 || before_inlining_p
))
176 node
->reachable
= true;
177 enqueue_cgraph_node (node
, first
);
182 struct varpool_node
*node
= ipa_ref_varpool_node (ref
);
185 varpool_mark_needed_node (node
);
186 enqueue_varpool_node (node
, first_varpool
);
192 /* Return true when function NODE can be removed from callgraph
193 if all direct calls are eliminated. */
196 varpool_can_remove_if_no_refs (struct varpool_node
*node
)
198 return (!node
->force_output
&& !node
->used_from_other_partition
199 && (DECL_COMDAT (node
->decl
) || !node
->externally_visible
));
202 /* Return true when function can be marked local. */
205 cgraph_local_node_p (struct cgraph_node
*node
)
207 return (cgraph_only_called_directly_p (node
)
209 && !DECL_EXTERNAL (node
->decl
)
210 && !node
->local
.externally_visible
211 && !node
->reachable_from_other_partition
212 && !node
->in_other_partition
);
215 /* Perform reachability analysis and reclaim all unreachable nodes.
216 If BEFORE_INLINING_P is true this function is called before inlining
217 decisions has been made. If BEFORE_INLINING_P is false this function also
218 removes unneeded bodies of extern inline functions. */
221 cgraph_remove_unreachable_nodes (bool before_inlining_p
, FILE *file
)
223 struct cgraph_node
*first
= (struct cgraph_node
*) (void *) 1;
224 struct varpool_node
*first_varpool
= (struct varpool_node
*) (void *) 1;
225 struct cgraph_node
*node
, *next
;
226 struct varpool_node
*vnode
, *vnext
;
227 bool changed
= false;
229 #ifdef ENABLE_CHECKING
233 fprintf (file
, "\nReclaiming functions:");
234 #ifdef ENABLE_CHECKING
235 for (node
= cgraph_nodes
; node
; node
= node
->next
)
236 gcc_assert (!node
->aux
);
237 for (vnode
= varpool_nodes
; vnode
; vnode
= vnode
->next
)
238 gcc_assert (!vnode
->aux
);
240 varpool_reset_queue ();
241 /* Mark functions whose bodies are obviously needed.
242 This is mostly when they can be referenced externally. Inline clones
243 are special since their declarations are shared with master clone and thus
244 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
245 for (node
= cgraph_nodes
; node
; node
= node
->next
)
246 if (node
->analyzed
&& !node
->global
.inlined_to
247 && (!cgraph_can_remove_if_no_direct_calls_and_refs_p (node
)
248 /* Keep around virtual functions for possible devirtualization. */
249 || (before_inlining_p
250 && DECL_VIRTUAL_P (node
->decl
)
251 && (DECL_COMDAT (node
->decl
) || DECL_EXTERNAL (node
->decl
)))
252 /* Also external functions with address taken are better to stay
253 for indirect inlining. */
254 || (before_inlining_p
255 && DECL_EXTERNAL (node
->decl
)
256 && node
->address_taken
)))
258 gcc_assert (!node
->global
.inlined_to
);
259 enqueue_cgraph_node (node
, &first
);
260 node
->reachable
= true;
264 gcc_assert (!node
->aux
);
265 node
->reachable
= false;
268 /* Mark variables that are obviously needed. */
269 for (vnode
= varpool_nodes
; vnode
; vnode
= vnode
->next
)
271 vnode
->next_needed
= NULL
;
272 vnode
->prev_needed
= NULL
;
273 if (!varpool_can_remove_if_no_refs (vnode
))
275 vnode
->needed
= false;
276 varpool_mark_needed_node (vnode
);
277 enqueue_varpool_node (vnode
, &first_varpool
);
280 vnode
->needed
= false;
283 /* Perform reachability analysis. As a special case do not consider
284 extern inline functions not inlined as live because we won't output
287 We maintain two worklist, one for cgraph nodes other for varpools and
288 are finished once both are empty. */
290 while (first
!= (struct cgraph_node
*) (void *) 1
291 || first_varpool
!= (struct varpool_node
*) (void *) 1)
293 if (first
!= (struct cgraph_node
*) (void *) 1)
295 struct cgraph_edge
*e
;
297 first
= (struct cgraph_node
*) first
->aux
;
298 if (!node
->reachable
)
299 node
->aux
= (void *)2;
301 /* If we found this node reachable, first mark on the callees
302 reachable too, unless they are direct calls to extern inline functions
303 we decided to not inline. */
306 for (e
= node
->callees
; e
; e
= e
->next_callee
)
307 if (!e
->callee
->reachable
309 && (!e
->inline_failed
|| !e
->callee
->analyzed
310 || (!DECL_EXTERNAL (e
->callee
->decl
))
311 || before_inlining_p
))
313 e
->callee
->reachable
= true;
314 enqueue_cgraph_node (e
->callee
, &first
);
316 process_references (&node
->ref_list
, &first
, &first_varpool
, before_inlining_p
);
319 /* If any function in a comdat group is reachable, force
320 all other functions in the same comdat group to be
322 if (node
->same_comdat_group
324 && !node
->global
.inlined_to
)
326 for (next
= node
->same_comdat_group
;
328 next
= next
->same_comdat_group
)
329 if (!next
->reachable
)
331 next
->reachable
= true;
332 enqueue_cgraph_node (next
, &first
);
336 /* We can freely remove inline clones even if they are cloned, however if
337 function is clone of real clone, we must keep it around in order to
338 make materialize_clones produce function body with the changes
340 while (node
->clone_of
&& !node
->clone_of
->aux
341 && !gimple_has_body_p (node
->decl
))
343 bool noninline
= node
->clone_of
->decl
!= node
->decl
;
344 node
= node
->clone_of
;
345 if (noninline
&& !node
->reachable
&& !node
->aux
)
347 enqueue_cgraph_node (node
, &first
);
352 if (first_varpool
!= (struct varpool_node
*) (void *) 1)
354 vnode
= first_varpool
;
355 first_varpool
= (struct varpool_node
*)first_varpool
->aux
;
357 process_references (&vnode
->ref_list
, &first
, &first_varpool
, before_inlining_p
);
358 /* If any function in a comdat group is reachable, force
359 all other functions in the same comdat group to be
361 if (vnode
->same_comdat_group
)
363 struct varpool_node
*next
;
364 for (next
= vnode
->same_comdat_group
;
366 next
= next
->same_comdat_group
)
369 varpool_mark_needed_node (next
);
370 enqueue_varpool_node (next
, &first_varpool
);
376 /* Remove unreachable nodes.
378 Completely unreachable functions can be fully removed from the callgraph.
379 Extern inline functions that we decided to not inline need to become unanalyzed nodes of
380 callgraph (so we still have edges to them). We remove function body then.
382 Also we need to care functions that are unreachable but we need to keep them around
383 for later clonning. In this case we also turn them to unanalyzed nodes, but
384 keep the body around. */
385 for (node
= cgraph_nodes
; node
; node
= next
)
388 if (node
->aux
&& !node
->reachable
)
390 cgraph_node_remove_callees (node
);
391 ipa_remove_all_references (&node
->ref_list
);
392 node
->analyzed
= false;
393 node
->local
.inlinable
= false;
397 struct cgraph_edge
*e
;
402 node
->global
.inlined_to
= NULL
;
404 fprintf (file
, " %s", cgraph_node_name (node
));
405 /* See if there is reachable caller. */
406 for (e
= node
->callers
; e
&& !found
; e
= e
->next_caller
)
407 if (e
->caller
->reachable
)
409 for (i
= 0; (ipa_ref_list_refering_iterate (&node
->ref_list
, i
, ref
)
411 if (ref
->refering_type
== IPA_REF_CGRAPH
412 && ipa_ref_refering_node (ref
)->reachable
)
414 else if (ref
->refering_type
== IPA_REF_VARPOOL
415 && ipa_ref_refering_varpool_node (ref
)->needed
)
418 /* If so, we need to keep node in the callgraph. */
419 if (found
|| node
->needed
)
423 struct cgraph_node
*clone
;
425 /* If there are still clones, we must keep body around.
426 Otherwise we can just remove the body but keep the clone. */
427 for (clone
= node
->clones
; clone
;
428 clone
= clone
->next_sibling_clone
)
433 cgraph_release_function_body (node
);
434 node
->local
.inlinable
= false;
435 if (node
->prev_sibling_clone
)
436 node
->prev_sibling_clone
->next_sibling_clone
= node
->next_sibling_clone
;
437 else if (node
->clone_of
)
438 node
->clone_of
->clones
= node
->next_sibling_clone
;
439 if (node
->next_sibling_clone
)
440 node
->next_sibling_clone
->prev_sibling_clone
= node
->prev_sibling_clone
;
441 #ifdef ENABLE_CHECKING
443 node
->former_clone_of
= node
->clone_of
->decl
;
445 node
->clone_of
= NULL
;
446 node
->next_sibling_clone
= NULL
;
447 node
->prev_sibling_clone
= NULL
;
450 gcc_assert (!clone
->in_other_partition
);
451 node
->analyzed
= false;
453 cgraph_node_remove_callees (node
);
454 ipa_remove_all_references (&node
->ref_list
);
459 cgraph_remove_node (node
);
464 for (node
= cgraph_nodes
; node
; node
= node
->next
)
466 /* Inline clones might be kept around so their materializing allows further
467 cloning. If the function the clone is inlined into is removed, we need
468 to turn it into normal cone. */
469 if (node
->global
.inlined_to
472 gcc_assert (node
->clones
);
473 node
->global
.inlined_to
= NULL
;
474 update_inlined_to_pointer (node
, node
);
480 fprintf (file
, "\n");
482 /* We must release unused extern inlines or sanity checking will fail. Rest of transformations
483 are undesirable at -O0 since we do not want to remove anything. */
488 fprintf (file
, "Reclaiming variables:");
489 for (vnode
= varpool_nodes
; vnode
; vnode
= vnext
)
495 fprintf (file
, " %s", varpool_node_name (vnode
));
496 varpool_remove_node (vnode
);
501 /* Now update address_taken flags and try to promote functions to be local. */
504 fprintf (file
, "\nClearing address taken flags:");
505 for (node
= cgraph_nodes
; node
; node
= node
->next
)
506 if (node
->address_taken
507 && !node
->reachable_from_other_partition
)
512 for (i
= 0; ipa_ref_list_refering_iterate (&node
->ref_list
, i
, ref
)
515 gcc_assert (ref
->use
== IPA_REF_ADDR
);
521 fprintf (file
, " %s", cgraph_node_name (node
));
522 node
->address_taken
= false;
524 if (cgraph_local_node_p (node
))
526 node
->local
.local
= true;
528 fprintf (file
, " (local)");
533 #ifdef ENABLE_CHECKING
537 /* Reclaim alias pairs for functions that have disappeared from the
539 remove_unreachable_alias_pairs ();
544 /* Discover variables that have no longer address taken or that are read only
545 and update their flags.
547 FIXME: This can not be done in between gimplify and omp_expand since
548 readonly flag plays role on what is shared and what is not. Currently we do
549 this transformation as part of whole program visibility and re-do at
550 ipa-reference pass (to take into account clonning), but it would
551 make sense to do it before early optimizations. */
554 ipa_discover_readonly_nonaddressable_vars (void)
556 struct varpool_node
*vnode
;
558 fprintf (dump_file
, "Clearing variable flags:");
559 for (vnode
= varpool_nodes
; vnode
; vnode
= vnode
->next
)
560 if (vnode
->finalized
&& varpool_all_refs_explicit_p (vnode
)
561 && (TREE_ADDRESSABLE (vnode
->decl
) || !TREE_READONLY (vnode
->decl
)))
563 bool written
= false;
564 bool address_taken
= false;
567 for (i
= 0; ipa_ref_list_refering_iterate (&vnode
->ref_list
, i
, ref
)
568 && (!written
|| !address_taken
); i
++)
572 address_taken
= true;
580 if (TREE_ADDRESSABLE (vnode
->decl
) && !address_taken
)
583 fprintf (dump_file
, " %s (addressable)", varpool_node_name (vnode
));
584 TREE_ADDRESSABLE (vnode
->decl
) = 0;
586 if (!TREE_READONLY (vnode
->decl
) && !address_taken
&& !written
587 /* Making variable in explicit section readonly can cause section
589 See e.g. gcc.c-torture/compile/pr23237.c */
590 && DECL_SECTION_NAME (vnode
->decl
) == NULL
)
593 fprintf (dump_file
, " %s (read-only)", varpool_node_name (vnode
));
594 TREE_READONLY (vnode
->decl
) = 1;
598 fprintf (dump_file
, "\n");
601 /* Return true when function NODE should be considered externally visible. */
604 cgraph_externally_visible_p (struct cgraph_node
*node
, bool whole_program
, bool aliased
)
606 if (!node
->local
.finalized
)
608 if (!DECL_COMDAT (node
->decl
)
609 && (!TREE_PUBLIC (node
->decl
) || DECL_EXTERNAL (node
->decl
)))
612 /* Do not even try to be smart about aliased nodes. Until we properly
613 represent everything by same body alias, these are just evil. */
617 /* If linker counts on us, we must preserve the function. */
618 if (cgraph_used_from_object_file_p (node
))
620 if (DECL_PRESERVE_P (node
->decl
))
622 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node
->decl
)))
625 /* When doing link time optimizations, hidden symbols become local. */
627 && (DECL_VISIBILITY (node
->decl
) == VISIBILITY_HIDDEN
628 || DECL_VISIBILITY (node
->decl
) == VISIBILITY_INTERNAL
)
629 /* Be sure that node is defined in IR file, not in other object
630 file. In that case we don't set used_from_other_object_file. */
633 else if (!whole_program
)
635 /* COMDAT functions must be shared only if they have address taken,
636 otherwise we can produce our own private implementation with
638 else if (DECL_COMDAT (node
->decl
))
640 if (node
->address_taken
|| !node
->analyzed
)
642 if (node
->same_comdat_group
)
644 struct cgraph_node
*next
;
646 /* If more than one function is in the same COMDAT group, it must
647 be shared even if just one function in the comdat group has
649 for (next
= node
->same_comdat_group
;
651 next
= next
->same_comdat_group
)
652 if (next
->address_taken
|| !next
->analyzed
)
657 if (MAIN_NAME_P (DECL_NAME (node
->decl
)))
663 /* Return true when variable VNODE should be considered externally visible. */
666 varpool_externally_visible_p (struct varpool_node
*vnode
, bool aliased
)
668 if (!DECL_COMDAT (vnode
->decl
) && !TREE_PUBLIC (vnode
->decl
))
671 /* Do not even try to be smart about aliased nodes. Until we properly
672 represent everything by same body alias, these are just evil. */
676 /* If linker counts on us, we must preserve the function. */
677 if (varpool_used_from_object_file_p (vnode
))
680 if (DECL_PRESERVE_P (vnode
->decl
))
682 if (lookup_attribute ("externally_visible",
683 DECL_ATTRIBUTES (vnode
->decl
)))
686 /* See if we have linker information about symbol not being used or
687 if we need to make guess based on the declaration.
689 Even if the linker clams the symbol is unused, never bring internal
690 symbols that are declared by user as used or externally visible.
691 This is needed for i.e. references from asm statements. */
692 if (varpool_used_from_object_file_p (vnode
))
695 /* When doing link time optimizations, hidden symbols become local. */
697 && (DECL_VISIBILITY (vnode
->decl
) == VISIBILITY_HIDDEN
698 || DECL_VISIBILITY (vnode
->decl
) == VISIBILITY_INTERNAL
)
699 /* Be sure that node is defined in IR file, not in other object
700 file. In that case we don't set used_from_other_object_file. */
703 else if (!flag_whole_program
)
706 /* Do not attempt to privatize COMDATS by default.
707 This would break linking with C++ libraries sharing
710 FIXME: We can do so for readonly vars with no address taken and
711 possibly also for vtables since no direct pointer comparsion is done.
712 It might be interesting to do so to reduce linking overhead. */
713 if (DECL_COMDAT (vnode
->decl
) || DECL_WEAK (vnode
->decl
))
718 /* Dissolve the same_comdat_group list in which NODE resides. */
721 dissolve_same_comdat_group_list (struct cgraph_node
*node
)
723 struct cgraph_node
*n
= node
, *next
;
726 next
= n
->same_comdat_group
;
727 n
->same_comdat_group
= NULL
;
733 /* Mark visibility of all functions.
735 A local function is one whose calls can occur only in the current
736 compilation unit and all its calls are explicit, so we can change
737 its calling convention. We simply mark all static functions whose
738 address is not taken as local.
740 We also change the TREE_PUBLIC flag of all declarations that are public
741 in language point of view but we want to overwrite this default
742 via visibilities for the backend point of view. */
745 function_and_variable_visibility (bool whole_program
)
747 struct cgraph_node
*node
;
748 struct varpool_node
*vnode
;
749 struct pointer_set_t
*aliased_nodes
= pointer_set_create ();
750 struct pointer_set_t
*aliased_vnodes
= pointer_set_create ();
754 /* Discover aliased nodes. */
755 FOR_EACH_VEC_ELT (alias_pair
, alias_pairs
, i
, p
)
758 fprintf (dump_file
, "Alias %s->%s",
759 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p
->decl
)),
760 IDENTIFIER_POINTER (p
->target
));
762 if ((node
= cgraph_node_for_asm (p
->target
)) != NULL
)
764 gcc_assert (node
->needed
);
765 pointer_set_insert (aliased_nodes
, node
);
767 fprintf (dump_file
, " node %s/%i",
768 cgraph_node_name (node
), node
->uid
);
770 else if ((vnode
= varpool_node_for_asm (p
->target
)) != NULL
)
772 gcc_assert (vnode
->needed
);
773 pointer_set_insert (aliased_vnodes
, vnode
);
775 fprintf (dump_file
, " varpool node %s",
776 varpool_node_name (vnode
));
779 fprintf (dump_file
, "\n");
782 for (node
= cgraph_nodes
; node
; node
= node
->next
)
784 int flags
= flags_from_decl_or_type (node
->decl
);
786 && (flags
& (ECF_CONST
| ECF_PURE
))
787 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
789 DECL_STATIC_CONSTRUCTOR (node
->decl
) = 0;
790 DECL_STATIC_DESTRUCTOR (node
->decl
) = 0;
793 /* C++ FE on lack of COMDAT support create local COMDAT functions
794 (that ought to be shared but can not due to object format
795 limitations). It is neccesary to keep the flag to make rest of C++ FE
796 happy. Clear the flag here to avoid confusion in middle-end. */
797 if (DECL_COMDAT (node
->decl
) && !TREE_PUBLIC (node
->decl
))
798 DECL_COMDAT (node
->decl
) = 0;
799 /* For external decls stop tracking same_comdat_group, it doesn't matter
800 what comdat group they are in when they won't be emitted in this TU,
801 and simplifies later passes. */
802 if (node
->same_comdat_group
&& DECL_EXTERNAL (node
->decl
))
804 #ifdef ENABLE_CHECKING
805 struct cgraph_node
*n
;
807 for (n
= node
->same_comdat_group
;
809 n
= n
->same_comdat_group
)
810 /* If at least one of same comdat group functions is external,
811 all of them have to be, otherwise it is a front-end bug. */
812 gcc_assert (DECL_EXTERNAL (n
->decl
));
814 dissolve_same_comdat_group_list (node
);
816 gcc_assert ((!DECL_WEAK (node
->decl
) && !DECL_COMDAT (node
->decl
))
817 || TREE_PUBLIC (node
->decl
) || DECL_EXTERNAL (node
->decl
));
818 if (cgraph_externally_visible_p (node
, whole_program
,
819 pointer_set_contains (aliased_nodes
,
822 gcc_assert (!node
->global
.inlined_to
);
823 node
->local
.externally_visible
= true;
826 node
->local
.externally_visible
= false;
827 if (!node
->local
.externally_visible
&& node
->analyzed
828 && !DECL_EXTERNAL (node
->decl
))
830 struct cgraph_node
*alias
;
831 gcc_assert (whole_program
|| in_lto_p
|| !TREE_PUBLIC (node
->decl
));
832 cgraph_make_decl_local (node
->decl
);
833 node
->resolution
= LDPR_PREVAILING_DEF_IRONLY
;
834 for (alias
= node
->same_body
; alias
; alias
= alias
->next
)
835 cgraph_make_decl_local (alias
->decl
);
836 if (node
->same_comdat_group
)
837 /* cgraph_externally_visible_p has already checked all other nodes
838 in the group and they will all be made local. We need to
839 dissolve the group at once so that the predicate does not
841 dissolve_same_comdat_group_list (node
);
843 node
->local
.local
= cgraph_local_node_p (node
);
845 for (vnode
= varpool_nodes
; vnode
; vnode
= vnode
->next
)
847 /* weak flag makes no sense on local variables. */
848 gcc_assert (!DECL_WEAK (vnode
->decl
)
849 || TREE_PUBLIC (vnode
->decl
) || DECL_EXTERNAL (vnode
->decl
));
850 /* In several cases declarations can not be common:
852 - when declaration has initializer
854 - when it has specific section
855 - when it resides in non-generic address space.
856 - if declaration is local, it will get into .local common section
857 so common flag is not needed. Frontends still produce these in
858 certain cases, such as for:
860 static int a __attribute__ ((common))
862 Canonicalize things here and clear the redundant flag. */
863 if (DECL_COMMON (vnode
->decl
)
864 && (!(TREE_PUBLIC (vnode
->decl
) || DECL_EXTERNAL (vnode
->decl
))
865 || (DECL_INITIAL (vnode
->decl
)
866 && DECL_INITIAL (vnode
->decl
) != error_mark_node
)
867 || DECL_WEAK (vnode
->decl
)
868 || DECL_SECTION_NAME (vnode
->decl
) != NULL
869 || ! (ADDR_SPACE_GENERIC_P
870 (TYPE_ADDR_SPACE (TREE_TYPE (vnode
->decl
))))))
871 DECL_COMMON (vnode
->decl
) = 0;
873 for (vnode
= varpool_nodes_queue
; vnode
; vnode
= vnode
->next_needed
)
875 if (!vnode
->finalized
)
878 && varpool_externally_visible_p
880 pointer_set_contains (aliased_vnodes
, vnode
)))
881 vnode
->externally_visible
= true;
883 vnode
->externally_visible
= false;
884 if (!vnode
->externally_visible
)
886 gcc_assert (in_lto_p
|| whole_program
|| !TREE_PUBLIC (vnode
->decl
));
887 cgraph_make_decl_local (vnode
->decl
);
888 vnode
->resolution
= LDPR_PREVAILING_DEF_IRONLY
;
890 gcc_assert (TREE_STATIC (vnode
->decl
));
892 pointer_set_destroy (aliased_nodes
);
893 pointer_set_destroy (aliased_vnodes
);
897 fprintf (dump_file
, "\nMarking local functions:");
898 for (node
= cgraph_nodes
; node
; node
= node
->next
)
899 if (node
->local
.local
)
900 fprintf (dump_file
, " %s", cgraph_node_name (node
));
901 fprintf (dump_file
, "\n\n");
902 fprintf (dump_file
, "\nMarking externally visible functions:");
903 for (node
= cgraph_nodes
; node
; node
= node
->next
)
904 if (node
->local
.externally_visible
)
905 fprintf (dump_file
, " %s", cgraph_node_name (node
));
906 fprintf (dump_file
, "\n\n");
907 fprintf (dump_file
, "\nMarking externally visible variables:");
908 for (vnode
= varpool_nodes_queue
; vnode
; vnode
= vnode
->next_needed
)
909 if (vnode
->externally_visible
)
910 fprintf (dump_file
, " %s", varpool_node_name (vnode
));
911 fprintf (dump_file
, "\n\n");
913 cgraph_function_flags_ready
= true;
917 /* Local function pass handling visibilities. This happens before LTO streaming
918 so in particular -fwhole-program should be ignored at this level. */
921 local_function_and_variable_visibility (void)
923 return function_and_variable_visibility (flag_whole_program
&& !flag_lto
&& !flag_whopr
);
926 struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility
=
930 "visibility", /* name */
932 local_function_and_variable_visibility
,/* execute */
935 0, /* static_pass_number */
936 TV_CGRAPHOPT
, /* tv_id */
937 0, /* properties_required */
938 0, /* properties_provided */
939 0, /* properties_destroyed */
940 0, /* todo_flags_start */
941 TODO_remove_functions
| TODO_dump_cgraph
942 | TODO_ggc_collect
/* todo_flags_finish */
946 /* Do not re-run on ltrans stage. */
949 gate_whole_program_function_and_variable_visibility (void)
954 /* Bring functionss local at LTO time whith -fwhole-program. */
957 whole_program_function_and_variable_visibility (void)
959 struct cgraph_node
*node
;
960 struct varpool_node
*vnode
;
962 function_and_variable_visibility (flag_whole_program
);
964 for (node
= cgraph_nodes
; node
; node
= node
->next
)
965 if ((node
->local
.externally_visible
&& !DECL_COMDAT (node
->decl
))
966 && node
->local
.finalized
)
967 cgraph_mark_needed_node (node
);
968 for (vnode
= varpool_nodes_queue
; vnode
; vnode
= vnode
->next_needed
)
969 if (vnode
->externally_visible
&& !DECL_COMDAT (vnode
->decl
))
970 varpool_mark_needed_node (vnode
);
973 fprintf (dump_file
, "\nNeeded variables:");
974 for (vnode
= varpool_nodes_queue
; vnode
; vnode
= vnode
->next_needed
)
976 fprintf (dump_file
, " %s", varpool_node_name (vnode
));
977 fprintf (dump_file
, "\n\n");
980 ipa_discover_readonly_nonaddressable_vars ();
984 struct ipa_opt_pass_d pass_ipa_whole_program_visibility
=
988 "whole-program", /* name */
989 gate_whole_program_function_and_variable_visibility
,/* gate */
990 whole_program_function_and_variable_visibility
,/* execute */
993 0, /* static_pass_number */
994 TV_CGRAPHOPT
, /* tv_id */
995 0, /* properties_required */
996 0, /* properties_provided */
997 0, /* properties_destroyed */
998 0, /* todo_flags_start */
999 TODO_remove_functions
| TODO_dump_cgraph
1000 | TODO_ggc_collect
/* todo_flags_finish */
1002 NULL
, /* generate_summary */
1003 NULL
, /* write_summary */
1004 NULL
, /* read_summary */
1005 NULL
, /* write_optimization_summary */
1006 NULL
, /* read_optimization_summary */
1007 NULL
, /* stmt_fixup */
1009 NULL
, /* function_transform */
1010 NULL
, /* variable_transform */
1013 /* Hash a cgraph node set element. */
1016 hash_cgraph_node_set_element (const void *p
)
1018 const_cgraph_node_set_element element
= (const_cgraph_node_set_element
) p
;
1019 return htab_hash_pointer (element
->node
);
1022 /* Compare two cgraph node set elements. */
1025 eq_cgraph_node_set_element (const void *p1
, const void *p2
)
1027 const_cgraph_node_set_element e1
= (const_cgraph_node_set_element
) p1
;
1028 const_cgraph_node_set_element e2
= (const_cgraph_node_set_element
) p2
;
1030 return e1
->node
== e2
->node
;
1033 /* Create a new cgraph node set. */
1036 cgraph_node_set_new (void)
1038 cgraph_node_set new_node_set
;
1040 new_node_set
= ggc_alloc_cgraph_node_set_def ();
1041 new_node_set
->hashtab
= htab_create_ggc (10,
1042 hash_cgraph_node_set_element
,
1043 eq_cgraph_node_set_element
,
1045 new_node_set
->nodes
= NULL
;
1046 return new_node_set
;
1049 /* Add cgraph_node NODE to cgraph_node_set SET. */
1052 cgraph_node_set_add (cgraph_node_set set
, struct cgraph_node
*node
)
1055 cgraph_node_set_element element
;
1056 struct cgraph_node_set_element_def dummy
;
1059 slot
= htab_find_slot (set
->hashtab
, &dummy
, INSERT
);
1061 if (*slot
!= HTAB_EMPTY_ENTRY
)
1063 element
= (cgraph_node_set_element
) *slot
;
1064 gcc_assert (node
== element
->node
1065 && (VEC_index (cgraph_node_ptr
, set
->nodes
, element
->index
)
1070 /* Insert node into hash table. */
1071 element
= ggc_alloc_cgraph_node_set_element_def ();
1072 element
->node
= node
;
1073 element
->index
= VEC_length (cgraph_node_ptr
, set
->nodes
);
1076 /* Insert into node vector. */
1077 VEC_safe_push (cgraph_node_ptr
, gc
, set
->nodes
, node
);
1080 /* Remove cgraph_node NODE from cgraph_node_set SET. */
1083 cgraph_node_set_remove (cgraph_node_set set
, struct cgraph_node
*node
)
1085 void **slot
, **last_slot
;
1086 cgraph_node_set_element element
, last_element
;
1087 struct cgraph_node
*last_node
;
1088 struct cgraph_node_set_element_def dummy
;
1091 slot
= htab_find_slot (set
->hashtab
, &dummy
, NO_INSERT
);
1095 element
= (cgraph_node_set_element
) *slot
;
1096 gcc_assert (VEC_index (cgraph_node_ptr
, set
->nodes
, element
->index
)
1099 /* Remove from vector. We do this by swapping node with the last element
1101 last_node
= VEC_pop (cgraph_node_ptr
, set
->nodes
);
1102 if (last_node
!= node
)
1104 dummy
.node
= last_node
;
1105 last_slot
= htab_find_slot (set
->hashtab
, &dummy
, NO_INSERT
);
1106 last_element
= (cgraph_node_set_element
) *last_slot
;
1107 gcc_assert (last_element
);
1109 /* Move the last element to the original spot of NODE. */
1110 last_element
->index
= element
->index
;
1111 VEC_replace (cgraph_node_ptr
, set
->nodes
, last_element
->index
,
1115 /* Remove element from hash table. */
1116 htab_clear_slot (set
->hashtab
, slot
);
1120 /* Find NODE in SET and return an iterator to it if found. A null iterator
1121 is returned if NODE is not in SET. */
1123 cgraph_node_set_iterator
1124 cgraph_node_set_find (cgraph_node_set set
, struct cgraph_node
*node
)
1127 struct cgraph_node_set_element_def dummy
;
1128 cgraph_node_set_element element
;
1129 cgraph_node_set_iterator csi
;
1132 slot
= htab_find_slot (set
->hashtab
, &dummy
, NO_INSERT
);
1134 csi
.index
= (unsigned) ~0;
1137 element
= (cgraph_node_set_element
) *slot
;
1138 gcc_assert (VEC_index (cgraph_node_ptr
, set
->nodes
, element
->index
)
1140 csi
.index
= element
->index
;
1147 /* Dump content of SET to file F. */
1150 dump_cgraph_node_set (FILE *f
, cgraph_node_set set
)
1152 cgraph_node_set_iterator iter
;
1154 for (iter
= csi_start (set
); !csi_end_p (iter
); csi_next (&iter
))
1156 struct cgraph_node
*node
= csi_node (iter
);
1157 fprintf (f
, " %s/%i", cgraph_node_name (node
), node
->uid
);
1162 /* Dump content of SET to stderr. */
1165 debug_cgraph_node_set (cgraph_node_set set
)
1167 dump_cgraph_node_set (stderr
, set
);
1170 /* Hash a varpool node set element. */
1173 hash_varpool_node_set_element (const void *p
)
1175 const_varpool_node_set_element element
= (const_varpool_node_set_element
) p
;
1176 return htab_hash_pointer (element
->node
);
1179 /* Compare two varpool node set elements. */
1182 eq_varpool_node_set_element (const void *p1
, const void *p2
)
1184 const_varpool_node_set_element e1
= (const_varpool_node_set_element
) p1
;
1185 const_varpool_node_set_element e2
= (const_varpool_node_set_element
) p2
;
1187 return e1
->node
== e2
->node
;
1190 /* Create a new varpool node set. */
1193 varpool_node_set_new (void)
1195 varpool_node_set new_node_set
;
1197 new_node_set
= ggc_alloc_varpool_node_set_def ();
1198 new_node_set
->hashtab
= htab_create_ggc (10,
1199 hash_varpool_node_set_element
,
1200 eq_varpool_node_set_element
,
1202 new_node_set
->nodes
= NULL
;
1203 return new_node_set
;
1206 /* Add varpool_node NODE to varpool_node_set SET. */
1209 varpool_node_set_add (varpool_node_set set
, struct varpool_node
*node
)
1212 varpool_node_set_element element
;
1213 struct varpool_node_set_element_def dummy
;
1216 slot
= htab_find_slot (set
->hashtab
, &dummy
, INSERT
);
1218 if (*slot
!= HTAB_EMPTY_ENTRY
)
1220 element
= (varpool_node_set_element
) *slot
;
1221 gcc_assert (node
== element
->node
1222 && (VEC_index (varpool_node_ptr
, set
->nodes
, element
->index
)
1227 /* Insert node into hash table. */
1228 element
= ggc_alloc_varpool_node_set_element_def ();
1229 element
->node
= node
;
1230 element
->index
= VEC_length (varpool_node_ptr
, set
->nodes
);
1233 /* Insert into node vector. */
1234 VEC_safe_push (varpool_node_ptr
, gc
, set
->nodes
, node
);
1237 /* Remove varpool_node NODE from varpool_node_set SET. */
1240 varpool_node_set_remove (varpool_node_set set
, struct varpool_node
*node
)
1242 void **slot
, **last_slot
;
1243 varpool_node_set_element element
, last_element
;
1244 struct varpool_node
*last_node
;
1245 struct varpool_node_set_element_def dummy
;
1248 slot
= htab_find_slot (set
->hashtab
, &dummy
, NO_INSERT
);
1252 element
= (varpool_node_set_element
) *slot
;
1253 gcc_assert (VEC_index (varpool_node_ptr
, set
->nodes
, element
->index
)
1256 /* Remove from vector. We do this by swapping node with the last element
1258 last_node
= VEC_pop (varpool_node_ptr
, set
->nodes
);
1259 if (last_node
!= node
)
1261 dummy
.node
= last_node
;
1262 last_slot
= htab_find_slot (set
->hashtab
, &dummy
, NO_INSERT
);
1263 last_element
= (varpool_node_set_element
) *last_slot
;
1264 gcc_assert (last_element
);
1266 /* Move the last element to the original spot of NODE. */
1267 last_element
->index
= element
->index
;
1268 VEC_replace (varpool_node_ptr
, set
->nodes
, last_element
->index
,
1272 /* Remove element from hash table. */
1273 htab_clear_slot (set
->hashtab
, slot
);
1277 /* Find NODE in SET and return an iterator to it if found. A null iterator
1278 is returned if NODE is not in SET. */
1280 varpool_node_set_iterator
1281 varpool_node_set_find (varpool_node_set set
, struct varpool_node
*node
)
1284 struct varpool_node_set_element_def dummy
;
1285 varpool_node_set_element element
;
1286 varpool_node_set_iterator vsi
;
1289 slot
= htab_find_slot (set
->hashtab
, &dummy
, NO_INSERT
);
1291 vsi
.index
= (unsigned) ~0;
1294 element
= (varpool_node_set_element
) *slot
;
1295 gcc_assert (VEC_index (varpool_node_ptr
, set
->nodes
, element
->index
)
1297 vsi
.index
= element
->index
;
1304 /* Dump content of SET to file F. */
1307 dump_varpool_node_set (FILE *f
, varpool_node_set set
)
1309 varpool_node_set_iterator iter
;
1311 for (iter
= vsi_start (set
); !vsi_end_p (iter
); vsi_next (&iter
))
1313 struct varpool_node
*node
= vsi_node (iter
);
1314 fprintf (f
, " %s", varpool_node_name (node
));
1319 /* Dump content of SET to stderr. */
1322 debug_varpool_node_set (varpool_node_set set
)
1324 dump_varpool_node_set (stderr
, set
);
1328 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1333 struct cgraph_node
**order
= XCNEWVEC (struct cgraph_node
*, cgraph_n_nodes
);
1334 struct cgraph_edge
*e
;
1336 bool something_changed
= false;
1339 order_pos
= cgraph_postorder (order
);
1340 for (i
= order_pos
- 1; i
>= 0; i
--)
1342 if (order
[i
]->local
.local
&& cgraph_propagate_frequency (order
[i
]))
1344 for (e
= order
[i
]->callees
; e
; e
= e
->next_callee
)
1345 if (e
->callee
->local
.local
&& !e
->callee
->aux
)
1347 something_changed
= true;
1348 e
->callee
->aux
= (void *)1;
1351 order
[i
]->aux
= NULL
;
1354 while (something_changed
)
1356 something_changed
= false;
1357 for (i
= order_pos
- 1; i
>= 0; i
--)
1359 if (order
[i
]->aux
&& cgraph_propagate_frequency (order
[i
]))
1361 for (e
= order
[i
]->callees
; e
; e
= e
->next_callee
)
1362 if (e
->callee
->local
.local
&& !e
->callee
->aux
)
1364 something_changed
= true;
1365 e
->callee
->aux
= (void *)1;
1368 order
[i
]->aux
= NULL
;
1376 gate_ipa_profile (void)
1378 return flag_ipa_profile
;
1381 struct ipa_opt_pass_d pass_ipa_profile
=
1385 "ipa-profile", /* name */
1386 gate_ipa_profile
, /* gate */
1387 ipa_profile
, /* execute */
1390 0, /* static_pass_number */
1391 TV_IPA_PROFILE
, /* tv_id */
1392 0, /* properties_required */
1393 0, /* properties_provided */
1394 0, /* properties_destroyed */
1395 0, /* todo_flags_start */
1396 0 /* todo_flags_finish */
1398 NULL
, /* generate_summary */
1399 NULL
, /* write_summary */
1400 NULL
, /* read_summary */
1401 NULL
, /* write_optimization_summary */
1402 NULL
, /* read_optimization_summary */
1403 NULL
, /* stmt_fixup */
1405 NULL
, /* function_transform */
1406 NULL
/* variable_transform */
1409 /* Generate and emit a static constructor or destructor. WHICH must
1410 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1411 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1412 initialization priority for this constructor or destructor. */
1415 cgraph_build_static_cdtor (char which
, tree body
, int priority
)
1417 static int counter
= 0;
1419 tree decl
, name
, resdecl
;
1421 /* The priority is encoded in the constructor or destructor name.
1422 collect2 will sort the names and arrange that they are called at
1424 sprintf (which_buf
, "%c_%.5d_%d", which
, priority
, counter
++);
1425 name
= get_file_function_name (which_buf
);
1427 decl
= build_decl (input_location
, FUNCTION_DECL
, name
,
1428 build_function_type_list (void_type_node
, NULL_TREE
));
1429 current_function_decl
= decl
;
1431 resdecl
= build_decl (input_location
,
1432 RESULT_DECL
, NULL_TREE
, void_type_node
);
1433 DECL_ARTIFICIAL (resdecl
) = 1;
1434 DECL_RESULT (decl
) = resdecl
;
1435 DECL_CONTEXT (resdecl
) = decl
;
1437 allocate_struct_function (decl
, false);
1439 TREE_STATIC (decl
) = 1;
1440 TREE_USED (decl
) = 1;
1441 DECL_ARTIFICIAL (decl
) = 1;
1442 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl
) = 1;
1443 DECL_SAVED_TREE (decl
) = body
;
1444 if (!targetm
.have_ctors_dtors
)
1446 TREE_PUBLIC (decl
) = 1;
1447 DECL_PRESERVE_P (decl
) = 1;
1449 DECL_UNINLINABLE (decl
) = 1;
1451 DECL_INITIAL (decl
) = make_node (BLOCK
);
1452 TREE_USED (DECL_INITIAL (decl
)) = 1;
1454 DECL_SOURCE_LOCATION (decl
) = input_location
;
1455 cfun
->function_end_locus
= input_location
;
1460 DECL_STATIC_CONSTRUCTOR (decl
) = 1;
1461 decl_init_priority_insert (decl
, priority
);
1464 DECL_STATIC_DESTRUCTOR (decl
) = 1;
1465 decl_fini_priority_insert (decl
, priority
);
1471 gimplify_function_tree (decl
);
1473 cgraph_add_new_function (decl
, false);
1476 current_function_decl
= NULL
;
1480 /* A vector of FUNCTION_DECLs declared as static constructors. */
1481 static VEC(tree
, heap
) *static_ctors
;
1482 /* A vector of FUNCTION_DECLs declared as static destructors. */
1483 static VEC(tree
, heap
) *static_dtors
;
1485 /* When target does not have ctors and dtors, we call all constructor
1486 and destructor by special initialization/destruction function
1487 recognized by collect2.
1489 When we are going to build this function, collect all constructors and
1490 destructors and turn them into normal functions. */
1493 record_cdtor_fn (struct cgraph_node
*node
)
1495 if (DECL_STATIC_CONSTRUCTOR (node
->decl
))
1496 VEC_safe_push (tree
, heap
, static_ctors
, node
->decl
);
1497 if (DECL_STATIC_DESTRUCTOR (node
->decl
))
1498 VEC_safe_push (tree
, heap
, static_dtors
, node
->decl
);
1499 node
= cgraph_node (node
->decl
);
1500 node
->local
.disregard_inline_limits
= 1;
1503 /* Define global constructors/destructor functions for the CDTORS, of
1504 which they are LEN. The CDTORS are sorted by initialization
1505 priority. If CTOR_P is true, these are constructors; otherwise,
1506 they are destructors. */
1509 build_cdtor (bool ctor_p
, VEC (tree
, heap
) *cdtors
)
1512 size_t len
= VEC_length (tree
, cdtors
);
1519 priority_type priority
;
1527 fn
= VEC_index (tree
, cdtors
, j
);
1528 p
= ctor_p
? DECL_INIT_PRIORITY (fn
) : DECL_FINI_PRIORITY (fn
);
1531 else if (p
!= priority
)
1537 /* When there is only one cdtor and target supports them, do nothing. */
1539 && targetm
.have_ctors_dtors
)
1544 /* Find the next batch of constructors/destructors with the same
1545 initialization priority. */
1549 fn
= VEC_index (tree
, cdtors
, i
);
1550 call
= build_call_expr (fn
, 0);
1552 DECL_STATIC_CONSTRUCTOR (fn
) = 0;
1554 DECL_STATIC_DESTRUCTOR (fn
) = 0;
1555 /* We do not want to optimize away pure/const calls here.
1556 When optimizing, these should be already removed, when not
1557 optimizing, we want user to be able to breakpoint in them. */
1558 TREE_SIDE_EFFECTS (call
) = 1;
1559 append_to_statement_list (call
, &body
);
1561 gcc_assert (body
!= NULL_TREE
);
1562 /* Generate a function to call all the function of like
1564 cgraph_build_static_cdtor (ctor_p
? 'I' : 'D', body
, priority
);
1568 /* Comparison function for qsort. P1 and P2 are actually of type
1569 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1570 used to determine the sort order. */
1573 compare_ctor (const void *p1
, const void *p2
)
1580 f1
= *(const tree
*)p1
;
1581 f2
= *(const tree
*)p2
;
1582 priority1
= DECL_INIT_PRIORITY (f1
);
1583 priority2
= DECL_INIT_PRIORITY (f2
);
1585 if (priority1
< priority2
)
1587 else if (priority1
> priority2
)
1590 /* Ensure a stable sort. Constructors are executed in backwarding
1591 order to make LTO initialize braries first. */
1592 return DECL_UID (f2
) - DECL_UID (f1
);
1595 /* Comparison function for qsort. P1 and P2 are actually of type
1596 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1597 used to determine the sort order. */
1600 compare_dtor (const void *p1
, const void *p2
)
1607 f1
= *(const tree
*)p1
;
1608 f2
= *(const tree
*)p2
;
1609 priority1
= DECL_FINI_PRIORITY (f1
);
1610 priority2
= DECL_FINI_PRIORITY (f2
);
1612 if (priority1
< priority2
)
1614 else if (priority1
> priority2
)
1617 /* Ensure a stable sort. */
1618 return DECL_UID (f1
) - DECL_UID (f2
);
1621 /* Generate functions to call static constructors and destructors
1622 for targets that do not support .ctors/.dtors sections. These
1623 functions have magic names which are detected by collect2. */
1626 build_cdtor_fns (void)
1628 if (!VEC_empty (tree
, static_ctors
))
1630 gcc_assert (!targetm
.have_ctors_dtors
|| in_lto_p
);
1631 VEC_qsort (tree
, static_ctors
, compare_ctor
);
1632 build_cdtor (/*ctor_p=*/true, static_ctors
);
1635 if (!VEC_empty (tree
, static_dtors
))
1637 gcc_assert (!targetm
.have_ctors_dtors
|| in_lto_p
);
1638 VEC_qsort (tree
, static_dtors
, compare_dtor
);
1639 build_cdtor (/*ctor_p=*/false, static_dtors
);
1643 /* Look for constructors and destructors and produce function calling them.
1644 This is needed for targets not supporting ctors or dtors, but we perform the
1645 transformation also at linktime to merge possibly numberous
1646 constructors/destructors into single function to improve code locality and
1650 ipa_cdtor_merge (void)
1652 struct cgraph_node
*node
;
1653 for (node
= cgraph_nodes
; node
; node
= node
->next
)
1655 && (DECL_STATIC_CONSTRUCTOR (node
->decl
)
1656 || DECL_STATIC_DESTRUCTOR (node
->decl
)))
1657 record_cdtor_fn (node
);
1659 VEC_free (tree
, heap
, static_ctors
);
1660 VEC_free (tree
, heap
, static_dtors
);
1664 /* Perform the pass when we have no ctors/dtors support
1665 or at LTO time to merge multiple constructors into single
1669 gate_ipa_cdtor_merge (void)
1671 return !targetm
.have_ctors_dtors
|| (optimize
&& in_lto_p
);
1674 struct ipa_opt_pass_d pass_ipa_cdtor_merge
=
1679 gate_ipa_cdtor_merge
, /* gate */
1680 ipa_cdtor_merge
, /* execute */
1683 0, /* static_pass_number */
1684 TV_CGRAPHOPT
, /* tv_id */
1685 0, /* properties_required */
1686 0, /* properties_provided */
1687 0, /* properties_destroyed */
1688 0, /* todo_flags_start */
1689 0 /* todo_flags_finish */
1691 NULL
, /* generate_summary */
1692 NULL
, /* write_summary */
1693 NULL
, /* read_summary */
1694 NULL
, /* write_optimization_summary */
1695 NULL
, /* read_optimization_summary */
1696 NULL
, /* stmt_fixup */
1698 NULL
, /* function_transform */
1699 NULL
/* variable_transform */