re PR lto/47247 (Linker plugin specification makes it difficult to handle COMDATs)
[gcc.git] / gcc / ipa.c
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "cgraph.h"
26 #include "tree-pass.h"
27 #include "timevar.h"
28 #include "gimple.h"
29 #include "ggc.h"
30 #include "flags.h"
31 #include "pointer-set.h"
32 #include "target.h"
33 #include "tree-iterator.h"
34 #include "ipa-utils.h"
35
36 /* Look for all functions inlined to NODE and update their inlined_to pointers
37 to INLINED_TO. */
38
39 static void
40 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
41 {
42 struct cgraph_edge *e;
43 for (e = node->callees; e; e = e->next_callee)
44 if (e->callee->global.inlined_to)
45 {
46 e->callee->global.inlined_to = inlined_to;
47 update_inlined_to_pointer (e->callee, inlined_to);
48 }
49 }
50
51 /* Add cgraph NODE to queue starting at FIRST.
52
53 The queue is linked via AUX pointers and terminated by pointer to 1.
54 We enqueue nodes at two occasions: when we find them reachable or when we find
55 their bodies needed for further clonning. In the second case we mark them
56 by pointer to 2 after processing so they are re-queue when they become
57 reachable. */
58
59 static void
60 enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first)
61 {
62 /* Node is still in queue; do nothing. */
63 if (node->aux && node->aux != (void *) 2)
64 return;
65 /* Node was already processed as unreachable, re-enqueue
66 only if it became reachable now. */
67 if (node->aux == (void *)2 && !node->reachable)
68 return;
69 node->aux = *first;
70 *first = node;
71 }
72
73 /* Add varpool NODE to queue starting at FIRST. */
74
75 static void
76 enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first)
77 {
78 node->aux = *first;
79 *first = node;
80 }
81
82 /* Process references. */
83
84 static void
85 process_references (struct ipa_ref_list *list,
86 struct cgraph_node **first,
87 struct varpool_node **first_varpool,
88 bool before_inlining_p)
89 {
90 int i;
91 struct ipa_ref *ref;
92 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
93 {
94 if (ref->refered_type == IPA_REF_CGRAPH)
95 {
96 struct cgraph_node *node = ipa_ref_node (ref);
97 if (!node->reachable
98 && node->analyzed
99 && (!DECL_EXTERNAL (node->decl)
100 || before_inlining_p))
101 node->reachable = true;
102 enqueue_cgraph_node (node, first);
103 }
104 else
105 {
106 struct varpool_node *node = ipa_ref_varpool_node (ref);
107 if (!node->needed)
108 {
109 varpool_mark_needed_node (node);
110 enqueue_varpool_node (node, first_varpool);
111 }
112 }
113 }
114 }
115
116
117 /* Return true when NODE can not be local. Worker for cgraph_local_node_p. */
118
119 static bool
120 cgraph_non_local_node_p_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
121 {
122 /* FIXME: Aliases can be local, but i386 gets thunks wrong then. */
123 return !(cgraph_only_called_directly_or_aliased_p (node)
124 && !ipa_ref_has_aliases_p (&node->ref_list)
125 && node->analyzed
126 && !DECL_EXTERNAL (node->decl)
127 && !node->local.externally_visible
128 && !node->reachable_from_other_partition
129 && !node->in_other_partition);
130 }
131
132 /* Return true when function can be marked local. */
133
134 static bool
135 cgraph_local_node_p (struct cgraph_node *node)
136 {
137 struct cgraph_node *n = cgraph_function_or_thunk_node (node, NULL);
138
139 /* FIXME: thunks can be considered local, but we need prevent i386
140 from attempting to change calling convention of them. */
141 if (n->thunk.thunk_p)
142 return false;
143 return !cgraph_for_node_and_aliases (n,
144 cgraph_non_local_node_p_1, NULL, true);
145
146 }
147
148 /* Return true when NODE has ADDR reference. */
149
150 static bool
151 has_addr_references_p (struct cgraph_node *node,
152 void *data ATTRIBUTE_UNUSED)
153 {
154 int i;
155 struct ipa_ref *ref;
156
157 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
158 if (ref->use == IPA_REF_ADDR)
159 return true;
160 return false;
161 }
162
163 /* Perform reachability analysis and reclaim all unreachable nodes.
164 If BEFORE_INLINING_P is true this function is called before inlining
165 decisions has been made. If BEFORE_INLINING_P is false this function also
166 removes unneeded bodies of extern inline functions. */
167
168 bool
169 cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
170 {
171 struct cgraph_node *first = (struct cgraph_node *) (void *) 1;
172 struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1;
173 struct cgraph_node *node, *next;
174 struct varpool_node *vnode, *vnext;
175 bool changed = false;
176
177 #ifdef ENABLE_CHECKING
178 verify_cgraph ();
179 #endif
180 if (file)
181 fprintf (file, "\nReclaiming functions:");
182 #ifdef ENABLE_CHECKING
183 for (node = cgraph_nodes; node; node = node->next)
184 gcc_assert (!node->aux);
185 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
186 gcc_assert (!vnode->aux);
187 #endif
188 varpool_reset_queue ();
189 /* Mark functions whose bodies are obviously needed.
190 This is mostly when they can be referenced externally. Inline clones
191 are special since their declarations are shared with master clone and thus
192 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
193 for (node = cgraph_nodes; node; node = node->next)
194 if (node->analyzed && !node->global.inlined_to
195 && (!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
196 /* Keep around virtual functions for possible devirtualization. */
197 || (before_inlining_p
198 && DECL_VIRTUAL_P (node->decl)
199 && (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl)))
200 /* Also external functions with address taken are better to stay
201 for indirect inlining. */
202 || (before_inlining_p
203 && DECL_EXTERNAL (node->decl)
204 && node->address_taken)))
205 {
206 gcc_assert (!node->global.inlined_to);
207 enqueue_cgraph_node (node, &first);
208 node->reachable = true;
209 }
210 else
211 {
212 gcc_assert (!node->aux);
213 node->reachable = false;
214 }
215
216 /* Mark variables that are obviously needed. */
217 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
218 {
219 vnode->next_needed = NULL;
220 vnode->prev_needed = NULL;
221 if ((vnode->analyzed || vnode->force_output)
222 && !varpool_can_remove_if_no_refs (vnode))
223 {
224 vnode->needed = false;
225 varpool_mark_needed_node (vnode);
226 enqueue_varpool_node (vnode, &first_varpool);
227 }
228 else
229 vnode->needed = false;
230 }
231
232 /* Perform reachability analysis. As a special case do not consider
233 extern inline functions not inlined as live because we won't output
234 them at all.
235
236 We maintain two worklist, one for cgraph nodes other for varpools and
237 are finished once both are empty. */
238
239 while (first != (struct cgraph_node *) (void *) 1
240 || first_varpool != (struct varpool_node *) (void *) 1)
241 {
242 if (first != (struct cgraph_node *) (void *) 1)
243 {
244 struct cgraph_edge *e;
245 node = first;
246 first = (struct cgraph_node *) first->aux;
247 if (!node->reachable)
248 node->aux = (void *)2;
249
250 /* If we found this node reachable, first mark on the callees
251 reachable too, unless they are direct calls to extern inline functions
252 we decided to not inline. */
253 if (node->reachable)
254 {
255 for (e = node->callees; e; e = e->next_callee)
256 {
257 if (!e->callee->reachable
258 && node->analyzed
259 && (!e->inline_failed
260 || !DECL_EXTERNAL (e->callee->decl)
261 || before_inlining_p))
262 e->callee->reachable = true;
263 enqueue_cgraph_node (e->callee, &first);
264 }
265 process_references (&node->ref_list, &first, &first_varpool, before_inlining_p);
266 }
267
268 /* If any function in a comdat group is reachable, force
269 all other functions in the same comdat group to be
270 also reachable. */
271 if (node->same_comdat_group
272 && node->reachable
273 && !node->global.inlined_to)
274 {
275 for (next = node->same_comdat_group;
276 next != node;
277 next = next->same_comdat_group)
278 if (!next->reachable)
279 {
280 next->reachable = true;
281 enqueue_cgraph_node (next, &first);
282 }
283 }
284
285 /* We can freely remove inline clones even if they are cloned, however if
286 function is clone of real clone, we must keep it around in order to
287 make materialize_clones produce function body with the changes
288 applied. */
289 while (node->clone_of && !node->clone_of->aux
290 && !gimple_has_body_p (node->decl))
291 {
292 bool noninline = node->clone_of->decl != node->decl;
293 node = node->clone_of;
294 if (noninline && !node->reachable && !node->aux)
295 {
296 enqueue_cgraph_node (node, &first);
297 break;
298 }
299 }
300 }
301 if (first_varpool != (struct varpool_node *) (void *) 1)
302 {
303 vnode = first_varpool;
304 first_varpool = (struct varpool_node *)first_varpool->aux;
305 vnode->aux = NULL;
306 process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p);
307 /* If any function in a comdat group is reachable, force
308 all other functions in the same comdat group to be
309 also reachable. */
310 if (vnode->same_comdat_group)
311 {
312 struct varpool_node *next;
313 for (next = vnode->same_comdat_group;
314 next != vnode;
315 next = next->same_comdat_group)
316 if (!next->needed)
317 {
318 varpool_mark_needed_node (next);
319 enqueue_varpool_node (next, &first_varpool);
320 }
321 }
322 }
323 }
324
325 /* Remove unreachable nodes.
326
327 Completely unreachable functions can be fully removed from the callgraph.
328 Extern inline functions that we decided to not inline need to become unanalyzed nodes of
329 callgraph (so we still have edges to them). We remove function body then.
330
331 Also we need to care functions that are unreachable but we need to keep them around
332 for later clonning. In this case we also turn them to unanalyzed nodes, but
333 keep the body around. */
334 for (node = cgraph_nodes; node; node = next)
335 {
336 next = node->next;
337 if (node->aux && !node->reachable)
338 {
339 cgraph_node_remove_callees (node);
340 ipa_remove_all_references (&node->ref_list);
341 node->analyzed = false;
342 }
343 if (!node->aux)
344 {
345 struct cgraph_edge *e;
346 bool found = false;
347 int i;
348 struct ipa_ref *ref;
349
350 node->global.inlined_to = NULL;
351 if (file)
352 fprintf (file, " %s", cgraph_node_name (node));
353 /* See if there is reachable caller. */
354 for (e = node->callers; e && !found; e = e->next_caller)
355 if (e->caller->reachable)
356 found = true;
357 for (i = 0; (ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
358 && !found); i++)
359 if (ref->refering_type == IPA_REF_CGRAPH
360 && ipa_ref_refering_node (ref)->reachable)
361 found = true;
362 else if (ref->refering_type == IPA_REF_VARPOOL
363 && ipa_ref_refering_varpool_node (ref)->needed)
364 found = true;
365
366 /* If so, we need to keep node in the callgraph. */
367 if (found)
368 {
369 if (node->analyzed)
370 {
371 struct cgraph_node *clone;
372
373 /* If there are still clones, we must keep body around.
374 Otherwise we can just remove the body but keep the clone. */
375 for (clone = node->clones; clone;
376 clone = clone->next_sibling_clone)
377 if (clone->aux)
378 break;
379 if (!clone)
380 {
381 cgraph_release_function_body (node);
382 if (node->prev_sibling_clone)
383 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
384 else if (node->clone_of)
385 node->clone_of->clones = node->next_sibling_clone;
386 if (node->next_sibling_clone)
387 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
388 if (node->clone_of)
389 node->former_clone_of = node->clone_of->decl;
390 node->clone_of = NULL;
391 node->next_sibling_clone = NULL;
392 node->prev_sibling_clone = NULL;
393 }
394 else
395 gcc_assert (!clone->in_other_partition);
396 node->analyzed = false;
397 changed = true;
398 cgraph_node_remove_callees (node);
399 ipa_remove_all_references (&node->ref_list);
400 }
401 }
402 else
403 {
404 cgraph_remove_node (node);
405 changed = true;
406 }
407 }
408 }
409 for (node = cgraph_nodes; node; node = node->next)
410 {
411 /* Inline clones might be kept around so their materializing allows further
412 cloning. If the function the clone is inlined into is removed, we need
413 to turn it into normal cone. */
414 if (node->global.inlined_to
415 && !node->callers)
416 {
417 gcc_assert (node->clones);
418 node->global.inlined_to = NULL;
419 update_inlined_to_pointer (node, node);
420 }
421 node->aux = NULL;
422 }
423
424 if (file)
425 fprintf (file, "\n");
426
427 /* We must release unused extern inlines or sanity checking will fail. Rest of transformations
428 are undesirable at -O0 since we do not want to remove anything. */
429 if (!optimize)
430 return changed;
431
432 if (file)
433 fprintf (file, "Reclaiming variables:");
434 for (vnode = varpool_nodes; vnode; vnode = vnext)
435 {
436 vnext = vnode->next;
437 if (!vnode->needed)
438 {
439 if (file)
440 fprintf (file, " %s", varpool_node_name (vnode));
441 varpool_remove_node (vnode);
442 changed = true;
443 }
444 }
445
446 /* Now update address_taken flags and try to promote functions to be local. */
447
448 if (file)
449 fprintf (file, "\nClearing address taken flags:");
450 for (node = cgraph_nodes; node; node = node->next)
451 if (node->address_taken
452 && !node->reachable_from_other_partition)
453 {
454 if (!cgraph_for_node_and_aliases (node, has_addr_references_p, NULL, true))
455 {
456 if (file)
457 fprintf (file, " %s", cgraph_node_name (node));
458 node->address_taken = false;
459 changed = true;
460 if (cgraph_local_node_p (node))
461 {
462 node->local.local = true;
463 if (file)
464 fprintf (file, " (local)");
465 }
466 }
467 }
468 if (file)
469 fprintf (file, "\n");
470
471 #ifdef ENABLE_CHECKING
472 verify_cgraph ();
473 #endif
474
475 /* Reclaim alias pairs for functions that have disappeared from the
476 call graph. */
477 remove_unreachable_alias_pairs ();
478
479 return changed;
480 }
481
482 /* Discover variables that have no longer address taken or that are read only
483 and update their flags.
484
485 FIXME: This can not be done in between gimplify and omp_expand since
486 readonly flag plays role on what is shared and what is not. Currently we do
487 this transformation as part of whole program visibility and re-do at
488 ipa-reference pass (to take into account clonning), but it would
489 make sense to do it before early optimizations. */
490
491 void
492 ipa_discover_readonly_nonaddressable_vars (void)
493 {
494 struct varpool_node *vnode;
495 if (dump_file)
496 fprintf (dump_file, "Clearing variable flags:");
497 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
498 if (vnode->finalized && varpool_all_refs_explicit_p (vnode)
499 && (TREE_ADDRESSABLE (vnode->decl) || !TREE_READONLY (vnode->decl)))
500 {
501 bool written = false;
502 bool address_taken = false;
503 int i;
504 struct ipa_ref *ref;
505 for (i = 0; ipa_ref_list_refering_iterate (&vnode->ref_list, i, ref)
506 && (!written || !address_taken); i++)
507 switch (ref->use)
508 {
509 case IPA_REF_ADDR:
510 address_taken = true;
511 break;
512 case IPA_REF_LOAD:
513 break;
514 case IPA_REF_STORE:
515 written = true;
516 break;
517 }
518 if (TREE_ADDRESSABLE (vnode->decl) && !address_taken)
519 {
520 if (dump_file)
521 fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode));
522 TREE_ADDRESSABLE (vnode->decl) = 0;
523 }
524 if (!TREE_READONLY (vnode->decl) && !address_taken && !written
525 /* Making variable in explicit section readonly can cause section
526 type conflict.
527 See e.g. gcc.c-torture/compile/pr23237.c */
528 && DECL_SECTION_NAME (vnode->decl) == NULL)
529 {
530 if (dump_file)
531 fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode));
532 TREE_READONLY (vnode->decl) = 1;
533 }
534 }
535 if (dump_file)
536 fprintf (dump_file, "\n");
537 }
538
539 /* Return true when there is a reference to node and it is not vtable. */
540 static bool
541 cgraph_address_taken_from_non_vtable_p (struct cgraph_node *node)
542 {
543 int i;
544 struct ipa_ref *ref;
545 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
546 if (ref->use == IPA_REF_ADDR)
547 {
548 struct varpool_node *node;
549 if (ref->refering_type == IPA_REF_CGRAPH)
550 return true;
551 node = ipa_ref_refering_varpool_node (ref);
552 if (!DECL_VIRTUAL_P (node->decl))
553 return true;
554 }
555 return false;
556 }
557
558 /* COMDAT functions must be shared only if they have address taken,
559 otherwise we can produce our own private implementation with
560 -fwhole-program.
561 Return true when turning COMDAT functoin static can not lead to wrong
562 code when the resulting object links with a library defining same COMDAT.
563
564 Virtual functions do have their addresses taken from the vtables,
565 but in C++ there is no way to compare their addresses for equality. */
566
567 bool
568 cgraph_comdat_can_be_unshared_p (struct cgraph_node *node)
569 {
570 if ((cgraph_address_taken_from_non_vtable_p (node)
571 && !DECL_VIRTUAL_P (node->decl))
572 || !node->analyzed)
573 return false;
574 if (node->same_comdat_group)
575 {
576 struct cgraph_node *next;
577
578 /* If more than one function is in the same COMDAT group, it must
579 be shared even if just one function in the comdat group has
580 address taken. */
581 for (next = node->same_comdat_group;
582 next != node; next = next->same_comdat_group)
583 if (cgraph_address_taken_from_non_vtable_p (next)
584 && !DECL_VIRTUAL_P (next->decl))
585 return false;
586 }
587 return true;
588 }
589
590 /* Return true when function NODE should be considered externally visible. */
591
592 static bool
593 cgraph_externally_visible_p (struct cgraph_node *node,
594 bool whole_program, bool aliased)
595 {
596 if (!node->local.finalized)
597 return false;
598 if (!DECL_COMDAT (node->decl)
599 && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)))
600 return false;
601
602 /* Do not even try to be smart about aliased nodes. Until we properly
603 represent everything by same body alias, these are just evil. */
604 if (aliased)
605 return true;
606
607 /* Do not try to localize built-in functions yet. One of problems is that we
608 end up mangling their asm for WHOPR that makes it impossible to call them
609 using the implicit built-in declarations anymore. Similarly this enables
610 us to remove them as unreachable before actual calls may appear during
611 expansion or folding. */
612 if (DECL_BUILT_IN (node->decl))
613 return true;
614
615 /* If linker counts on us, we must preserve the function. */
616 if (cgraph_used_from_object_file_p (node))
617 return true;
618 if (DECL_PRESERVE_P (node->decl))
619 return true;
620 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl)))
621 return true;
622 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
623 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (node->decl)))
624 return true;
625 if (node->resolution == LDPR_PREVAILING_DEF_IRONLY)
626 return false;
627 /* When doing LTO or whole program, we can bring COMDAT functoins static.
628 This improves code quality and we know we will duplicate them at most twice
629 (in the case that we are not using plugin and link with object file
630 implementing same COMDAT) */
631 if ((in_lto_p || whole_program)
632 && DECL_COMDAT (node->decl)
633 && cgraph_comdat_can_be_unshared_p (node))
634 return false;
635
636 /* When doing link time optimizations, hidden symbols become local. */
637 if (in_lto_p
638 && (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
639 || DECL_VISIBILITY (node->decl) == VISIBILITY_INTERNAL)
640 /* Be sure that node is defined in IR file, not in other object
641 file. In that case we don't set used_from_other_object_file. */
642 && node->analyzed)
643 ;
644 else if (!whole_program)
645 return true;
646
647 if (MAIN_NAME_P (DECL_NAME (node->decl)))
648 return true;
649
650 return false;
651 }
652
653 /* Return true when variable VNODE should be considered externally visible. */
654
655 static bool
656 varpool_externally_visible_p (struct varpool_node *vnode, bool aliased)
657 {
658 if (!DECL_COMDAT (vnode->decl) && !TREE_PUBLIC (vnode->decl))
659 return false;
660
661 /* Do not even try to be smart about aliased nodes. Until we properly
662 represent everything by same body alias, these are just evil. */
663 if (aliased)
664 return true;
665
666 /* If linker counts on us, we must preserve the function. */
667 if (varpool_used_from_object_file_p (vnode))
668 return true;
669
670 if (DECL_PRESERVE_P (vnode->decl))
671 return true;
672 if (lookup_attribute ("externally_visible",
673 DECL_ATTRIBUTES (vnode->decl)))
674 return true;
675 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
676 && lookup_attribute ("dllexport",
677 DECL_ATTRIBUTES (vnode->decl)))
678 return true;
679
680 /* See if we have linker information about symbol not being used or
681 if we need to make guess based on the declaration.
682
683 Even if the linker clams the symbol is unused, never bring internal
684 symbols that are declared by user as used or externally visible.
685 This is needed for i.e. references from asm statements. */
686 if (varpool_used_from_object_file_p (vnode))
687 return true;
688 if (vnode->resolution == LDPR_PREVAILING_DEF_IRONLY)
689 return false;
690
691 /* As a special case, the COMDAT virutal tables can be unshared.
692 In LTO mode turn vtables into static variables. The variable is readonly,
693 so this does not enable more optimization, but referring static var
694 is faster for dynamic linking. Also this match logic hidding vtables
695 from LTO symbol tables. */
696 if ((in_lto_p || flag_whole_program)
697 && !vnode->force_output
698 && DECL_COMDAT (vnode->decl) && DECL_VIRTUAL_P (vnode->decl))
699 return false;
700
701 /* When doing link time optimizations, hidden symbols become local. */
702 if (in_lto_p
703 && (DECL_VISIBILITY (vnode->decl) == VISIBILITY_HIDDEN
704 || DECL_VISIBILITY (vnode->decl) == VISIBILITY_INTERNAL)
705 /* Be sure that node is defined in IR file, not in other object
706 file. In that case we don't set used_from_other_object_file. */
707 && vnode->finalized)
708 ;
709 else if (!flag_whole_program)
710 return true;
711
712 /* Do not attempt to privatize COMDATS by default.
713 This would break linking with C++ libraries sharing
714 inline definitions.
715
716 FIXME: We can do so for readonly vars with no address taken and
717 possibly also for vtables since no direct pointer comparsion is done.
718 It might be interesting to do so to reduce linking overhead. */
719 if (DECL_COMDAT (vnode->decl) || DECL_WEAK (vnode->decl))
720 return true;
721 return false;
722 }
723
724 /* Dissolve the same_comdat_group list in which NODE resides. */
725
726 static void
727 dissolve_same_comdat_group_list (struct cgraph_node *node)
728 {
729 struct cgraph_node *n = node, *next;
730 do
731 {
732 next = n->same_comdat_group;
733 n->same_comdat_group = NULL;
734 n = next;
735 }
736 while (n != node);
737 }
738
739 /* Mark visibility of all functions.
740
741 A local function is one whose calls can occur only in the current
742 compilation unit and all its calls are explicit, so we can change
743 its calling convention. We simply mark all static functions whose
744 address is not taken as local.
745
746 We also change the TREE_PUBLIC flag of all declarations that are public
747 in language point of view but we want to overwrite this default
748 via visibilities for the backend point of view. */
749
750 static unsigned int
751 function_and_variable_visibility (bool whole_program)
752 {
753 struct cgraph_node *node;
754 struct varpool_node *vnode;
755 struct pointer_set_t *aliased_nodes = pointer_set_create ();
756 struct pointer_set_t *aliased_vnodes = pointer_set_create ();
757 unsigned i;
758 alias_pair *p;
759
760 /* Discover aliased nodes. */
761 FOR_EACH_VEC_ELT (alias_pair, alias_pairs, i, p)
762 {
763 if (dump_file)
764 fprintf (dump_file, "Alias %s->%s",
765 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)),
766 IDENTIFIER_POINTER (p->target));
767
768 if ((node = cgraph_node_for_asm (p->target)) != NULL
769 && !DECL_EXTERNAL (node->decl))
770 {
771 if (!node->analyzed)
772 continue;
773 cgraph_mark_needed_node (node);
774 gcc_assert (node->needed);
775 pointer_set_insert (aliased_nodes, node);
776 if (dump_file)
777 fprintf (dump_file, " node %s/%i",
778 cgraph_node_name (node), node->uid);
779 }
780 else if ((vnode = varpool_node_for_asm (p->target)) != NULL
781 && !DECL_EXTERNAL (vnode->decl))
782 {
783 varpool_mark_needed_node (vnode);
784 gcc_assert (vnode->needed);
785 pointer_set_insert (aliased_vnodes, vnode);
786 if (dump_file)
787 fprintf (dump_file, " varpool node %s",
788 varpool_node_name (vnode));
789 }
790 if (dump_file)
791 fprintf (dump_file, "\n");
792 }
793
794 for (node = cgraph_nodes; node; node = node->next)
795 {
796 int flags = flags_from_decl_or_type (node->decl);
797
798 /* Optimize away PURE and CONST constructors and destructors. */
799 if (optimize
800 && (flags & (ECF_CONST | ECF_PURE))
801 && !(flags & ECF_LOOPING_CONST_OR_PURE))
802 {
803 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
804 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
805 }
806
807 /* Frontends and alias code marks nodes as needed before parsing is finished.
808 We may end up marking as node external nodes where this flag is meaningless
809 strip it. */
810 if (node->needed
811 && (DECL_EXTERNAL (node->decl) || !node->analyzed))
812 node->needed = 0;
813
814 /* C++ FE on lack of COMDAT support create local COMDAT functions
815 (that ought to be shared but can not due to object format
816 limitations). It is neccesary to keep the flag to make rest of C++ FE
817 happy. Clear the flag here to avoid confusion in middle-end. */
818 if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl))
819 DECL_COMDAT (node->decl) = 0;
820 /* For external decls stop tracking same_comdat_group, it doesn't matter
821 what comdat group they are in when they won't be emitted in this TU,
822 and simplifies later passes. */
823 if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
824 {
825 #ifdef ENABLE_CHECKING
826 struct cgraph_node *n;
827
828 for (n = node->same_comdat_group;
829 n != node;
830 n = n->same_comdat_group)
831 /* If at least one of same comdat group functions is external,
832 all of them have to be, otherwise it is a front-end bug. */
833 gcc_assert (DECL_EXTERNAL (n->decl));
834 #endif
835 dissolve_same_comdat_group_list (node);
836 }
837 gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl))
838 || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl));
839 if (cgraph_externally_visible_p (node, whole_program,
840 pointer_set_contains (aliased_nodes,
841 node)))
842 {
843 gcc_assert (!node->global.inlined_to);
844 node->local.externally_visible = true;
845 }
846 else
847 node->local.externally_visible = false;
848 if (!node->local.externally_visible && node->analyzed
849 && !DECL_EXTERNAL (node->decl))
850 {
851 gcc_assert (whole_program || in_lto_p || !TREE_PUBLIC (node->decl));
852 cgraph_make_decl_local (node->decl);
853 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
854 if (node->same_comdat_group)
855 /* cgraph_externally_visible_p has already checked all other nodes
856 in the group and they will all be made local. We need to
857 dissolve the group at once so that the predicate does not
858 segfault though. */
859 dissolve_same_comdat_group_list (node);
860 }
861
862 if (node->thunk.thunk_p
863 && TREE_PUBLIC (node->decl))
864 {
865 struct cgraph_node *decl_node = node;
866
867 decl_node = cgraph_function_node (decl_node->callees->callee, NULL);
868
869 /* Thunks have the same visibility as function they are attached to.
870 Make sure the C++ front end set this up properly. */
871 if (DECL_ONE_ONLY (decl_node->decl))
872 {
873 gcc_checking_assert (DECL_COMDAT (node->decl)
874 == DECL_COMDAT (decl_node->decl));
875 gcc_checking_assert (DECL_COMDAT_GROUP (node->decl)
876 == DECL_COMDAT_GROUP (decl_node->decl));
877 gcc_checking_assert (node->same_comdat_group);
878 }
879 if (DECL_EXTERNAL (decl_node->decl))
880 DECL_EXTERNAL (node->decl) = 1;
881 }
882 }
883 for (node = cgraph_nodes; node; node = node->next)
884 node->local.local = cgraph_local_node_p (node);
885 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
886 {
887 /* weak flag makes no sense on local variables. */
888 gcc_assert (!DECL_WEAK (vnode->decl)
889 || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl));
890 /* In several cases declarations can not be common:
891
892 - when declaration has initializer
893 - when it is in weak
894 - when it has specific section
895 - when it resides in non-generic address space.
896 - if declaration is local, it will get into .local common section
897 so common flag is not needed. Frontends still produce these in
898 certain cases, such as for:
899
900 static int a __attribute__ ((common))
901
902 Canonicalize things here and clear the redundant flag. */
903 if (DECL_COMMON (vnode->decl)
904 && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
905 || (DECL_INITIAL (vnode->decl)
906 && DECL_INITIAL (vnode->decl) != error_mark_node)
907 || DECL_WEAK (vnode->decl)
908 || DECL_SECTION_NAME (vnode->decl) != NULL
909 || ! (ADDR_SPACE_GENERIC_P
910 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl))))))
911 DECL_COMMON (vnode->decl) = 0;
912 }
913 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
914 {
915 if (!vnode->finalized)
916 continue;
917 if (vnode->needed
918 && varpool_externally_visible_p
919 (vnode,
920 pointer_set_contains (aliased_vnodes, vnode)))
921 vnode->externally_visible = true;
922 else
923 vnode->externally_visible = false;
924 if (!vnode->externally_visible)
925 {
926 gcc_assert (in_lto_p || whole_program || !TREE_PUBLIC (vnode->decl));
927 cgraph_make_decl_local (vnode->decl);
928 vnode->resolution = LDPR_PREVAILING_DEF_IRONLY;
929 }
930 gcc_assert (TREE_STATIC (vnode->decl));
931 }
932 pointer_set_destroy (aliased_nodes);
933 pointer_set_destroy (aliased_vnodes);
934
935 if (dump_file)
936 {
937 fprintf (dump_file, "\nMarking local functions:");
938 for (node = cgraph_nodes; node; node = node->next)
939 if (node->local.local)
940 fprintf (dump_file, " %s", cgraph_node_name (node));
941 fprintf (dump_file, "\n\n");
942 fprintf (dump_file, "\nMarking externally visible functions:");
943 for (node = cgraph_nodes; node; node = node->next)
944 if (node->local.externally_visible)
945 fprintf (dump_file, " %s", cgraph_node_name (node));
946 fprintf (dump_file, "\n\n");
947 fprintf (dump_file, "\nMarking externally visible variables:");
948 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
949 if (vnode->externally_visible)
950 fprintf (dump_file, " %s", varpool_node_name (vnode));
951 fprintf (dump_file, "\n\n");
952 }
953 cgraph_function_flags_ready = true;
954 return 0;
955 }
956
957 /* Local function pass handling visibilities. This happens before LTO streaming
958 so in particular -fwhole-program should be ignored at this level. */
959
960 static unsigned int
961 local_function_and_variable_visibility (void)
962 {
963 return function_and_variable_visibility (flag_whole_program && !flag_lto);
964 }
965
966 struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility =
967 {
968 {
969 SIMPLE_IPA_PASS,
970 "visibility", /* name */
971 NULL, /* gate */
972 local_function_and_variable_visibility,/* execute */
973 NULL, /* sub */
974 NULL, /* next */
975 0, /* static_pass_number */
976 TV_CGRAPHOPT, /* tv_id */
977 0, /* properties_required */
978 0, /* properties_provided */
979 0, /* properties_destroyed */
980 0, /* todo_flags_start */
981 TODO_remove_functions | TODO_dump_cgraph
982 | TODO_ggc_collect /* todo_flags_finish */
983 }
984 };
985
986 /* Do not re-run on ltrans stage. */
987
988 static bool
989 gate_whole_program_function_and_variable_visibility (void)
990 {
991 return !flag_ltrans;
992 }
993
994 /* Bring functionss local at LTO time whith -fwhole-program. */
995
996 static unsigned int
997 whole_program_function_and_variable_visibility (void)
998 {
999 struct cgraph_node *node;
1000 struct varpool_node *vnode;
1001
1002 function_and_variable_visibility (flag_whole_program);
1003
1004 for (node = cgraph_nodes; node; node = node->next)
1005 if ((node->local.externally_visible && !DECL_COMDAT (node->decl))
1006 && node->local.finalized)
1007 cgraph_mark_needed_node (node);
1008 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
1009 if (vnode->externally_visible && !DECL_COMDAT (vnode->decl))
1010 varpool_mark_needed_node (vnode);
1011 if (dump_file)
1012 {
1013 fprintf (dump_file, "\nNeeded variables:");
1014 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
1015 if (vnode->needed)
1016 fprintf (dump_file, " %s", varpool_node_name (vnode));
1017 fprintf (dump_file, "\n\n");
1018 }
1019 if (optimize)
1020 ipa_discover_readonly_nonaddressable_vars ();
1021 return 0;
1022 }
1023
1024 struct ipa_opt_pass_d pass_ipa_whole_program_visibility =
1025 {
1026 {
1027 IPA_PASS,
1028 "whole-program", /* name */
1029 gate_whole_program_function_and_variable_visibility,/* gate */
1030 whole_program_function_and_variable_visibility,/* execute */
1031 NULL, /* sub */
1032 NULL, /* next */
1033 0, /* static_pass_number */
1034 TV_CGRAPHOPT, /* tv_id */
1035 0, /* properties_required */
1036 0, /* properties_provided */
1037 0, /* properties_destroyed */
1038 0, /* todo_flags_start */
1039 TODO_remove_functions | TODO_dump_cgraph
1040 | TODO_ggc_collect /* todo_flags_finish */
1041 },
1042 NULL, /* generate_summary */
1043 NULL, /* write_summary */
1044 NULL, /* read_summary */
1045 NULL, /* write_optimization_summary */
1046 NULL, /* read_optimization_summary */
1047 NULL, /* stmt_fixup */
1048 0, /* TODOs */
1049 NULL, /* function_transform */
1050 NULL, /* variable_transform */
1051 };
1052
1053
1054 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1055
1056 static unsigned int
1057 ipa_profile (void)
1058 {
1059 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1060 struct cgraph_edge *e;
1061 int order_pos;
1062 bool something_changed = false;
1063 int i;
1064
1065 order_pos = ipa_reverse_postorder (order);
1066 for (i = order_pos - 1; i >= 0; i--)
1067 {
1068 if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1069 {
1070 for (e = order[i]->callees; e; e = e->next_callee)
1071 if (e->callee->local.local && !e->callee->aux)
1072 {
1073 something_changed = true;
1074 e->callee->aux = (void *)1;
1075 }
1076 }
1077 order[i]->aux = NULL;
1078 }
1079
1080 while (something_changed)
1081 {
1082 something_changed = false;
1083 for (i = order_pos - 1; i >= 0; i--)
1084 {
1085 if (order[i]->aux && cgraph_propagate_frequency (order[i]))
1086 {
1087 for (e = order[i]->callees; e; e = e->next_callee)
1088 if (e->callee->local.local && !e->callee->aux)
1089 {
1090 something_changed = true;
1091 e->callee->aux = (void *)1;
1092 }
1093 }
1094 order[i]->aux = NULL;
1095 }
1096 }
1097 free (order);
1098 return 0;
1099 }
1100
1101 static bool
1102 gate_ipa_profile (void)
1103 {
1104 return flag_ipa_profile;
1105 }
1106
1107 struct ipa_opt_pass_d pass_ipa_profile =
1108 {
1109 {
1110 IPA_PASS,
1111 "profile_estimate", /* name */
1112 gate_ipa_profile, /* gate */
1113 ipa_profile, /* execute */
1114 NULL, /* sub */
1115 NULL, /* next */
1116 0, /* static_pass_number */
1117 TV_IPA_PROFILE, /* tv_id */
1118 0, /* properties_required */
1119 0, /* properties_provided */
1120 0, /* properties_destroyed */
1121 0, /* todo_flags_start */
1122 0 /* todo_flags_finish */
1123 },
1124 NULL, /* generate_summary */
1125 NULL, /* write_summary */
1126 NULL, /* read_summary */
1127 NULL, /* write_optimization_summary */
1128 NULL, /* read_optimization_summary */
1129 NULL, /* stmt_fixup */
1130 0, /* TODOs */
1131 NULL, /* function_transform */
1132 NULL /* variable_transform */
1133 };
1134
1135 /* Generate and emit a static constructor or destructor. WHICH must
1136 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1137 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1138 initialization priority for this constructor or destructor.
1139
1140 FINAL specify whether the externally visible name for collect2 should
1141 be produced. */
1142
1143 static void
1144 cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
1145 {
1146 static int counter = 0;
1147 char which_buf[16];
1148 tree decl, name, resdecl;
1149
1150 /* The priority is encoded in the constructor or destructor name.
1151 collect2 will sort the names and arrange that they are called at
1152 program startup. */
1153 if (final)
1154 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1155 else
1156 /* Proudce sane name but one not recognizable by collect2, just for the
1157 case we fail to inline the function. */
1158 sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
1159 name = get_file_function_name (which_buf);
1160
1161 decl = build_decl (input_location, FUNCTION_DECL, name,
1162 build_function_type_list (void_type_node, NULL_TREE));
1163 current_function_decl = decl;
1164
1165 resdecl = build_decl (input_location,
1166 RESULT_DECL, NULL_TREE, void_type_node);
1167 DECL_ARTIFICIAL (resdecl) = 1;
1168 DECL_RESULT (decl) = resdecl;
1169 DECL_CONTEXT (resdecl) = decl;
1170
1171 allocate_struct_function (decl, false);
1172
1173 TREE_STATIC (decl) = 1;
1174 TREE_USED (decl) = 1;
1175 DECL_ARTIFICIAL (decl) = 1;
1176 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1177 DECL_SAVED_TREE (decl) = body;
1178 if (!targetm.have_ctors_dtors && final)
1179 {
1180 TREE_PUBLIC (decl) = 1;
1181 DECL_PRESERVE_P (decl) = 1;
1182 }
1183 DECL_UNINLINABLE (decl) = 1;
1184
1185 DECL_INITIAL (decl) = make_node (BLOCK);
1186 TREE_USED (DECL_INITIAL (decl)) = 1;
1187
1188 DECL_SOURCE_LOCATION (decl) = input_location;
1189 cfun->function_end_locus = input_location;
1190
1191 switch (which)
1192 {
1193 case 'I':
1194 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1195 decl_init_priority_insert (decl, priority);
1196 break;
1197 case 'D':
1198 DECL_STATIC_DESTRUCTOR (decl) = 1;
1199 decl_fini_priority_insert (decl, priority);
1200 break;
1201 default:
1202 gcc_unreachable ();
1203 }
1204
1205 gimplify_function_tree (decl);
1206
1207 cgraph_add_new_function (decl, false);
1208
1209 set_cfun (NULL);
1210 current_function_decl = NULL;
1211 }
1212
1213 /* Generate and emit a static constructor or destructor. WHICH must
1214 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1215 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1216 initialization priority for this constructor or destructor. */
1217
1218 void
1219 cgraph_build_static_cdtor (char which, tree body, int priority)
1220 {
1221 cgraph_build_static_cdtor_1 (which, body, priority, false);
1222 }
1223
1224 /* A vector of FUNCTION_DECLs declared as static constructors. */
1225 static VEC(tree, heap) *static_ctors;
1226 /* A vector of FUNCTION_DECLs declared as static destructors. */
1227 static VEC(tree, heap) *static_dtors;
1228
1229 /* When target does not have ctors and dtors, we call all constructor
1230 and destructor by special initialization/destruction function
1231 recognized by collect2.
1232
1233 When we are going to build this function, collect all constructors and
1234 destructors and turn them into normal functions. */
1235
1236 static void
1237 record_cdtor_fn (struct cgraph_node *node)
1238 {
1239 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1240 VEC_safe_push (tree, heap, static_ctors, node->decl);
1241 if (DECL_STATIC_DESTRUCTOR (node->decl))
1242 VEC_safe_push (tree, heap, static_dtors, node->decl);
1243 node = cgraph_get_node (node->decl);
1244 DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
1245 }
1246
1247 /* Define global constructors/destructor functions for the CDTORS, of
1248 which they are LEN. The CDTORS are sorted by initialization
1249 priority. If CTOR_P is true, these are constructors; otherwise,
1250 they are destructors. */
1251
1252 static void
1253 build_cdtor (bool ctor_p, VEC (tree, heap) *cdtors)
1254 {
1255 size_t i,j;
1256 size_t len = VEC_length (tree, cdtors);
1257
1258 i = 0;
1259 while (i < len)
1260 {
1261 tree body;
1262 tree fn;
1263 priority_type priority;
1264
1265 priority = 0;
1266 body = NULL_TREE;
1267 j = i;
1268 do
1269 {
1270 priority_type p;
1271 fn = VEC_index (tree, cdtors, j);
1272 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1273 if (j == i)
1274 priority = p;
1275 else if (p != priority)
1276 break;
1277 j++;
1278 }
1279 while (j < len);
1280
1281 /* When there is only one cdtor and target supports them, do nothing. */
1282 if (j == i + 1
1283 && targetm.have_ctors_dtors)
1284 {
1285 i++;
1286 continue;
1287 }
1288 /* Find the next batch of constructors/destructors with the same
1289 initialization priority. */
1290 for (;i < j; i++)
1291 {
1292 tree call;
1293 fn = VEC_index (tree, cdtors, i);
1294 call = build_call_expr (fn, 0);
1295 if (ctor_p)
1296 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1297 else
1298 DECL_STATIC_DESTRUCTOR (fn) = 0;
1299 /* We do not want to optimize away pure/const calls here.
1300 When optimizing, these should be already removed, when not
1301 optimizing, we want user to be able to breakpoint in them. */
1302 TREE_SIDE_EFFECTS (call) = 1;
1303 append_to_statement_list (call, &body);
1304 }
1305 gcc_assert (body != NULL_TREE);
1306 /* Generate a function to call all the function of like
1307 priority. */
1308 cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true);
1309 }
1310 }
1311
1312 /* Comparison function for qsort. P1 and P2 are actually of type
1313 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1314 used to determine the sort order. */
1315
1316 static int
1317 compare_ctor (const void *p1, const void *p2)
1318 {
1319 tree f1;
1320 tree f2;
1321 int priority1;
1322 int priority2;
1323
1324 f1 = *(const tree *)p1;
1325 f2 = *(const tree *)p2;
1326 priority1 = DECL_INIT_PRIORITY (f1);
1327 priority2 = DECL_INIT_PRIORITY (f2);
1328
1329 if (priority1 < priority2)
1330 return -1;
1331 else if (priority1 > priority2)
1332 return 1;
1333 else
1334 /* Ensure a stable sort. Constructors are executed in backwarding
1335 order to make LTO initialize braries first. */
1336 return DECL_UID (f2) - DECL_UID (f1);
1337 }
1338
1339 /* Comparison function for qsort. P1 and P2 are actually of type
1340 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1341 used to determine the sort order. */
1342
1343 static int
1344 compare_dtor (const void *p1, const void *p2)
1345 {
1346 tree f1;
1347 tree f2;
1348 int priority1;
1349 int priority2;
1350
1351 f1 = *(const tree *)p1;
1352 f2 = *(const tree *)p2;
1353 priority1 = DECL_FINI_PRIORITY (f1);
1354 priority2 = DECL_FINI_PRIORITY (f2);
1355
1356 if (priority1 < priority2)
1357 return -1;
1358 else if (priority1 > priority2)
1359 return 1;
1360 else
1361 /* Ensure a stable sort. */
1362 return DECL_UID (f1) - DECL_UID (f2);
1363 }
1364
1365 /* Generate functions to call static constructors and destructors
1366 for targets that do not support .ctors/.dtors sections. These
1367 functions have magic names which are detected by collect2. */
1368
1369 static void
1370 build_cdtor_fns (void)
1371 {
1372 if (!VEC_empty (tree, static_ctors))
1373 {
1374 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1375 VEC_qsort (tree, static_ctors, compare_ctor);
1376 build_cdtor (/*ctor_p=*/true, static_ctors);
1377 }
1378
1379 if (!VEC_empty (tree, static_dtors))
1380 {
1381 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1382 VEC_qsort (tree, static_dtors, compare_dtor);
1383 build_cdtor (/*ctor_p=*/false, static_dtors);
1384 }
1385 }
1386
1387 /* Look for constructors and destructors and produce function calling them.
1388 This is needed for targets not supporting ctors or dtors, but we perform the
1389 transformation also at linktime to merge possibly numberous
1390 constructors/destructors into single function to improve code locality and
1391 reduce size. */
1392
1393 static unsigned int
1394 ipa_cdtor_merge (void)
1395 {
1396 struct cgraph_node *node;
1397 for (node = cgraph_nodes; node; node = node->next)
1398 if (node->analyzed
1399 && (DECL_STATIC_CONSTRUCTOR (node->decl)
1400 || DECL_STATIC_DESTRUCTOR (node->decl)))
1401 record_cdtor_fn (node);
1402 build_cdtor_fns ();
1403 VEC_free (tree, heap, static_ctors);
1404 VEC_free (tree, heap, static_dtors);
1405 return 0;
1406 }
1407
1408 /* Perform the pass when we have no ctors/dtors support
1409 or at LTO time to merge multiple constructors into single
1410 function. */
1411
1412 static bool
1413 gate_ipa_cdtor_merge (void)
1414 {
1415 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1416 }
1417
1418 struct ipa_opt_pass_d pass_ipa_cdtor_merge =
1419 {
1420 {
1421 IPA_PASS,
1422 "cdtor", /* name */
1423 gate_ipa_cdtor_merge, /* gate */
1424 ipa_cdtor_merge, /* execute */
1425 NULL, /* sub */
1426 NULL, /* next */
1427 0, /* static_pass_number */
1428 TV_CGRAPHOPT, /* tv_id */
1429 0, /* properties_required */
1430 0, /* properties_provided */
1431 0, /* properties_destroyed */
1432 0, /* todo_flags_start */
1433 0 /* todo_flags_finish */
1434 },
1435 NULL, /* generate_summary */
1436 NULL, /* write_summary */
1437 NULL, /* read_summary */
1438 NULL, /* write_optimization_summary */
1439 NULL, /* read_optimization_summary */
1440 NULL, /* stmt_fixup */
1441 0, /* TODOs */
1442 NULL, /* function_transform */
1443 NULL /* variable_transform */
1444 };