ipa.c (cgraph_externally_visible_p, [...]): Use info provided by linker plugin.
[gcc.git] / gcc / ipa.c
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "cgraph.h"
26 #include "tree-pass.h"
27 #include "timevar.h"
28 #include "gimple.h"
29 #include "ggc.h"
30 #include "flags.h"
31 #include "pointer-set.h"
32 #include "target.h"
33 #include "tree-iterator.h"
34
35 /* Fill array order with all nodes with output flag set in the reverse
36 topological order. */
37
38 int
39 cgraph_postorder (struct cgraph_node **order)
40 {
41 struct cgraph_node *node, *node2;
42 int stack_size = 0;
43 int order_pos = 0;
44 struct cgraph_edge *edge, last;
45 int pass;
46
47 struct cgraph_node **stack =
48 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
49
50 /* We have to deal with cycles nicely, so use a depth first traversal
51 output algorithm. Ignore the fact that some functions won't need
52 to be output and put them into order as well, so we get dependencies
53 right through inline functions. */
54 for (node = cgraph_nodes; node; node = node->next)
55 node->aux = NULL;
56 for (pass = 0; pass < 2; pass++)
57 for (node = cgraph_nodes; node; node = node->next)
58 if (!node->aux
59 && (pass
60 || (!node->address_taken
61 && !node->global.inlined_to
62 && !cgraph_only_called_directly_p (node))))
63 {
64 node2 = node;
65 if (!node->callers)
66 node->aux = &last;
67 else
68 node->aux = node->callers;
69 while (node2)
70 {
71 while (node2->aux != &last)
72 {
73 edge = (struct cgraph_edge *) node2->aux;
74 if (edge->next_caller)
75 node2->aux = edge->next_caller;
76 else
77 node2->aux = &last;
78 /* Break possible cycles involving always-inline
79 functions by ignoring edges from always-inline
80 functions to non-always-inline functions. */
81 if (edge->caller->local.disregard_inline_limits
82 && !edge->callee->local.disregard_inline_limits)
83 continue;
84 if (!edge->caller->aux)
85 {
86 if (!edge->caller->callers)
87 edge->caller->aux = &last;
88 else
89 edge->caller->aux = edge->caller->callers;
90 stack[stack_size++] = node2;
91 node2 = edge->caller;
92 break;
93 }
94 }
95 if (node2->aux == &last)
96 {
97 order[order_pos++] = node2;
98 if (stack_size)
99 node2 = stack[--stack_size];
100 else
101 node2 = NULL;
102 }
103 }
104 }
105 free (stack);
106 for (node = cgraph_nodes; node; node = node->next)
107 node->aux = NULL;
108 return order_pos;
109 }
110
111 /* Look for all functions inlined to NODE and update their inlined_to pointers
112 to INLINED_TO. */
113
114 static void
115 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
116 {
117 struct cgraph_edge *e;
118 for (e = node->callees; e; e = e->next_callee)
119 if (e->callee->global.inlined_to)
120 {
121 e->callee->global.inlined_to = inlined_to;
122 update_inlined_to_pointer (e->callee, inlined_to);
123 }
124 }
125
126 /* Add cgraph NODE to queue starting at FIRST.
127
128 The queue is linked via AUX pointers and terminated by pointer to 1.
129 We enqueue nodes at two occasions: when we find them reachable or when we find
130 their bodies needed for further clonning. In the second case we mark them
131 by pointer to 2 after processing so they are re-queue when they become
132 reachable. */
133
134 static void
135 enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first)
136 {
137 /* Node is still in queue; do nothing. */
138 if (node->aux && node->aux != (void *) 2)
139 return;
140 /* Node was already processed as unreachable, re-enqueue
141 only if it became reachable now. */
142 if (node->aux == (void *)2 && !node->reachable)
143 return;
144 node->aux = *first;
145 *first = node;
146 }
147
148 /* Add varpool NODE to queue starting at FIRST. */
149
150 static void
151 enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first)
152 {
153 node->aux = *first;
154 *first = node;
155 }
156
157 /* Process references. */
158
159 static void
160 process_references (struct ipa_ref_list *list,
161 struct cgraph_node **first,
162 struct varpool_node **first_varpool,
163 bool before_inlining_p)
164 {
165 int i;
166 struct ipa_ref *ref;
167 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
168 {
169 if (ref->refered_type == IPA_REF_CGRAPH)
170 {
171 struct cgraph_node *node = ipa_ref_node (ref);
172 if (!node->reachable
173 && (!DECL_EXTERNAL (node->decl)
174 || before_inlining_p))
175 {
176 node->reachable = true;
177 enqueue_cgraph_node (node, first);
178 }
179 }
180 else
181 {
182 struct varpool_node *node = ipa_ref_varpool_node (ref);
183 if (!node->needed)
184 {
185 varpool_mark_needed_node (node);
186 enqueue_varpool_node (node, first_varpool);
187 }
188 }
189 }
190 }
191
192 /* Return true when function NODE can be removed from callgraph
193 if all direct calls are eliminated. */
194
195 static inline bool
196 varpool_can_remove_if_no_refs (struct varpool_node *node)
197 {
198 return (!node->force_output && !node->used_from_other_partition
199 && (DECL_COMDAT (node->decl) || !node->externally_visible));
200 }
201
202 /* Return true when function can be marked local. */
203
204 static bool
205 cgraph_local_node_p (struct cgraph_node *node)
206 {
207 return (cgraph_only_called_directly_p (node)
208 && node->analyzed
209 && !DECL_EXTERNAL (node->decl)
210 && !node->local.externally_visible
211 && !node->reachable_from_other_partition
212 && !node->in_other_partition);
213 }
214
215 /* Perform reachability analysis and reclaim all unreachable nodes.
216 If BEFORE_INLINING_P is true this function is called before inlining
217 decisions has been made. If BEFORE_INLINING_P is false this function also
218 removes unneeded bodies of extern inline functions. */
219
220 bool
221 cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
222 {
223 struct cgraph_node *first = (struct cgraph_node *) (void *) 1;
224 struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1;
225 struct cgraph_node *node, *next;
226 struct varpool_node *vnode, *vnext;
227 bool changed = false;
228
229 #ifdef ENABLE_CHECKING
230 verify_cgraph ();
231 #endif
232 if (file)
233 fprintf (file, "\nReclaiming functions:");
234 #ifdef ENABLE_CHECKING
235 for (node = cgraph_nodes; node; node = node->next)
236 gcc_assert (!node->aux);
237 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
238 gcc_assert (!vnode->aux);
239 #endif
240 varpool_reset_queue ();
241 /* Mark functions whose bodies are obviously needed.
242 This is mostly when they can be referenced externally. Inline clones
243 are special since their declarations are shared with master clone and thus
244 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
245 for (node = cgraph_nodes; node; node = node->next)
246 if (node->analyzed && !node->global.inlined_to
247 && (!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
248 /* Keep around virtual functions for possible devirtualization. */
249 || (before_inlining_p
250 && DECL_VIRTUAL_P (node->decl)
251 && (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl)))
252 /* Also external functions with address taken are better to stay
253 for indirect inlining. */
254 || (before_inlining_p
255 && DECL_EXTERNAL (node->decl)
256 && node->address_taken)))
257 {
258 gcc_assert (!node->global.inlined_to);
259 enqueue_cgraph_node (node, &first);
260 node->reachable = true;
261 }
262 else
263 {
264 gcc_assert (!node->aux);
265 node->reachable = false;
266 }
267
268 /* Mark variables that are obviously needed. */
269 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
270 {
271 vnode->next_needed = NULL;
272 vnode->prev_needed = NULL;
273 if (!varpool_can_remove_if_no_refs (vnode))
274 {
275 vnode->needed = false;
276 varpool_mark_needed_node (vnode);
277 enqueue_varpool_node (vnode, &first_varpool);
278 }
279 else
280 vnode->needed = false;
281 }
282
283 /* Perform reachability analysis. As a special case do not consider
284 extern inline functions not inlined as live because we won't output
285 them at all.
286
287 We maintain two worklist, one for cgraph nodes other for varpools and
288 are finished once both are empty. */
289
290 while (first != (struct cgraph_node *) (void *) 1
291 || first_varpool != (struct varpool_node *) (void *) 1)
292 {
293 if (first != (struct cgraph_node *) (void *) 1)
294 {
295 struct cgraph_edge *e;
296 node = first;
297 first = (struct cgraph_node *) first->aux;
298 if (!node->reachable)
299 node->aux = (void *)2;
300
301 /* If we found this node reachable, first mark on the callees
302 reachable too, unless they are direct calls to extern inline functions
303 we decided to not inline. */
304 if (node->reachable)
305 {
306 for (e = node->callees; e; e = e->next_callee)
307 if (!e->callee->reachable
308 && node->analyzed
309 && (!e->inline_failed || !e->callee->analyzed
310 || (!DECL_EXTERNAL (e->callee->decl))
311 || before_inlining_p))
312 {
313 e->callee->reachable = true;
314 enqueue_cgraph_node (e->callee, &first);
315 }
316 process_references (&node->ref_list, &first, &first_varpool, before_inlining_p);
317 }
318
319 /* If any function in a comdat group is reachable, force
320 all other functions in the same comdat group to be
321 also reachable. */
322 if (node->same_comdat_group
323 && node->reachable
324 && !node->global.inlined_to)
325 {
326 for (next = node->same_comdat_group;
327 next != node;
328 next = next->same_comdat_group)
329 if (!next->reachable)
330 {
331 next->reachable = true;
332 enqueue_cgraph_node (next, &first);
333 }
334 }
335
336 /* We can freely remove inline clones even if they are cloned, however if
337 function is clone of real clone, we must keep it around in order to
338 make materialize_clones produce function body with the changes
339 applied. */
340 while (node->clone_of && !node->clone_of->aux
341 && !gimple_has_body_p (node->decl))
342 {
343 bool noninline = node->clone_of->decl != node->decl;
344 node = node->clone_of;
345 if (noninline && !node->reachable && !node->aux)
346 {
347 enqueue_cgraph_node (node, &first);
348 break;
349 }
350 }
351 }
352 if (first_varpool != (struct varpool_node *) (void *) 1)
353 {
354 vnode = first_varpool;
355 first_varpool = (struct varpool_node *)first_varpool->aux;
356 vnode->aux = NULL;
357 process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p);
358 /* If any function in a comdat group is reachable, force
359 all other functions in the same comdat group to be
360 also reachable. */
361 if (vnode->same_comdat_group)
362 {
363 struct varpool_node *next;
364 for (next = vnode->same_comdat_group;
365 next != vnode;
366 next = next->same_comdat_group)
367 if (!next->needed)
368 {
369 varpool_mark_needed_node (next);
370 enqueue_varpool_node (next, &first_varpool);
371 }
372 }
373 }
374 }
375
376 /* Remove unreachable nodes.
377
378 Completely unreachable functions can be fully removed from the callgraph.
379 Extern inline functions that we decided to not inline need to become unanalyzed nodes of
380 callgraph (so we still have edges to them). We remove function body then.
381
382 Also we need to care functions that are unreachable but we need to keep them around
383 for later clonning. In this case we also turn them to unanalyzed nodes, but
384 keep the body around. */
385 for (node = cgraph_nodes; node; node = next)
386 {
387 next = node->next;
388 if (node->aux && !node->reachable)
389 {
390 cgraph_node_remove_callees (node);
391 ipa_remove_all_references (&node->ref_list);
392 node->analyzed = false;
393 node->local.inlinable = false;
394 }
395 if (!node->aux)
396 {
397 struct cgraph_edge *e;
398 bool found = false;
399 int i;
400 struct ipa_ref *ref;
401
402 node->global.inlined_to = NULL;
403 if (file)
404 fprintf (file, " %s", cgraph_node_name (node));
405 /* See if there is reachable caller. */
406 for (e = node->callers; e && !found; e = e->next_caller)
407 if (e->caller->reachable)
408 found = true;
409 for (i = 0; (ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
410 && !found); i++)
411 if (ref->refering_type == IPA_REF_CGRAPH
412 && ipa_ref_refering_node (ref)->reachable)
413 found = true;
414 else if (ref->refering_type == IPA_REF_VARPOOL
415 && ipa_ref_refering_varpool_node (ref)->needed)
416 found = true;
417
418 /* If so, we need to keep node in the callgraph. */
419 if (found || node->needed)
420 {
421 if (node->analyzed)
422 {
423 struct cgraph_node *clone;
424
425 /* If there are still clones, we must keep body around.
426 Otherwise we can just remove the body but keep the clone. */
427 for (clone = node->clones; clone;
428 clone = clone->next_sibling_clone)
429 if (clone->aux)
430 break;
431 if (!clone)
432 {
433 cgraph_release_function_body (node);
434 node->local.inlinable = false;
435 if (node->prev_sibling_clone)
436 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
437 else if (node->clone_of)
438 node->clone_of->clones = node->next_sibling_clone;
439 if (node->next_sibling_clone)
440 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
441 #ifdef ENABLE_CHECKING
442 if (node->clone_of)
443 node->former_clone_of = node->clone_of->decl;
444 #endif
445 node->clone_of = NULL;
446 node->next_sibling_clone = NULL;
447 node->prev_sibling_clone = NULL;
448 }
449 else
450 gcc_assert (!clone->in_other_partition);
451 node->analyzed = false;
452 changed = true;
453 cgraph_node_remove_callees (node);
454 ipa_remove_all_references (&node->ref_list);
455 }
456 }
457 else
458 {
459 cgraph_remove_node (node);
460 changed = true;
461 }
462 }
463 }
464 for (node = cgraph_nodes; node; node = node->next)
465 {
466 /* Inline clones might be kept around so their materializing allows further
467 cloning. If the function the clone is inlined into is removed, we need
468 to turn it into normal cone. */
469 if (node->global.inlined_to
470 && !node->callers)
471 {
472 gcc_assert (node->clones);
473 node->global.inlined_to = NULL;
474 update_inlined_to_pointer (node, node);
475 }
476 node->aux = NULL;
477 }
478
479 if (file)
480 fprintf (file, "\n");
481
482 /* We must release unused extern inlines or sanity checking will fail. Rest of transformations
483 are undesirable at -O0 since we do not want to remove anything. */
484 if (!optimize)
485 return changed;
486
487 if (file)
488 fprintf (file, "Reclaiming variables:");
489 for (vnode = varpool_nodes; vnode; vnode = vnext)
490 {
491 vnext = vnode->next;
492 if (!vnode->needed)
493 {
494 if (file)
495 fprintf (file, " %s", varpool_node_name (vnode));
496 varpool_remove_node (vnode);
497 changed = true;
498 }
499 }
500
501 /* Now update address_taken flags and try to promote functions to be local. */
502
503 if (file)
504 fprintf (file, "\nClearing address taken flags:");
505 for (node = cgraph_nodes; node; node = node->next)
506 if (node->address_taken
507 && !node->reachable_from_other_partition)
508 {
509 int i;
510 struct ipa_ref *ref;
511 bool found = false;
512 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
513 && !found; i++)
514 {
515 gcc_assert (ref->use == IPA_REF_ADDR);
516 found = true;
517 }
518 if (!found)
519 {
520 if (file)
521 fprintf (file, " %s", cgraph_node_name (node));
522 node->address_taken = false;
523 changed = true;
524 if (cgraph_local_node_p (node))
525 {
526 node->local.local = true;
527 if (file)
528 fprintf (file, " (local)");
529 }
530 }
531 }
532
533 #ifdef ENABLE_CHECKING
534 verify_cgraph ();
535 #endif
536
537 /* Reclaim alias pairs for functions that have disappeared from the
538 call graph. */
539 remove_unreachable_alias_pairs ();
540
541 return changed;
542 }
543
544 /* Discover variables that have no longer address taken or that are read only
545 and update their flags.
546
547 FIXME: This can not be done in between gimplify and omp_expand since
548 readonly flag plays role on what is shared and what is not. Currently we do
549 this transformation as part of whole program visibility and re-do at
550 ipa-reference pass (to take into account clonning), but it would
551 make sense to do it before early optimizations. */
552
553 void
554 ipa_discover_readonly_nonaddressable_vars (void)
555 {
556 struct varpool_node *vnode;
557 if (dump_file)
558 fprintf (dump_file, "Clearing variable flags:");
559 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
560 if (vnode->finalized && varpool_all_refs_explicit_p (vnode)
561 && (TREE_ADDRESSABLE (vnode->decl) || !TREE_READONLY (vnode->decl)))
562 {
563 bool written = false;
564 bool address_taken = false;
565 int i;
566 struct ipa_ref *ref;
567 for (i = 0; ipa_ref_list_refering_iterate (&vnode->ref_list, i, ref)
568 && (!written || !address_taken); i++)
569 switch (ref->use)
570 {
571 case IPA_REF_ADDR:
572 address_taken = true;
573 break;
574 case IPA_REF_LOAD:
575 break;
576 case IPA_REF_STORE:
577 written = true;
578 break;
579 }
580 if (TREE_ADDRESSABLE (vnode->decl) && !address_taken)
581 {
582 if (dump_file)
583 fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode));
584 TREE_ADDRESSABLE (vnode->decl) = 0;
585 }
586 if (!TREE_READONLY (vnode->decl) && !address_taken && !written
587 /* Making variable in explicit section readonly can cause section
588 type conflict.
589 See e.g. gcc.c-torture/compile/pr23237.c */
590 && DECL_SECTION_NAME (vnode->decl) == NULL)
591 {
592 if (dump_file)
593 fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode));
594 TREE_READONLY (vnode->decl) = 1;
595 }
596 }
597 if (dump_file)
598 fprintf (dump_file, "\n");
599 }
600
601 /* Return true when function NODE should be considered externally visible. */
602
603 static bool
604 cgraph_externally_visible_p (struct cgraph_node *node, bool whole_program, bool aliased)
605 {
606 struct cgraph_node *alias;
607 if (!node->local.finalized)
608 return false;
609 if (!DECL_COMDAT (node->decl)
610 && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)))
611 return false;
612
613 /* Do not even try to be smart about aliased nodes. Until we properly
614 represent everything by same body alias, these are just evil. */
615 if (aliased)
616 return true;
617
618 /* If linker counts on us, we must preserve the function. */
619 if (cgraph_used_from_object_file_p (node))
620 return true;
621 if (DECL_PRESERVE_P (node->decl))
622 return true;
623 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl)))
624 return true;
625
626 /* See if we have linker information about symbol not being used or
627 if we need to make guess based on the declaration.
628
629 Even if the linker clams the symbol is unused, never bring internal
630 symbols that are declared by user as used or externally visible.
631 This is needed for i.e. references from asm statements. */
632 for (alias = node->same_body; alias; alias = alias->next)
633 if (alias->resolution != LDPR_PREVAILING_DEF_IRONLY)
634 break;
635 if (!alias && node->resolution == LDPR_PREVAILING_DEF_IRONLY)
636 return false;
637
638 /* When doing link time optimizations, hidden symbols become local. */
639 if (in_lto_p
640 && (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
641 || DECL_VISIBILITY (node->decl) == VISIBILITY_INTERNAL)
642 /* Be sure that node is defined in IR file, not in other object
643 file. In that case we don't set used_from_other_object_file. */
644 && node->analyzed)
645 ;
646 else if (!whole_program)
647 return true;
648 /* COMDAT functions must be shared only if they have address taken,
649 otherwise we can produce our own private implementation with
650 -fwhole-program. */
651 else if (DECL_COMDAT (node->decl))
652 {
653 if (node->address_taken || !node->analyzed)
654 return true;
655 if (node->same_comdat_group)
656 {
657 struct cgraph_node *next;
658
659 /* If more than one function is in the same COMDAT group, it must
660 be shared even if just one function in the comdat group has
661 address taken. */
662 for (next = node->same_comdat_group;
663 next != node;
664 next = next->same_comdat_group)
665 if (next->address_taken || !next->analyzed)
666 return true;
667 }
668 }
669
670 if (MAIN_NAME_P (DECL_NAME (node->decl)))
671 return true;
672
673 return false;
674 }
675
676 /* Return true when variable VNODE should be considered externally visible. */
677
678 static bool
679 varpool_externally_visible_p (struct varpool_node *vnode, bool aliased)
680 {
681 struct varpool_node *alias;
682 if (!DECL_COMDAT (vnode->decl) && !TREE_PUBLIC (vnode->decl))
683 return false;
684
685 /* Do not even try to be smart about aliased nodes. Until we properly
686 represent everything by same body alias, these are just evil. */
687 if (aliased)
688 return true;
689
690 /* If linker counts on us, we must preserve the function. */
691 if (varpool_used_from_object_file_p (vnode))
692 return true;
693
694 if (DECL_PRESERVE_P (vnode->decl))
695 return true;
696 if (lookup_attribute ("externally_visible",
697 DECL_ATTRIBUTES (vnode->decl)))
698 return true;
699
700 /* See if we have linker information about symbol not being used or
701 if we need to make guess based on the declaration.
702
703 Even if the linker clams the symbol is unused, never bring internal
704 symbols that are declared by user as used or externally visible.
705 This is needed for i.e. references from asm statements. */
706 if (varpool_used_from_object_file_p (vnode))
707 return true;
708 for (alias = vnode->extra_name; alias; alias = alias->next)
709 if (alias->resolution != LDPR_PREVAILING_DEF_IRONLY)
710 break;
711 if (!alias && vnode->resolution == LDPR_PREVAILING_DEF_IRONLY)
712 return false;
713
714 /* When doing link time optimizations, hidden symbols become local. */
715 if (in_lto_p
716 && (DECL_VISIBILITY (vnode->decl) == VISIBILITY_HIDDEN
717 || DECL_VISIBILITY (vnode->decl) == VISIBILITY_INTERNAL)
718 /* Be sure that node is defined in IR file, not in other object
719 file. In that case we don't set used_from_other_object_file. */
720 && vnode->finalized)
721 ;
722 else if (!flag_whole_program)
723 return true;
724
725 /* Do not attempt to privatize COMDATS by default.
726 This would break linking with C++ libraries sharing
727 inline definitions.
728
729 FIXME: We can do so for readonly vars with no address taken and
730 possibly also for vtables since no direct pointer comparsion is done.
731 It might be interesting to do so to reduce linking overhead. */
732 if (DECL_COMDAT (vnode->decl) || DECL_WEAK (vnode->decl))
733 return true;
734 return false;
735 }
736
737 /* Dissolve the same_comdat_group list in which NODE resides. */
738
739 static void
740 dissolve_same_comdat_group_list (struct cgraph_node *node)
741 {
742 struct cgraph_node *n = node, *next;
743 do
744 {
745 next = n->same_comdat_group;
746 n->same_comdat_group = NULL;
747 n = next;
748 }
749 while (n != node);
750 }
751
752 /* Mark visibility of all functions.
753
754 A local function is one whose calls can occur only in the current
755 compilation unit and all its calls are explicit, so we can change
756 its calling convention. We simply mark all static functions whose
757 address is not taken as local.
758
759 We also change the TREE_PUBLIC flag of all declarations that are public
760 in language point of view but we want to overwrite this default
761 via visibilities for the backend point of view. */
762
763 static unsigned int
764 function_and_variable_visibility (bool whole_program)
765 {
766 struct cgraph_node *node;
767 struct varpool_node *vnode;
768 struct pointer_set_t *aliased_nodes = pointer_set_create ();
769 struct pointer_set_t *aliased_vnodes = pointer_set_create ();
770 unsigned i;
771 alias_pair *p;
772
773 /* Discover aliased nodes. */
774 FOR_EACH_VEC_ELT (alias_pair, alias_pairs, i, p)
775 {
776 if (dump_file)
777 fprintf (dump_file, "Alias %s->%s",
778 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)),
779 IDENTIFIER_POINTER (p->target));
780
781 if ((node = cgraph_node_for_asm (p->target)) != NULL)
782 {
783 gcc_assert (node->needed);
784 pointer_set_insert (aliased_nodes, node);
785 if (dump_file)
786 fprintf (dump_file, " node %s/%i",
787 cgraph_node_name (node), node->uid);
788 }
789 else if ((vnode = varpool_node_for_asm (p->target)) != NULL)
790 {
791 gcc_assert (vnode->needed);
792 pointer_set_insert (aliased_vnodes, vnode);
793 if (dump_file)
794 fprintf (dump_file, " varpool node %s",
795 varpool_node_name (vnode));
796 }
797 if (dump_file)
798 fprintf (dump_file, "\n");
799 }
800
801 for (node = cgraph_nodes; node; node = node->next)
802 {
803 int flags = flags_from_decl_or_type (node->decl);
804 if (optimize
805 && (flags & (ECF_CONST | ECF_PURE))
806 && !(flags & ECF_LOOPING_CONST_OR_PURE))
807 {
808 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
809 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
810 }
811
812 /* C++ FE on lack of COMDAT support create local COMDAT functions
813 (that ought to be shared but can not due to object format
814 limitations). It is neccesary to keep the flag to make rest of C++ FE
815 happy. Clear the flag here to avoid confusion in middle-end. */
816 if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl))
817 DECL_COMDAT (node->decl) = 0;
818 /* For external decls stop tracking same_comdat_group, it doesn't matter
819 what comdat group they are in when they won't be emitted in this TU,
820 and simplifies later passes. */
821 if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
822 {
823 #ifdef ENABLE_CHECKING
824 struct cgraph_node *n;
825
826 for (n = node->same_comdat_group;
827 n != node;
828 n = n->same_comdat_group)
829 /* If at least one of same comdat group functions is external,
830 all of them have to be, otherwise it is a front-end bug. */
831 gcc_assert (DECL_EXTERNAL (n->decl));
832 #endif
833 dissolve_same_comdat_group_list (node);
834 }
835 gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl))
836 || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl));
837 if (cgraph_externally_visible_p (node, whole_program,
838 pointer_set_contains (aliased_nodes,
839 node)))
840 {
841 gcc_assert (!node->global.inlined_to);
842 node->local.externally_visible = true;
843 }
844 else
845 node->local.externally_visible = false;
846 if (!node->local.externally_visible && node->analyzed
847 && !DECL_EXTERNAL (node->decl))
848 {
849 struct cgraph_node *alias;
850 gcc_assert (whole_program || in_lto_p || !TREE_PUBLIC (node->decl));
851 cgraph_make_decl_local (node->decl);
852 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
853 for (alias = node->same_body; alias; alias = alias->next)
854 cgraph_make_decl_local (alias->decl);
855 if (node->same_comdat_group)
856 /* cgraph_externally_visible_p has already checked all other nodes
857 in the group and they will all be made local. We need to
858 dissolve the group at once so that the predicate does not
859 segfault though. */
860 dissolve_same_comdat_group_list (node);
861 }
862 node->local.local = cgraph_local_node_p (node);
863 }
864 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
865 {
866 /* weak flag makes no sense on local variables. */
867 gcc_assert (!DECL_WEAK (vnode->decl)
868 || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl));
869 /* In several cases declarations can not be common:
870
871 - when declaration has initializer
872 - when it is in weak
873 - when it has specific section
874 - when it resides in non-generic address space.
875 - if declaration is local, it will get into .local common section
876 so common flag is not needed. Frontends still produce these in
877 certain cases, such as for:
878
879 static int a __attribute__ ((common))
880
881 Canonicalize things here and clear the redundant flag. */
882 if (DECL_COMMON (vnode->decl)
883 && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
884 || (DECL_INITIAL (vnode->decl)
885 && DECL_INITIAL (vnode->decl) != error_mark_node)
886 || DECL_WEAK (vnode->decl)
887 || DECL_SECTION_NAME (vnode->decl) != NULL
888 || ! (ADDR_SPACE_GENERIC_P
889 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl))))))
890 DECL_COMMON (vnode->decl) = 0;
891 }
892 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
893 {
894 if (!vnode->finalized)
895 continue;
896 if (vnode->needed
897 && varpool_externally_visible_p
898 (vnode,
899 pointer_set_contains (aliased_vnodes, vnode)))
900 vnode->externally_visible = true;
901 else
902 vnode->externally_visible = false;
903 if (!vnode->externally_visible)
904 {
905 gcc_assert (in_lto_p || whole_program || !TREE_PUBLIC (vnode->decl));
906 cgraph_make_decl_local (vnode->decl);
907 vnode->resolution = LDPR_PREVAILING_DEF_IRONLY;
908 }
909 gcc_assert (TREE_STATIC (vnode->decl));
910 }
911 pointer_set_destroy (aliased_nodes);
912 pointer_set_destroy (aliased_vnodes);
913
914 if (dump_file)
915 {
916 fprintf (dump_file, "\nMarking local functions:");
917 for (node = cgraph_nodes; node; node = node->next)
918 if (node->local.local)
919 fprintf (dump_file, " %s", cgraph_node_name (node));
920 fprintf (dump_file, "\n\n");
921 fprintf (dump_file, "\nMarking externally visible functions:");
922 for (node = cgraph_nodes; node; node = node->next)
923 if (node->local.externally_visible)
924 fprintf (dump_file, " %s", cgraph_node_name (node));
925 fprintf (dump_file, "\n\n");
926 fprintf (dump_file, "\nMarking externally visible variables:");
927 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
928 if (vnode->externally_visible)
929 fprintf (dump_file, " %s", varpool_node_name (vnode));
930 fprintf (dump_file, "\n\n");
931 }
932 cgraph_function_flags_ready = true;
933 return 0;
934 }
935
936 /* Local function pass handling visibilities. This happens before LTO streaming
937 so in particular -fwhole-program should be ignored at this level. */
938
939 static unsigned int
940 local_function_and_variable_visibility (void)
941 {
942 return function_and_variable_visibility (flag_whole_program && !flag_lto && !flag_whopr);
943 }
944
945 struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility =
946 {
947 {
948 SIMPLE_IPA_PASS,
949 "visibility", /* name */
950 NULL, /* gate */
951 local_function_and_variable_visibility,/* execute */
952 NULL, /* sub */
953 NULL, /* next */
954 0, /* static_pass_number */
955 TV_CGRAPHOPT, /* tv_id */
956 0, /* properties_required */
957 0, /* properties_provided */
958 0, /* properties_destroyed */
959 0, /* todo_flags_start */
960 TODO_remove_functions | TODO_dump_cgraph
961 | TODO_ggc_collect /* todo_flags_finish */
962 }
963 };
964
965 /* Do not re-run on ltrans stage. */
966
967 static bool
968 gate_whole_program_function_and_variable_visibility (void)
969 {
970 return !flag_ltrans;
971 }
972
973 /* Bring functionss local at LTO time whith -fwhole-program. */
974
975 static unsigned int
976 whole_program_function_and_variable_visibility (void)
977 {
978 struct cgraph_node *node;
979 struct varpool_node *vnode;
980
981 function_and_variable_visibility (flag_whole_program);
982
983 for (node = cgraph_nodes; node; node = node->next)
984 if ((node->local.externally_visible && !DECL_COMDAT (node->decl))
985 && node->local.finalized)
986 cgraph_mark_needed_node (node);
987 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
988 if (vnode->externally_visible && !DECL_COMDAT (vnode->decl))
989 varpool_mark_needed_node (vnode);
990 if (dump_file)
991 {
992 fprintf (dump_file, "\nNeeded variables:");
993 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
994 if (vnode->needed)
995 fprintf (dump_file, " %s", varpool_node_name (vnode));
996 fprintf (dump_file, "\n\n");
997 }
998 if (optimize)
999 ipa_discover_readonly_nonaddressable_vars ();
1000 return 0;
1001 }
1002
1003 struct ipa_opt_pass_d pass_ipa_whole_program_visibility =
1004 {
1005 {
1006 IPA_PASS,
1007 "whole-program", /* name */
1008 gate_whole_program_function_and_variable_visibility,/* gate */
1009 whole_program_function_and_variable_visibility,/* execute */
1010 NULL, /* sub */
1011 NULL, /* next */
1012 0, /* static_pass_number */
1013 TV_CGRAPHOPT, /* tv_id */
1014 0, /* properties_required */
1015 0, /* properties_provided */
1016 0, /* properties_destroyed */
1017 0, /* todo_flags_start */
1018 TODO_remove_functions | TODO_dump_cgraph
1019 | TODO_ggc_collect /* todo_flags_finish */
1020 },
1021 NULL, /* generate_summary */
1022 NULL, /* write_summary */
1023 NULL, /* read_summary */
1024 NULL, /* write_optimization_summary */
1025 NULL, /* read_optimization_summary */
1026 NULL, /* stmt_fixup */
1027 0, /* TODOs */
1028 NULL, /* function_transform */
1029 NULL, /* variable_transform */
1030 };
1031
1032 /* Hash a cgraph node set element. */
1033
1034 static hashval_t
1035 hash_cgraph_node_set_element (const void *p)
1036 {
1037 const_cgraph_node_set_element element = (const_cgraph_node_set_element) p;
1038 return htab_hash_pointer (element->node);
1039 }
1040
1041 /* Compare two cgraph node set elements. */
1042
1043 static int
1044 eq_cgraph_node_set_element (const void *p1, const void *p2)
1045 {
1046 const_cgraph_node_set_element e1 = (const_cgraph_node_set_element) p1;
1047 const_cgraph_node_set_element e2 = (const_cgraph_node_set_element) p2;
1048
1049 return e1->node == e2->node;
1050 }
1051
1052 /* Create a new cgraph node set. */
1053
1054 cgraph_node_set
1055 cgraph_node_set_new (void)
1056 {
1057 cgraph_node_set new_node_set;
1058
1059 new_node_set = ggc_alloc_cgraph_node_set_def ();
1060 new_node_set->hashtab = htab_create_ggc (10,
1061 hash_cgraph_node_set_element,
1062 eq_cgraph_node_set_element,
1063 NULL);
1064 new_node_set->nodes = NULL;
1065 return new_node_set;
1066 }
1067
1068 /* Add cgraph_node NODE to cgraph_node_set SET. */
1069
1070 void
1071 cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node)
1072 {
1073 void **slot;
1074 cgraph_node_set_element element;
1075 struct cgraph_node_set_element_def dummy;
1076
1077 dummy.node = node;
1078 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
1079
1080 if (*slot != HTAB_EMPTY_ENTRY)
1081 {
1082 element = (cgraph_node_set_element) *slot;
1083 gcc_assert (node == element->node
1084 && (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1085 == node));
1086 return;
1087 }
1088
1089 /* Insert node into hash table. */
1090 element = ggc_alloc_cgraph_node_set_element_def ();
1091 element->node = node;
1092 element->index = VEC_length (cgraph_node_ptr, set->nodes);
1093 *slot = element;
1094
1095 /* Insert into node vector. */
1096 VEC_safe_push (cgraph_node_ptr, gc, set->nodes, node);
1097 }
1098
1099 /* Remove cgraph_node NODE from cgraph_node_set SET. */
1100
1101 void
1102 cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node)
1103 {
1104 void **slot, **last_slot;
1105 cgraph_node_set_element element, last_element;
1106 struct cgraph_node *last_node;
1107 struct cgraph_node_set_element_def dummy;
1108
1109 dummy.node = node;
1110 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1111 if (slot == NULL)
1112 return;
1113
1114 element = (cgraph_node_set_element) *slot;
1115 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1116 == node);
1117
1118 /* Remove from vector. We do this by swapping node with the last element
1119 of the vector. */
1120 last_node = VEC_pop (cgraph_node_ptr, set->nodes);
1121 if (last_node != node)
1122 {
1123 dummy.node = last_node;
1124 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1125 last_element = (cgraph_node_set_element) *last_slot;
1126 gcc_assert (last_element);
1127
1128 /* Move the last element to the original spot of NODE. */
1129 last_element->index = element->index;
1130 VEC_replace (cgraph_node_ptr, set->nodes, last_element->index,
1131 last_node);
1132 }
1133
1134 /* Remove element from hash table. */
1135 htab_clear_slot (set->hashtab, slot);
1136 ggc_free (element);
1137 }
1138
1139 /* Find NODE in SET and return an iterator to it if found. A null iterator
1140 is returned if NODE is not in SET. */
1141
1142 cgraph_node_set_iterator
1143 cgraph_node_set_find (cgraph_node_set set, struct cgraph_node *node)
1144 {
1145 void **slot;
1146 struct cgraph_node_set_element_def dummy;
1147 cgraph_node_set_element element;
1148 cgraph_node_set_iterator csi;
1149
1150 dummy.node = node;
1151 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1152 if (slot == NULL)
1153 csi.index = (unsigned) ~0;
1154 else
1155 {
1156 element = (cgraph_node_set_element) *slot;
1157 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1158 == node);
1159 csi.index = element->index;
1160 }
1161 csi.set = set;
1162
1163 return csi;
1164 }
1165
1166 /* Dump content of SET to file F. */
1167
1168 void
1169 dump_cgraph_node_set (FILE *f, cgraph_node_set set)
1170 {
1171 cgraph_node_set_iterator iter;
1172
1173 for (iter = csi_start (set); !csi_end_p (iter); csi_next (&iter))
1174 {
1175 struct cgraph_node *node = csi_node (iter);
1176 fprintf (f, " %s/%i", cgraph_node_name (node), node->uid);
1177 }
1178 fprintf (f, "\n");
1179 }
1180
1181 /* Dump content of SET to stderr. */
1182
1183 DEBUG_FUNCTION void
1184 debug_cgraph_node_set (cgraph_node_set set)
1185 {
1186 dump_cgraph_node_set (stderr, set);
1187 }
1188
1189 /* Hash a varpool node set element. */
1190
1191 static hashval_t
1192 hash_varpool_node_set_element (const void *p)
1193 {
1194 const_varpool_node_set_element element = (const_varpool_node_set_element) p;
1195 return htab_hash_pointer (element->node);
1196 }
1197
1198 /* Compare two varpool node set elements. */
1199
1200 static int
1201 eq_varpool_node_set_element (const void *p1, const void *p2)
1202 {
1203 const_varpool_node_set_element e1 = (const_varpool_node_set_element) p1;
1204 const_varpool_node_set_element e2 = (const_varpool_node_set_element) p2;
1205
1206 return e1->node == e2->node;
1207 }
1208
1209 /* Create a new varpool node set. */
1210
1211 varpool_node_set
1212 varpool_node_set_new (void)
1213 {
1214 varpool_node_set new_node_set;
1215
1216 new_node_set = ggc_alloc_varpool_node_set_def ();
1217 new_node_set->hashtab = htab_create_ggc (10,
1218 hash_varpool_node_set_element,
1219 eq_varpool_node_set_element,
1220 NULL);
1221 new_node_set->nodes = NULL;
1222 return new_node_set;
1223 }
1224
1225 /* Add varpool_node NODE to varpool_node_set SET. */
1226
1227 void
1228 varpool_node_set_add (varpool_node_set set, struct varpool_node *node)
1229 {
1230 void **slot;
1231 varpool_node_set_element element;
1232 struct varpool_node_set_element_def dummy;
1233
1234 dummy.node = node;
1235 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
1236
1237 if (*slot != HTAB_EMPTY_ENTRY)
1238 {
1239 element = (varpool_node_set_element) *slot;
1240 gcc_assert (node == element->node
1241 && (VEC_index (varpool_node_ptr, set->nodes, element->index)
1242 == node));
1243 return;
1244 }
1245
1246 /* Insert node into hash table. */
1247 element = ggc_alloc_varpool_node_set_element_def ();
1248 element->node = node;
1249 element->index = VEC_length (varpool_node_ptr, set->nodes);
1250 *slot = element;
1251
1252 /* Insert into node vector. */
1253 VEC_safe_push (varpool_node_ptr, gc, set->nodes, node);
1254 }
1255
1256 /* Remove varpool_node NODE from varpool_node_set SET. */
1257
1258 void
1259 varpool_node_set_remove (varpool_node_set set, struct varpool_node *node)
1260 {
1261 void **slot, **last_slot;
1262 varpool_node_set_element element, last_element;
1263 struct varpool_node *last_node;
1264 struct varpool_node_set_element_def dummy;
1265
1266 dummy.node = node;
1267 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1268 if (slot == NULL)
1269 return;
1270
1271 element = (varpool_node_set_element) *slot;
1272 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1273 == node);
1274
1275 /* Remove from vector. We do this by swapping node with the last element
1276 of the vector. */
1277 last_node = VEC_pop (varpool_node_ptr, set->nodes);
1278 if (last_node != node)
1279 {
1280 dummy.node = last_node;
1281 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1282 last_element = (varpool_node_set_element) *last_slot;
1283 gcc_assert (last_element);
1284
1285 /* Move the last element to the original spot of NODE. */
1286 last_element->index = element->index;
1287 VEC_replace (varpool_node_ptr, set->nodes, last_element->index,
1288 last_node);
1289 }
1290
1291 /* Remove element from hash table. */
1292 htab_clear_slot (set->hashtab, slot);
1293 ggc_free (element);
1294 }
1295
1296 /* Find NODE in SET and return an iterator to it if found. A null iterator
1297 is returned if NODE is not in SET. */
1298
1299 varpool_node_set_iterator
1300 varpool_node_set_find (varpool_node_set set, struct varpool_node *node)
1301 {
1302 void **slot;
1303 struct varpool_node_set_element_def dummy;
1304 varpool_node_set_element element;
1305 varpool_node_set_iterator vsi;
1306
1307 dummy.node = node;
1308 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1309 if (slot == NULL)
1310 vsi.index = (unsigned) ~0;
1311 else
1312 {
1313 element = (varpool_node_set_element) *slot;
1314 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1315 == node);
1316 vsi.index = element->index;
1317 }
1318 vsi.set = set;
1319
1320 return vsi;
1321 }
1322
1323 /* Dump content of SET to file F. */
1324
1325 void
1326 dump_varpool_node_set (FILE *f, varpool_node_set set)
1327 {
1328 varpool_node_set_iterator iter;
1329
1330 for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter))
1331 {
1332 struct varpool_node *node = vsi_node (iter);
1333 fprintf (f, " %s", varpool_node_name (node));
1334 }
1335 fprintf (f, "\n");
1336 }
1337
1338 /* Dump content of SET to stderr. */
1339
1340 DEBUG_FUNCTION void
1341 debug_varpool_node_set (varpool_node_set set)
1342 {
1343 dump_varpool_node_set (stderr, set);
1344 }
1345
1346
1347 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1348
1349 static unsigned int
1350 ipa_profile (void)
1351 {
1352 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1353 struct cgraph_edge *e;
1354 int order_pos;
1355 bool something_changed = false;
1356 int i;
1357
1358 order_pos = cgraph_postorder (order);
1359 for (i = order_pos - 1; i >= 0; i--)
1360 {
1361 if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1362 {
1363 for (e = order[i]->callees; e; e = e->next_callee)
1364 if (e->callee->local.local && !e->callee->aux)
1365 {
1366 something_changed = true;
1367 e->callee->aux = (void *)1;
1368 }
1369 }
1370 order[i]->aux = NULL;
1371 }
1372
1373 while (something_changed)
1374 {
1375 something_changed = false;
1376 for (i = order_pos - 1; i >= 0; i--)
1377 {
1378 if (order[i]->aux && cgraph_propagate_frequency (order[i]))
1379 {
1380 for (e = order[i]->callees; e; e = e->next_callee)
1381 if (e->callee->local.local && !e->callee->aux)
1382 {
1383 something_changed = true;
1384 e->callee->aux = (void *)1;
1385 }
1386 }
1387 order[i]->aux = NULL;
1388 }
1389 }
1390 free (order);
1391 return 0;
1392 }
1393
1394 static bool
1395 gate_ipa_profile (void)
1396 {
1397 return flag_ipa_profile;
1398 }
1399
1400 struct ipa_opt_pass_d pass_ipa_profile =
1401 {
1402 {
1403 IPA_PASS,
1404 "ipa-profile", /* name */
1405 gate_ipa_profile, /* gate */
1406 ipa_profile, /* execute */
1407 NULL, /* sub */
1408 NULL, /* next */
1409 0, /* static_pass_number */
1410 TV_IPA_PROFILE, /* tv_id */
1411 0, /* properties_required */
1412 0, /* properties_provided */
1413 0, /* properties_destroyed */
1414 0, /* todo_flags_start */
1415 0 /* todo_flags_finish */
1416 },
1417 NULL, /* generate_summary */
1418 NULL, /* write_summary */
1419 NULL, /* read_summary */
1420 NULL, /* write_optimization_summary */
1421 NULL, /* read_optimization_summary */
1422 NULL, /* stmt_fixup */
1423 0, /* TODOs */
1424 NULL, /* function_transform */
1425 NULL /* variable_transform */
1426 };
1427
1428 /* Generate and emit a static constructor or destructor. WHICH must
1429 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1430 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1431 initialization priority for this constructor or destructor. */
1432
1433 void
1434 cgraph_build_static_cdtor (char which, tree body, int priority)
1435 {
1436 static int counter = 0;
1437 char which_buf[16];
1438 tree decl, name, resdecl;
1439
1440 /* The priority is encoded in the constructor or destructor name.
1441 collect2 will sort the names and arrange that they are called at
1442 program startup. */
1443 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1444 name = get_file_function_name (which_buf);
1445
1446 decl = build_decl (input_location, FUNCTION_DECL, name,
1447 build_function_type_list (void_type_node, NULL_TREE));
1448 current_function_decl = decl;
1449
1450 resdecl = build_decl (input_location,
1451 RESULT_DECL, NULL_TREE, void_type_node);
1452 DECL_ARTIFICIAL (resdecl) = 1;
1453 DECL_RESULT (decl) = resdecl;
1454 DECL_CONTEXT (resdecl) = decl;
1455
1456 allocate_struct_function (decl, false);
1457
1458 TREE_STATIC (decl) = 1;
1459 TREE_USED (decl) = 1;
1460 DECL_ARTIFICIAL (decl) = 1;
1461 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1462 DECL_SAVED_TREE (decl) = body;
1463 if (!targetm.have_ctors_dtors)
1464 {
1465 TREE_PUBLIC (decl) = 1;
1466 DECL_PRESERVE_P (decl) = 1;
1467 }
1468 DECL_UNINLINABLE (decl) = 1;
1469
1470 DECL_INITIAL (decl) = make_node (BLOCK);
1471 TREE_USED (DECL_INITIAL (decl)) = 1;
1472
1473 DECL_SOURCE_LOCATION (decl) = input_location;
1474 cfun->function_end_locus = input_location;
1475
1476 switch (which)
1477 {
1478 case 'I':
1479 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1480 decl_init_priority_insert (decl, priority);
1481 break;
1482 case 'D':
1483 DECL_STATIC_DESTRUCTOR (decl) = 1;
1484 decl_fini_priority_insert (decl, priority);
1485 break;
1486 default:
1487 gcc_unreachable ();
1488 }
1489
1490 gimplify_function_tree (decl);
1491
1492 cgraph_add_new_function (decl, false);
1493
1494 set_cfun (NULL);
1495 current_function_decl = NULL;
1496 }
1497
1498
1499 /* A vector of FUNCTION_DECLs declared as static constructors. */
1500 static VEC(tree, heap) *static_ctors;
1501 /* A vector of FUNCTION_DECLs declared as static destructors. */
1502 static VEC(tree, heap) *static_dtors;
1503
1504 /* When target does not have ctors and dtors, we call all constructor
1505 and destructor by special initialization/destruction function
1506 recognized by collect2.
1507
1508 When we are going to build this function, collect all constructors and
1509 destructors and turn them into normal functions. */
1510
1511 static void
1512 record_cdtor_fn (struct cgraph_node *node)
1513 {
1514 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1515 VEC_safe_push (tree, heap, static_ctors, node->decl);
1516 if (DECL_STATIC_DESTRUCTOR (node->decl))
1517 VEC_safe_push (tree, heap, static_dtors, node->decl);
1518 node = cgraph_node (node->decl);
1519 node->local.disregard_inline_limits = 1;
1520 }
1521
1522 /* Define global constructors/destructor functions for the CDTORS, of
1523 which they are LEN. The CDTORS are sorted by initialization
1524 priority. If CTOR_P is true, these are constructors; otherwise,
1525 they are destructors. */
1526
1527 static void
1528 build_cdtor (bool ctor_p, VEC (tree, heap) *cdtors)
1529 {
1530 size_t i,j;
1531 size_t len = VEC_length (tree, cdtors);
1532
1533 i = 0;
1534 while (i < len)
1535 {
1536 tree body;
1537 tree fn;
1538 priority_type priority;
1539
1540 priority = 0;
1541 body = NULL_TREE;
1542 j = i;
1543 do
1544 {
1545 priority_type p;
1546 fn = VEC_index (tree, cdtors, j);
1547 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1548 if (j == i)
1549 priority = p;
1550 else if (p != priority)
1551 break;
1552 j++;
1553 }
1554 while (j < len);
1555
1556 /* When there is only one cdtor and target supports them, do nothing. */
1557 if (j == i + 1
1558 && targetm.have_ctors_dtors)
1559 {
1560 i++;
1561 continue;
1562 }
1563 /* Find the next batch of constructors/destructors with the same
1564 initialization priority. */
1565 for (;i < j; i++)
1566 {
1567 tree call;
1568 fn = VEC_index (tree, cdtors, i);
1569 call = build_call_expr (fn, 0);
1570 if (ctor_p)
1571 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1572 else
1573 DECL_STATIC_DESTRUCTOR (fn) = 0;
1574 /* We do not want to optimize away pure/const calls here.
1575 When optimizing, these should be already removed, when not
1576 optimizing, we want user to be able to breakpoint in them. */
1577 TREE_SIDE_EFFECTS (call) = 1;
1578 append_to_statement_list (call, &body);
1579 }
1580 gcc_assert (body != NULL_TREE);
1581 /* Generate a function to call all the function of like
1582 priority. */
1583 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
1584 }
1585 }
1586
1587 /* Comparison function for qsort. P1 and P2 are actually of type
1588 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1589 used to determine the sort order. */
1590
1591 static int
1592 compare_ctor (const void *p1, const void *p2)
1593 {
1594 tree f1;
1595 tree f2;
1596 int priority1;
1597 int priority2;
1598
1599 f1 = *(const tree *)p1;
1600 f2 = *(const tree *)p2;
1601 priority1 = DECL_INIT_PRIORITY (f1);
1602 priority2 = DECL_INIT_PRIORITY (f2);
1603
1604 if (priority1 < priority2)
1605 return -1;
1606 else if (priority1 > priority2)
1607 return 1;
1608 else
1609 /* Ensure a stable sort. Constructors are executed in backwarding
1610 order to make LTO initialize braries first. */
1611 return DECL_UID (f2) - DECL_UID (f1);
1612 }
1613
1614 /* Comparison function for qsort. P1 and P2 are actually of type
1615 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1616 used to determine the sort order. */
1617
1618 static int
1619 compare_dtor (const void *p1, const void *p2)
1620 {
1621 tree f1;
1622 tree f2;
1623 int priority1;
1624 int priority2;
1625
1626 f1 = *(const tree *)p1;
1627 f2 = *(const tree *)p2;
1628 priority1 = DECL_FINI_PRIORITY (f1);
1629 priority2 = DECL_FINI_PRIORITY (f2);
1630
1631 if (priority1 < priority2)
1632 return -1;
1633 else if (priority1 > priority2)
1634 return 1;
1635 else
1636 /* Ensure a stable sort. */
1637 return DECL_UID (f1) - DECL_UID (f2);
1638 }
1639
1640 /* Generate functions to call static constructors and destructors
1641 for targets that do not support .ctors/.dtors sections. These
1642 functions have magic names which are detected by collect2. */
1643
1644 static void
1645 build_cdtor_fns (void)
1646 {
1647 if (!VEC_empty (tree, static_ctors))
1648 {
1649 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1650 VEC_qsort (tree, static_ctors, compare_ctor);
1651 build_cdtor (/*ctor_p=*/true, static_ctors);
1652 }
1653
1654 if (!VEC_empty (tree, static_dtors))
1655 {
1656 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1657 VEC_qsort (tree, static_dtors, compare_dtor);
1658 build_cdtor (/*ctor_p=*/false, static_dtors);
1659 }
1660 }
1661
1662 /* Look for constructors and destructors and produce function calling them.
1663 This is needed for targets not supporting ctors or dtors, but we perform the
1664 transformation also at linktime to merge possibly numberous
1665 constructors/destructors into single function to improve code locality and
1666 reduce size. */
1667
1668 static unsigned int
1669 ipa_cdtor_merge (void)
1670 {
1671 struct cgraph_node *node;
1672 for (node = cgraph_nodes; node; node = node->next)
1673 if (node->analyzed
1674 && (DECL_STATIC_CONSTRUCTOR (node->decl)
1675 || DECL_STATIC_DESTRUCTOR (node->decl)))
1676 record_cdtor_fn (node);
1677 build_cdtor_fns ();
1678 VEC_free (tree, heap, static_ctors);
1679 VEC_free (tree, heap, static_dtors);
1680 return 0;
1681 }
1682
1683 /* Perform the pass when we have no ctors/dtors support
1684 or at LTO time to merge multiple constructors into single
1685 function. */
1686
1687 static bool
1688 gate_ipa_cdtor_merge (void)
1689 {
1690 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1691 }
1692
1693 struct ipa_opt_pass_d pass_ipa_cdtor_merge =
1694 {
1695 {
1696 IPA_PASS,
1697 "cdtor", /* name */
1698 gate_ipa_cdtor_merge, /* gate */
1699 ipa_cdtor_merge, /* execute */
1700 NULL, /* sub */
1701 NULL, /* next */
1702 0, /* static_pass_number */
1703 TV_CGRAPHOPT, /* tv_id */
1704 0, /* properties_required */
1705 0, /* properties_provided */
1706 0, /* properties_destroyed */
1707 0, /* todo_flags_start */
1708 0 /* todo_flags_finish */
1709 },
1710 NULL, /* generate_summary */
1711 NULL, /* write_summary */
1712 NULL, /* read_summary */
1713 NULL, /* write_optimization_summary */
1714 NULL, /* read_optimization_summary */
1715 NULL, /* stmt_fixup */
1716 0, /* TODOs */
1717 NULL, /* function_transform */
1718 NULL /* variable_transform */
1719 };