ipa.c (cgraph_remove_unreachable_nodes): External references can always be removed.
[gcc.git] / gcc / ipa.c
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "cgraph.h"
26 #include "tree-pass.h"
27 #include "timevar.h"
28 #include "gimple.h"
29 #include "ggc.h"
30 #include "flags.h"
31 #include "pointer-set.h"
32 #include "target.h"
33 #include "tree-iterator.h"
34
35 /* Fill array order with all nodes with output flag set in the reverse
36 topological order. */
37
38 int
39 cgraph_postorder (struct cgraph_node **order)
40 {
41 struct cgraph_node *node, *node2;
42 int stack_size = 0;
43 int order_pos = 0;
44 struct cgraph_edge *edge, last;
45 int pass;
46
47 struct cgraph_node **stack =
48 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
49
50 /* We have to deal with cycles nicely, so use a depth first traversal
51 output algorithm. Ignore the fact that some functions won't need
52 to be output and put them into order as well, so we get dependencies
53 right through inline functions. */
54 for (node = cgraph_nodes; node; node = node->next)
55 node->aux = NULL;
56 for (pass = 0; pass < 2; pass++)
57 for (node = cgraph_nodes; node; node = node->next)
58 if (!node->aux
59 && (pass
60 || (!cgraph_only_called_directly_p (node)
61 && !node->address_taken)))
62 {
63 node2 = node;
64 if (!node->callers)
65 node->aux = &last;
66 else
67 node->aux = node->callers;
68 while (node2)
69 {
70 while (node2->aux != &last)
71 {
72 edge = (struct cgraph_edge *) node2->aux;
73 if (edge->next_caller)
74 node2->aux = edge->next_caller;
75 else
76 node2->aux = &last;
77 /* Break possible cycles involving always-inline
78 functions by ignoring edges from always-inline
79 functions to non-always-inline functions. */
80 if (edge->caller->local.disregard_inline_limits
81 && !edge->callee->local.disregard_inline_limits)
82 continue;
83 if (!edge->caller->aux)
84 {
85 if (!edge->caller->callers)
86 edge->caller->aux = &last;
87 else
88 edge->caller->aux = edge->caller->callers;
89 stack[stack_size++] = node2;
90 node2 = edge->caller;
91 break;
92 }
93 }
94 if (node2->aux == &last)
95 {
96 order[order_pos++] = node2;
97 if (stack_size)
98 node2 = stack[--stack_size];
99 else
100 node2 = NULL;
101 }
102 }
103 }
104 free (stack);
105 for (node = cgraph_nodes; node; node = node->next)
106 node->aux = NULL;
107 return order_pos;
108 }
109
110 /* Look for all functions inlined to NODE and update their inlined_to pointers
111 to INLINED_TO. */
112
113 static void
114 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
115 {
116 struct cgraph_edge *e;
117 for (e = node->callees; e; e = e->next_callee)
118 if (e->callee->global.inlined_to)
119 {
120 e->callee->global.inlined_to = inlined_to;
121 update_inlined_to_pointer (e->callee, inlined_to);
122 }
123 }
124
125 /* Add cgraph NODE to queue starting at FIRST.
126
127 The queue is linked via AUX pointers and terminated by pointer to 1.
128 We enqueue nodes at two occasions: when we find them reachable or when we find
129 their bodies needed for further clonning. In the second case we mark them
130 by pointer to 2 after processing so they are re-queue when they become
131 reachable. */
132
133 static void
134 enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first)
135 {
136 /* Node is still in queue; do nothing. */
137 if (node->aux && node->aux != (void *) 2)
138 return;
139 /* Node was already processed as unreachable, re-enqueue
140 only if it became reachable now. */
141 if (node->aux == (void *)2 && !node->reachable)
142 return;
143 node->aux = *first;
144 *first = node;
145 }
146
147 /* Add varpool NODE to queue starting at FIRST. */
148
149 static void
150 enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first)
151 {
152 node->aux = *first;
153 *first = node;
154 }
155
156 /* Process references. */
157
158 static void
159 process_references (struct ipa_ref_list *list,
160 struct cgraph_node **first,
161 struct varpool_node **first_varpool,
162 bool before_inlining_p)
163 {
164 int i;
165 struct ipa_ref *ref;
166 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
167 {
168 if (ref->refered_type == IPA_REF_CGRAPH)
169 {
170 struct cgraph_node *node = ipa_ref_node (ref);
171 if (!node->reachable
172 && (!DECL_EXTERNAL (node->decl)
173 || before_inlining_p))
174 {
175 node->reachable = true;
176 enqueue_cgraph_node (node, first);
177 }
178 }
179 else
180 {
181 struct varpool_node *node = ipa_ref_varpool_node (ref);
182 if (!node->needed)
183 {
184 varpool_mark_needed_node (node);
185 enqueue_varpool_node (node, first_varpool);
186 }
187 }
188 }
189 }
190
191 /* Return true when function NODE can be removed from callgraph
192 if all direct calls are eliminated. */
193
194 static inline bool
195 varpool_can_remove_if_no_refs (struct varpool_node *node)
196 {
197 return (!node->force_output && !node->used_from_other_partition
198 && (DECL_COMDAT (node->decl) || !node->externally_visible));
199 }
200
201 /* Return true when function can be marked local. */
202
203 static bool
204 cgraph_local_node_p (struct cgraph_node *node)
205 {
206 return (cgraph_only_called_directly_p (node)
207 && node->analyzed
208 && !DECL_EXTERNAL (node->decl)
209 && !node->local.externally_visible
210 && !node->reachable_from_other_partition
211 && !node->in_other_partition);
212 }
213
214 /* Perform reachability analysis and reclaim all unreachable nodes.
215 If BEFORE_INLINING_P is true this function is called before inlining
216 decisions has been made. If BEFORE_INLINING_P is false this function also
217 removes unneeded bodies of extern inline functions. */
218
219 bool
220 cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
221 {
222 struct cgraph_node *first = (struct cgraph_node *) (void *) 1;
223 struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1;
224 struct cgraph_node *node, *next;
225 struct varpool_node *vnode, *vnext;
226 bool changed = false;
227
228 #ifdef ENABLE_CHECKING
229 verify_cgraph ();
230 #endif
231 if (file)
232 fprintf (file, "\nReclaiming functions:");
233 #ifdef ENABLE_CHECKING
234 for (node = cgraph_nodes; node; node = node->next)
235 gcc_assert (!node->aux);
236 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
237 gcc_assert (!vnode->aux);
238 #endif
239 varpool_reset_queue ();
240 for (node = cgraph_nodes; node; node = node->next)
241 if (!node->analyzed)
242 {
243 gcc_assert (!node->aux);
244 node->reachable = false;
245 }
246 else if ((!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
247 /* Keep around virtual functions for possible devirtualization. */
248 || (!before_inlining_p
249 && !node->global.inlined_to
250 && DECL_VIRTUAL_P (node->decl)
251 && (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))))
252 && ((!DECL_EXTERNAL (node->decl))
253 || before_inlining_p))
254 {
255 gcc_assert (!node->global.inlined_to);
256 enqueue_cgraph_node (node, &first);
257 node->reachable = true;
258 }
259 else
260 {
261 gcc_assert (!node->aux);
262 node->reachable = false;
263 }
264 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
265 {
266 vnode->next_needed = NULL;
267 vnode->prev_needed = NULL;
268 if (!varpool_can_remove_if_no_refs (vnode))
269 {
270 vnode->needed = false;
271 varpool_mark_needed_node (vnode);
272 enqueue_varpool_node (vnode, &first_varpool);
273 }
274 else
275 vnode->needed = false;
276 }
277
278 /* Perform reachability analysis. As a special case do not consider
279 extern inline functions not inlined as live because we won't output
280 them at all.
281
282 We maintain two worklist, one for cgraph nodes other for varpools and
283 are finished once both are empty. */
284
285 while (first != (struct cgraph_node *) (void *) 1
286 || first_varpool != (struct varpool_node *) (void *) 1)
287 {
288 if (first != (struct cgraph_node *) (void *) 1)
289 {
290 struct cgraph_edge *e;
291 node = first;
292 first = (struct cgraph_node *) first->aux;
293 if (!node->reachable)
294 node->aux = (void *)2;
295
296 /* If we found this node reachable, first mark on the callees
297 reachable too, unless they are direct calls to extern inline functions
298 we decided to not inline. */
299 if (node->reachable)
300 {
301 for (e = node->callees; e; e = e->next_callee)
302 if (!e->callee->reachable
303 && node->analyzed
304 && (!e->inline_failed || !e->callee->analyzed
305 || (!DECL_EXTERNAL (e->callee->decl))
306 || before_inlining_p))
307 {
308 e->callee->reachable = true;
309 enqueue_cgraph_node (e->callee, &first);
310 }
311 process_references (&node->ref_list, &first, &first_varpool, before_inlining_p);
312 }
313
314 /* If any function in a comdat group is reachable, force
315 all other functions in the same comdat group to be
316 also reachable. */
317 if (node->same_comdat_group
318 && node->reachable
319 && !node->global.inlined_to)
320 {
321 for (next = node->same_comdat_group;
322 next != node;
323 next = next->same_comdat_group)
324 if (!next->reachable)
325 {
326 next->reachable = true;
327 enqueue_cgraph_node (next, &first);
328 }
329 }
330
331 /* We can freely remove inline clones even if they are cloned, however if
332 function is clone of real clone, we must keep it around in order to
333 make materialize_clones produce function body with the changes
334 applied. */
335 while (node->clone_of && !node->clone_of->aux
336 && !gimple_has_body_p (node->decl))
337 {
338 bool noninline = node->clone_of->decl != node->decl;
339 node = node->clone_of;
340 if (noninline && !node->reachable && !node->aux)
341 {
342 enqueue_cgraph_node (node, &first);
343 break;
344 }
345 }
346 }
347 if (first_varpool != (struct varpool_node *) (void *) 1)
348 {
349 vnode = first_varpool;
350 first_varpool = (struct varpool_node *)first_varpool->aux;
351 vnode->aux = NULL;
352 process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p);
353 /* If any function in a comdat group is reachable, force
354 all other functions in the same comdat group to be
355 also reachable. */
356 if (vnode->same_comdat_group)
357 {
358 struct varpool_node *next;
359 for (next = vnode->same_comdat_group;
360 next != vnode;
361 next = next->same_comdat_group)
362 if (!next->needed)
363 {
364 varpool_mark_needed_node (next);
365 enqueue_varpool_node (next, &first_varpool);
366 }
367 }
368 }
369 }
370
371 /* Remove unreachable nodes.
372
373 Completely unreachable functions can be fully removed from the callgraph.
374 Extern inline functions that we decided to not inline need to become unanalyzed nodes of
375 callgraph (so we still have edges to them). We remove function body then.
376
377 Also we need to care functions that are unreachable but we need to keep them around
378 for later clonning. In this case we also turn them to unanalyzed nodes, but
379 keep the body around. */
380 for (node = cgraph_nodes; node; node = next)
381 {
382 next = node->next;
383 if (node->aux && !node->reachable)
384 {
385 cgraph_node_remove_callees (node);
386 ipa_remove_all_references (&node->ref_list);
387 node->analyzed = false;
388 node->local.inlinable = false;
389 }
390 if (!node->aux)
391 {
392 node->global.inlined_to = NULL;
393 if (file)
394 fprintf (file, " %s", cgraph_node_name (node));
395 if (!node->analyzed || !DECL_EXTERNAL (node->decl) || before_inlining_p)
396 cgraph_remove_node (node);
397 else
398 {
399 struct cgraph_edge *e;
400
401 /* See if there is reachable caller. */
402 for (e = node->callers; e; e = e->next_caller)
403 if (e->caller->reachable)
404 break;
405
406 /* If so, we need to keep node in the callgraph. */
407 if (e || node->needed)
408 {
409 struct cgraph_node *clone;
410
411 /* If there are still clones, we must keep body around.
412 Otherwise we can just remove the body but keep the clone. */
413 for (clone = node->clones; clone;
414 clone = clone->next_sibling_clone)
415 if (clone->aux)
416 break;
417 if (!clone)
418 {
419 cgraph_release_function_body (node);
420 node->local.inlinable = false;
421 if (node->prev_sibling_clone)
422 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
423 else if (node->clone_of)
424 node->clone_of->clones = node->next_sibling_clone;
425 if (node->next_sibling_clone)
426 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
427 #ifdef ENABLE_CHECKING
428 if (node->clone_of)
429 node->former_clone_of = node->clone_of->decl;
430 #endif
431 node->clone_of = NULL;
432 node->next_sibling_clone = NULL;
433 node->prev_sibling_clone = NULL;
434 }
435 else
436 gcc_assert (!clone->in_other_partition);
437 node->analyzed = false;
438 cgraph_node_remove_callees (node);
439 ipa_remove_all_references (&node->ref_list);
440 }
441 else
442 cgraph_remove_node (node);
443 }
444 changed = true;
445 }
446 }
447 for (node = cgraph_nodes; node; node = node->next)
448 {
449 /* Inline clones might be kept around so their materializing allows further
450 cloning. If the function the clone is inlined into is removed, we need
451 to turn it into normal cone. */
452 if (node->global.inlined_to
453 && !node->callers)
454 {
455 gcc_assert (node->clones);
456 node->global.inlined_to = NULL;
457 update_inlined_to_pointer (node, node);
458 }
459 node->aux = NULL;
460 }
461
462 if (file)
463 fprintf (file, "\n");
464
465 /* We must release unused extern inlines or sanity checking will fail. Rest of transformations
466 are undesirable at -O0 since we do not want to remove anything. */
467 if (!optimize)
468 return changed;
469
470 if (file)
471 fprintf (file, "Reclaiming variables:");
472 for (vnode = varpool_nodes; vnode; vnode = vnext)
473 {
474 vnext = vnode->next;
475 if (!vnode->needed)
476 {
477 if (file)
478 fprintf (file, " %s", varpool_node_name (vnode));
479 varpool_remove_node (vnode);
480 changed = true;
481 }
482 }
483
484 /* Now update address_taken flags and try to promote functions to be local. */
485
486 if (file)
487 fprintf (file, "\nClearing address taken flags:");
488 for (node = cgraph_nodes; node; node = node->next)
489 if (node->address_taken
490 && !node->reachable_from_other_partition)
491 {
492 int i;
493 struct ipa_ref *ref;
494 bool found = false;
495 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
496 && !found; i++)
497 {
498 gcc_assert (ref->use == IPA_REF_ADDR);
499 found = true;
500 }
501 if (!found)
502 {
503 if (file)
504 fprintf (file, " %s", cgraph_node_name (node));
505 node->address_taken = false;
506 changed = true;
507 if (cgraph_local_node_p (node))
508 {
509 node->local.local = true;
510 if (file)
511 fprintf (file, " (local)");
512 }
513 }
514 }
515
516 #ifdef ENABLE_CHECKING
517 verify_cgraph ();
518 #endif
519
520 /* Reclaim alias pairs for functions that have disappeared from the
521 call graph. */
522 remove_unreachable_alias_pairs ();
523
524 return changed;
525 }
526
527 /* Discover variables that have no longer address taken or that are read only
528 and update their flags.
529
530 FIXME: This can not be done in between gimplify and omp_expand since
531 readonly flag plays role on what is shared and what is not. Currently we do
532 this transformation as part of whole program visibility and re-do at
533 ipa-reference pass (to take into account clonning), but it would
534 make sense to do it before early optimizations. */
535
536 void
537 ipa_discover_readonly_nonaddressable_vars (void)
538 {
539 struct varpool_node *vnode;
540 if (dump_file)
541 fprintf (dump_file, "Clearing variable flags:");
542 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
543 if (vnode->finalized && varpool_all_refs_explicit_p (vnode)
544 && (TREE_ADDRESSABLE (vnode->decl) || !TREE_READONLY (vnode->decl)))
545 {
546 bool written = false;
547 bool address_taken = false;
548 int i;
549 struct ipa_ref *ref;
550 for (i = 0; ipa_ref_list_refering_iterate (&vnode->ref_list, i, ref)
551 && (!written || !address_taken); i++)
552 switch (ref->use)
553 {
554 case IPA_REF_ADDR:
555 address_taken = true;
556 break;
557 case IPA_REF_LOAD:
558 break;
559 case IPA_REF_STORE:
560 written = true;
561 break;
562 }
563 if (TREE_ADDRESSABLE (vnode->decl) && !address_taken)
564 {
565 if (dump_file)
566 fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode));
567 TREE_ADDRESSABLE (vnode->decl) = 0;
568 }
569 if (!TREE_READONLY (vnode->decl) && !address_taken && !written
570 /* Making variable in explicit section readonly can cause section
571 type conflict.
572 See e.g. gcc.c-torture/compile/pr23237.c */
573 && DECL_SECTION_NAME (vnode->decl) == NULL)
574 {
575 if (dump_file)
576 fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode));
577 TREE_READONLY (vnode->decl) = 1;
578 }
579 }
580 if (dump_file)
581 fprintf (dump_file, "\n");
582 }
583
584 /* Return true when function NODE should be considered externally visible. */
585
586 static bool
587 cgraph_externally_visible_p (struct cgraph_node *node, bool whole_program, bool aliased)
588 {
589 if (!node->local.finalized)
590 return false;
591 if (!DECL_COMDAT (node->decl)
592 && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)))
593 return false;
594
595 /* Do not even try to be smart about aliased nodes. Until we properly
596 represent everything by same body alias, these are just evil. */
597 if (aliased)
598 return true;
599
600 /* If linker counts on us, we must preserve the function. */
601 if (cgraph_used_from_object_file_p (node))
602 return true;
603 /* When doing link time optimizations, hidden symbols become local. */
604 if (in_lto_p
605 && (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
606 || DECL_VISIBILITY (node->decl) == VISIBILITY_INTERNAL)
607 /* Be sure that node is defined in IR file, not in other object
608 file. In that case we don't set used_from_other_object_file. */
609 && node->analyzed)
610 ;
611 else if (!whole_program)
612 return true;
613 /* COMDAT functions must be shared only if they have address taken,
614 otherwise we can produce our own private implementation with
615 -fwhole-program. */
616 else if (DECL_COMDAT (node->decl))
617 {
618 if (node->address_taken || !node->analyzed)
619 return true;
620 if (node->same_comdat_group)
621 {
622 struct cgraph_node *next;
623
624 /* If more than one function is in the same COMDAT group, it must
625 be shared even if just one function in the comdat group has
626 address taken. */
627 for (next = node->same_comdat_group;
628 next != node;
629 next = next->same_comdat_group)
630 if (next->address_taken || !next->analyzed)
631 return true;
632 }
633 }
634 if (DECL_PRESERVE_P (node->decl))
635 return true;
636 if (MAIN_NAME_P (DECL_NAME (node->decl)))
637 return true;
638 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl)))
639 return true;
640 return false;
641 }
642
643 /* Dissolve the same_comdat_group list in which NODE resides. */
644
645 static void
646 dissolve_same_comdat_group_list (struct cgraph_node *node)
647 {
648 struct cgraph_node *n = node, *next;
649 do
650 {
651 next = n->same_comdat_group;
652 n->same_comdat_group = NULL;
653 n = next;
654 }
655 while (n != node);
656 }
657
658 /* Mark visibility of all functions.
659
660 A local function is one whose calls can occur only in the current
661 compilation unit and all its calls are explicit, so we can change
662 its calling convention. We simply mark all static functions whose
663 address is not taken as local.
664
665 We also change the TREE_PUBLIC flag of all declarations that are public
666 in language point of view but we want to overwrite this default
667 via visibilities for the backend point of view. */
668
669 static unsigned int
670 function_and_variable_visibility (bool whole_program)
671 {
672 struct cgraph_node *node;
673 struct varpool_node *vnode;
674 struct pointer_set_t *aliased_nodes = pointer_set_create ();
675 struct pointer_set_t *aliased_vnodes = pointer_set_create ();
676 unsigned i;
677 alias_pair *p;
678
679 /* Discover aliased nodes. */
680 FOR_EACH_VEC_ELT (alias_pair, alias_pairs, i, p)
681 {
682 if (dump_file)
683 fprintf (dump_file, "Alias %s->%s",
684 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)),
685 IDENTIFIER_POINTER (p->target));
686
687 if ((node = cgraph_node_for_asm (p->target)) != NULL)
688 {
689 gcc_assert (node->needed);
690 pointer_set_insert (aliased_nodes, node);
691 if (dump_file)
692 fprintf (dump_file, " node %s/%i",
693 cgraph_node_name (node), node->uid);
694 }
695 else if ((vnode = varpool_node_for_asm (p->target)) != NULL)
696 {
697 gcc_assert (vnode->needed);
698 pointer_set_insert (aliased_vnodes, vnode);
699 if (dump_file)
700 fprintf (dump_file, " varpool node %s",
701 varpool_node_name (vnode));
702 }
703 if (dump_file)
704 fprintf (dump_file, "\n");
705 }
706
707 for (node = cgraph_nodes; node; node = node->next)
708 {
709 /* C++ FE on lack of COMDAT support create local COMDAT functions
710 (that ought to be shared but can not due to object format
711 limitations). It is neccesary to keep the flag to make rest of C++ FE
712 happy. Clear the flag here to avoid confusion in middle-end. */
713 if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl))
714 DECL_COMDAT (node->decl) = 0;
715 /* For external decls stop tracking same_comdat_group, it doesn't matter
716 what comdat group they are in when they won't be emitted in this TU,
717 and simplifies later passes. */
718 if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
719 {
720 #ifdef ENABLE_CHECKING
721 struct cgraph_node *n;
722
723 for (n = node->same_comdat_group;
724 n != node;
725 n = n->same_comdat_group)
726 /* If at least one of same comdat group functions is external,
727 all of them have to be, otherwise it is a front-end bug. */
728 gcc_assert (DECL_EXTERNAL (n->decl));
729 #endif
730 dissolve_same_comdat_group_list (node);
731 }
732 gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl))
733 || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl));
734 if (cgraph_externally_visible_p (node, whole_program,
735 pointer_set_contains (aliased_nodes,
736 node)))
737 {
738 gcc_assert (!node->global.inlined_to);
739 node->local.externally_visible = true;
740 }
741 else
742 node->local.externally_visible = false;
743 if (!node->local.externally_visible && node->analyzed
744 && !DECL_EXTERNAL (node->decl))
745 {
746 struct cgraph_node *alias;
747 gcc_assert (whole_program || in_lto_p || !TREE_PUBLIC (node->decl));
748 cgraph_make_decl_local (node->decl);
749 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
750 for (alias = node->same_body; alias; alias = alias->next)
751 cgraph_make_decl_local (alias->decl);
752 if (node->same_comdat_group)
753 /* cgraph_externally_visible_p has already checked all other nodes
754 in the group and they will all be made local. We need to
755 dissolve the group at once so that the predicate does not
756 segfault though. */
757 dissolve_same_comdat_group_list (node);
758 }
759 node->local.local = cgraph_local_node_p (node);
760 }
761 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
762 {
763 /* weak flag makes no sense on local variables. */
764 gcc_assert (!DECL_WEAK (vnode->decl)
765 || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl));
766 /* In several cases declarations can not be common:
767
768 - when declaration has initializer
769 - when it is in weak
770 - when it has specific section
771 - when it resides in non-generic address space.
772 - if declaration is local, it will get into .local common section
773 so common flag is not needed. Frontends still produce these in
774 certain cases, such as for:
775
776 static int a __attribute__ ((common))
777
778 Canonicalize things here and clear the redundant flag. */
779 if (DECL_COMMON (vnode->decl)
780 && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
781 || (DECL_INITIAL (vnode->decl)
782 && DECL_INITIAL (vnode->decl) != error_mark_node)
783 || DECL_WEAK (vnode->decl)
784 || DECL_SECTION_NAME (vnode->decl) != NULL
785 || ! (ADDR_SPACE_GENERIC_P
786 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl))))))
787 DECL_COMMON (vnode->decl) = 0;
788 }
789 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
790 {
791 if (!vnode->finalized)
792 continue;
793 if (vnode->needed
794 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl))
795 && (((!whole_program
796 /* We can privatize comdat readonly variables whose address is
797 not taken, but doing so is not going to bring us
798 optimization oppurtunities until we start reordering
799 datastructures. */
800 || DECL_COMDAT (vnode->decl)
801 || DECL_WEAK (vnode->decl))
802 /* When doing linktime optimizations, all hidden symbols will
803 become local. */
804 && (!in_lto_p
805 || (DECL_VISIBILITY (vnode->decl) != VISIBILITY_HIDDEN
806 && DECL_VISIBILITY (vnode->decl) != VISIBILITY_INTERNAL)
807 /* We can get prevailing decision in other object file.
808 In this case we do not sed used_from_object_file. */
809 || !vnode->finalized))
810 || DECL_PRESERVE_P (vnode->decl)
811 || varpool_used_from_object_file_p (vnode)
812 || pointer_set_contains (aliased_vnodes, vnode)
813 || lookup_attribute ("externally_visible",
814 DECL_ATTRIBUTES (vnode->decl))))
815 vnode->externally_visible = true;
816 else
817 vnode->externally_visible = false;
818 if (!vnode->externally_visible)
819 {
820 gcc_assert (in_lto_p || whole_program || !TREE_PUBLIC (vnode->decl));
821 cgraph_make_decl_local (vnode->decl);
822 vnode->resolution = LDPR_PREVAILING_DEF_IRONLY;
823 }
824 gcc_assert (TREE_STATIC (vnode->decl));
825 }
826 pointer_set_destroy (aliased_nodes);
827 pointer_set_destroy (aliased_vnodes);
828
829 if (dump_file)
830 {
831 fprintf (dump_file, "\nMarking local functions:");
832 for (node = cgraph_nodes; node; node = node->next)
833 if (node->local.local)
834 fprintf (dump_file, " %s", cgraph_node_name (node));
835 fprintf (dump_file, "\n\n");
836 fprintf (dump_file, "\nMarking externally visible functions:");
837 for (node = cgraph_nodes; node; node = node->next)
838 if (node->local.externally_visible)
839 fprintf (dump_file, " %s", cgraph_node_name (node));
840 fprintf (dump_file, "\n\n");
841 fprintf (dump_file, "\nMarking externally visible variables:");
842 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
843 if (vnode->externally_visible)
844 fprintf (dump_file, " %s", varpool_node_name (vnode));
845 fprintf (dump_file, "\n\n");
846 }
847 cgraph_function_flags_ready = true;
848 return 0;
849 }
850
851 /* Local function pass handling visibilities. This happens before LTO streaming
852 so in particular -fwhole-program should be ignored at this level. */
853
854 static unsigned int
855 local_function_and_variable_visibility (void)
856 {
857 return function_and_variable_visibility (flag_whole_program && !flag_lto && !flag_whopr);
858 }
859
860 struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility =
861 {
862 {
863 SIMPLE_IPA_PASS,
864 "visibility", /* name */
865 NULL, /* gate */
866 local_function_and_variable_visibility,/* execute */
867 NULL, /* sub */
868 NULL, /* next */
869 0, /* static_pass_number */
870 TV_CGRAPHOPT, /* tv_id */
871 0, /* properties_required */
872 0, /* properties_provided */
873 0, /* properties_destroyed */
874 0, /* todo_flags_start */
875 TODO_remove_functions | TODO_dump_cgraph
876 | TODO_ggc_collect /* todo_flags_finish */
877 }
878 };
879
880 /* Do not re-run on ltrans stage. */
881
882 static bool
883 gate_whole_program_function_and_variable_visibility (void)
884 {
885 return !flag_ltrans;
886 }
887
888 /* Bring functionss local at LTO time whith -fwhole-program. */
889
890 static unsigned int
891 whole_program_function_and_variable_visibility (void)
892 {
893 struct cgraph_node *node;
894 struct varpool_node *vnode;
895
896 function_and_variable_visibility (flag_whole_program);
897
898 for (node = cgraph_nodes; node; node = node->next)
899 if ((node->local.externally_visible && !DECL_COMDAT (node->decl))
900 && node->local.finalized)
901 cgraph_mark_needed_node (node);
902 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
903 if (vnode->externally_visible && !DECL_COMDAT (vnode->decl))
904 varpool_mark_needed_node (vnode);
905 if (dump_file)
906 {
907 fprintf (dump_file, "\nNeeded variables:");
908 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
909 if (vnode->needed)
910 fprintf (dump_file, " %s", varpool_node_name (vnode));
911 fprintf (dump_file, "\n\n");
912 }
913 if (optimize)
914 ipa_discover_readonly_nonaddressable_vars ();
915 return 0;
916 }
917
918 struct ipa_opt_pass_d pass_ipa_whole_program_visibility =
919 {
920 {
921 IPA_PASS,
922 "whole-program", /* name */
923 gate_whole_program_function_and_variable_visibility,/* gate */
924 whole_program_function_and_variable_visibility,/* execute */
925 NULL, /* sub */
926 NULL, /* next */
927 0, /* static_pass_number */
928 TV_CGRAPHOPT, /* tv_id */
929 0, /* properties_required */
930 0, /* properties_provided */
931 0, /* properties_destroyed */
932 0, /* todo_flags_start */
933 TODO_remove_functions | TODO_dump_cgraph
934 | TODO_ggc_collect /* todo_flags_finish */
935 },
936 NULL, /* generate_summary */
937 NULL, /* write_summary */
938 NULL, /* read_summary */
939 NULL, /* write_optimization_summary */
940 NULL, /* read_optimization_summary */
941 NULL, /* stmt_fixup */
942 0, /* TODOs */
943 NULL, /* function_transform */
944 NULL, /* variable_transform */
945 };
946
947 /* Hash a cgraph node set element. */
948
949 static hashval_t
950 hash_cgraph_node_set_element (const void *p)
951 {
952 const_cgraph_node_set_element element = (const_cgraph_node_set_element) p;
953 return htab_hash_pointer (element->node);
954 }
955
956 /* Compare two cgraph node set elements. */
957
958 static int
959 eq_cgraph_node_set_element (const void *p1, const void *p2)
960 {
961 const_cgraph_node_set_element e1 = (const_cgraph_node_set_element) p1;
962 const_cgraph_node_set_element e2 = (const_cgraph_node_set_element) p2;
963
964 return e1->node == e2->node;
965 }
966
967 /* Create a new cgraph node set. */
968
969 cgraph_node_set
970 cgraph_node_set_new (void)
971 {
972 cgraph_node_set new_node_set;
973
974 new_node_set = ggc_alloc_cgraph_node_set_def ();
975 new_node_set->hashtab = htab_create_ggc (10,
976 hash_cgraph_node_set_element,
977 eq_cgraph_node_set_element,
978 NULL);
979 new_node_set->nodes = NULL;
980 return new_node_set;
981 }
982
983 /* Add cgraph_node NODE to cgraph_node_set SET. */
984
985 void
986 cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node)
987 {
988 void **slot;
989 cgraph_node_set_element element;
990 struct cgraph_node_set_element_def dummy;
991
992 dummy.node = node;
993 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
994
995 if (*slot != HTAB_EMPTY_ENTRY)
996 {
997 element = (cgraph_node_set_element) *slot;
998 gcc_assert (node == element->node
999 && (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1000 == node));
1001 return;
1002 }
1003
1004 /* Insert node into hash table. */
1005 element = ggc_alloc_cgraph_node_set_element_def ();
1006 element->node = node;
1007 element->index = VEC_length (cgraph_node_ptr, set->nodes);
1008 *slot = element;
1009
1010 /* Insert into node vector. */
1011 VEC_safe_push (cgraph_node_ptr, gc, set->nodes, node);
1012 }
1013
1014 /* Remove cgraph_node NODE from cgraph_node_set SET. */
1015
1016 void
1017 cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node)
1018 {
1019 void **slot, **last_slot;
1020 cgraph_node_set_element element, last_element;
1021 struct cgraph_node *last_node;
1022 struct cgraph_node_set_element_def dummy;
1023
1024 dummy.node = node;
1025 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1026 if (slot == NULL)
1027 return;
1028
1029 element = (cgraph_node_set_element) *slot;
1030 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1031 == node);
1032
1033 /* Remove from vector. We do this by swapping node with the last element
1034 of the vector. */
1035 last_node = VEC_pop (cgraph_node_ptr, set->nodes);
1036 if (last_node != node)
1037 {
1038 dummy.node = last_node;
1039 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1040 last_element = (cgraph_node_set_element) *last_slot;
1041 gcc_assert (last_element);
1042
1043 /* Move the last element to the original spot of NODE. */
1044 last_element->index = element->index;
1045 VEC_replace (cgraph_node_ptr, set->nodes, last_element->index,
1046 last_node);
1047 }
1048
1049 /* Remove element from hash table. */
1050 htab_clear_slot (set->hashtab, slot);
1051 ggc_free (element);
1052 }
1053
1054 /* Find NODE in SET and return an iterator to it if found. A null iterator
1055 is returned if NODE is not in SET. */
1056
1057 cgraph_node_set_iterator
1058 cgraph_node_set_find (cgraph_node_set set, struct cgraph_node *node)
1059 {
1060 void **slot;
1061 struct cgraph_node_set_element_def dummy;
1062 cgraph_node_set_element element;
1063 cgraph_node_set_iterator csi;
1064
1065 dummy.node = node;
1066 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1067 if (slot == NULL)
1068 csi.index = (unsigned) ~0;
1069 else
1070 {
1071 element = (cgraph_node_set_element) *slot;
1072 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1073 == node);
1074 csi.index = element->index;
1075 }
1076 csi.set = set;
1077
1078 return csi;
1079 }
1080
1081 /* Dump content of SET to file F. */
1082
1083 void
1084 dump_cgraph_node_set (FILE *f, cgraph_node_set set)
1085 {
1086 cgraph_node_set_iterator iter;
1087
1088 for (iter = csi_start (set); !csi_end_p (iter); csi_next (&iter))
1089 {
1090 struct cgraph_node *node = csi_node (iter);
1091 fprintf (f, " %s/%i", cgraph_node_name (node), node->uid);
1092 }
1093 fprintf (f, "\n");
1094 }
1095
1096 /* Dump content of SET to stderr. */
1097
1098 DEBUG_FUNCTION void
1099 debug_cgraph_node_set (cgraph_node_set set)
1100 {
1101 dump_cgraph_node_set (stderr, set);
1102 }
1103
1104 /* Hash a varpool node set element. */
1105
1106 static hashval_t
1107 hash_varpool_node_set_element (const void *p)
1108 {
1109 const_varpool_node_set_element element = (const_varpool_node_set_element) p;
1110 return htab_hash_pointer (element->node);
1111 }
1112
1113 /* Compare two varpool node set elements. */
1114
1115 static int
1116 eq_varpool_node_set_element (const void *p1, const void *p2)
1117 {
1118 const_varpool_node_set_element e1 = (const_varpool_node_set_element) p1;
1119 const_varpool_node_set_element e2 = (const_varpool_node_set_element) p2;
1120
1121 return e1->node == e2->node;
1122 }
1123
1124 /* Create a new varpool node set. */
1125
1126 varpool_node_set
1127 varpool_node_set_new (void)
1128 {
1129 varpool_node_set new_node_set;
1130
1131 new_node_set = ggc_alloc_varpool_node_set_def ();
1132 new_node_set->hashtab = htab_create_ggc (10,
1133 hash_varpool_node_set_element,
1134 eq_varpool_node_set_element,
1135 NULL);
1136 new_node_set->nodes = NULL;
1137 return new_node_set;
1138 }
1139
1140 /* Add varpool_node NODE to varpool_node_set SET. */
1141
1142 void
1143 varpool_node_set_add (varpool_node_set set, struct varpool_node *node)
1144 {
1145 void **slot;
1146 varpool_node_set_element element;
1147 struct varpool_node_set_element_def dummy;
1148
1149 dummy.node = node;
1150 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
1151
1152 if (*slot != HTAB_EMPTY_ENTRY)
1153 {
1154 element = (varpool_node_set_element) *slot;
1155 gcc_assert (node == element->node
1156 && (VEC_index (varpool_node_ptr, set->nodes, element->index)
1157 == node));
1158 return;
1159 }
1160
1161 /* Insert node into hash table. */
1162 element = ggc_alloc_varpool_node_set_element_def ();
1163 element->node = node;
1164 element->index = VEC_length (varpool_node_ptr, set->nodes);
1165 *slot = element;
1166
1167 /* Insert into node vector. */
1168 VEC_safe_push (varpool_node_ptr, gc, set->nodes, node);
1169 }
1170
1171 /* Remove varpool_node NODE from varpool_node_set SET. */
1172
1173 void
1174 varpool_node_set_remove (varpool_node_set set, struct varpool_node *node)
1175 {
1176 void **slot, **last_slot;
1177 varpool_node_set_element element, last_element;
1178 struct varpool_node *last_node;
1179 struct varpool_node_set_element_def dummy;
1180
1181 dummy.node = node;
1182 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1183 if (slot == NULL)
1184 return;
1185
1186 element = (varpool_node_set_element) *slot;
1187 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1188 == node);
1189
1190 /* Remove from vector. We do this by swapping node with the last element
1191 of the vector. */
1192 last_node = VEC_pop (varpool_node_ptr, set->nodes);
1193 if (last_node != node)
1194 {
1195 dummy.node = last_node;
1196 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1197 last_element = (varpool_node_set_element) *last_slot;
1198 gcc_assert (last_element);
1199
1200 /* Move the last element to the original spot of NODE. */
1201 last_element->index = element->index;
1202 VEC_replace (varpool_node_ptr, set->nodes, last_element->index,
1203 last_node);
1204 }
1205
1206 /* Remove element from hash table. */
1207 htab_clear_slot (set->hashtab, slot);
1208 ggc_free (element);
1209 }
1210
1211 /* Find NODE in SET and return an iterator to it if found. A null iterator
1212 is returned if NODE is not in SET. */
1213
1214 varpool_node_set_iterator
1215 varpool_node_set_find (varpool_node_set set, struct varpool_node *node)
1216 {
1217 void **slot;
1218 struct varpool_node_set_element_def dummy;
1219 varpool_node_set_element element;
1220 varpool_node_set_iterator vsi;
1221
1222 dummy.node = node;
1223 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1224 if (slot == NULL)
1225 vsi.index = (unsigned) ~0;
1226 else
1227 {
1228 element = (varpool_node_set_element) *slot;
1229 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1230 == node);
1231 vsi.index = element->index;
1232 }
1233 vsi.set = set;
1234
1235 return vsi;
1236 }
1237
1238 /* Dump content of SET to file F. */
1239
1240 void
1241 dump_varpool_node_set (FILE *f, varpool_node_set set)
1242 {
1243 varpool_node_set_iterator iter;
1244
1245 for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter))
1246 {
1247 struct varpool_node *node = vsi_node (iter);
1248 fprintf (f, " %s", varpool_node_name (node));
1249 }
1250 fprintf (f, "\n");
1251 }
1252
1253 /* Dump content of SET to stderr. */
1254
1255 DEBUG_FUNCTION void
1256 debug_varpool_node_set (varpool_node_set set)
1257 {
1258 dump_varpool_node_set (stderr, set);
1259 }
1260
1261
1262 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1263
1264 static unsigned int
1265 ipa_profile (void)
1266 {
1267 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1268 struct cgraph_edge *e;
1269 int order_pos;
1270 bool something_changed = false;
1271 int i;
1272
1273 order_pos = cgraph_postorder (order);
1274 for (i = order_pos - 1; i >= 0; i--)
1275 {
1276 if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1277 {
1278 for (e = order[i]->callees; e; e = e->next_callee)
1279 if (e->callee->local.local && !e->callee->aux)
1280 {
1281 something_changed = true;
1282 e->callee->aux = (void *)1;
1283 }
1284 }
1285 order[i]->aux = NULL;
1286 }
1287
1288 while (something_changed)
1289 {
1290 something_changed = false;
1291 for (i = order_pos - 1; i >= 0; i--)
1292 {
1293 if (order[i]->aux && cgraph_propagate_frequency (order[i]))
1294 {
1295 for (e = order[i]->callees; e; e = e->next_callee)
1296 if (e->callee->local.local && !e->callee->aux)
1297 {
1298 something_changed = true;
1299 e->callee->aux = (void *)1;
1300 }
1301 }
1302 order[i]->aux = NULL;
1303 }
1304 }
1305 free (order);
1306 return 0;
1307 }
1308
1309 static bool
1310 gate_ipa_profile (void)
1311 {
1312 return flag_ipa_profile;
1313 }
1314
1315 struct ipa_opt_pass_d pass_ipa_profile =
1316 {
1317 {
1318 IPA_PASS,
1319 "ipa-profile", /* name */
1320 gate_ipa_profile, /* gate */
1321 ipa_profile, /* execute */
1322 NULL, /* sub */
1323 NULL, /* next */
1324 0, /* static_pass_number */
1325 TV_IPA_PROFILE, /* tv_id */
1326 0, /* properties_required */
1327 0, /* properties_provided */
1328 0, /* properties_destroyed */
1329 0, /* todo_flags_start */
1330 0 /* todo_flags_finish */
1331 },
1332 NULL, /* generate_summary */
1333 NULL, /* write_summary */
1334 NULL, /* read_summary */
1335 NULL, /* write_optimization_summary */
1336 NULL, /* read_optimization_summary */
1337 NULL, /* stmt_fixup */
1338 0, /* TODOs */
1339 NULL, /* function_transform */
1340 NULL /* variable_transform */
1341 };
1342
1343 /* Generate and emit a static constructor or destructor. WHICH must
1344 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1345 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1346 initialization priority for this constructor or destructor. */
1347
1348 void
1349 cgraph_build_static_cdtor (char which, tree body, int priority)
1350 {
1351 static int counter = 0;
1352 char which_buf[16];
1353 tree decl, name, resdecl;
1354
1355 /* The priority is encoded in the constructor or destructor name.
1356 collect2 will sort the names and arrange that they are called at
1357 program startup. */
1358 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1359 name = get_file_function_name (which_buf);
1360
1361 decl = build_decl (input_location, FUNCTION_DECL, name,
1362 build_function_type_list (void_type_node, NULL_TREE));
1363 current_function_decl = decl;
1364
1365 resdecl = build_decl (input_location,
1366 RESULT_DECL, NULL_TREE, void_type_node);
1367 DECL_ARTIFICIAL (resdecl) = 1;
1368 DECL_RESULT (decl) = resdecl;
1369 DECL_CONTEXT (resdecl) = decl;
1370
1371 allocate_struct_function (decl, false);
1372
1373 TREE_STATIC (decl) = 1;
1374 TREE_USED (decl) = 1;
1375 DECL_ARTIFICIAL (decl) = 1;
1376 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1377 DECL_SAVED_TREE (decl) = body;
1378 if (!targetm.have_ctors_dtors)
1379 {
1380 TREE_PUBLIC (decl) = 1;
1381 DECL_PRESERVE_P (decl) = 1;
1382 }
1383 DECL_UNINLINABLE (decl) = 1;
1384
1385 DECL_INITIAL (decl) = make_node (BLOCK);
1386 TREE_USED (DECL_INITIAL (decl)) = 1;
1387
1388 DECL_SOURCE_LOCATION (decl) = input_location;
1389 cfun->function_end_locus = input_location;
1390
1391 switch (which)
1392 {
1393 case 'I':
1394 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1395 decl_init_priority_insert (decl, priority);
1396 break;
1397 case 'D':
1398 DECL_STATIC_DESTRUCTOR (decl) = 1;
1399 decl_fini_priority_insert (decl, priority);
1400 break;
1401 default:
1402 gcc_unreachable ();
1403 }
1404
1405 gimplify_function_tree (decl);
1406
1407 cgraph_add_new_function (decl, false);
1408
1409 set_cfun (NULL);
1410 current_function_decl = NULL;
1411 }
1412
1413
1414 /* A vector of FUNCTION_DECLs declared as static constructors. */
1415 static VEC(tree, heap) *static_ctors;
1416 /* A vector of FUNCTION_DECLs declared as static destructors. */
1417 static VEC(tree, heap) *static_dtors;
1418
1419 /* When target does not have ctors and dtors, we call all constructor
1420 and destructor by special initialization/destruction function
1421 recognized by collect2.
1422
1423 When we are going to build this function, collect all constructors and
1424 destructors and turn them into normal functions. */
1425
1426 static void
1427 record_cdtor_fn (struct cgraph_node *node)
1428 {
1429 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1430 VEC_safe_push (tree, heap, static_ctors, node->decl);
1431 if (DECL_STATIC_DESTRUCTOR (node->decl))
1432 VEC_safe_push (tree, heap, static_dtors, node->decl);
1433 node = cgraph_node (node->decl);
1434 node->local.disregard_inline_limits = 1;
1435 }
1436
1437 /* Define global constructors/destructor functions for the CDTORS, of
1438 which they are LEN. The CDTORS are sorted by initialization
1439 priority. If CTOR_P is true, these are constructors; otherwise,
1440 they are destructors. */
1441
1442 static void
1443 build_cdtor (bool ctor_p, VEC (tree, heap) *cdtors)
1444 {
1445 size_t i,j;
1446 size_t len = VEC_length (tree, cdtors);
1447
1448 i = 0;
1449 while (i < len)
1450 {
1451 tree body;
1452 tree fn;
1453 priority_type priority;
1454
1455 priority = 0;
1456 body = NULL_TREE;
1457 j = i;
1458 do
1459 {
1460 priority_type p;
1461 fn = VEC_index (tree, cdtors, j);
1462 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1463 if (j == i)
1464 priority = p;
1465 else if (p != priority)
1466 break;
1467 j++;
1468 }
1469 while (j < len);
1470
1471 /* When there is only one cdtor and target supports them, do nothing. */
1472 if (j == i + 1
1473 && targetm.have_ctors_dtors)
1474 {
1475 i++;
1476 continue;
1477 }
1478 /* Find the next batch of constructors/destructors with the same
1479 initialization priority. */
1480 for (;i < j; i++)
1481 {
1482 tree call;
1483 fn = VEC_index (tree, cdtors, i);
1484 call = build_call_expr (fn, 0);
1485 if (ctor_p)
1486 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1487 else
1488 DECL_STATIC_DESTRUCTOR (fn) = 0;
1489 /* We do not want to optimize away pure/const calls here.
1490 When optimizing, these should be already removed, when not
1491 optimizing, we want user to be able to breakpoint in them. */
1492 TREE_SIDE_EFFECTS (call) = 1;
1493 append_to_statement_list (call, &body);
1494 }
1495 while (i < len);
1496 gcc_assert (body != NULL_TREE);
1497 /* Generate a function to call all the function of like
1498 priority. */
1499 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
1500 }
1501 }
1502
1503 /* Comparison function for qsort. P1 and P2 are actually of type
1504 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1505 used to determine the sort order. */
1506
1507 static int
1508 compare_ctor (const void *p1, const void *p2)
1509 {
1510 tree f1;
1511 tree f2;
1512 int priority1;
1513 int priority2;
1514
1515 f1 = *(const tree *)p1;
1516 f2 = *(const tree *)p2;
1517 priority1 = DECL_INIT_PRIORITY (f1);
1518 priority2 = DECL_INIT_PRIORITY (f2);
1519
1520 if (priority1 < priority2)
1521 return -1;
1522 else if (priority1 > priority2)
1523 return 1;
1524 else
1525 /* Ensure a stable sort. Constructors are executed in backwarding
1526 order to make LTO initialize braries first. */
1527 return DECL_UID (f2) - DECL_UID (f1);
1528 }
1529
1530 /* Comparison function for qsort. P1 and P2 are actually of type
1531 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1532 used to determine the sort order. */
1533
1534 static int
1535 compare_dtor (const void *p1, const void *p2)
1536 {
1537 tree f1;
1538 tree f2;
1539 int priority1;
1540 int priority2;
1541
1542 f1 = *(const tree *)p1;
1543 f2 = *(const tree *)p2;
1544 priority1 = DECL_FINI_PRIORITY (f1);
1545 priority2 = DECL_FINI_PRIORITY (f2);
1546
1547 if (priority1 < priority2)
1548 return -1;
1549 else if (priority1 > priority2)
1550 return 1;
1551 else
1552 /* Ensure a stable sort. */
1553 return DECL_UID (f1) - DECL_UID (f2);
1554 }
1555
1556 /* Generate functions to call static constructors and destructors
1557 for targets that do not support .ctors/.dtors sections. These
1558 functions have magic names which are detected by collect2. */
1559
1560 static void
1561 build_cdtor_fns (void)
1562 {
1563 if (!VEC_empty (tree, static_ctors))
1564 {
1565 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1566 qsort (VEC_address (tree, static_ctors),
1567 VEC_length (tree, static_ctors),
1568 sizeof (tree),
1569 compare_ctor);
1570 build_cdtor (/*ctor_p=*/true, static_ctors);
1571 }
1572
1573 if (!VEC_empty (tree, static_dtors))
1574 {
1575 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1576 qsort (VEC_address (tree, static_dtors),
1577 VEC_length (tree, static_dtors),
1578 sizeof (tree),
1579 compare_dtor);
1580 build_cdtor (/*ctor_p=*/false, static_dtors);
1581 }
1582 }
1583
1584 /* Look for constructors and destructors and produce function calling them.
1585 This is needed for targets not supporting ctors or dtors, but we perform the
1586 transformation also at linktime to merge possibly numberous
1587 constructors/destructors into single function to improve code locality and
1588 reduce size. */
1589
1590 static unsigned int
1591 ipa_cdtor_merge (void)
1592 {
1593 struct cgraph_node *node;
1594 for (node = cgraph_nodes; node; node = node->next)
1595 if (node->analyzed
1596 && (DECL_STATIC_CONSTRUCTOR (node->decl)
1597 || DECL_STATIC_DESTRUCTOR (node->decl)))
1598 record_cdtor_fn (node);
1599 build_cdtor_fns ();
1600 VEC_free (tree, heap, static_ctors);
1601 VEC_free (tree, heap, static_dtors);
1602 return 0;
1603 }
1604
1605 /* Perform the pass when we have no ctors/dtors support
1606 or at LTO time to merge multiple constructors into single
1607 function. */
1608
1609 static bool
1610 gate_ipa_cdtor_merge (void)
1611 {
1612 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1613 }
1614
1615 struct ipa_opt_pass_d pass_ipa_cdtor_merge =
1616 {
1617 {
1618 IPA_PASS,
1619 "cdtor", /* name */
1620 gate_ipa_cdtor_merge, /* gate */
1621 ipa_cdtor_merge, /* execute */
1622 NULL, /* sub */
1623 NULL, /* next */
1624 0, /* static_pass_number */
1625 TV_CGRAPHOPT, /* tv_id */
1626 0, /* properties_required */
1627 0, /* properties_provided */
1628 0, /* properties_destroyed */
1629 0, /* todo_flags_start */
1630 0 /* todo_flags_finish */
1631 },
1632 NULL, /* generate_summary */
1633 NULL, /* write_summary */
1634 NULL, /* read_summary */
1635 NULL, /* write_optimization_summary */
1636 NULL, /* read_optimization_summary */
1637 NULL, /* stmt_fixup */
1638 0, /* TODOs */
1639 NULL, /* function_transform */
1640 NULL /* variable_transform */
1641 };