re PR lto/45736 (ICE: in cgraph_remove_unreachable_nodes, at ipa.c:245 with -flto...
[gcc.git] / gcc / ipa.c
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "cgraph.h"
26 #include "tree-pass.h"
27 #include "timevar.h"
28 #include "gimple.h"
29 #include "ggc.h"
30 #include "flags.h"
31 #include "pointer-set.h"
32 #include "target.h"
33 #include "tree-iterator.h"
34
35 /* Fill array order with all nodes with output flag set in the reverse
36 topological order. */
37
38 int
39 cgraph_postorder (struct cgraph_node **order)
40 {
41 struct cgraph_node *node, *node2;
42 int stack_size = 0;
43 int order_pos = 0;
44 struct cgraph_edge *edge, last;
45 int pass;
46
47 struct cgraph_node **stack =
48 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
49
50 /* We have to deal with cycles nicely, so use a depth first traversal
51 output algorithm. Ignore the fact that some functions won't need
52 to be output and put them into order as well, so we get dependencies
53 right through inline functions. */
54 for (node = cgraph_nodes; node; node = node->next)
55 node->aux = NULL;
56 for (pass = 0; pass < 2; pass++)
57 for (node = cgraph_nodes; node; node = node->next)
58 if (!node->aux
59 && (pass
60 || (!node->address_taken
61 && !node->global.inlined_to
62 && !cgraph_only_called_directly_p (node))))
63 {
64 node2 = node;
65 if (!node->callers)
66 node->aux = &last;
67 else
68 node->aux = node->callers;
69 while (node2)
70 {
71 while (node2->aux != &last)
72 {
73 edge = (struct cgraph_edge *) node2->aux;
74 if (edge->next_caller)
75 node2->aux = edge->next_caller;
76 else
77 node2->aux = &last;
78 /* Break possible cycles involving always-inline
79 functions by ignoring edges from always-inline
80 functions to non-always-inline functions. */
81 if (edge->caller->local.disregard_inline_limits
82 && !edge->callee->local.disregard_inline_limits)
83 continue;
84 if (!edge->caller->aux)
85 {
86 if (!edge->caller->callers)
87 edge->caller->aux = &last;
88 else
89 edge->caller->aux = edge->caller->callers;
90 stack[stack_size++] = node2;
91 node2 = edge->caller;
92 break;
93 }
94 }
95 if (node2->aux == &last)
96 {
97 order[order_pos++] = node2;
98 if (stack_size)
99 node2 = stack[--stack_size];
100 else
101 node2 = NULL;
102 }
103 }
104 }
105 free (stack);
106 for (node = cgraph_nodes; node; node = node->next)
107 node->aux = NULL;
108 return order_pos;
109 }
110
111 /* Look for all functions inlined to NODE and update their inlined_to pointers
112 to INLINED_TO. */
113
114 static void
115 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
116 {
117 struct cgraph_edge *e;
118 for (e = node->callees; e; e = e->next_callee)
119 if (e->callee->global.inlined_to)
120 {
121 e->callee->global.inlined_to = inlined_to;
122 update_inlined_to_pointer (e->callee, inlined_to);
123 }
124 }
125
126 /* Add cgraph NODE to queue starting at FIRST.
127
128 The queue is linked via AUX pointers and terminated by pointer to 1.
129 We enqueue nodes at two occasions: when we find them reachable or when we find
130 their bodies needed for further clonning. In the second case we mark them
131 by pointer to 2 after processing so they are re-queue when they become
132 reachable. */
133
134 static void
135 enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first)
136 {
137 /* Node is still in queue; do nothing. */
138 if (node->aux && node->aux != (void *) 2)
139 return;
140 /* Node was already processed as unreachable, re-enqueue
141 only if it became reachable now. */
142 if (node->aux == (void *)2 && !node->reachable)
143 return;
144 node->aux = *first;
145 *first = node;
146 }
147
148 /* Add varpool NODE to queue starting at FIRST. */
149
150 static void
151 enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first)
152 {
153 node->aux = *first;
154 *first = node;
155 }
156
157 /* Process references. */
158
159 static void
160 process_references (struct ipa_ref_list *list,
161 struct cgraph_node **first,
162 struct varpool_node **first_varpool,
163 bool before_inlining_p)
164 {
165 int i;
166 struct ipa_ref *ref;
167 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
168 {
169 if (ref->refered_type == IPA_REF_CGRAPH)
170 {
171 struct cgraph_node *node = ipa_ref_node (ref);
172 if (!node->reachable
173 && (!DECL_EXTERNAL (node->decl)
174 || before_inlining_p))
175 {
176 node->reachable = true;
177 enqueue_cgraph_node (node, first);
178 }
179 }
180 else
181 {
182 struct varpool_node *node = ipa_ref_varpool_node (ref);
183 if (!node->needed)
184 {
185 varpool_mark_needed_node (node);
186 enqueue_varpool_node (node, first_varpool);
187 }
188 }
189 }
190 }
191
192 /* Return true when function NODE can be removed from callgraph
193 if all direct calls are eliminated. */
194
195 static inline bool
196 varpool_can_remove_if_no_refs (struct varpool_node *node)
197 {
198 return (!node->force_output && !node->used_from_other_partition
199 && (DECL_COMDAT (node->decl) || !node->externally_visible));
200 }
201
202 /* Return true when function can be marked local. */
203
204 static bool
205 cgraph_local_node_p (struct cgraph_node *node)
206 {
207 return (cgraph_only_called_directly_p (node)
208 && node->analyzed
209 && !DECL_EXTERNAL (node->decl)
210 && !node->local.externally_visible
211 && !node->reachable_from_other_partition
212 && !node->in_other_partition);
213 }
214
215 /* Perform reachability analysis and reclaim all unreachable nodes.
216 If BEFORE_INLINING_P is true this function is called before inlining
217 decisions has been made. If BEFORE_INLINING_P is false this function also
218 removes unneeded bodies of extern inline functions. */
219
220 bool
221 cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
222 {
223 struct cgraph_node *first = (struct cgraph_node *) (void *) 1;
224 struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1;
225 struct cgraph_node *node, *next;
226 struct varpool_node *vnode, *vnext;
227 bool changed = false;
228
229 #ifdef ENABLE_CHECKING
230 verify_cgraph ();
231 #endif
232 if (file)
233 fprintf (file, "\nReclaiming functions:");
234 #ifdef ENABLE_CHECKING
235 for (node = cgraph_nodes; node; node = node->next)
236 gcc_assert (!node->aux);
237 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
238 gcc_assert (!vnode->aux);
239 #endif
240 varpool_reset_queue ();
241 /* Mark functions whose bodies are obviously needed.
242 This is mostly when they can be referenced externally. Inline clones
243 are special since their declarations are shared with master clone and thus
244 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
245 for (node = cgraph_nodes; node; node = node->next)
246 if (node->analyzed && !node->global.inlined_to
247 && (!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
248 /* Keep around virtual functions for possible devirtualization. */
249 || (before_inlining_p
250 && DECL_VIRTUAL_P (node->decl)
251 && (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl)))
252 /* Also external functions with address taken are better to stay
253 for indirect inlining. */
254 || (before_inlining_p
255 && DECL_EXTERNAL (node->decl)
256 && node->address_taken)))
257 {
258 gcc_assert (!node->global.inlined_to);
259 enqueue_cgraph_node (node, &first);
260 node->reachable = true;
261 }
262 else
263 {
264 gcc_assert (!node->aux);
265 node->reachable = false;
266 }
267
268 /* Mark variables that are obviously needed. */
269 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
270 {
271 vnode->next_needed = NULL;
272 vnode->prev_needed = NULL;
273 if (!varpool_can_remove_if_no_refs (vnode))
274 {
275 vnode->needed = false;
276 varpool_mark_needed_node (vnode);
277 enqueue_varpool_node (vnode, &first_varpool);
278 }
279 else
280 vnode->needed = false;
281 }
282
283 /* Perform reachability analysis. As a special case do not consider
284 extern inline functions not inlined as live because we won't output
285 them at all.
286
287 We maintain two worklist, one for cgraph nodes other for varpools and
288 are finished once both are empty. */
289
290 while (first != (struct cgraph_node *) (void *) 1
291 || first_varpool != (struct varpool_node *) (void *) 1)
292 {
293 if (first != (struct cgraph_node *) (void *) 1)
294 {
295 struct cgraph_edge *e;
296 node = first;
297 first = (struct cgraph_node *) first->aux;
298 if (!node->reachable)
299 node->aux = (void *)2;
300
301 /* If we found this node reachable, first mark on the callees
302 reachable too, unless they are direct calls to extern inline functions
303 we decided to not inline. */
304 if (node->reachable)
305 {
306 for (e = node->callees; e; e = e->next_callee)
307 if (!e->callee->reachable
308 && node->analyzed
309 && (!e->inline_failed || !e->callee->analyzed
310 || (!DECL_EXTERNAL (e->callee->decl))
311 || before_inlining_p))
312 {
313 e->callee->reachable = true;
314 enqueue_cgraph_node (e->callee, &first);
315 }
316 process_references (&node->ref_list, &first, &first_varpool, before_inlining_p);
317 }
318
319 /* If any function in a comdat group is reachable, force
320 all other functions in the same comdat group to be
321 also reachable. */
322 if (node->same_comdat_group
323 && node->reachable
324 && !node->global.inlined_to)
325 {
326 for (next = node->same_comdat_group;
327 next != node;
328 next = next->same_comdat_group)
329 if (!next->reachable)
330 {
331 next->reachable = true;
332 enqueue_cgraph_node (next, &first);
333 }
334 }
335
336 /* We can freely remove inline clones even if they are cloned, however if
337 function is clone of real clone, we must keep it around in order to
338 make materialize_clones produce function body with the changes
339 applied. */
340 while (node->clone_of && !node->clone_of->aux
341 && !gimple_has_body_p (node->decl))
342 {
343 bool noninline = node->clone_of->decl != node->decl;
344 node = node->clone_of;
345 if (noninline && !node->reachable && !node->aux)
346 {
347 enqueue_cgraph_node (node, &first);
348 break;
349 }
350 }
351 }
352 if (first_varpool != (struct varpool_node *) (void *) 1)
353 {
354 vnode = first_varpool;
355 first_varpool = (struct varpool_node *)first_varpool->aux;
356 vnode->aux = NULL;
357 process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p);
358 /* If any function in a comdat group is reachable, force
359 all other functions in the same comdat group to be
360 also reachable. */
361 if (vnode->same_comdat_group)
362 {
363 struct varpool_node *next;
364 for (next = vnode->same_comdat_group;
365 next != vnode;
366 next = next->same_comdat_group)
367 if (!next->needed)
368 {
369 varpool_mark_needed_node (next);
370 enqueue_varpool_node (next, &first_varpool);
371 }
372 }
373 }
374 }
375
376 /* Remove unreachable nodes.
377
378 Completely unreachable functions can be fully removed from the callgraph.
379 Extern inline functions that we decided to not inline need to become unanalyzed nodes of
380 callgraph (so we still have edges to them). We remove function body then.
381
382 Also we need to care functions that are unreachable but we need to keep them around
383 for later clonning. In this case we also turn them to unanalyzed nodes, but
384 keep the body around. */
385 for (node = cgraph_nodes; node; node = next)
386 {
387 next = node->next;
388 if (node->aux && !node->reachable)
389 {
390 cgraph_node_remove_callees (node);
391 ipa_remove_all_references (&node->ref_list);
392 node->analyzed = false;
393 node->local.inlinable = false;
394 }
395 if (!node->aux)
396 {
397 struct cgraph_edge *e;
398 bool found = false;
399 int i;
400 struct ipa_ref *ref;
401
402 node->global.inlined_to = NULL;
403 if (file)
404 fprintf (file, " %s", cgraph_node_name (node));
405 /* See if there is reachable caller. */
406 for (e = node->callers; e && !found; e = e->next_caller)
407 if (e->caller->reachable)
408 found = true;
409 for (i = 0; (ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
410 && !found); i++)
411 if (ref->refering_type == IPA_REF_CGRAPH
412 && ipa_ref_refering_node (ref)->reachable)
413 found = true;
414 else if (ref->refering_type == IPA_REF_VARPOOL
415 && ipa_ref_refering_varpool_node (ref)->needed)
416 found = true;
417
418 /* If so, we need to keep node in the callgraph. */
419 if (found || node->needed)
420 {
421 if (node->analyzed)
422 {
423 struct cgraph_node *clone;
424
425 /* If there are still clones, we must keep body around.
426 Otherwise we can just remove the body but keep the clone. */
427 for (clone = node->clones; clone;
428 clone = clone->next_sibling_clone)
429 if (clone->aux)
430 break;
431 if (!clone)
432 {
433 cgraph_release_function_body (node);
434 node->local.inlinable = false;
435 if (node->prev_sibling_clone)
436 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
437 else if (node->clone_of)
438 node->clone_of->clones = node->next_sibling_clone;
439 if (node->next_sibling_clone)
440 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
441 #ifdef ENABLE_CHECKING
442 if (node->clone_of)
443 node->former_clone_of = node->clone_of->decl;
444 #endif
445 node->clone_of = NULL;
446 node->next_sibling_clone = NULL;
447 node->prev_sibling_clone = NULL;
448 }
449 else
450 gcc_assert (!clone->in_other_partition);
451 node->analyzed = false;
452 changed = true;
453 cgraph_node_remove_callees (node);
454 ipa_remove_all_references (&node->ref_list);
455 }
456 }
457 else
458 {
459 cgraph_remove_node (node);
460 changed = true;
461 }
462 }
463 }
464 for (node = cgraph_nodes; node; node = node->next)
465 {
466 /* Inline clones might be kept around so their materializing allows further
467 cloning. If the function the clone is inlined into is removed, we need
468 to turn it into normal cone. */
469 if (node->global.inlined_to
470 && !node->callers)
471 {
472 gcc_assert (node->clones);
473 node->global.inlined_to = NULL;
474 update_inlined_to_pointer (node, node);
475 }
476 node->aux = NULL;
477 }
478
479 if (file)
480 fprintf (file, "\n");
481
482 /* We must release unused extern inlines or sanity checking will fail. Rest of transformations
483 are undesirable at -O0 since we do not want to remove anything. */
484 if (!optimize)
485 return changed;
486
487 if (file)
488 fprintf (file, "Reclaiming variables:");
489 for (vnode = varpool_nodes; vnode; vnode = vnext)
490 {
491 vnext = vnode->next;
492 if (!vnode->needed)
493 {
494 if (file)
495 fprintf (file, " %s", varpool_node_name (vnode));
496 varpool_remove_node (vnode);
497 changed = true;
498 }
499 }
500
501 /* Now update address_taken flags and try to promote functions to be local. */
502
503 if (file)
504 fprintf (file, "\nClearing address taken flags:");
505 for (node = cgraph_nodes; node; node = node->next)
506 if (node->address_taken
507 && !node->reachable_from_other_partition)
508 {
509 int i;
510 struct ipa_ref *ref;
511 bool found = false;
512 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
513 && !found; i++)
514 {
515 gcc_assert (ref->use == IPA_REF_ADDR);
516 found = true;
517 }
518 if (!found)
519 {
520 if (file)
521 fprintf (file, " %s", cgraph_node_name (node));
522 node->address_taken = false;
523 changed = true;
524 if (cgraph_local_node_p (node))
525 {
526 node->local.local = true;
527 if (file)
528 fprintf (file, " (local)");
529 }
530 }
531 }
532
533 #ifdef ENABLE_CHECKING
534 verify_cgraph ();
535 #endif
536
537 /* Reclaim alias pairs for functions that have disappeared from the
538 call graph. */
539 remove_unreachable_alias_pairs ();
540
541 return changed;
542 }
543
544 /* Discover variables that have no longer address taken or that are read only
545 and update their flags.
546
547 FIXME: This can not be done in between gimplify and omp_expand since
548 readonly flag plays role on what is shared and what is not. Currently we do
549 this transformation as part of whole program visibility and re-do at
550 ipa-reference pass (to take into account clonning), but it would
551 make sense to do it before early optimizations. */
552
553 void
554 ipa_discover_readonly_nonaddressable_vars (void)
555 {
556 struct varpool_node *vnode;
557 if (dump_file)
558 fprintf (dump_file, "Clearing variable flags:");
559 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
560 if (vnode->finalized && varpool_all_refs_explicit_p (vnode)
561 && (TREE_ADDRESSABLE (vnode->decl) || !TREE_READONLY (vnode->decl)))
562 {
563 bool written = false;
564 bool address_taken = false;
565 int i;
566 struct ipa_ref *ref;
567 for (i = 0; ipa_ref_list_refering_iterate (&vnode->ref_list, i, ref)
568 && (!written || !address_taken); i++)
569 switch (ref->use)
570 {
571 case IPA_REF_ADDR:
572 address_taken = true;
573 break;
574 case IPA_REF_LOAD:
575 break;
576 case IPA_REF_STORE:
577 written = true;
578 break;
579 }
580 if (TREE_ADDRESSABLE (vnode->decl) && !address_taken)
581 {
582 if (dump_file)
583 fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode));
584 TREE_ADDRESSABLE (vnode->decl) = 0;
585 }
586 if (!TREE_READONLY (vnode->decl) && !address_taken && !written
587 /* Making variable in explicit section readonly can cause section
588 type conflict.
589 See e.g. gcc.c-torture/compile/pr23237.c */
590 && DECL_SECTION_NAME (vnode->decl) == NULL)
591 {
592 if (dump_file)
593 fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode));
594 TREE_READONLY (vnode->decl) = 1;
595 }
596 }
597 if (dump_file)
598 fprintf (dump_file, "\n");
599 }
600
601 /* Return true when function NODE should be considered externally visible. */
602
603 static bool
604 cgraph_externally_visible_p (struct cgraph_node *node, bool whole_program, bool aliased)
605 {
606 if (!node->local.finalized)
607 return false;
608 if (!DECL_COMDAT (node->decl)
609 && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)))
610 return false;
611
612 /* Do not even try to be smart about aliased nodes. Until we properly
613 represent everything by same body alias, these are just evil. */
614 if (aliased)
615 return true;
616
617 /* If linker counts on us, we must preserve the function. */
618 if (cgraph_used_from_object_file_p (node))
619 return true;
620 if (DECL_PRESERVE_P (node->decl))
621 return true;
622 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl)))
623 return true;
624
625 /* When doing link time optimizations, hidden symbols become local. */
626 if (in_lto_p
627 && (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
628 || DECL_VISIBILITY (node->decl) == VISIBILITY_INTERNAL)
629 /* Be sure that node is defined in IR file, not in other object
630 file. In that case we don't set used_from_other_object_file. */
631 && node->analyzed)
632 ;
633 else if (!whole_program)
634 return true;
635 /* COMDAT functions must be shared only if they have address taken,
636 otherwise we can produce our own private implementation with
637 -fwhole-program. */
638 else if (DECL_COMDAT (node->decl))
639 {
640 if (node->address_taken || !node->analyzed)
641 return true;
642 if (node->same_comdat_group)
643 {
644 struct cgraph_node *next;
645
646 /* If more than one function is in the same COMDAT group, it must
647 be shared even if just one function in the comdat group has
648 address taken. */
649 for (next = node->same_comdat_group;
650 next != node;
651 next = next->same_comdat_group)
652 if (next->address_taken || !next->analyzed)
653 return true;
654 }
655 }
656
657 if (MAIN_NAME_P (DECL_NAME (node->decl)))
658 return true;
659
660 return false;
661 }
662
663 /* Return true when variable VNODE should be considered externally visible. */
664
665 static bool
666 varpool_externally_visible_p (struct varpool_node *vnode, bool aliased)
667 {
668 if (!DECL_COMDAT (vnode->decl) && !TREE_PUBLIC (vnode->decl))
669 return false;
670
671 /* Do not even try to be smart about aliased nodes. Until we properly
672 represent everything by same body alias, these are just evil. */
673 if (aliased)
674 return true;
675
676 /* If linker counts on us, we must preserve the function. */
677 if (varpool_used_from_object_file_p (vnode))
678 return true;
679
680 if (DECL_PRESERVE_P (vnode->decl))
681 return true;
682 if (lookup_attribute ("externally_visible",
683 DECL_ATTRIBUTES (vnode->decl)))
684 return true;
685
686 /* See if we have linker information about symbol not being used or
687 if we need to make guess based on the declaration.
688
689 Even if the linker clams the symbol is unused, never bring internal
690 symbols that are declared by user as used or externally visible.
691 This is needed for i.e. references from asm statements. */
692 if (varpool_used_from_object_file_p (vnode))
693 return true;
694
695 /* When doing link time optimizations, hidden symbols become local. */
696 if (in_lto_p
697 && (DECL_VISIBILITY (vnode->decl) == VISIBILITY_HIDDEN
698 || DECL_VISIBILITY (vnode->decl) == VISIBILITY_INTERNAL)
699 /* Be sure that node is defined in IR file, not in other object
700 file. In that case we don't set used_from_other_object_file. */
701 && vnode->finalized)
702 ;
703 else if (!flag_whole_program)
704 return true;
705
706 /* Do not attempt to privatize COMDATS by default.
707 This would break linking with C++ libraries sharing
708 inline definitions.
709
710 FIXME: We can do so for readonly vars with no address taken and
711 possibly also for vtables since no direct pointer comparsion is done.
712 It might be interesting to do so to reduce linking overhead. */
713 if (DECL_COMDAT (vnode->decl) || DECL_WEAK (vnode->decl))
714 return true;
715 return false;
716 }
717
718 /* Dissolve the same_comdat_group list in which NODE resides. */
719
720 static void
721 dissolve_same_comdat_group_list (struct cgraph_node *node)
722 {
723 struct cgraph_node *n = node, *next;
724 do
725 {
726 next = n->same_comdat_group;
727 n->same_comdat_group = NULL;
728 n = next;
729 }
730 while (n != node);
731 }
732
733 /* Mark visibility of all functions.
734
735 A local function is one whose calls can occur only in the current
736 compilation unit and all its calls are explicit, so we can change
737 its calling convention. We simply mark all static functions whose
738 address is not taken as local.
739
740 We also change the TREE_PUBLIC flag of all declarations that are public
741 in language point of view but we want to overwrite this default
742 via visibilities for the backend point of view. */
743
744 static unsigned int
745 function_and_variable_visibility (bool whole_program)
746 {
747 struct cgraph_node *node;
748 struct varpool_node *vnode;
749 struct pointer_set_t *aliased_nodes = pointer_set_create ();
750 struct pointer_set_t *aliased_vnodes = pointer_set_create ();
751 unsigned i;
752 alias_pair *p;
753
754 /* Discover aliased nodes. */
755 FOR_EACH_VEC_ELT (alias_pair, alias_pairs, i, p)
756 {
757 if (dump_file)
758 fprintf (dump_file, "Alias %s->%s",
759 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)),
760 IDENTIFIER_POINTER (p->target));
761
762 if ((node = cgraph_node_for_asm (p->target)) != NULL)
763 {
764 gcc_assert (node->needed);
765 pointer_set_insert (aliased_nodes, node);
766 if (dump_file)
767 fprintf (dump_file, " node %s/%i",
768 cgraph_node_name (node), node->uid);
769 }
770 else if ((vnode = varpool_node_for_asm (p->target)) != NULL)
771 {
772 gcc_assert (vnode->needed);
773 pointer_set_insert (aliased_vnodes, vnode);
774 if (dump_file)
775 fprintf (dump_file, " varpool node %s",
776 varpool_node_name (vnode));
777 }
778 if (dump_file)
779 fprintf (dump_file, "\n");
780 }
781
782 for (node = cgraph_nodes; node; node = node->next)
783 {
784 int flags = flags_from_decl_or_type (node->decl);
785 if (optimize
786 && (flags & (ECF_CONST | ECF_PURE))
787 && !(flags & ECF_LOOPING_CONST_OR_PURE))
788 {
789 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
790 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
791 }
792
793 /* C++ FE on lack of COMDAT support create local COMDAT functions
794 (that ought to be shared but can not due to object format
795 limitations). It is neccesary to keep the flag to make rest of C++ FE
796 happy. Clear the flag here to avoid confusion in middle-end. */
797 if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl))
798 DECL_COMDAT (node->decl) = 0;
799 /* For external decls stop tracking same_comdat_group, it doesn't matter
800 what comdat group they are in when they won't be emitted in this TU,
801 and simplifies later passes. */
802 if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
803 {
804 #ifdef ENABLE_CHECKING
805 struct cgraph_node *n;
806
807 for (n = node->same_comdat_group;
808 n != node;
809 n = n->same_comdat_group)
810 /* If at least one of same comdat group functions is external,
811 all of them have to be, otherwise it is a front-end bug. */
812 gcc_assert (DECL_EXTERNAL (n->decl));
813 #endif
814 dissolve_same_comdat_group_list (node);
815 }
816 gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl))
817 || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl));
818 if (cgraph_externally_visible_p (node, whole_program,
819 pointer_set_contains (aliased_nodes,
820 node)))
821 {
822 gcc_assert (!node->global.inlined_to);
823 node->local.externally_visible = true;
824 }
825 else
826 node->local.externally_visible = false;
827 if (!node->local.externally_visible && node->analyzed
828 && !DECL_EXTERNAL (node->decl))
829 {
830 struct cgraph_node *alias;
831 gcc_assert (whole_program || in_lto_p || !TREE_PUBLIC (node->decl));
832 cgraph_make_decl_local (node->decl);
833 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
834 for (alias = node->same_body; alias; alias = alias->next)
835 cgraph_make_decl_local (alias->decl);
836 if (node->same_comdat_group)
837 /* cgraph_externally_visible_p has already checked all other nodes
838 in the group and they will all be made local. We need to
839 dissolve the group at once so that the predicate does not
840 segfault though. */
841 dissolve_same_comdat_group_list (node);
842 }
843 node->local.local = cgraph_local_node_p (node);
844 }
845 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
846 {
847 /* weak flag makes no sense on local variables. */
848 gcc_assert (!DECL_WEAK (vnode->decl)
849 || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl));
850 /* In several cases declarations can not be common:
851
852 - when declaration has initializer
853 - when it is in weak
854 - when it has specific section
855 - when it resides in non-generic address space.
856 - if declaration is local, it will get into .local common section
857 so common flag is not needed. Frontends still produce these in
858 certain cases, such as for:
859
860 static int a __attribute__ ((common))
861
862 Canonicalize things here and clear the redundant flag. */
863 if (DECL_COMMON (vnode->decl)
864 && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
865 || (DECL_INITIAL (vnode->decl)
866 && DECL_INITIAL (vnode->decl) != error_mark_node)
867 || DECL_WEAK (vnode->decl)
868 || DECL_SECTION_NAME (vnode->decl) != NULL
869 || ! (ADDR_SPACE_GENERIC_P
870 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl))))))
871 DECL_COMMON (vnode->decl) = 0;
872 }
873 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
874 {
875 if (!vnode->finalized)
876 continue;
877 if (vnode->needed
878 && varpool_externally_visible_p
879 (vnode,
880 pointer_set_contains (aliased_vnodes, vnode)))
881 vnode->externally_visible = true;
882 else
883 vnode->externally_visible = false;
884 if (!vnode->externally_visible)
885 {
886 gcc_assert (in_lto_p || whole_program || !TREE_PUBLIC (vnode->decl));
887 cgraph_make_decl_local (vnode->decl);
888 vnode->resolution = LDPR_PREVAILING_DEF_IRONLY;
889 }
890 gcc_assert (TREE_STATIC (vnode->decl));
891 }
892 pointer_set_destroy (aliased_nodes);
893 pointer_set_destroy (aliased_vnodes);
894
895 if (dump_file)
896 {
897 fprintf (dump_file, "\nMarking local functions:");
898 for (node = cgraph_nodes; node; node = node->next)
899 if (node->local.local)
900 fprintf (dump_file, " %s", cgraph_node_name (node));
901 fprintf (dump_file, "\n\n");
902 fprintf (dump_file, "\nMarking externally visible functions:");
903 for (node = cgraph_nodes; node; node = node->next)
904 if (node->local.externally_visible)
905 fprintf (dump_file, " %s", cgraph_node_name (node));
906 fprintf (dump_file, "\n\n");
907 fprintf (dump_file, "\nMarking externally visible variables:");
908 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
909 if (vnode->externally_visible)
910 fprintf (dump_file, " %s", varpool_node_name (vnode));
911 fprintf (dump_file, "\n\n");
912 }
913 cgraph_function_flags_ready = true;
914 return 0;
915 }
916
917 /* Local function pass handling visibilities. This happens before LTO streaming
918 so in particular -fwhole-program should be ignored at this level. */
919
920 static unsigned int
921 local_function_and_variable_visibility (void)
922 {
923 return function_and_variable_visibility (flag_whole_program && !flag_lto && !flag_whopr);
924 }
925
926 struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility =
927 {
928 {
929 SIMPLE_IPA_PASS,
930 "visibility", /* name */
931 NULL, /* gate */
932 local_function_and_variable_visibility,/* execute */
933 NULL, /* sub */
934 NULL, /* next */
935 0, /* static_pass_number */
936 TV_CGRAPHOPT, /* tv_id */
937 0, /* properties_required */
938 0, /* properties_provided */
939 0, /* properties_destroyed */
940 0, /* todo_flags_start */
941 TODO_remove_functions | TODO_dump_cgraph
942 | TODO_ggc_collect /* todo_flags_finish */
943 }
944 };
945
946 /* Do not re-run on ltrans stage. */
947
948 static bool
949 gate_whole_program_function_and_variable_visibility (void)
950 {
951 return !flag_ltrans;
952 }
953
954 /* Bring functionss local at LTO time whith -fwhole-program. */
955
956 static unsigned int
957 whole_program_function_and_variable_visibility (void)
958 {
959 struct cgraph_node *node;
960 struct varpool_node *vnode;
961
962 function_and_variable_visibility (flag_whole_program);
963
964 for (node = cgraph_nodes; node; node = node->next)
965 if ((node->local.externally_visible && !DECL_COMDAT (node->decl))
966 && node->local.finalized)
967 cgraph_mark_needed_node (node);
968 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
969 if (vnode->externally_visible && !DECL_COMDAT (vnode->decl))
970 varpool_mark_needed_node (vnode);
971 if (dump_file)
972 {
973 fprintf (dump_file, "\nNeeded variables:");
974 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
975 if (vnode->needed)
976 fprintf (dump_file, " %s", varpool_node_name (vnode));
977 fprintf (dump_file, "\n\n");
978 }
979 if (optimize)
980 ipa_discover_readonly_nonaddressable_vars ();
981 return 0;
982 }
983
984 struct ipa_opt_pass_d pass_ipa_whole_program_visibility =
985 {
986 {
987 IPA_PASS,
988 "whole-program", /* name */
989 gate_whole_program_function_and_variable_visibility,/* gate */
990 whole_program_function_and_variable_visibility,/* execute */
991 NULL, /* sub */
992 NULL, /* next */
993 0, /* static_pass_number */
994 TV_CGRAPHOPT, /* tv_id */
995 0, /* properties_required */
996 0, /* properties_provided */
997 0, /* properties_destroyed */
998 0, /* todo_flags_start */
999 TODO_remove_functions | TODO_dump_cgraph
1000 | TODO_ggc_collect /* todo_flags_finish */
1001 },
1002 NULL, /* generate_summary */
1003 NULL, /* write_summary */
1004 NULL, /* read_summary */
1005 NULL, /* write_optimization_summary */
1006 NULL, /* read_optimization_summary */
1007 NULL, /* stmt_fixup */
1008 0, /* TODOs */
1009 NULL, /* function_transform */
1010 NULL, /* variable_transform */
1011 };
1012
1013 /* Hash a cgraph node set element. */
1014
1015 static hashval_t
1016 hash_cgraph_node_set_element (const void *p)
1017 {
1018 const_cgraph_node_set_element element = (const_cgraph_node_set_element) p;
1019 return htab_hash_pointer (element->node);
1020 }
1021
1022 /* Compare two cgraph node set elements. */
1023
1024 static int
1025 eq_cgraph_node_set_element (const void *p1, const void *p2)
1026 {
1027 const_cgraph_node_set_element e1 = (const_cgraph_node_set_element) p1;
1028 const_cgraph_node_set_element e2 = (const_cgraph_node_set_element) p2;
1029
1030 return e1->node == e2->node;
1031 }
1032
1033 /* Create a new cgraph node set. */
1034
1035 cgraph_node_set
1036 cgraph_node_set_new (void)
1037 {
1038 cgraph_node_set new_node_set;
1039
1040 new_node_set = ggc_alloc_cgraph_node_set_def ();
1041 new_node_set->hashtab = htab_create_ggc (10,
1042 hash_cgraph_node_set_element,
1043 eq_cgraph_node_set_element,
1044 NULL);
1045 new_node_set->nodes = NULL;
1046 return new_node_set;
1047 }
1048
1049 /* Add cgraph_node NODE to cgraph_node_set SET. */
1050
1051 void
1052 cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node)
1053 {
1054 void **slot;
1055 cgraph_node_set_element element;
1056 struct cgraph_node_set_element_def dummy;
1057
1058 dummy.node = node;
1059 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
1060
1061 if (*slot != HTAB_EMPTY_ENTRY)
1062 {
1063 element = (cgraph_node_set_element) *slot;
1064 gcc_assert (node == element->node
1065 && (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1066 == node));
1067 return;
1068 }
1069
1070 /* Insert node into hash table. */
1071 element = ggc_alloc_cgraph_node_set_element_def ();
1072 element->node = node;
1073 element->index = VEC_length (cgraph_node_ptr, set->nodes);
1074 *slot = element;
1075
1076 /* Insert into node vector. */
1077 VEC_safe_push (cgraph_node_ptr, gc, set->nodes, node);
1078 }
1079
1080 /* Remove cgraph_node NODE from cgraph_node_set SET. */
1081
1082 void
1083 cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node)
1084 {
1085 void **slot, **last_slot;
1086 cgraph_node_set_element element, last_element;
1087 struct cgraph_node *last_node;
1088 struct cgraph_node_set_element_def dummy;
1089
1090 dummy.node = node;
1091 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1092 if (slot == NULL)
1093 return;
1094
1095 element = (cgraph_node_set_element) *slot;
1096 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1097 == node);
1098
1099 /* Remove from vector. We do this by swapping node with the last element
1100 of the vector. */
1101 last_node = VEC_pop (cgraph_node_ptr, set->nodes);
1102 if (last_node != node)
1103 {
1104 dummy.node = last_node;
1105 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1106 last_element = (cgraph_node_set_element) *last_slot;
1107 gcc_assert (last_element);
1108
1109 /* Move the last element to the original spot of NODE. */
1110 last_element->index = element->index;
1111 VEC_replace (cgraph_node_ptr, set->nodes, last_element->index,
1112 last_node);
1113 }
1114
1115 /* Remove element from hash table. */
1116 htab_clear_slot (set->hashtab, slot);
1117 ggc_free (element);
1118 }
1119
1120 /* Find NODE in SET and return an iterator to it if found. A null iterator
1121 is returned if NODE is not in SET. */
1122
1123 cgraph_node_set_iterator
1124 cgraph_node_set_find (cgraph_node_set set, struct cgraph_node *node)
1125 {
1126 void **slot;
1127 struct cgraph_node_set_element_def dummy;
1128 cgraph_node_set_element element;
1129 cgraph_node_set_iterator csi;
1130
1131 dummy.node = node;
1132 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1133 if (slot == NULL)
1134 csi.index = (unsigned) ~0;
1135 else
1136 {
1137 element = (cgraph_node_set_element) *slot;
1138 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1139 == node);
1140 csi.index = element->index;
1141 }
1142 csi.set = set;
1143
1144 return csi;
1145 }
1146
1147 /* Dump content of SET to file F. */
1148
1149 void
1150 dump_cgraph_node_set (FILE *f, cgraph_node_set set)
1151 {
1152 cgraph_node_set_iterator iter;
1153
1154 for (iter = csi_start (set); !csi_end_p (iter); csi_next (&iter))
1155 {
1156 struct cgraph_node *node = csi_node (iter);
1157 fprintf (f, " %s/%i", cgraph_node_name (node), node->uid);
1158 }
1159 fprintf (f, "\n");
1160 }
1161
1162 /* Dump content of SET to stderr. */
1163
1164 DEBUG_FUNCTION void
1165 debug_cgraph_node_set (cgraph_node_set set)
1166 {
1167 dump_cgraph_node_set (stderr, set);
1168 }
1169
1170 /* Hash a varpool node set element. */
1171
1172 static hashval_t
1173 hash_varpool_node_set_element (const void *p)
1174 {
1175 const_varpool_node_set_element element = (const_varpool_node_set_element) p;
1176 return htab_hash_pointer (element->node);
1177 }
1178
1179 /* Compare two varpool node set elements. */
1180
1181 static int
1182 eq_varpool_node_set_element (const void *p1, const void *p2)
1183 {
1184 const_varpool_node_set_element e1 = (const_varpool_node_set_element) p1;
1185 const_varpool_node_set_element e2 = (const_varpool_node_set_element) p2;
1186
1187 return e1->node == e2->node;
1188 }
1189
1190 /* Create a new varpool node set. */
1191
1192 varpool_node_set
1193 varpool_node_set_new (void)
1194 {
1195 varpool_node_set new_node_set;
1196
1197 new_node_set = ggc_alloc_varpool_node_set_def ();
1198 new_node_set->hashtab = htab_create_ggc (10,
1199 hash_varpool_node_set_element,
1200 eq_varpool_node_set_element,
1201 NULL);
1202 new_node_set->nodes = NULL;
1203 return new_node_set;
1204 }
1205
1206 /* Add varpool_node NODE to varpool_node_set SET. */
1207
1208 void
1209 varpool_node_set_add (varpool_node_set set, struct varpool_node *node)
1210 {
1211 void **slot;
1212 varpool_node_set_element element;
1213 struct varpool_node_set_element_def dummy;
1214
1215 dummy.node = node;
1216 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
1217
1218 if (*slot != HTAB_EMPTY_ENTRY)
1219 {
1220 element = (varpool_node_set_element) *slot;
1221 gcc_assert (node == element->node
1222 && (VEC_index (varpool_node_ptr, set->nodes, element->index)
1223 == node));
1224 return;
1225 }
1226
1227 /* Insert node into hash table. */
1228 element = ggc_alloc_varpool_node_set_element_def ();
1229 element->node = node;
1230 element->index = VEC_length (varpool_node_ptr, set->nodes);
1231 *slot = element;
1232
1233 /* Insert into node vector. */
1234 VEC_safe_push (varpool_node_ptr, gc, set->nodes, node);
1235 }
1236
1237 /* Remove varpool_node NODE from varpool_node_set SET. */
1238
1239 void
1240 varpool_node_set_remove (varpool_node_set set, struct varpool_node *node)
1241 {
1242 void **slot, **last_slot;
1243 varpool_node_set_element element, last_element;
1244 struct varpool_node *last_node;
1245 struct varpool_node_set_element_def dummy;
1246
1247 dummy.node = node;
1248 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1249 if (slot == NULL)
1250 return;
1251
1252 element = (varpool_node_set_element) *slot;
1253 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1254 == node);
1255
1256 /* Remove from vector. We do this by swapping node with the last element
1257 of the vector. */
1258 last_node = VEC_pop (varpool_node_ptr, set->nodes);
1259 if (last_node != node)
1260 {
1261 dummy.node = last_node;
1262 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1263 last_element = (varpool_node_set_element) *last_slot;
1264 gcc_assert (last_element);
1265
1266 /* Move the last element to the original spot of NODE. */
1267 last_element->index = element->index;
1268 VEC_replace (varpool_node_ptr, set->nodes, last_element->index,
1269 last_node);
1270 }
1271
1272 /* Remove element from hash table. */
1273 htab_clear_slot (set->hashtab, slot);
1274 ggc_free (element);
1275 }
1276
1277 /* Find NODE in SET and return an iterator to it if found. A null iterator
1278 is returned if NODE is not in SET. */
1279
1280 varpool_node_set_iterator
1281 varpool_node_set_find (varpool_node_set set, struct varpool_node *node)
1282 {
1283 void **slot;
1284 struct varpool_node_set_element_def dummy;
1285 varpool_node_set_element element;
1286 varpool_node_set_iterator vsi;
1287
1288 dummy.node = node;
1289 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1290 if (slot == NULL)
1291 vsi.index = (unsigned) ~0;
1292 else
1293 {
1294 element = (varpool_node_set_element) *slot;
1295 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1296 == node);
1297 vsi.index = element->index;
1298 }
1299 vsi.set = set;
1300
1301 return vsi;
1302 }
1303
1304 /* Dump content of SET to file F. */
1305
1306 void
1307 dump_varpool_node_set (FILE *f, varpool_node_set set)
1308 {
1309 varpool_node_set_iterator iter;
1310
1311 for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter))
1312 {
1313 struct varpool_node *node = vsi_node (iter);
1314 fprintf (f, " %s", varpool_node_name (node));
1315 }
1316 fprintf (f, "\n");
1317 }
1318
1319 /* Dump content of SET to stderr. */
1320
1321 DEBUG_FUNCTION void
1322 debug_varpool_node_set (varpool_node_set set)
1323 {
1324 dump_varpool_node_set (stderr, set);
1325 }
1326
1327
1328 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1329
1330 static unsigned int
1331 ipa_profile (void)
1332 {
1333 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1334 struct cgraph_edge *e;
1335 int order_pos;
1336 bool something_changed = false;
1337 int i;
1338
1339 order_pos = cgraph_postorder (order);
1340 for (i = order_pos - 1; i >= 0; i--)
1341 {
1342 if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1343 {
1344 for (e = order[i]->callees; e; e = e->next_callee)
1345 if (e->callee->local.local && !e->callee->aux)
1346 {
1347 something_changed = true;
1348 e->callee->aux = (void *)1;
1349 }
1350 }
1351 order[i]->aux = NULL;
1352 }
1353
1354 while (something_changed)
1355 {
1356 something_changed = false;
1357 for (i = order_pos - 1; i >= 0; i--)
1358 {
1359 if (order[i]->aux && cgraph_propagate_frequency (order[i]))
1360 {
1361 for (e = order[i]->callees; e; e = e->next_callee)
1362 if (e->callee->local.local && !e->callee->aux)
1363 {
1364 something_changed = true;
1365 e->callee->aux = (void *)1;
1366 }
1367 }
1368 order[i]->aux = NULL;
1369 }
1370 }
1371 free (order);
1372 return 0;
1373 }
1374
1375 static bool
1376 gate_ipa_profile (void)
1377 {
1378 return flag_ipa_profile;
1379 }
1380
1381 struct ipa_opt_pass_d pass_ipa_profile =
1382 {
1383 {
1384 IPA_PASS,
1385 "ipa-profile", /* name */
1386 gate_ipa_profile, /* gate */
1387 ipa_profile, /* execute */
1388 NULL, /* sub */
1389 NULL, /* next */
1390 0, /* static_pass_number */
1391 TV_IPA_PROFILE, /* tv_id */
1392 0, /* properties_required */
1393 0, /* properties_provided */
1394 0, /* properties_destroyed */
1395 0, /* todo_flags_start */
1396 0 /* todo_flags_finish */
1397 },
1398 NULL, /* generate_summary */
1399 NULL, /* write_summary */
1400 NULL, /* read_summary */
1401 NULL, /* write_optimization_summary */
1402 NULL, /* read_optimization_summary */
1403 NULL, /* stmt_fixup */
1404 0, /* TODOs */
1405 NULL, /* function_transform */
1406 NULL /* variable_transform */
1407 };
1408
1409 /* Generate and emit a static constructor or destructor. WHICH must
1410 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1411 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1412 initialization priority for this constructor or destructor. */
1413
1414 void
1415 cgraph_build_static_cdtor (char which, tree body, int priority)
1416 {
1417 static int counter = 0;
1418 char which_buf[16];
1419 tree decl, name, resdecl;
1420
1421 /* The priority is encoded in the constructor or destructor name.
1422 collect2 will sort the names and arrange that they are called at
1423 program startup. */
1424 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1425 name = get_file_function_name (which_buf);
1426
1427 decl = build_decl (input_location, FUNCTION_DECL, name,
1428 build_function_type_list (void_type_node, NULL_TREE));
1429 current_function_decl = decl;
1430
1431 resdecl = build_decl (input_location,
1432 RESULT_DECL, NULL_TREE, void_type_node);
1433 DECL_ARTIFICIAL (resdecl) = 1;
1434 DECL_RESULT (decl) = resdecl;
1435 DECL_CONTEXT (resdecl) = decl;
1436
1437 allocate_struct_function (decl, false);
1438
1439 TREE_STATIC (decl) = 1;
1440 TREE_USED (decl) = 1;
1441 DECL_ARTIFICIAL (decl) = 1;
1442 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1443 DECL_SAVED_TREE (decl) = body;
1444 if (!targetm.have_ctors_dtors)
1445 {
1446 TREE_PUBLIC (decl) = 1;
1447 DECL_PRESERVE_P (decl) = 1;
1448 }
1449 DECL_UNINLINABLE (decl) = 1;
1450
1451 DECL_INITIAL (decl) = make_node (BLOCK);
1452 TREE_USED (DECL_INITIAL (decl)) = 1;
1453
1454 DECL_SOURCE_LOCATION (decl) = input_location;
1455 cfun->function_end_locus = input_location;
1456
1457 switch (which)
1458 {
1459 case 'I':
1460 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1461 decl_init_priority_insert (decl, priority);
1462 break;
1463 case 'D':
1464 DECL_STATIC_DESTRUCTOR (decl) = 1;
1465 decl_fini_priority_insert (decl, priority);
1466 break;
1467 default:
1468 gcc_unreachable ();
1469 }
1470
1471 gimplify_function_tree (decl);
1472
1473 cgraph_add_new_function (decl, false);
1474
1475 set_cfun (NULL);
1476 current_function_decl = NULL;
1477 }
1478
1479
1480 /* A vector of FUNCTION_DECLs declared as static constructors. */
1481 static VEC(tree, heap) *static_ctors;
1482 /* A vector of FUNCTION_DECLs declared as static destructors. */
1483 static VEC(tree, heap) *static_dtors;
1484
1485 /* When target does not have ctors and dtors, we call all constructor
1486 and destructor by special initialization/destruction function
1487 recognized by collect2.
1488
1489 When we are going to build this function, collect all constructors and
1490 destructors and turn them into normal functions. */
1491
1492 static void
1493 record_cdtor_fn (struct cgraph_node *node)
1494 {
1495 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1496 VEC_safe_push (tree, heap, static_ctors, node->decl);
1497 if (DECL_STATIC_DESTRUCTOR (node->decl))
1498 VEC_safe_push (tree, heap, static_dtors, node->decl);
1499 node = cgraph_node (node->decl);
1500 node->local.disregard_inline_limits = 1;
1501 }
1502
1503 /* Define global constructors/destructor functions for the CDTORS, of
1504 which they are LEN. The CDTORS are sorted by initialization
1505 priority. If CTOR_P is true, these are constructors; otherwise,
1506 they are destructors. */
1507
1508 static void
1509 build_cdtor (bool ctor_p, VEC (tree, heap) *cdtors)
1510 {
1511 size_t i,j;
1512 size_t len = VEC_length (tree, cdtors);
1513
1514 i = 0;
1515 while (i < len)
1516 {
1517 tree body;
1518 tree fn;
1519 priority_type priority;
1520
1521 priority = 0;
1522 body = NULL_TREE;
1523 j = i;
1524 do
1525 {
1526 priority_type p;
1527 fn = VEC_index (tree, cdtors, j);
1528 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1529 if (j == i)
1530 priority = p;
1531 else if (p != priority)
1532 break;
1533 j++;
1534 }
1535 while (j < len);
1536
1537 /* When there is only one cdtor and target supports them, do nothing. */
1538 if (j == i + 1
1539 && targetm.have_ctors_dtors)
1540 {
1541 i++;
1542 continue;
1543 }
1544 /* Find the next batch of constructors/destructors with the same
1545 initialization priority. */
1546 for (;i < j; i++)
1547 {
1548 tree call;
1549 fn = VEC_index (tree, cdtors, i);
1550 call = build_call_expr (fn, 0);
1551 if (ctor_p)
1552 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1553 else
1554 DECL_STATIC_DESTRUCTOR (fn) = 0;
1555 /* We do not want to optimize away pure/const calls here.
1556 When optimizing, these should be already removed, when not
1557 optimizing, we want user to be able to breakpoint in them. */
1558 TREE_SIDE_EFFECTS (call) = 1;
1559 append_to_statement_list (call, &body);
1560 }
1561 gcc_assert (body != NULL_TREE);
1562 /* Generate a function to call all the function of like
1563 priority. */
1564 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
1565 }
1566 }
1567
1568 /* Comparison function for qsort. P1 and P2 are actually of type
1569 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1570 used to determine the sort order. */
1571
1572 static int
1573 compare_ctor (const void *p1, const void *p2)
1574 {
1575 tree f1;
1576 tree f2;
1577 int priority1;
1578 int priority2;
1579
1580 f1 = *(const tree *)p1;
1581 f2 = *(const tree *)p2;
1582 priority1 = DECL_INIT_PRIORITY (f1);
1583 priority2 = DECL_INIT_PRIORITY (f2);
1584
1585 if (priority1 < priority2)
1586 return -1;
1587 else if (priority1 > priority2)
1588 return 1;
1589 else
1590 /* Ensure a stable sort. Constructors are executed in backwarding
1591 order to make LTO initialize braries first. */
1592 return DECL_UID (f2) - DECL_UID (f1);
1593 }
1594
1595 /* Comparison function for qsort. P1 and P2 are actually of type
1596 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1597 used to determine the sort order. */
1598
1599 static int
1600 compare_dtor (const void *p1, const void *p2)
1601 {
1602 tree f1;
1603 tree f2;
1604 int priority1;
1605 int priority2;
1606
1607 f1 = *(const tree *)p1;
1608 f2 = *(const tree *)p2;
1609 priority1 = DECL_FINI_PRIORITY (f1);
1610 priority2 = DECL_FINI_PRIORITY (f2);
1611
1612 if (priority1 < priority2)
1613 return -1;
1614 else if (priority1 > priority2)
1615 return 1;
1616 else
1617 /* Ensure a stable sort. */
1618 return DECL_UID (f1) - DECL_UID (f2);
1619 }
1620
1621 /* Generate functions to call static constructors and destructors
1622 for targets that do not support .ctors/.dtors sections. These
1623 functions have magic names which are detected by collect2. */
1624
1625 static void
1626 build_cdtor_fns (void)
1627 {
1628 if (!VEC_empty (tree, static_ctors))
1629 {
1630 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1631 VEC_qsort (tree, static_ctors, compare_ctor);
1632 build_cdtor (/*ctor_p=*/true, static_ctors);
1633 }
1634
1635 if (!VEC_empty (tree, static_dtors))
1636 {
1637 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1638 VEC_qsort (tree, static_dtors, compare_dtor);
1639 build_cdtor (/*ctor_p=*/false, static_dtors);
1640 }
1641 }
1642
1643 /* Look for constructors and destructors and produce function calling them.
1644 This is needed for targets not supporting ctors or dtors, but we perform the
1645 transformation also at linktime to merge possibly numberous
1646 constructors/destructors into single function to improve code locality and
1647 reduce size. */
1648
1649 static unsigned int
1650 ipa_cdtor_merge (void)
1651 {
1652 struct cgraph_node *node;
1653 for (node = cgraph_nodes; node; node = node->next)
1654 if (node->analyzed
1655 && (DECL_STATIC_CONSTRUCTOR (node->decl)
1656 || DECL_STATIC_DESTRUCTOR (node->decl)))
1657 record_cdtor_fn (node);
1658 build_cdtor_fns ();
1659 VEC_free (tree, heap, static_ctors);
1660 VEC_free (tree, heap, static_dtors);
1661 return 0;
1662 }
1663
1664 /* Perform the pass when we have no ctors/dtors support
1665 or at LTO time to merge multiple constructors into single
1666 function. */
1667
1668 static bool
1669 gate_ipa_cdtor_merge (void)
1670 {
1671 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1672 }
1673
1674 struct ipa_opt_pass_d pass_ipa_cdtor_merge =
1675 {
1676 {
1677 IPA_PASS,
1678 "cdtor", /* name */
1679 gate_ipa_cdtor_merge, /* gate */
1680 ipa_cdtor_merge, /* execute */
1681 NULL, /* sub */
1682 NULL, /* next */
1683 0, /* static_pass_number */
1684 TV_CGRAPHOPT, /* tv_id */
1685 0, /* properties_required */
1686 0, /* properties_provided */
1687 0, /* properties_destroyed */
1688 0, /* todo_flags_start */
1689 0 /* todo_flags_finish */
1690 },
1691 NULL, /* generate_summary */
1692 NULL, /* write_summary */
1693 NULL, /* read_summary */
1694 NULL, /* write_optimization_summary */
1695 NULL, /* read_optimization_summary */
1696 NULL, /* stmt_fixup */
1697 0, /* TODOs */
1698 NULL, /* function_transform */
1699 NULL /* variable_transform */
1700 };