re PR tree-optimization/45453 (ICE: verify_cgraph_node failed: inlined_to pointer...
[gcc.git] / gcc / ipa.c
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "cgraph.h"
26 #include "tree-pass.h"
27 #include "timevar.h"
28 #include "gimple.h"
29 #include "ggc.h"
30 #include "flags.h"
31 #include "pointer-set.h"
32 #include "target.h"
33 #include "tree-iterator.h"
34
35 /* Fill array order with all nodes with output flag set in the reverse
36 topological order. */
37
38 int
39 cgraph_postorder (struct cgraph_node **order)
40 {
41 struct cgraph_node *node, *node2;
42 int stack_size = 0;
43 int order_pos = 0;
44 struct cgraph_edge *edge, last;
45 int pass;
46
47 struct cgraph_node **stack =
48 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
49
50 /* We have to deal with cycles nicely, so use a depth first traversal
51 output algorithm. Ignore the fact that some functions won't need
52 to be output and put them into order as well, so we get dependencies
53 right through inline functions. */
54 for (node = cgraph_nodes; node; node = node->next)
55 node->aux = NULL;
56 for (pass = 0; pass < 2; pass++)
57 for (node = cgraph_nodes; node; node = node->next)
58 if (!node->aux
59 && (pass
60 || (!cgraph_only_called_directly_p (node)
61 && !node->address_taken)))
62 {
63 node2 = node;
64 if (!node->callers)
65 node->aux = &last;
66 else
67 node->aux = node->callers;
68 while (node2)
69 {
70 while (node2->aux != &last)
71 {
72 edge = (struct cgraph_edge *) node2->aux;
73 if (edge->next_caller)
74 node2->aux = edge->next_caller;
75 else
76 node2->aux = &last;
77 /* Break possible cycles involving always-inline
78 functions by ignoring edges from always-inline
79 functions to non-always-inline functions. */
80 if (edge->caller->local.disregard_inline_limits
81 && !edge->callee->local.disregard_inline_limits)
82 continue;
83 if (!edge->caller->aux)
84 {
85 if (!edge->caller->callers)
86 edge->caller->aux = &last;
87 else
88 edge->caller->aux = edge->caller->callers;
89 stack[stack_size++] = node2;
90 node2 = edge->caller;
91 break;
92 }
93 }
94 if (node2->aux == &last)
95 {
96 order[order_pos++] = node2;
97 if (stack_size)
98 node2 = stack[--stack_size];
99 else
100 node2 = NULL;
101 }
102 }
103 }
104 free (stack);
105 for (node = cgraph_nodes; node; node = node->next)
106 node->aux = NULL;
107 return order_pos;
108 }
109
110 /* Look for all functions inlined to NODE and update their inlined_to pointers
111 to INLINED_TO. */
112
113 static void
114 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
115 {
116 struct cgraph_edge *e;
117 for (e = node->callees; e; e = e->next_callee)
118 if (e->callee->global.inlined_to)
119 {
120 e->callee->global.inlined_to = inlined_to;
121 update_inlined_to_pointer (e->callee, inlined_to);
122 }
123 }
124
125 /* Add cgraph NODE to queue starting at FIRST.
126
127 The queue is linked via AUX pointers and terminated by pointer to 1.
128 We enqueue nodes at two occasions: when we find them reachable or when we find
129 their bodies needed for further clonning. In the second case we mark them
130 by pointer to 2 after processing so they are re-queue when they become
131 reachable. */
132
133 static void
134 enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first)
135 {
136 /* Node is still in queue; do nothing. */
137 if (node->aux && node->aux != (void *) 2)
138 return;
139 /* Node was already processed as unreachable, re-enqueue
140 only if it became reachable now. */
141 if (node->aux == (void *)2 && !node->reachable)
142 return;
143 node->aux = *first;
144 *first = node;
145 }
146
147 /* Add varpool NODE to queue starting at FIRST. */
148
149 static void
150 enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first)
151 {
152 node->aux = *first;
153 *first = node;
154 }
155
156 /* Process references. */
157
158 static void
159 process_references (struct ipa_ref_list *list,
160 struct cgraph_node **first,
161 struct varpool_node **first_varpool,
162 bool before_inlining_p)
163 {
164 int i;
165 struct ipa_ref *ref;
166 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
167 {
168 if (ref->refered_type == IPA_REF_CGRAPH)
169 {
170 struct cgraph_node *node = ipa_ref_node (ref);
171 if (!node->reachable
172 && (!DECL_EXTERNAL (node->decl)
173 || before_inlining_p))
174 {
175 node->reachable = true;
176 enqueue_cgraph_node (node, first);
177 }
178 }
179 else
180 {
181 struct varpool_node *node = ipa_ref_varpool_node (ref);
182 if (!node->needed)
183 {
184 varpool_mark_needed_node (node);
185 enqueue_varpool_node (node, first_varpool);
186 }
187 }
188 }
189 }
190
191 /* Return true when function NODE can be removed from callgraph
192 if all direct calls are eliminated. */
193
194 static inline bool
195 varpool_can_remove_if_no_refs (struct varpool_node *node)
196 {
197 return (!node->force_output && !node->used_from_other_partition
198 && (DECL_COMDAT (node->decl) || !node->externally_visible));
199 }
200
201 /* Return true when function can be marked local. */
202
203 static bool
204 cgraph_local_node_p (struct cgraph_node *node)
205 {
206 return (cgraph_only_called_directly_p (node)
207 && node->analyzed
208 && !DECL_EXTERNAL (node->decl)
209 && !node->local.externally_visible
210 && !node->reachable_from_other_partition
211 && !node->in_other_partition);
212 }
213
214 /* Perform reachability analysis and reclaim all unreachable nodes.
215 If BEFORE_INLINING_P is true this function is called before inlining
216 decisions has been made. If BEFORE_INLINING_P is false this function also
217 removes unneeded bodies of extern inline functions. */
218
219 bool
220 cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
221 {
222 struct cgraph_node *first = (struct cgraph_node *) (void *) 1;
223 struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1;
224 struct cgraph_node *node, *next;
225 struct varpool_node *vnode, *vnext;
226 bool changed = false;
227
228 #ifdef ENABLE_CHECKING
229 verify_cgraph ();
230 #endif
231 if (file)
232 fprintf (file, "\nReclaiming functions:");
233 #ifdef ENABLE_CHECKING
234 for (node = cgraph_nodes; node; node = node->next)
235 gcc_assert (!node->aux);
236 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
237 gcc_assert (!vnode->aux);
238 #endif
239 varpool_reset_queue ();
240 for (node = cgraph_nodes; node; node = node->next)
241 if ((!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
242 /* Keep around virtual functions for possible devirtualization. */
243 || (!before_inlining_p
244 && !node->global.inlined_to
245 && DECL_VIRTUAL_P (node->decl)
246 && (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))))
247 && ((!DECL_EXTERNAL (node->decl))
248 || before_inlining_p))
249 {
250 gcc_assert (!node->global.inlined_to);
251 enqueue_cgraph_node (node, &first);
252 node->reachable = true;
253 }
254 else
255 {
256 gcc_assert (!node->aux);
257 node->reachable = false;
258 }
259 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
260 {
261 vnode->next_needed = NULL;
262 vnode->prev_needed = NULL;
263 if (!varpool_can_remove_if_no_refs (vnode))
264 {
265 vnode->needed = false;
266 varpool_mark_needed_node (vnode);
267 enqueue_varpool_node (vnode, &first_varpool);
268 }
269 else
270 vnode->needed = false;
271 }
272
273 /* Perform reachability analysis. As a special case do not consider
274 extern inline functions not inlined as live because we won't output
275 them at all.
276
277 We maintain two worklist, one for cgraph nodes other for varpools and
278 are finished once both are empty. */
279
280 while (first != (struct cgraph_node *) (void *) 1
281 || first_varpool != (struct varpool_node *) (void *) 1)
282 {
283 if (first != (struct cgraph_node *) (void *) 1)
284 {
285 struct cgraph_edge *e;
286 node = first;
287 first = (struct cgraph_node *) first->aux;
288 if (!node->reachable)
289 node->aux = (void *)2;
290
291 /* If we found this node reachable, first mark on the callees
292 reachable too, unless they are direct calls to extern inline functions
293 we decided to not inline. */
294 if (node->reachable)
295 {
296 for (e = node->callees; e; e = e->next_callee)
297 if (!e->callee->reachable
298 && node->analyzed
299 && (!e->inline_failed || !e->callee->analyzed
300 || (!DECL_EXTERNAL (e->callee->decl))
301 || before_inlining_p))
302 {
303 e->callee->reachable = true;
304 enqueue_cgraph_node (e->callee, &first);
305 }
306 process_references (&node->ref_list, &first, &first_varpool, before_inlining_p);
307 }
308
309 /* If any function in a comdat group is reachable, force
310 all other functions in the same comdat group to be
311 also reachable. */
312 if (node->same_comdat_group
313 && node->reachable
314 && !node->global.inlined_to)
315 {
316 for (next = node->same_comdat_group;
317 next != node;
318 next = next->same_comdat_group)
319 if (!next->reachable)
320 {
321 next->reachable = true;
322 enqueue_cgraph_node (next, &first);
323 }
324 }
325
326 /* We can freely remove inline clones even if they are cloned, however if
327 function is clone of real clone, we must keep it around in order to
328 make materialize_clones produce function body with the changes
329 applied. */
330 while (node->clone_of && !node->clone_of->aux
331 && !gimple_has_body_p (node->decl))
332 {
333 bool noninline = node->clone_of->decl != node->decl;
334 node = node->clone_of;
335 if (noninline && !node->reachable && !node->aux)
336 {
337 enqueue_cgraph_node (node, &first);
338 break;
339 }
340 }
341 }
342 if (first_varpool != (struct varpool_node *) (void *) 1)
343 {
344 vnode = first_varpool;
345 first_varpool = (struct varpool_node *)first_varpool->aux;
346 vnode->aux = NULL;
347 process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p);
348 /* If any function in a comdat group is reachable, force
349 all other functions in the same comdat group to be
350 also reachable. */
351 if (vnode->same_comdat_group)
352 {
353 struct varpool_node *next;
354 for (next = vnode->same_comdat_group;
355 next != vnode;
356 next = next->same_comdat_group)
357 if (!next->needed)
358 {
359 varpool_mark_needed_node (next);
360 enqueue_varpool_node (next, &first_varpool);
361 }
362 }
363 }
364 }
365
366 /* Remove unreachable nodes.
367
368 Completely unreachable functions can be fully removed from the callgraph.
369 Extern inline functions that we decided to not inline need to become unanalyzed nodes of
370 callgraph (so we still have edges to them). We remove function body then.
371
372 Also we need to care functions that are unreachable but we need to keep them around
373 for later clonning. In this case we also turn them to unanalyzed nodes, but
374 keep the body around. */
375 for (node = cgraph_nodes; node; node = next)
376 {
377 next = node->next;
378 if (node->aux && !node->reachable)
379 {
380 cgraph_node_remove_callees (node);
381 ipa_remove_all_references (&node->ref_list);
382 node->analyzed = false;
383 node->local.inlinable = false;
384 }
385 if (!node->aux)
386 {
387 node->global.inlined_to = NULL;
388 if (file)
389 fprintf (file, " %s", cgraph_node_name (node));
390 if (!node->analyzed || !DECL_EXTERNAL (node->decl) || before_inlining_p)
391 cgraph_remove_node (node);
392 else
393 {
394 struct cgraph_edge *e;
395
396 /* See if there is reachable caller. */
397 for (e = node->callers; e; e = e->next_caller)
398 if (e->caller->reachable)
399 break;
400
401 /* If so, we need to keep node in the callgraph. */
402 if (e || node->needed)
403 {
404 struct cgraph_node *clone;
405
406 /* If there are still clones, we must keep body around.
407 Otherwise we can just remove the body but keep the clone. */
408 for (clone = node->clones; clone;
409 clone = clone->next_sibling_clone)
410 if (clone->aux)
411 break;
412 if (!clone)
413 {
414 cgraph_release_function_body (node);
415 node->local.inlinable = false;
416 if (node->prev_sibling_clone)
417 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
418 else if (node->clone_of)
419 node->clone_of->clones = node->next_sibling_clone;
420 if (node->next_sibling_clone)
421 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
422 #ifdef ENABLE_CHECKING
423 if (node->clone_of)
424 node->former_clone_of = node->clone_of->decl;
425 #endif
426 node->clone_of = NULL;
427 node->next_sibling_clone = NULL;
428 node->prev_sibling_clone = NULL;
429 }
430 else
431 gcc_assert (!clone->in_other_partition);
432 node->analyzed = false;
433 cgraph_node_remove_callees (node);
434 ipa_remove_all_references (&node->ref_list);
435 }
436 else
437 cgraph_remove_node (node);
438 }
439 changed = true;
440 }
441 }
442 for (node = cgraph_nodes; node; node = node->next)
443 {
444 /* Inline clones might be kept around so their materializing allows further
445 cloning. If the function the clone is inlined into is removed, we need
446 to turn it into normal cone. */
447 if (node->global.inlined_to
448 && !node->callers)
449 {
450 gcc_assert (node->clones);
451 node->global.inlined_to = NULL;
452 update_inlined_to_pointer (node, node);
453 }
454 node->aux = NULL;
455 }
456
457 if (file)
458 fprintf (file, "\n");
459
460 /* We must release unused extern inlines or sanity checking will fail. Rest of transformations
461 are undesirable at -O0 since we do not want to remove anything. */
462 if (!optimize)
463 return changed;
464
465 if (file)
466 fprintf (file, "Reclaiming variables:");
467 for (vnode = varpool_nodes; vnode; vnode = vnext)
468 {
469 vnext = vnode->next;
470 if (!vnode->needed)
471 {
472 if (file)
473 fprintf (file, " %s", varpool_node_name (vnode));
474 varpool_remove_node (vnode);
475 changed = true;
476 }
477 }
478
479 /* Now update address_taken flags and try to promote functions to be local. */
480
481 if (file)
482 fprintf (file, "\nClearing address taken flags:");
483 for (node = cgraph_nodes; node; node = node->next)
484 if (node->address_taken
485 && !node->reachable_from_other_partition)
486 {
487 int i;
488 struct ipa_ref *ref;
489 bool found = false;
490 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
491 && !found; i++)
492 {
493 gcc_assert (ref->use == IPA_REF_ADDR);
494 found = true;
495 }
496 if (!found)
497 {
498 if (file)
499 fprintf (file, " %s", cgraph_node_name (node));
500 node->address_taken = false;
501 changed = true;
502 if (cgraph_local_node_p (node))
503 {
504 node->local.local = true;
505 if (file)
506 fprintf (file, " (local)");
507 }
508 }
509 }
510
511 #ifdef ENABLE_CHECKING
512 verify_cgraph ();
513 #endif
514
515 /* Reclaim alias pairs for functions that have disappeared from the
516 call graph. */
517 remove_unreachable_alias_pairs ();
518
519 return changed;
520 }
521
522 /* Discover variables that have no longer address taken or that are read only
523 and update their flags.
524
525 FIXME: This can not be done in between gimplify and omp_expand since
526 readonly flag plays role on what is shared and what is not. Currently we do
527 this transformation as part of whole program visibility and re-do at
528 ipa-reference pass (to take into account clonning), but it would
529 make sense to do it before early optimizations. */
530
531 void
532 ipa_discover_readonly_nonaddressable_vars (void)
533 {
534 struct varpool_node *vnode;
535 if (dump_file)
536 fprintf (dump_file, "Clearing variable flags:");
537 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
538 if (vnode->finalized && varpool_all_refs_explicit_p (vnode)
539 && (TREE_ADDRESSABLE (vnode->decl) || !TREE_READONLY (vnode->decl)))
540 {
541 bool written = false;
542 bool address_taken = false;
543 int i;
544 struct ipa_ref *ref;
545 for (i = 0; ipa_ref_list_refering_iterate (&vnode->ref_list, i, ref)
546 && (!written || !address_taken); i++)
547 switch (ref->use)
548 {
549 case IPA_REF_ADDR:
550 address_taken = true;
551 break;
552 case IPA_REF_LOAD:
553 break;
554 case IPA_REF_STORE:
555 written = true;
556 break;
557 }
558 if (TREE_ADDRESSABLE (vnode->decl) && !address_taken)
559 {
560 if (dump_file)
561 fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode));
562 TREE_ADDRESSABLE (vnode->decl) = 0;
563 }
564 if (!TREE_READONLY (vnode->decl) && !address_taken && !written
565 /* Making variable in explicit section readonly can cause section
566 type conflict.
567 See e.g. gcc.c-torture/compile/pr23237.c */
568 && DECL_SECTION_NAME (vnode->decl) == NULL)
569 {
570 if (dump_file)
571 fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode));
572 TREE_READONLY (vnode->decl) = 1;
573 vnode->const_value_known |= varpool_decide_const_value_known (vnode);
574 }
575 }
576 if (dump_file)
577 fprintf (dump_file, "\n");
578 }
579
580 /* Return true when function NODE should be considered externally visible. */
581
582 static bool
583 cgraph_externally_visible_p (struct cgraph_node *node, bool whole_program, bool aliased)
584 {
585 if (!node->local.finalized)
586 return false;
587 if (!DECL_COMDAT (node->decl)
588 && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)))
589 return false;
590
591 /* Do not even try to be smart about aliased nodes. Until we properly
592 represent everything by same body alias, these are just evil. */
593 if (aliased)
594 return true;
595
596 /* When doing link time optimizations, hidden symbols become local. */
597 if (in_lto_p && DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
598 /* Be sure that node is defined in IR file, not in other object
599 file. In that case we don't set used_from_other_object_file. */
600 && node->analyzed)
601 ;
602 else if (!whole_program)
603 return true;
604 /* COMDAT functions must be shared only if they have address taken,
605 otherwise we can produce our own private implementation with
606 -fwhole-program. */
607 else if (DECL_COMDAT (node->decl))
608 {
609 if (node->address_taken || !node->analyzed)
610 return true;
611 if (node->same_comdat_group)
612 {
613 struct cgraph_node *next;
614
615 /* If more than one function is in the same COMDAT group, it must
616 be shared even if just one function in the comdat group has
617 address taken. */
618 for (next = node->same_comdat_group;
619 next != node;
620 next = next->same_comdat_group)
621 if (next->address_taken || !next->analyzed)
622 return true;
623 }
624 }
625 if (node->local.used_from_object_file)
626 return true;
627 if (DECL_PRESERVE_P (node->decl))
628 return true;
629 if (MAIN_NAME_P (DECL_NAME (node->decl)))
630 return true;
631 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl)))
632 return true;
633 return false;
634 }
635
636 /* Dissolve the same_comdat_group list in which NODE resides. */
637
638 static void
639 dissolve_same_comdat_group_list (struct cgraph_node *node)
640 {
641 struct cgraph_node *n = node, *next;
642 do
643 {
644 next = n->same_comdat_group;
645 n->same_comdat_group = NULL;
646 n = next;
647 }
648 while (n != node);
649 }
650
651 /* Mark visibility of all functions.
652
653 A local function is one whose calls can occur only in the current
654 compilation unit and all its calls are explicit, so we can change
655 its calling convention. We simply mark all static functions whose
656 address is not taken as local.
657
658 We also change the TREE_PUBLIC flag of all declarations that are public
659 in language point of view but we want to overwrite this default
660 via visibilities for the backend point of view. */
661
662 static unsigned int
663 function_and_variable_visibility (bool whole_program)
664 {
665 struct cgraph_node *node;
666 struct varpool_node *vnode;
667 struct pointer_set_t *aliased_nodes = pointer_set_create ();
668 struct pointer_set_t *aliased_vnodes = pointer_set_create ();
669 unsigned i;
670 alias_pair *p;
671
672 /* Discover aliased nodes. */
673 FOR_EACH_VEC_ELT (alias_pair, alias_pairs, i, p)
674 {
675 if (dump_file)
676 fprintf (dump_file, "Alias %s->%s",
677 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)),
678 IDENTIFIER_POINTER (p->target));
679
680 if ((node = cgraph_node_for_asm (p->target)) != NULL)
681 {
682 gcc_assert (node->needed);
683 pointer_set_insert (aliased_nodes, node);
684 if (dump_file)
685 fprintf (dump_file, " node %s/%i",
686 cgraph_node_name (node), node->uid);
687 }
688 else if ((vnode = varpool_node_for_asm (p->target)) != NULL)
689 {
690 gcc_assert (vnode->needed);
691 pointer_set_insert (aliased_vnodes, vnode);
692 if (dump_file)
693 fprintf (dump_file, " varpool node %s",
694 varpool_node_name (vnode));
695 }
696 if (dump_file)
697 fprintf (dump_file, "\n");
698 }
699
700 for (node = cgraph_nodes; node; node = node->next)
701 {
702 /* C++ FE on lack of COMDAT support create local COMDAT functions
703 (that ought to be shared but can not due to object format
704 limitations). It is neccesary to keep the flag to make rest of C++ FE
705 happy. Clear the flag here to avoid confusion in middle-end. */
706 if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl))
707 DECL_COMDAT (node->decl) = 0;
708 /* For external decls stop tracking same_comdat_group, it doesn't matter
709 what comdat group they are in when they won't be emitted in this TU,
710 and simplifies later passes. */
711 if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
712 {
713 #ifdef ENABLE_CHECKING
714 struct cgraph_node *n;
715
716 for (n = node->same_comdat_group;
717 n != node;
718 n = n->same_comdat_group)
719 /* If at least one of same comdat group functions is external,
720 all of them have to be, otherwise it is a front-end bug. */
721 gcc_assert (DECL_EXTERNAL (n->decl));
722 #endif
723 dissolve_same_comdat_group_list (node);
724 }
725 gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl))
726 || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl));
727 if (cgraph_externally_visible_p (node, whole_program,
728 pointer_set_contains (aliased_nodes,
729 node)))
730 {
731 gcc_assert (!node->global.inlined_to);
732 node->local.externally_visible = true;
733 }
734 else
735 node->local.externally_visible = false;
736 if (!node->local.externally_visible && node->analyzed
737 && !DECL_EXTERNAL (node->decl))
738 {
739 struct cgraph_node *alias;
740 gcc_assert (whole_program || in_lto_p || !TREE_PUBLIC (node->decl));
741 cgraph_make_decl_local (node->decl);
742 for (alias = node->same_body; alias; alias = alias->next)
743 cgraph_make_decl_local (alias->decl);
744 if (node->same_comdat_group)
745 /* cgraph_externally_visible_p has already checked all other nodes
746 in the group and they will all be made local. We need to
747 dissolve the group at once so that the predicate does not
748 segfault though. */
749 dissolve_same_comdat_group_list (node);
750 }
751 node->local.local = cgraph_local_node_p (node);
752 }
753 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
754 {
755 /* weak flag makes no sense on local variables. */
756 gcc_assert (!DECL_WEAK (vnode->decl)
757 || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl));
758 /* In several cases declarations can not be common:
759
760 - when declaration has initializer
761 - when it is in weak
762 - when it has specific section
763 - when it resides in non-generic address space.
764 - if declaration is local, it will get into .local common section
765 so common flag is not needed. Frontends still produce these in
766 certain cases, such as for:
767
768 static int a __attribute__ ((common))
769
770 Canonicalize things here and clear the redundant flag. */
771 if (DECL_COMMON (vnode->decl)
772 && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
773 || (DECL_INITIAL (vnode->decl)
774 && DECL_INITIAL (vnode->decl) != error_mark_node)
775 || DECL_WEAK (vnode->decl)
776 || DECL_SECTION_NAME (vnode->decl) != NULL
777 || ! (ADDR_SPACE_GENERIC_P
778 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl))))))
779 DECL_COMMON (vnode->decl) = 0;
780 /* Even extern variables might have initializers known.
781 See, for example testsuite/g++.dg/opt/static3.C */
782 vnode->const_value_known |= varpool_decide_const_value_known (vnode);
783 }
784 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
785 {
786 if (!vnode->finalized)
787 continue;
788 if (vnode->needed
789 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl))
790 && (((!whole_program
791 /* We can privatize comdat readonly variables whose address is
792 not taken, but doing so is not going to bring us
793 optimization oppurtunities until we start reordering
794 datastructures. */
795 || DECL_COMDAT (vnode->decl)
796 || DECL_WEAK (vnode->decl))
797 /* When doing linktime optimizations, all hidden symbols will
798 become local. */
799 && (!in_lto_p
800 || DECL_VISIBILITY (vnode->decl) != VISIBILITY_HIDDEN
801 /* We can get prevailing decision in other object file.
802 In this case we do not sed used_from_object_file. */
803 || !vnode->finalized))
804 || DECL_PRESERVE_P (vnode->decl)
805 || vnode->used_from_object_file
806 || pointer_set_contains (aliased_vnodes, vnode)
807 || lookup_attribute ("externally_visible",
808 DECL_ATTRIBUTES (vnode->decl))))
809 vnode->externally_visible = true;
810 else
811 vnode->externally_visible = false;
812 if (!vnode->externally_visible)
813 {
814 gcc_assert (in_lto_p || whole_program || !TREE_PUBLIC (vnode->decl));
815 cgraph_make_decl_local (vnode->decl);
816 }
817 vnode->const_value_known |= varpool_decide_const_value_known (vnode);
818 gcc_assert (TREE_STATIC (vnode->decl));
819 }
820 pointer_set_destroy (aliased_nodes);
821 pointer_set_destroy (aliased_vnodes);
822
823 if (dump_file)
824 {
825 fprintf (dump_file, "\nMarking local functions:");
826 for (node = cgraph_nodes; node; node = node->next)
827 if (node->local.local)
828 fprintf (dump_file, " %s", cgraph_node_name (node));
829 fprintf (dump_file, "\n\n");
830 fprintf (dump_file, "\nMarking externally visible functions:");
831 for (node = cgraph_nodes; node; node = node->next)
832 if (node->local.externally_visible)
833 fprintf (dump_file, " %s", cgraph_node_name (node));
834 fprintf (dump_file, "\n\n");
835 fprintf (dump_file, "\nMarking externally visible variables:");
836 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
837 if (vnode->externally_visible)
838 fprintf (dump_file, " %s", varpool_node_name (vnode));
839 fprintf (dump_file, "\n\n");
840 }
841 cgraph_function_flags_ready = true;
842 return 0;
843 }
844
845 /* Local function pass handling visibilities. This happens before LTO streaming
846 so in particular -fwhole-program should be ignored at this level. */
847
848 static unsigned int
849 local_function_and_variable_visibility (void)
850 {
851 return function_and_variable_visibility (flag_whole_program && !flag_lto && !flag_whopr);
852 }
853
854 struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility =
855 {
856 {
857 SIMPLE_IPA_PASS,
858 "visibility", /* name */
859 NULL, /* gate */
860 local_function_and_variable_visibility,/* execute */
861 NULL, /* sub */
862 NULL, /* next */
863 0, /* static_pass_number */
864 TV_CGRAPHOPT, /* tv_id */
865 0, /* properties_required */
866 0, /* properties_provided */
867 0, /* properties_destroyed */
868 0, /* todo_flags_start */
869 TODO_remove_functions | TODO_dump_cgraph
870 | TODO_ggc_collect /* todo_flags_finish */
871 }
872 };
873
874 /* Do not re-run on ltrans stage. */
875
876 static bool
877 gate_whole_program_function_and_variable_visibility (void)
878 {
879 return !flag_ltrans;
880 }
881
882 /* Bring functionss local at LTO time whith -fwhole-program. */
883
884 static unsigned int
885 whole_program_function_and_variable_visibility (void)
886 {
887 struct cgraph_node *node;
888 struct varpool_node *vnode;
889
890 function_and_variable_visibility (flag_whole_program);
891
892 for (node = cgraph_nodes; node; node = node->next)
893 if ((node->local.externally_visible && !DECL_COMDAT (node->decl))
894 && node->local.finalized)
895 cgraph_mark_needed_node (node);
896 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
897 if (vnode->externally_visible && !DECL_COMDAT (vnode->decl))
898 varpool_mark_needed_node (vnode);
899 if (dump_file)
900 {
901 fprintf (dump_file, "\nNeeded variables:");
902 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
903 if (vnode->needed)
904 fprintf (dump_file, " %s", varpool_node_name (vnode));
905 fprintf (dump_file, "\n\n");
906 }
907 if (optimize)
908 ipa_discover_readonly_nonaddressable_vars ();
909 return 0;
910 }
911
912 struct ipa_opt_pass_d pass_ipa_whole_program_visibility =
913 {
914 {
915 IPA_PASS,
916 "whole-program", /* name */
917 gate_whole_program_function_and_variable_visibility,/* gate */
918 whole_program_function_and_variable_visibility,/* execute */
919 NULL, /* sub */
920 NULL, /* next */
921 0, /* static_pass_number */
922 TV_CGRAPHOPT, /* tv_id */
923 0, /* properties_required */
924 0, /* properties_provided */
925 0, /* properties_destroyed */
926 0, /* todo_flags_start */
927 TODO_remove_functions | TODO_dump_cgraph
928 | TODO_ggc_collect /* todo_flags_finish */
929 },
930 NULL, /* generate_summary */
931 NULL, /* write_summary */
932 NULL, /* read_summary */
933 NULL, /* write_optimization_summary */
934 NULL, /* read_optimization_summary */
935 NULL, /* stmt_fixup */
936 0, /* TODOs */
937 NULL, /* function_transform */
938 NULL, /* variable_transform */
939 };
940
941 /* Hash a cgraph node set element. */
942
943 static hashval_t
944 hash_cgraph_node_set_element (const void *p)
945 {
946 const_cgraph_node_set_element element = (const_cgraph_node_set_element) p;
947 return htab_hash_pointer (element->node);
948 }
949
950 /* Compare two cgraph node set elements. */
951
952 static int
953 eq_cgraph_node_set_element (const void *p1, const void *p2)
954 {
955 const_cgraph_node_set_element e1 = (const_cgraph_node_set_element) p1;
956 const_cgraph_node_set_element e2 = (const_cgraph_node_set_element) p2;
957
958 return e1->node == e2->node;
959 }
960
961 /* Create a new cgraph node set. */
962
963 cgraph_node_set
964 cgraph_node_set_new (void)
965 {
966 cgraph_node_set new_node_set;
967
968 new_node_set = ggc_alloc_cgraph_node_set_def ();
969 new_node_set->hashtab = htab_create_ggc (10,
970 hash_cgraph_node_set_element,
971 eq_cgraph_node_set_element,
972 NULL);
973 new_node_set->nodes = NULL;
974 return new_node_set;
975 }
976
977 /* Add cgraph_node NODE to cgraph_node_set SET. */
978
979 void
980 cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node)
981 {
982 void **slot;
983 cgraph_node_set_element element;
984 struct cgraph_node_set_element_def dummy;
985
986 dummy.node = node;
987 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
988
989 if (*slot != HTAB_EMPTY_ENTRY)
990 {
991 element = (cgraph_node_set_element) *slot;
992 gcc_assert (node == element->node
993 && (VEC_index (cgraph_node_ptr, set->nodes, element->index)
994 == node));
995 return;
996 }
997
998 /* Insert node into hash table. */
999 element = ggc_alloc_cgraph_node_set_element_def ();
1000 element->node = node;
1001 element->index = VEC_length (cgraph_node_ptr, set->nodes);
1002 *slot = element;
1003
1004 /* Insert into node vector. */
1005 VEC_safe_push (cgraph_node_ptr, gc, set->nodes, node);
1006 }
1007
1008 /* Remove cgraph_node NODE from cgraph_node_set SET. */
1009
1010 void
1011 cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node)
1012 {
1013 void **slot, **last_slot;
1014 cgraph_node_set_element element, last_element;
1015 struct cgraph_node *last_node;
1016 struct cgraph_node_set_element_def dummy;
1017
1018 dummy.node = node;
1019 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1020 if (slot == NULL)
1021 return;
1022
1023 element = (cgraph_node_set_element) *slot;
1024 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1025 == node);
1026
1027 /* Remove from vector. We do this by swapping node with the last element
1028 of the vector. */
1029 last_node = VEC_pop (cgraph_node_ptr, set->nodes);
1030 if (last_node != node)
1031 {
1032 dummy.node = last_node;
1033 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1034 last_element = (cgraph_node_set_element) *last_slot;
1035 gcc_assert (last_element);
1036
1037 /* Move the last element to the original spot of NODE. */
1038 last_element->index = element->index;
1039 VEC_replace (cgraph_node_ptr, set->nodes, last_element->index,
1040 last_node);
1041 }
1042
1043 /* Remove element from hash table. */
1044 htab_clear_slot (set->hashtab, slot);
1045 ggc_free (element);
1046 }
1047
1048 /* Find NODE in SET and return an iterator to it if found. A null iterator
1049 is returned if NODE is not in SET. */
1050
1051 cgraph_node_set_iterator
1052 cgraph_node_set_find (cgraph_node_set set, struct cgraph_node *node)
1053 {
1054 void **slot;
1055 struct cgraph_node_set_element_def dummy;
1056 cgraph_node_set_element element;
1057 cgraph_node_set_iterator csi;
1058
1059 dummy.node = node;
1060 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1061 if (slot == NULL)
1062 csi.index = (unsigned) ~0;
1063 else
1064 {
1065 element = (cgraph_node_set_element) *slot;
1066 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1067 == node);
1068 csi.index = element->index;
1069 }
1070 csi.set = set;
1071
1072 return csi;
1073 }
1074
1075 /* Dump content of SET to file F. */
1076
1077 void
1078 dump_cgraph_node_set (FILE *f, cgraph_node_set set)
1079 {
1080 cgraph_node_set_iterator iter;
1081
1082 for (iter = csi_start (set); !csi_end_p (iter); csi_next (&iter))
1083 {
1084 struct cgraph_node *node = csi_node (iter);
1085 fprintf (f, " %s/%i", cgraph_node_name (node), node->uid);
1086 }
1087 fprintf (f, "\n");
1088 }
1089
1090 /* Dump content of SET to stderr. */
1091
1092 DEBUG_FUNCTION void
1093 debug_cgraph_node_set (cgraph_node_set set)
1094 {
1095 dump_cgraph_node_set (stderr, set);
1096 }
1097
1098 /* Hash a varpool node set element. */
1099
1100 static hashval_t
1101 hash_varpool_node_set_element (const void *p)
1102 {
1103 const_varpool_node_set_element element = (const_varpool_node_set_element) p;
1104 return htab_hash_pointer (element->node);
1105 }
1106
1107 /* Compare two varpool node set elements. */
1108
1109 static int
1110 eq_varpool_node_set_element (const void *p1, const void *p2)
1111 {
1112 const_varpool_node_set_element e1 = (const_varpool_node_set_element) p1;
1113 const_varpool_node_set_element e2 = (const_varpool_node_set_element) p2;
1114
1115 return e1->node == e2->node;
1116 }
1117
1118 /* Create a new varpool node set. */
1119
1120 varpool_node_set
1121 varpool_node_set_new (void)
1122 {
1123 varpool_node_set new_node_set;
1124
1125 new_node_set = ggc_alloc_varpool_node_set_def ();
1126 new_node_set->hashtab = htab_create_ggc (10,
1127 hash_varpool_node_set_element,
1128 eq_varpool_node_set_element,
1129 NULL);
1130 new_node_set->nodes = NULL;
1131 return new_node_set;
1132 }
1133
1134 /* Add varpool_node NODE to varpool_node_set SET. */
1135
1136 void
1137 varpool_node_set_add (varpool_node_set set, struct varpool_node *node)
1138 {
1139 void **slot;
1140 varpool_node_set_element element;
1141 struct varpool_node_set_element_def dummy;
1142
1143 dummy.node = node;
1144 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
1145
1146 if (*slot != HTAB_EMPTY_ENTRY)
1147 {
1148 element = (varpool_node_set_element) *slot;
1149 gcc_assert (node == element->node
1150 && (VEC_index (varpool_node_ptr, set->nodes, element->index)
1151 == node));
1152 return;
1153 }
1154
1155 /* Insert node into hash table. */
1156 element = ggc_alloc_varpool_node_set_element_def ();
1157 element->node = node;
1158 element->index = VEC_length (varpool_node_ptr, set->nodes);
1159 *slot = element;
1160
1161 /* Insert into node vector. */
1162 VEC_safe_push (varpool_node_ptr, gc, set->nodes, node);
1163 }
1164
1165 /* Remove varpool_node NODE from varpool_node_set SET. */
1166
1167 void
1168 varpool_node_set_remove (varpool_node_set set, struct varpool_node *node)
1169 {
1170 void **slot, **last_slot;
1171 varpool_node_set_element element, last_element;
1172 struct varpool_node *last_node;
1173 struct varpool_node_set_element_def dummy;
1174
1175 dummy.node = node;
1176 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1177 if (slot == NULL)
1178 return;
1179
1180 element = (varpool_node_set_element) *slot;
1181 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1182 == node);
1183
1184 /* Remove from vector. We do this by swapping node with the last element
1185 of the vector. */
1186 last_node = VEC_pop (varpool_node_ptr, set->nodes);
1187 if (last_node != node)
1188 {
1189 dummy.node = last_node;
1190 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1191 last_element = (varpool_node_set_element) *last_slot;
1192 gcc_assert (last_element);
1193
1194 /* Move the last element to the original spot of NODE. */
1195 last_element->index = element->index;
1196 VEC_replace (varpool_node_ptr, set->nodes, last_element->index,
1197 last_node);
1198 }
1199
1200 /* Remove element from hash table. */
1201 htab_clear_slot (set->hashtab, slot);
1202 ggc_free (element);
1203 }
1204
1205 /* Find NODE in SET and return an iterator to it if found. A null iterator
1206 is returned if NODE is not in SET. */
1207
1208 varpool_node_set_iterator
1209 varpool_node_set_find (varpool_node_set set, struct varpool_node *node)
1210 {
1211 void **slot;
1212 struct varpool_node_set_element_def dummy;
1213 varpool_node_set_element element;
1214 varpool_node_set_iterator vsi;
1215
1216 dummy.node = node;
1217 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1218 if (slot == NULL)
1219 vsi.index = (unsigned) ~0;
1220 else
1221 {
1222 element = (varpool_node_set_element) *slot;
1223 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1224 == node);
1225 vsi.index = element->index;
1226 }
1227 vsi.set = set;
1228
1229 return vsi;
1230 }
1231
1232 /* Dump content of SET to file F. */
1233
1234 void
1235 dump_varpool_node_set (FILE *f, varpool_node_set set)
1236 {
1237 varpool_node_set_iterator iter;
1238
1239 for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter))
1240 {
1241 struct varpool_node *node = vsi_node (iter);
1242 fprintf (f, " %s", varpool_node_name (node));
1243 }
1244 fprintf (f, "\n");
1245 }
1246
1247 /* Dump content of SET to stderr. */
1248
1249 DEBUG_FUNCTION void
1250 debug_varpool_node_set (varpool_node_set set)
1251 {
1252 dump_varpool_node_set (stderr, set);
1253 }
1254
1255
1256 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1257
1258 static unsigned int
1259 ipa_profile (void)
1260 {
1261 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1262 struct cgraph_edge *e;
1263 int order_pos;
1264 bool something_changed = false;
1265 int i;
1266
1267 order_pos = cgraph_postorder (order);
1268 for (i = order_pos - 1; i >= 0; i--)
1269 {
1270 if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1271 {
1272 for (e = order[i]->callees; e; e = e->next_callee)
1273 if (e->callee->local.local && !e->callee->aux)
1274 {
1275 something_changed = true;
1276 e->callee->aux = (void *)1;
1277 }
1278 }
1279 order[i]->aux = NULL;
1280 }
1281
1282 while (something_changed)
1283 {
1284 something_changed = false;
1285 for (i = order_pos - 1; i >= 0; i--)
1286 {
1287 if (order[i]->aux && cgraph_propagate_frequency (order[i]))
1288 {
1289 for (e = order[i]->callees; e; e = e->next_callee)
1290 if (e->callee->local.local && !e->callee->aux)
1291 {
1292 something_changed = true;
1293 e->callee->aux = (void *)1;
1294 }
1295 }
1296 order[i]->aux = NULL;
1297 }
1298 }
1299 free (order);
1300 return 0;
1301 }
1302
1303 static bool
1304 gate_ipa_profile (void)
1305 {
1306 return flag_ipa_profile;
1307 }
1308
1309 struct ipa_opt_pass_d pass_ipa_profile =
1310 {
1311 {
1312 IPA_PASS,
1313 "ipa-profile", /* name */
1314 gate_ipa_profile, /* gate */
1315 ipa_profile, /* execute */
1316 NULL, /* sub */
1317 NULL, /* next */
1318 0, /* static_pass_number */
1319 TV_IPA_PROFILE, /* tv_id */
1320 0, /* properties_required */
1321 0, /* properties_provided */
1322 0, /* properties_destroyed */
1323 0, /* todo_flags_start */
1324 0 /* todo_flags_finish */
1325 },
1326 NULL, /* generate_summary */
1327 NULL, /* write_summary */
1328 NULL, /* read_summary */
1329 NULL, /* write_optimization_summary */
1330 NULL, /* read_optimization_summary */
1331 NULL, /* stmt_fixup */
1332 0, /* TODOs */
1333 NULL, /* function_transform */
1334 NULL /* variable_transform */
1335 };
1336
1337 /* Generate and emit a static constructor or destructor. WHICH must
1338 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1339 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1340 initialization priority for this constructor or destructor. */
1341
1342 void
1343 cgraph_build_static_cdtor (char which, tree body, int priority)
1344 {
1345 static int counter = 0;
1346 char which_buf[16];
1347 tree decl, name, resdecl;
1348
1349 /* The priority is encoded in the constructor or destructor name.
1350 collect2 will sort the names and arrange that they are called at
1351 program startup. */
1352 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1353 name = get_file_function_name (which_buf);
1354
1355 decl = build_decl (input_location, FUNCTION_DECL, name,
1356 build_function_type_list (void_type_node, NULL_TREE));
1357 current_function_decl = decl;
1358
1359 resdecl = build_decl (input_location,
1360 RESULT_DECL, NULL_TREE, void_type_node);
1361 DECL_ARTIFICIAL (resdecl) = 1;
1362 DECL_RESULT (decl) = resdecl;
1363 DECL_CONTEXT (resdecl) = decl;
1364
1365 allocate_struct_function (decl, false);
1366
1367 TREE_STATIC (decl) = 1;
1368 TREE_USED (decl) = 1;
1369 DECL_ARTIFICIAL (decl) = 1;
1370 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1371 DECL_SAVED_TREE (decl) = body;
1372 if (!targetm.have_ctors_dtors)
1373 {
1374 TREE_PUBLIC (decl) = 1;
1375 DECL_PRESERVE_P (decl) = 1;
1376 }
1377 DECL_UNINLINABLE (decl) = 1;
1378
1379 DECL_INITIAL (decl) = make_node (BLOCK);
1380 TREE_USED (DECL_INITIAL (decl)) = 1;
1381
1382 DECL_SOURCE_LOCATION (decl) = input_location;
1383 cfun->function_end_locus = input_location;
1384
1385 switch (which)
1386 {
1387 case 'I':
1388 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1389 decl_init_priority_insert (decl, priority);
1390 break;
1391 case 'D':
1392 DECL_STATIC_DESTRUCTOR (decl) = 1;
1393 decl_fini_priority_insert (decl, priority);
1394 break;
1395 default:
1396 gcc_unreachable ();
1397 }
1398
1399 gimplify_function_tree (decl);
1400
1401 cgraph_add_new_function (decl, false);
1402
1403 set_cfun (NULL);
1404 current_function_decl = NULL;
1405 }
1406
1407
1408 /* A vector of FUNCTION_DECLs declared as static constructors. */
1409 static VEC(tree, heap) *static_ctors;
1410 /* A vector of FUNCTION_DECLs declared as static destructors. */
1411 static VEC(tree, heap) *static_dtors;
1412
1413 /* When target does not have ctors and dtors, we call all constructor
1414 and destructor by special initialization/destruction function
1415 recognized by collect2.
1416
1417 When we are going to build this function, collect all constructors and
1418 destructors and turn them into normal functions. */
1419
1420 static void
1421 record_cdtor_fn (struct cgraph_node *node)
1422 {
1423 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1424 VEC_safe_push (tree, heap, static_ctors, node->decl);
1425 if (DECL_STATIC_DESTRUCTOR (node->decl))
1426 VEC_safe_push (tree, heap, static_dtors, node->decl);
1427 node = cgraph_node (node->decl);
1428 node->local.disregard_inline_limits = 1;
1429 }
1430
1431 /* Define global constructors/destructor functions for the CDTORS, of
1432 which they are LEN. The CDTORS are sorted by initialization
1433 priority. If CTOR_P is true, these are constructors; otherwise,
1434 they are destructors. */
1435
1436 static void
1437 build_cdtor (bool ctor_p, VEC (tree, heap) *cdtors)
1438 {
1439 size_t i,j;
1440 size_t len = VEC_length (tree, cdtors);
1441
1442 i = 0;
1443 while (i < len)
1444 {
1445 tree body;
1446 tree fn;
1447 priority_type priority;
1448
1449 priority = 0;
1450 body = NULL_TREE;
1451 j = i;
1452 do
1453 {
1454 priority_type p;
1455 fn = VEC_index (tree, cdtors, j);
1456 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1457 if (j == i)
1458 priority = p;
1459 else if (p != priority)
1460 break;
1461 j++;
1462 }
1463 while (j < len);
1464
1465 /* When there is only one cdtor and target supports them, do nothing. */
1466 if (j == i + 1
1467 && targetm.have_ctors_dtors)
1468 {
1469 i++;
1470 continue;
1471 }
1472 /* Find the next batch of constructors/destructors with the same
1473 initialization priority. */
1474 for (;i < j; i++)
1475 {
1476 tree call;
1477 fn = VEC_index (tree, cdtors, i);
1478 call = build_call_expr (fn, 0);
1479 if (ctor_p)
1480 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1481 else
1482 DECL_STATIC_DESTRUCTOR (fn) = 0;
1483 /* We do not want to optimize away pure/const calls here.
1484 When optimizing, these should be already removed, when not
1485 optimizing, we want user to be able to breakpoint in them. */
1486 TREE_SIDE_EFFECTS (call) = 1;
1487 append_to_statement_list (call, &body);
1488 }
1489 while (i < len);
1490 gcc_assert (body != NULL_TREE);
1491 /* Generate a function to call all the function of like
1492 priority. */
1493 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
1494 }
1495 }
1496
1497 /* Comparison function for qsort. P1 and P2 are actually of type
1498 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1499 used to determine the sort order. */
1500
1501 static int
1502 compare_ctor (const void *p1, const void *p2)
1503 {
1504 tree f1;
1505 tree f2;
1506 int priority1;
1507 int priority2;
1508
1509 f1 = *(const tree *)p1;
1510 f2 = *(const tree *)p2;
1511 priority1 = DECL_INIT_PRIORITY (f1);
1512 priority2 = DECL_INIT_PRIORITY (f2);
1513
1514 if (priority1 < priority2)
1515 return -1;
1516 else if (priority1 > priority2)
1517 return 1;
1518 else
1519 /* Ensure a stable sort. Constructors are executed in backwarding
1520 order to make LTO initialize braries first. */
1521 return DECL_UID (f2) - DECL_UID (f1);
1522 }
1523
1524 /* Comparison function for qsort. P1 and P2 are actually of type
1525 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1526 used to determine the sort order. */
1527
1528 static int
1529 compare_dtor (const void *p1, const void *p2)
1530 {
1531 tree f1;
1532 tree f2;
1533 int priority1;
1534 int priority2;
1535
1536 f1 = *(const tree *)p1;
1537 f2 = *(const tree *)p2;
1538 priority1 = DECL_FINI_PRIORITY (f1);
1539 priority2 = DECL_FINI_PRIORITY (f2);
1540
1541 if (priority1 < priority2)
1542 return -1;
1543 else if (priority1 > priority2)
1544 return 1;
1545 else
1546 /* Ensure a stable sort. */
1547 return DECL_UID (f1) - DECL_UID (f2);
1548 }
1549
1550 /* Generate functions to call static constructors and destructors
1551 for targets that do not support .ctors/.dtors sections. These
1552 functions have magic names which are detected by collect2. */
1553
1554 static void
1555 build_cdtor_fns (void)
1556 {
1557 if (!VEC_empty (tree, static_ctors))
1558 {
1559 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1560 qsort (VEC_address (tree, static_ctors),
1561 VEC_length (tree, static_ctors),
1562 sizeof (tree),
1563 compare_ctor);
1564 build_cdtor (/*ctor_p=*/true, static_ctors);
1565 }
1566
1567 if (!VEC_empty (tree, static_dtors))
1568 {
1569 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1570 qsort (VEC_address (tree, static_dtors),
1571 VEC_length (tree, static_dtors),
1572 sizeof (tree),
1573 compare_dtor);
1574 build_cdtor (/*ctor_p=*/false, static_dtors);
1575 }
1576 }
1577
1578 /* Look for constructors and destructors and produce function calling them.
1579 This is needed for targets not supporting ctors or dtors, but we perform the
1580 transformation also at linktime to merge possibly numberous
1581 constructors/destructors into single function to improve code locality and
1582 reduce size. */
1583
1584 static unsigned int
1585 ipa_cdtor_merge (void)
1586 {
1587 struct cgraph_node *node;
1588 for (node = cgraph_nodes; node; node = node->next)
1589 if (node->analyzed
1590 && (DECL_STATIC_CONSTRUCTOR (node->decl)
1591 || DECL_STATIC_DESTRUCTOR (node->decl)))
1592 record_cdtor_fn (node);
1593 build_cdtor_fns ();
1594 VEC_free (tree, heap, static_ctors);
1595 VEC_free (tree, heap, static_dtors);
1596 return 0;
1597 }
1598
1599 /* Perform the pass when we have no ctors/dtors support
1600 or at LTO time to merge multiple constructors into single
1601 function. */
1602
1603 static bool
1604 gate_ipa_cdtor_merge (void)
1605 {
1606 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1607 }
1608
1609 struct ipa_opt_pass_d pass_ipa_cdtor_merge =
1610 {
1611 {
1612 IPA_PASS,
1613 "cdtor", /* name */
1614 gate_ipa_cdtor_merge, /* gate */
1615 ipa_cdtor_merge, /* execute */
1616 NULL, /* sub */
1617 NULL, /* next */
1618 0, /* static_pass_number */
1619 TV_CGRAPHOPT, /* tv_id */
1620 0, /* properties_required */
1621 0, /* properties_provided */
1622 0, /* properties_destroyed */
1623 0, /* todo_flags_start */
1624 0 /* todo_flags_finish */
1625 },
1626 NULL, /* generate_summary */
1627 NULL, /* write_summary */
1628 NULL, /* read_summary */
1629 NULL, /* write_optimization_summary */
1630 NULL, /* read_optimization_summary */
1631 NULL, /* stmt_fixup */
1632 0, /* TODOs */
1633 NULL, /* function_transform */
1634 NULL /* variable_transform */
1635 };