re PR middle-end/45926 (Revision 165057 breaks GOMP)
[gcc.git] / gcc / ipa.c
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "cgraph.h"
26 #include "tree-pass.h"
27 #include "timevar.h"
28 #include "gimple.h"
29 #include "ggc.h"
30 #include "flags.h"
31 #include "pointer-set.h"
32 #include "target.h"
33 #include "tree-iterator.h"
34
35 /* Fill array order with all nodes with output flag set in the reverse
36 topological order. */
37
38 int
39 cgraph_postorder (struct cgraph_node **order)
40 {
41 struct cgraph_node *node, *node2;
42 int stack_size = 0;
43 int order_pos = 0;
44 struct cgraph_edge *edge, last;
45 int pass;
46
47 struct cgraph_node **stack =
48 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
49
50 /* We have to deal with cycles nicely, so use a depth first traversal
51 output algorithm. Ignore the fact that some functions won't need
52 to be output and put them into order as well, so we get dependencies
53 right through inline functions. */
54 for (node = cgraph_nodes; node; node = node->next)
55 node->aux = NULL;
56 for (pass = 0; pass < 2; pass++)
57 for (node = cgraph_nodes; node; node = node->next)
58 if (!node->aux
59 && (pass
60 || (!cgraph_only_called_directly_p (node)
61 && !node->address_taken)))
62 {
63 node2 = node;
64 if (!node->callers)
65 node->aux = &last;
66 else
67 node->aux = node->callers;
68 while (node2)
69 {
70 while (node2->aux != &last)
71 {
72 edge = (struct cgraph_edge *) node2->aux;
73 if (edge->next_caller)
74 node2->aux = edge->next_caller;
75 else
76 node2->aux = &last;
77 /* Break possible cycles involving always-inline
78 functions by ignoring edges from always-inline
79 functions to non-always-inline functions. */
80 if (edge->caller->local.disregard_inline_limits
81 && !edge->callee->local.disregard_inline_limits)
82 continue;
83 if (!edge->caller->aux)
84 {
85 if (!edge->caller->callers)
86 edge->caller->aux = &last;
87 else
88 edge->caller->aux = edge->caller->callers;
89 stack[stack_size++] = node2;
90 node2 = edge->caller;
91 break;
92 }
93 }
94 if (node2->aux == &last)
95 {
96 order[order_pos++] = node2;
97 if (stack_size)
98 node2 = stack[--stack_size];
99 else
100 node2 = NULL;
101 }
102 }
103 }
104 free (stack);
105 for (node = cgraph_nodes; node; node = node->next)
106 node->aux = NULL;
107 return order_pos;
108 }
109
110 /* Look for all functions inlined to NODE and update their inlined_to pointers
111 to INLINED_TO. */
112
113 static void
114 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
115 {
116 struct cgraph_edge *e;
117 for (e = node->callees; e; e = e->next_callee)
118 if (e->callee->global.inlined_to)
119 {
120 e->callee->global.inlined_to = inlined_to;
121 update_inlined_to_pointer (e->callee, inlined_to);
122 }
123 }
124
125 /* Add cgraph NODE to queue starting at FIRST.
126
127 The queue is linked via AUX pointers and terminated by pointer to 1.
128 We enqueue nodes at two occasions: when we find them reachable or when we find
129 their bodies needed for further clonning. In the second case we mark them
130 by pointer to 2 after processing so they are re-queue when they become
131 reachable. */
132
133 static void
134 enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first)
135 {
136 /* Node is still in queue; do nothing. */
137 if (node->aux && node->aux != (void *) 2)
138 return;
139 /* Node was already processed as unreachable, re-enqueue
140 only if it became reachable now. */
141 if (node->aux == (void *)2 && !node->reachable)
142 return;
143 node->aux = *first;
144 *first = node;
145 }
146
147 /* Add varpool NODE to queue starting at FIRST. */
148
149 static void
150 enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first)
151 {
152 node->aux = *first;
153 *first = node;
154 }
155
156 /* Process references. */
157
158 static void
159 process_references (struct ipa_ref_list *list,
160 struct cgraph_node **first,
161 struct varpool_node **first_varpool,
162 bool before_inlining_p)
163 {
164 int i;
165 struct ipa_ref *ref;
166 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
167 {
168 if (ref->refered_type == IPA_REF_CGRAPH)
169 {
170 struct cgraph_node *node = ipa_ref_node (ref);
171 if (!node->reachable
172 && (!DECL_EXTERNAL (node->decl)
173 || before_inlining_p))
174 {
175 node->reachable = true;
176 enqueue_cgraph_node (node, first);
177 }
178 }
179 else
180 {
181 struct varpool_node *node = ipa_ref_varpool_node (ref);
182 if (!node->needed)
183 {
184 varpool_mark_needed_node (node);
185 enqueue_varpool_node (node, first_varpool);
186 }
187 }
188 }
189 }
190
191 /* Return true when function NODE can be removed from callgraph
192 if all direct calls are eliminated. */
193
194 static inline bool
195 varpool_can_remove_if_no_refs (struct varpool_node *node)
196 {
197 return (!node->force_output && !node->used_from_other_partition
198 && (DECL_COMDAT (node->decl) || !node->externally_visible));
199 }
200
201 /* Return true when function can be marked local. */
202
203 static bool
204 cgraph_local_node_p (struct cgraph_node *node)
205 {
206 return (cgraph_only_called_directly_p (node)
207 && node->analyzed
208 && !DECL_EXTERNAL (node->decl)
209 && !node->local.externally_visible
210 && !node->reachable_from_other_partition
211 && !node->in_other_partition);
212 }
213
214 /* Perform reachability analysis and reclaim all unreachable nodes.
215 If BEFORE_INLINING_P is true this function is called before inlining
216 decisions has been made. If BEFORE_INLINING_P is false this function also
217 removes unneeded bodies of extern inline functions. */
218
219 bool
220 cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
221 {
222 struct cgraph_node *first = (struct cgraph_node *) (void *) 1;
223 struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1;
224 struct cgraph_node *node, *next;
225 struct varpool_node *vnode, *vnext;
226 bool changed = false;
227
228 #ifdef ENABLE_CHECKING
229 verify_cgraph ();
230 #endif
231 if (file)
232 fprintf (file, "\nReclaiming functions:");
233 #ifdef ENABLE_CHECKING
234 for (node = cgraph_nodes; node; node = node->next)
235 gcc_assert (!node->aux);
236 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
237 gcc_assert (!vnode->aux);
238 #endif
239 varpool_reset_queue ();
240 for (node = cgraph_nodes; node; node = node->next)
241 if ((!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
242 /* Keep around virtual functions for possible devirtualization. */
243 || (!before_inlining_p
244 && !node->global.inlined_to
245 && DECL_VIRTUAL_P (node->decl)
246 && (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))))
247 && ((!DECL_EXTERNAL (node->decl))
248 || before_inlining_p))
249 {
250 gcc_assert (!node->global.inlined_to);
251 enqueue_cgraph_node (node, &first);
252 node->reachable = true;
253 }
254 else
255 {
256 gcc_assert (!node->aux);
257 node->reachable = false;
258 }
259 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
260 {
261 vnode->next_needed = NULL;
262 vnode->prev_needed = NULL;
263 if (!varpool_can_remove_if_no_refs (vnode))
264 {
265 vnode->needed = false;
266 varpool_mark_needed_node (vnode);
267 enqueue_varpool_node (vnode, &first_varpool);
268 }
269 else
270 vnode->needed = false;
271 }
272
273 /* Perform reachability analysis. As a special case do not consider
274 extern inline functions not inlined as live because we won't output
275 them at all.
276
277 We maintain two worklist, one for cgraph nodes other for varpools and
278 are finished once both are empty. */
279
280 while (first != (struct cgraph_node *) (void *) 1
281 || first_varpool != (struct varpool_node *) (void *) 1)
282 {
283 if (first != (struct cgraph_node *) (void *) 1)
284 {
285 struct cgraph_edge *e;
286 node = first;
287 first = (struct cgraph_node *) first->aux;
288 if (!node->reachable)
289 node->aux = (void *)2;
290
291 /* If we found this node reachable, first mark on the callees
292 reachable too, unless they are direct calls to extern inline functions
293 we decided to not inline. */
294 if (node->reachable)
295 {
296 for (e = node->callees; e; e = e->next_callee)
297 if (!e->callee->reachable
298 && node->analyzed
299 && (!e->inline_failed || !e->callee->analyzed
300 || (!DECL_EXTERNAL (e->callee->decl))
301 || before_inlining_p))
302 {
303 e->callee->reachable = true;
304 enqueue_cgraph_node (e->callee, &first);
305 }
306 process_references (&node->ref_list, &first, &first_varpool, before_inlining_p);
307 }
308
309 /* If any function in a comdat group is reachable, force
310 all other functions in the same comdat group to be
311 also reachable. */
312 if (node->same_comdat_group
313 && node->reachable
314 && !node->global.inlined_to)
315 {
316 for (next = node->same_comdat_group;
317 next != node;
318 next = next->same_comdat_group)
319 if (!next->reachable)
320 {
321 next->reachable = true;
322 enqueue_cgraph_node (next, &first);
323 }
324 }
325
326 /* We can freely remove inline clones even if they are cloned, however if
327 function is clone of real clone, we must keep it around in order to
328 make materialize_clones produce function body with the changes
329 applied. */
330 while (node->clone_of && !node->clone_of->aux
331 && !gimple_has_body_p (node->decl))
332 {
333 bool noninline = node->clone_of->decl != node->decl;
334 node = node->clone_of;
335 if (noninline && !node->reachable && !node->aux)
336 {
337 enqueue_cgraph_node (node, &first);
338 break;
339 }
340 }
341 }
342 if (first_varpool != (struct varpool_node *) (void *) 1)
343 {
344 vnode = first_varpool;
345 first_varpool = (struct varpool_node *)first_varpool->aux;
346 vnode->aux = NULL;
347 process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p);
348 /* If any function in a comdat group is reachable, force
349 all other functions in the same comdat group to be
350 also reachable. */
351 if (vnode->same_comdat_group)
352 {
353 struct varpool_node *next;
354 for (next = vnode->same_comdat_group;
355 next != vnode;
356 next = next->same_comdat_group)
357 if (!next->needed)
358 {
359 varpool_mark_needed_node (next);
360 enqueue_varpool_node (next, &first_varpool);
361 }
362 }
363 }
364 }
365
366 /* Remove unreachable nodes.
367
368 Completely unreachable functions can be fully removed from the callgraph.
369 Extern inline functions that we decided to not inline need to become unanalyzed nodes of
370 callgraph (so we still have edges to them). We remove function body then.
371
372 Also we need to care functions that are unreachable but we need to keep them around
373 for later clonning. In this case we also turn them to unanalyzed nodes, but
374 keep the body around. */
375 for (node = cgraph_nodes; node; node = next)
376 {
377 next = node->next;
378 if (node->aux && !node->reachable)
379 {
380 cgraph_node_remove_callees (node);
381 ipa_remove_all_references (&node->ref_list);
382 node->analyzed = false;
383 node->local.inlinable = false;
384 }
385 if (!node->aux)
386 {
387 node->global.inlined_to = NULL;
388 if (file)
389 fprintf (file, " %s", cgraph_node_name (node));
390 if (!node->analyzed || !DECL_EXTERNAL (node->decl) || before_inlining_p)
391 cgraph_remove_node (node);
392 else
393 {
394 struct cgraph_edge *e;
395
396 /* See if there is reachable caller. */
397 for (e = node->callers; e; e = e->next_caller)
398 if (e->caller->reachable)
399 break;
400
401 /* If so, we need to keep node in the callgraph. */
402 if (e || node->needed)
403 {
404 struct cgraph_node *clone;
405
406 /* If there are still clones, we must keep body around.
407 Otherwise we can just remove the body but keep the clone. */
408 for (clone = node->clones; clone;
409 clone = clone->next_sibling_clone)
410 if (clone->aux)
411 break;
412 if (!clone)
413 {
414 cgraph_release_function_body (node);
415 node->local.inlinable = false;
416 if (node->prev_sibling_clone)
417 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
418 else if (node->clone_of)
419 node->clone_of->clones = node->next_sibling_clone;
420 if (node->next_sibling_clone)
421 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
422 #ifdef ENABLE_CHECKING
423 if (node->clone_of)
424 node->former_clone_of = node->clone_of->decl;
425 #endif
426 node->clone_of = NULL;
427 node->next_sibling_clone = NULL;
428 node->prev_sibling_clone = NULL;
429 }
430 else
431 gcc_assert (!clone->in_other_partition);
432 node->analyzed = false;
433 cgraph_node_remove_callees (node);
434 ipa_remove_all_references (&node->ref_list);
435 }
436 else
437 cgraph_remove_node (node);
438 }
439 changed = true;
440 }
441 }
442 for (node = cgraph_nodes; node; node = node->next)
443 {
444 /* Inline clones might be kept around so their materializing allows further
445 cloning. If the function the clone is inlined into is removed, we need
446 to turn it into normal cone. */
447 if (node->global.inlined_to
448 && !node->callers)
449 {
450 gcc_assert (node->clones);
451 node->global.inlined_to = NULL;
452 update_inlined_to_pointer (node, node);
453 }
454 node->aux = NULL;
455 }
456
457 if (file)
458 fprintf (file, "\n");
459
460 /* We must release unused extern inlines or sanity checking will fail. Rest of transformations
461 are undesirable at -O0 since we do not want to remove anything. */
462 if (!optimize)
463 return changed;
464
465 if (file)
466 fprintf (file, "Reclaiming variables:");
467 for (vnode = varpool_nodes; vnode; vnode = vnext)
468 {
469 vnext = vnode->next;
470 if (!vnode->needed)
471 {
472 if (file)
473 fprintf (file, " %s", varpool_node_name (vnode));
474 varpool_remove_node (vnode);
475 changed = true;
476 }
477 }
478
479 /* Now update address_taken flags and try to promote functions to be local. */
480
481 if (file)
482 fprintf (file, "\nClearing address taken flags:");
483 for (node = cgraph_nodes; node; node = node->next)
484 if (node->address_taken
485 && !node->reachable_from_other_partition)
486 {
487 int i;
488 struct ipa_ref *ref;
489 bool found = false;
490 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
491 && !found; i++)
492 {
493 gcc_assert (ref->use == IPA_REF_ADDR);
494 found = true;
495 }
496 if (!found)
497 {
498 if (file)
499 fprintf (file, " %s", cgraph_node_name (node));
500 node->address_taken = false;
501 changed = true;
502 if (cgraph_local_node_p (node))
503 {
504 node->local.local = true;
505 if (file)
506 fprintf (file, " (local)");
507 }
508 }
509 }
510
511 #ifdef ENABLE_CHECKING
512 verify_cgraph ();
513 #endif
514
515 /* Reclaim alias pairs for functions that have disappeared from the
516 call graph. */
517 remove_unreachable_alias_pairs ();
518
519 return changed;
520 }
521
522 /* Discover variables that have no longer address taken or that are read only
523 and update their flags.
524
525 FIXME: This can not be done in between gimplify and omp_expand since
526 readonly flag plays role on what is shared and what is not. Currently we do
527 this transformation as part of whole program visibility and re-do at
528 ipa-reference pass (to take into account clonning), but it would
529 make sense to do it before early optimizations. */
530
531 void
532 ipa_discover_readonly_nonaddressable_vars (void)
533 {
534 struct varpool_node *vnode;
535 if (dump_file)
536 fprintf (dump_file, "Clearing variable flags:");
537 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
538 if (vnode->finalized && varpool_all_refs_explicit_p (vnode)
539 && (TREE_ADDRESSABLE (vnode->decl) || !TREE_READONLY (vnode->decl)))
540 {
541 bool written = false;
542 bool address_taken = false;
543 int i;
544 struct ipa_ref *ref;
545 for (i = 0; ipa_ref_list_refering_iterate (&vnode->ref_list, i, ref)
546 && (!written || !address_taken); i++)
547 switch (ref->use)
548 {
549 case IPA_REF_ADDR:
550 address_taken = true;
551 break;
552 case IPA_REF_LOAD:
553 break;
554 case IPA_REF_STORE:
555 written = true;
556 break;
557 }
558 if (TREE_ADDRESSABLE (vnode->decl) && !address_taken)
559 {
560 if (dump_file)
561 fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode));
562 TREE_ADDRESSABLE (vnode->decl) = 0;
563 }
564 if (!TREE_READONLY (vnode->decl) && !address_taken && !written
565 /* Making variable in explicit section readonly can cause section
566 type conflict.
567 See e.g. gcc.c-torture/compile/pr23237.c */
568 && DECL_SECTION_NAME (vnode->decl) == NULL)
569 {
570 if (dump_file)
571 fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode));
572 TREE_READONLY (vnode->decl) = 1;
573 }
574 }
575 if (dump_file)
576 fprintf (dump_file, "\n");
577 }
578
579 /* Return true when function NODE should be considered externally visible. */
580
581 static bool
582 cgraph_externally_visible_p (struct cgraph_node *node, bool whole_program, bool aliased)
583 {
584 if (!node->local.finalized)
585 return false;
586 if (!DECL_COMDAT (node->decl)
587 && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)))
588 return false;
589
590 /* Do not even try to be smart about aliased nodes. Until we properly
591 represent everything by same body alias, these are just evil. */
592 if (aliased)
593 return true;
594
595 /* If linker counts on us, we must preserve the function. */
596 if (cgraph_used_from_object_file_p (node))
597 return true;
598 /* When doing link time optimizations, hidden symbols become local. */
599 if (in_lto_p
600 && (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
601 || DECL_VISIBILITY (node->decl) == VISIBILITY_INTERNAL)
602 /* Be sure that node is defined in IR file, not in other object
603 file. In that case we don't set used_from_other_object_file. */
604 && node->analyzed)
605 ;
606 else if (!whole_program)
607 return true;
608 /* COMDAT functions must be shared only if they have address taken,
609 otherwise we can produce our own private implementation with
610 -fwhole-program. */
611 else if (DECL_COMDAT (node->decl))
612 {
613 if (node->address_taken || !node->analyzed)
614 return true;
615 if (node->same_comdat_group)
616 {
617 struct cgraph_node *next;
618
619 /* If more than one function is in the same COMDAT group, it must
620 be shared even if just one function in the comdat group has
621 address taken. */
622 for (next = node->same_comdat_group;
623 next != node;
624 next = next->same_comdat_group)
625 if (next->address_taken || !next->analyzed)
626 return true;
627 }
628 }
629 if (DECL_PRESERVE_P (node->decl))
630 return true;
631 if (MAIN_NAME_P (DECL_NAME (node->decl)))
632 return true;
633 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl)))
634 return true;
635 return false;
636 }
637
638 /* Dissolve the same_comdat_group list in which NODE resides. */
639
640 static void
641 dissolve_same_comdat_group_list (struct cgraph_node *node)
642 {
643 struct cgraph_node *n = node, *next;
644 do
645 {
646 next = n->same_comdat_group;
647 n->same_comdat_group = NULL;
648 n = next;
649 }
650 while (n != node);
651 }
652
653 /* Mark visibility of all functions.
654
655 A local function is one whose calls can occur only in the current
656 compilation unit and all its calls are explicit, so we can change
657 its calling convention. We simply mark all static functions whose
658 address is not taken as local.
659
660 We also change the TREE_PUBLIC flag of all declarations that are public
661 in language point of view but we want to overwrite this default
662 via visibilities for the backend point of view. */
663
664 static unsigned int
665 function_and_variable_visibility (bool whole_program)
666 {
667 struct cgraph_node *node;
668 struct varpool_node *vnode;
669 struct pointer_set_t *aliased_nodes = pointer_set_create ();
670 struct pointer_set_t *aliased_vnodes = pointer_set_create ();
671 unsigned i;
672 alias_pair *p;
673
674 /* Discover aliased nodes. */
675 FOR_EACH_VEC_ELT (alias_pair, alias_pairs, i, p)
676 {
677 if (dump_file)
678 fprintf (dump_file, "Alias %s->%s",
679 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)),
680 IDENTIFIER_POINTER (p->target));
681
682 if ((node = cgraph_node_for_asm (p->target)) != NULL)
683 {
684 gcc_assert (node->needed);
685 pointer_set_insert (aliased_nodes, node);
686 if (dump_file)
687 fprintf (dump_file, " node %s/%i",
688 cgraph_node_name (node), node->uid);
689 }
690 else if ((vnode = varpool_node_for_asm (p->target)) != NULL)
691 {
692 gcc_assert (vnode->needed);
693 pointer_set_insert (aliased_vnodes, vnode);
694 if (dump_file)
695 fprintf (dump_file, " varpool node %s",
696 varpool_node_name (vnode));
697 }
698 if (dump_file)
699 fprintf (dump_file, "\n");
700 }
701
702 for (node = cgraph_nodes; node; node = node->next)
703 {
704 /* C++ FE on lack of COMDAT support create local COMDAT functions
705 (that ought to be shared but can not due to object format
706 limitations). It is neccesary to keep the flag to make rest of C++ FE
707 happy. Clear the flag here to avoid confusion in middle-end. */
708 if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl))
709 DECL_COMDAT (node->decl) = 0;
710 /* For external decls stop tracking same_comdat_group, it doesn't matter
711 what comdat group they are in when they won't be emitted in this TU,
712 and simplifies later passes. */
713 if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
714 {
715 #ifdef ENABLE_CHECKING
716 struct cgraph_node *n;
717
718 for (n = node->same_comdat_group;
719 n != node;
720 n = n->same_comdat_group)
721 /* If at least one of same comdat group functions is external,
722 all of them have to be, otherwise it is a front-end bug. */
723 gcc_assert (DECL_EXTERNAL (n->decl));
724 #endif
725 dissolve_same_comdat_group_list (node);
726 }
727 gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl))
728 || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl));
729 if (cgraph_externally_visible_p (node, whole_program,
730 pointer_set_contains (aliased_nodes,
731 node)))
732 {
733 gcc_assert (!node->global.inlined_to);
734 node->local.externally_visible = true;
735 }
736 else
737 node->local.externally_visible = false;
738 if (!node->local.externally_visible && node->analyzed
739 && !DECL_EXTERNAL (node->decl))
740 {
741 struct cgraph_node *alias;
742 gcc_assert (whole_program || in_lto_p || !TREE_PUBLIC (node->decl));
743 cgraph_make_decl_local (node->decl);
744 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
745 for (alias = node->same_body; alias; alias = alias->next)
746 cgraph_make_decl_local (alias->decl);
747 if (node->same_comdat_group)
748 /* cgraph_externally_visible_p has already checked all other nodes
749 in the group and they will all be made local. We need to
750 dissolve the group at once so that the predicate does not
751 segfault though. */
752 dissolve_same_comdat_group_list (node);
753 }
754 node->local.local = cgraph_local_node_p (node);
755 }
756 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
757 {
758 /* weak flag makes no sense on local variables. */
759 gcc_assert (!DECL_WEAK (vnode->decl)
760 || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl));
761 /* In several cases declarations can not be common:
762
763 - when declaration has initializer
764 - when it is in weak
765 - when it has specific section
766 - when it resides in non-generic address space.
767 - if declaration is local, it will get into .local common section
768 so common flag is not needed. Frontends still produce these in
769 certain cases, such as for:
770
771 static int a __attribute__ ((common))
772
773 Canonicalize things here and clear the redundant flag. */
774 if (DECL_COMMON (vnode->decl)
775 && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
776 || (DECL_INITIAL (vnode->decl)
777 && DECL_INITIAL (vnode->decl) != error_mark_node)
778 || DECL_WEAK (vnode->decl)
779 || DECL_SECTION_NAME (vnode->decl) != NULL
780 || ! (ADDR_SPACE_GENERIC_P
781 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl))))))
782 DECL_COMMON (vnode->decl) = 0;
783 }
784 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
785 {
786 if (!vnode->finalized)
787 continue;
788 if (vnode->needed
789 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl))
790 && (((!whole_program
791 /* We can privatize comdat readonly variables whose address is
792 not taken, but doing so is not going to bring us
793 optimization oppurtunities until we start reordering
794 datastructures. */
795 || DECL_COMDAT (vnode->decl)
796 || DECL_WEAK (vnode->decl))
797 /* When doing linktime optimizations, all hidden symbols will
798 become local. */
799 && (!in_lto_p
800 || (DECL_VISIBILITY (vnode->decl) != VISIBILITY_HIDDEN
801 && DECL_VISIBILITY (vnode->decl) != VISIBILITY_INTERNAL)
802 /* We can get prevailing decision in other object file.
803 In this case we do not sed used_from_object_file. */
804 || !vnode->finalized))
805 || DECL_PRESERVE_P (vnode->decl)
806 || varpool_used_from_object_file_p (vnode)
807 || pointer_set_contains (aliased_vnodes, vnode)
808 || lookup_attribute ("externally_visible",
809 DECL_ATTRIBUTES (vnode->decl))))
810 vnode->externally_visible = true;
811 else
812 vnode->externally_visible = false;
813 if (!vnode->externally_visible)
814 {
815 gcc_assert (in_lto_p || whole_program || !TREE_PUBLIC (vnode->decl));
816 cgraph_make_decl_local (vnode->decl);
817 vnode->resolution = LDPR_PREVAILING_DEF_IRONLY;
818 }
819 gcc_assert (TREE_STATIC (vnode->decl));
820 }
821 pointer_set_destroy (aliased_nodes);
822 pointer_set_destroy (aliased_vnodes);
823
824 if (dump_file)
825 {
826 fprintf (dump_file, "\nMarking local functions:");
827 for (node = cgraph_nodes; node; node = node->next)
828 if (node->local.local)
829 fprintf (dump_file, " %s", cgraph_node_name (node));
830 fprintf (dump_file, "\n\n");
831 fprintf (dump_file, "\nMarking externally visible functions:");
832 for (node = cgraph_nodes; node; node = node->next)
833 if (node->local.externally_visible)
834 fprintf (dump_file, " %s", cgraph_node_name (node));
835 fprintf (dump_file, "\n\n");
836 fprintf (dump_file, "\nMarking externally visible variables:");
837 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
838 if (vnode->externally_visible)
839 fprintf (dump_file, " %s", varpool_node_name (vnode));
840 fprintf (dump_file, "\n\n");
841 }
842 cgraph_function_flags_ready = true;
843 return 0;
844 }
845
846 /* Local function pass handling visibilities. This happens before LTO streaming
847 so in particular -fwhole-program should be ignored at this level. */
848
849 static unsigned int
850 local_function_and_variable_visibility (void)
851 {
852 return function_and_variable_visibility (flag_whole_program && !flag_lto && !flag_whopr);
853 }
854
855 struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility =
856 {
857 {
858 SIMPLE_IPA_PASS,
859 "visibility", /* name */
860 NULL, /* gate */
861 local_function_and_variable_visibility,/* execute */
862 NULL, /* sub */
863 NULL, /* next */
864 0, /* static_pass_number */
865 TV_CGRAPHOPT, /* tv_id */
866 0, /* properties_required */
867 0, /* properties_provided */
868 0, /* properties_destroyed */
869 0, /* todo_flags_start */
870 TODO_remove_functions | TODO_dump_cgraph
871 | TODO_ggc_collect /* todo_flags_finish */
872 }
873 };
874
875 /* Do not re-run on ltrans stage. */
876
877 static bool
878 gate_whole_program_function_and_variable_visibility (void)
879 {
880 return !flag_ltrans;
881 }
882
883 /* Bring functionss local at LTO time whith -fwhole-program. */
884
885 static unsigned int
886 whole_program_function_and_variable_visibility (void)
887 {
888 struct cgraph_node *node;
889 struct varpool_node *vnode;
890
891 function_and_variable_visibility (flag_whole_program);
892
893 for (node = cgraph_nodes; node; node = node->next)
894 if ((node->local.externally_visible && !DECL_COMDAT (node->decl))
895 && node->local.finalized)
896 cgraph_mark_needed_node (node);
897 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
898 if (vnode->externally_visible && !DECL_COMDAT (vnode->decl))
899 varpool_mark_needed_node (vnode);
900 if (dump_file)
901 {
902 fprintf (dump_file, "\nNeeded variables:");
903 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
904 if (vnode->needed)
905 fprintf (dump_file, " %s", varpool_node_name (vnode));
906 fprintf (dump_file, "\n\n");
907 }
908 if (optimize)
909 ipa_discover_readonly_nonaddressable_vars ();
910 return 0;
911 }
912
913 struct ipa_opt_pass_d pass_ipa_whole_program_visibility =
914 {
915 {
916 IPA_PASS,
917 "whole-program", /* name */
918 gate_whole_program_function_and_variable_visibility,/* gate */
919 whole_program_function_and_variable_visibility,/* execute */
920 NULL, /* sub */
921 NULL, /* next */
922 0, /* static_pass_number */
923 TV_CGRAPHOPT, /* tv_id */
924 0, /* properties_required */
925 0, /* properties_provided */
926 0, /* properties_destroyed */
927 0, /* todo_flags_start */
928 TODO_remove_functions | TODO_dump_cgraph
929 | TODO_ggc_collect /* todo_flags_finish */
930 },
931 NULL, /* generate_summary */
932 NULL, /* write_summary */
933 NULL, /* read_summary */
934 NULL, /* write_optimization_summary */
935 NULL, /* read_optimization_summary */
936 NULL, /* stmt_fixup */
937 0, /* TODOs */
938 NULL, /* function_transform */
939 NULL, /* variable_transform */
940 };
941
942 /* Hash a cgraph node set element. */
943
944 static hashval_t
945 hash_cgraph_node_set_element (const void *p)
946 {
947 const_cgraph_node_set_element element = (const_cgraph_node_set_element) p;
948 return htab_hash_pointer (element->node);
949 }
950
951 /* Compare two cgraph node set elements. */
952
953 static int
954 eq_cgraph_node_set_element (const void *p1, const void *p2)
955 {
956 const_cgraph_node_set_element e1 = (const_cgraph_node_set_element) p1;
957 const_cgraph_node_set_element e2 = (const_cgraph_node_set_element) p2;
958
959 return e1->node == e2->node;
960 }
961
962 /* Create a new cgraph node set. */
963
964 cgraph_node_set
965 cgraph_node_set_new (void)
966 {
967 cgraph_node_set new_node_set;
968
969 new_node_set = ggc_alloc_cgraph_node_set_def ();
970 new_node_set->hashtab = htab_create_ggc (10,
971 hash_cgraph_node_set_element,
972 eq_cgraph_node_set_element,
973 NULL);
974 new_node_set->nodes = NULL;
975 return new_node_set;
976 }
977
978 /* Add cgraph_node NODE to cgraph_node_set SET. */
979
980 void
981 cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node)
982 {
983 void **slot;
984 cgraph_node_set_element element;
985 struct cgraph_node_set_element_def dummy;
986
987 dummy.node = node;
988 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
989
990 if (*slot != HTAB_EMPTY_ENTRY)
991 {
992 element = (cgraph_node_set_element) *slot;
993 gcc_assert (node == element->node
994 && (VEC_index (cgraph_node_ptr, set->nodes, element->index)
995 == node));
996 return;
997 }
998
999 /* Insert node into hash table. */
1000 element = ggc_alloc_cgraph_node_set_element_def ();
1001 element->node = node;
1002 element->index = VEC_length (cgraph_node_ptr, set->nodes);
1003 *slot = element;
1004
1005 /* Insert into node vector. */
1006 VEC_safe_push (cgraph_node_ptr, gc, set->nodes, node);
1007 }
1008
1009 /* Remove cgraph_node NODE from cgraph_node_set SET. */
1010
1011 void
1012 cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node)
1013 {
1014 void **slot, **last_slot;
1015 cgraph_node_set_element element, last_element;
1016 struct cgraph_node *last_node;
1017 struct cgraph_node_set_element_def dummy;
1018
1019 dummy.node = node;
1020 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1021 if (slot == NULL)
1022 return;
1023
1024 element = (cgraph_node_set_element) *slot;
1025 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1026 == node);
1027
1028 /* Remove from vector. We do this by swapping node with the last element
1029 of the vector. */
1030 last_node = VEC_pop (cgraph_node_ptr, set->nodes);
1031 if (last_node != node)
1032 {
1033 dummy.node = last_node;
1034 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1035 last_element = (cgraph_node_set_element) *last_slot;
1036 gcc_assert (last_element);
1037
1038 /* Move the last element to the original spot of NODE. */
1039 last_element->index = element->index;
1040 VEC_replace (cgraph_node_ptr, set->nodes, last_element->index,
1041 last_node);
1042 }
1043
1044 /* Remove element from hash table. */
1045 htab_clear_slot (set->hashtab, slot);
1046 ggc_free (element);
1047 }
1048
1049 /* Find NODE in SET and return an iterator to it if found. A null iterator
1050 is returned if NODE is not in SET. */
1051
1052 cgraph_node_set_iterator
1053 cgraph_node_set_find (cgraph_node_set set, struct cgraph_node *node)
1054 {
1055 void **slot;
1056 struct cgraph_node_set_element_def dummy;
1057 cgraph_node_set_element element;
1058 cgraph_node_set_iterator csi;
1059
1060 dummy.node = node;
1061 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1062 if (slot == NULL)
1063 csi.index = (unsigned) ~0;
1064 else
1065 {
1066 element = (cgraph_node_set_element) *slot;
1067 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1068 == node);
1069 csi.index = element->index;
1070 }
1071 csi.set = set;
1072
1073 return csi;
1074 }
1075
1076 /* Dump content of SET to file F. */
1077
1078 void
1079 dump_cgraph_node_set (FILE *f, cgraph_node_set set)
1080 {
1081 cgraph_node_set_iterator iter;
1082
1083 for (iter = csi_start (set); !csi_end_p (iter); csi_next (&iter))
1084 {
1085 struct cgraph_node *node = csi_node (iter);
1086 fprintf (f, " %s/%i", cgraph_node_name (node), node->uid);
1087 }
1088 fprintf (f, "\n");
1089 }
1090
1091 /* Dump content of SET to stderr. */
1092
1093 DEBUG_FUNCTION void
1094 debug_cgraph_node_set (cgraph_node_set set)
1095 {
1096 dump_cgraph_node_set (stderr, set);
1097 }
1098
1099 /* Hash a varpool node set element. */
1100
1101 static hashval_t
1102 hash_varpool_node_set_element (const void *p)
1103 {
1104 const_varpool_node_set_element element = (const_varpool_node_set_element) p;
1105 return htab_hash_pointer (element->node);
1106 }
1107
1108 /* Compare two varpool node set elements. */
1109
1110 static int
1111 eq_varpool_node_set_element (const void *p1, const void *p2)
1112 {
1113 const_varpool_node_set_element e1 = (const_varpool_node_set_element) p1;
1114 const_varpool_node_set_element e2 = (const_varpool_node_set_element) p2;
1115
1116 return e1->node == e2->node;
1117 }
1118
1119 /* Create a new varpool node set. */
1120
1121 varpool_node_set
1122 varpool_node_set_new (void)
1123 {
1124 varpool_node_set new_node_set;
1125
1126 new_node_set = ggc_alloc_varpool_node_set_def ();
1127 new_node_set->hashtab = htab_create_ggc (10,
1128 hash_varpool_node_set_element,
1129 eq_varpool_node_set_element,
1130 NULL);
1131 new_node_set->nodes = NULL;
1132 return new_node_set;
1133 }
1134
1135 /* Add varpool_node NODE to varpool_node_set SET. */
1136
1137 void
1138 varpool_node_set_add (varpool_node_set set, struct varpool_node *node)
1139 {
1140 void **slot;
1141 varpool_node_set_element element;
1142 struct varpool_node_set_element_def dummy;
1143
1144 dummy.node = node;
1145 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
1146
1147 if (*slot != HTAB_EMPTY_ENTRY)
1148 {
1149 element = (varpool_node_set_element) *slot;
1150 gcc_assert (node == element->node
1151 && (VEC_index (varpool_node_ptr, set->nodes, element->index)
1152 == node));
1153 return;
1154 }
1155
1156 /* Insert node into hash table. */
1157 element = ggc_alloc_varpool_node_set_element_def ();
1158 element->node = node;
1159 element->index = VEC_length (varpool_node_ptr, set->nodes);
1160 *slot = element;
1161
1162 /* Insert into node vector. */
1163 VEC_safe_push (varpool_node_ptr, gc, set->nodes, node);
1164 }
1165
1166 /* Remove varpool_node NODE from varpool_node_set SET. */
1167
1168 void
1169 varpool_node_set_remove (varpool_node_set set, struct varpool_node *node)
1170 {
1171 void **slot, **last_slot;
1172 varpool_node_set_element element, last_element;
1173 struct varpool_node *last_node;
1174 struct varpool_node_set_element_def dummy;
1175
1176 dummy.node = node;
1177 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1178 if (slot == NULL)
1179 return;
1180
1181 element = (varpool_node_set_element) *slot;
1182 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1183 == node);
1184
1185 /* Remove from vector. We do this by swapping node with the last element
1186 of the vector. */
1187 last_node = VEC_pop (varpool_node_ptr, set->nodes);
1188 if (last_node != node)
1189 {
1190 dummy.node = last_node;
1191 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1192 last_element = (varpool_node_set_element) *last_slot;
1193 gcc_assert (last_element);
1194
1195 /* Move the last element to the original spot of NODE. */
1196 last_element->index = element->index;
1197 VEC_replace (varpool_node_ptr, set->nodes, last_element->index,
1198 last_node);
1199 }
1200
1201 /* Remove element from hash table. */
1202 htab_clear_slot (set->hashtab, slot);
1203 ggc_free (element);
1204 }
1205
1206 /* Find NODE in SET and return an iterator to it if found. A null iterator
1207 is returned if NODE is not in SET. */
1208
1209 varpool_node_set_iterator
1210 varpool_node_set_find (varpool_node_set set, struct varpool_node *node)
1211 {
1212 void **slot;
1213 struct varpool_node_set_element_def dummy;
1214 varpool_node_set_element element;
1215 varpool_node_set_iterator vsi;
1216
1217 dummy.node = node;
1218 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1219 if (slot == NULL)
1220 vsi.index = (unsigned) ~0;
1221 else
1222 {
1223 element = (varpool_node_set_element) *slot;
1224 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1225 == node);
1226 vsi.index = element->index;
1227 }
1228 vsi.set = set;
1229
1230 return vsi;
1231 }
1232
1233 /* Dump content of SET to file F. */
1234
1235 void
1236 dump_varpool_node_set (FILE *f, varpool_node_set set)
1237 {
1238 varpool_node_set_iterator iter;
1239
1240 for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter))
1241 {
1242 struct varpool_node *node = vsi_node (iter);
1243 fprintf (f, " %s", varpool_node_name (node));
1244 }
1245 fprintf (f, "\n");
1246 }
1247
1248 /* Dump content of SET to stderr. */
1249
1250 DEBUG_FUNCTION void
1251 debug_varpool_node_set (varpool_node_set set)
1252 {
1253 dump_varpool_node_set (stderr, set);
1254 }
1255
1256
1257 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1258
1259 static unsigned int
1260 ipa_profile (void)
1261 {
1262 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1263 struct cgraph_edge *e;
1264 int order_pos;
1265 bool something_changed = false;
1266 int i;
1267
1268 order_pos = cgraph_postorder (order);
1269 for (i = order_pos - 1; i >= 0; i--)
1270 {
1271 if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1272 {
1273 for (e = order[i]->callees; e; e = e->next_callee)
1274 if (e->callee->local.local && !e->callee->aux)
1275 {
1276 something_changed = true;
1277 e->callee->aux = (void *)1;
1278 }
1279 }
1280 order[i]->aux = NULL;
1281 }
1282
1283 while (something_changed)
1284 {
1285 something_changed = false;
1286 for (i = order_pos - 1; i >= 0; i--)
1287 {
1288 if (order[i]->aux && cgraph_propagate_frequency (order[i]))
1289 {
1290 for (e = order[i]->callees; e; e = e->next_callee)
1291 if (e->callee->local.local && !e->callee->aux)
1292 {
1293 something_changed = true;
1294 e->callee->aux = (void *)1;
1295 }
1296 }
1297 order[i]->aux = NULL;
1298 }
1299 }
1300 free (order);
1301 return 0;
1302 }
1303
1304 static bool
1305 gate_ipa_profile (void)
1306 {
1307 return flag_ipa_profile;
1308 }
1309
1310 struct ipa_opt_pass_d pass_ipa_profile =
1311 {
1312 {
1313 IPA_PASS,
1314 "ipa-profile", /* name */
1315 gate_ipa_profile, /* gate */
1316 ipa_profile, /* execute */
1317 NULL, /* sub */
1318 NULL, /* next */
1319 0, /* static_pass_number */
1320 TV_IPA_PROFILE, /* tv_id */
1321 0, /* properties_required */
1322 0, /* properties_provided */
1323 0, /* properties_destroyed */
1324 0, /* todo_flags_start */
1325 0 /* todo_flags_finish */
1326 },
1327 NULL, /* generate_summary */
1328 NULL, /* write_summary */
1329 NULL, /* read_summary */
1330 NULL, /* write_optimization_summary */
1331 NULL, /* read_optimization_summary */
1332 NULL, /* stmt_fixup */
1333 0, /* TODOs */
1334 NULL, /* function_transform */
1335 NULL /* variable_transform */
1336 };
1337
1338 /* Generate and emit a static constructor or destructor. WHICH must
1339 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1340 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1341 initialization priority for this constructor or destructor. */
1342
1343 void
1344 cgraph_build_static_cdtor (char which, tree body, int priority)
1345 {
1346 static int counter = 0;
1347 char which_buf[16];
1348 tree decl, name, resdecl;
1349
1350 /* The priority is encoded in the constructor or destructor name.
1351 collect2 will sort the names and arrange that they are called at
1352 program startup. */
1353 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1354 name = get_file_function_name (which_buf);
1355
1356 decl = build_decl (input_location, FUNCTION_DECL, name,
1357 build_function_type_list (void_type_node, NULL_TREE));
1358 current_function_decl = decl;
1359
1360 resdecl = build_decl (input_location,
1361 RESULT_DECL, NULL_TREE, void_type_node);
1362 DECL_ARTIFICIAL (resdecl) = 1;
1363 DECL_RESULT (decl) = resdecl;
1364 DECL_CONTEXT (resdecl) = decl;
1365
1366 allocate_struct_function (decl, false);
1367
1368 TREE_STATIC (decl) = 1;
1369 TREE_USED (decl) = 1;
1370 DECL_ARTIFICIAL (decl) = 1;
1371 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1372 DECL_SAVED_TREE (decl) = body;
1373 if (!targetm.have_ctors_dtors)
1374 {
1375 TREE_PUBLIC (decl) = 1;
1376 DECL_PRESERVE_P (decl) = 1;
1377 }
1378 DECL_UNINLINABLE (decl) = 1;
1379
1380 DECL_INITIAL (decl) = make_node (BLOCK);
1381 TREE_USED (DECL_INITIAL (decl)) = 1;
1382
1383 DECL_SOURCE_LOCATION (decl) = input_location;
1384 cfun->function_end_locus = input_location;
1385
1386 switch (which)
1387 {
1388 case 'I':
1389 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1390 decl_init_priority_insert (decl, priority);
1391 break;
1392 case 'D':
1393 DECL_STATIC_DESTRUCTOR (decl) = 1;
1394 decl_fini_priority_insert (decl, priority);
1395 break;
1396 default:
1397 gcc_unreachable ();
1398 }
1399
1400 gimplify_function_tree (decl);
1401
1402 cgraph_add_new_function (decl, false);
1403
1404 set_cfun (NULL);
1405 current_function_decl = NULL;
1406 }
1407
1408
1409 /* A vector of FUNCTION_DECLs declared as static constructors. */
1410 static VEC(tree, heap) *static_ctors;
1411 /* A vector of FUNCTION_DECLs declared as static destructors. */
1412 static VEC(tree, heap) *static_dtors;
1413
1414 /* When target does not have ctors and dtors, we call all constructor
1415 and destructor by special initialization/destruction function
1416 recognized by collect2.
1417
1418 When we are going to build this function, collect all constructors and
1419 destructors and turn them into normal functions. */
1420
1421 static void
1422 record_cdtor_fn (struct cgraph_node *node)
1423 {
1424 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1425 VEC_safe_push (tree, heap, static_ctors, node->decl);
1426 if (DECL_STATIC_DESTRUCTOR (node->decl))
1427 VEC_safe_push (tree, heap, static_dtors, node->decl);
1428 node = cgraph_node (node->decl);
1429 node->local.disregard_inline_limits = 1;
1430 }
1431
1432 /* Define global constructors/destructor functions for the CDTORS, of
1433 which they are LEN. The CDTORS are sorted by initialization
1434 priority. If CTOR_P is true, these are constructors; otherwise,
1435 they are destructors. */
1436
1437 static void
1438 build_cdtor (bool ctor_p, VEC (tree, heap) *cdtors)
1439 {
1440 size_t i,j;
1441 size_t len = VEC_length (tree, cdtors);
1442
1443 i = 0;
1444 while (i < len)
1445 {
1446 tree body;
1447 tree fn;
1448 priority_type priority;
1449
1450 priority = 0;
1451 body = NULL_TREE;
1452 j = i;
1453 do
1454 {
1455 priority_type p;
1456 fn = VEC_index (tree, cdtors, j);
1457 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1458 if (j == i)
1459 priority = p;
1460 else if (p != priority)
1461 break;
1462 j++;
1463 }
1464 while (j < len);
1465
1466 /* When there is only one cdtor and target supports them, do nothing. */
1467 if (j == i + 1
1468 && targetm.have_ctors_dtors)
1469 {
1470 i++;
1471 continue;
1472 }
1473 /* Find the next batch of constructors/destructors with the same
1474 initialization priority. */
1475 for (;i < j; i++)
1476 {
1477 tree call;
1478 fn = VEC_index (tree, cdtors, i);
1479 call = build_call_expr (fn, 0);
1480 if (ctor_p)
1481 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1482 else
1483 DECL_STATIC_DESTRUCTOR (fn) = 0;
1484 /* We do not want to optimize away pure/const calls here.
1485 When optimizing, these should be already removed, when not
1486 optimizing, we want user to be able to breakpoint in them. */
1487 TREE_SIDE_EFFECTS (call) = 1;
1488 append_to_statement_list (call, &body);
1489 }
1490 while (i < len);
1491 gcc_assert (body != NULL_TREE);
1492 /* Generate a function to call all the function of like
1493 priority. */
1494 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
1495 }
1496 }
1497
1498 /* Comparison function for qsort. P1 and P2 are actually of type
1499 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1500 used to determine the sort order. */
1501
1502 static int
1503 compare_ctor (const void *p1, const void *p2)
1504 {
1505 tree f1;
1506 tree f2;
1507 int priority1;
1508 int priority2;
1509
1510 f1 = *(const tree *)p1;
1511 f2 = *(const tree *)p2;
1512 priority1 = DECL_INIT_PRIORITY (f1);
1513 priority2 = DECL_INIT_PRIORITY (f2);
1514
1515 if (priority1 < priority2)
1516 return -1;
1517 else if (priority1 > priority2)
1518 return 1;
1519 else
1520 /* Ensure a stable sort. Constructors are executed in backwarding
1521 order to make LTO initialize braries first. */
1522 return DECL_UID (f2) - DECL_UID (f1);
1523 }
1524
1525 /* Comparison function for qsort. P1 and P2 are actually of type
1526 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1527 used to determine the sort order. */
1528
1529 static int
1530 compare_dtor (const void *p1, const void *p2)
1531 {
1532 tree f1;
1533 tree f2;
1534 int priority1;
1535 int priority2;
1536
1537 f1 = *(const tree *)p1;
1538 f2 = *(const tree *)p2;
1539 priority1 = DECL_FINI_PRIORITY (f1);
1540 priority2 = DECL_FINI_PRIORITY (f2);
1541
1542 if (priority1 < priority2)
1543 return -1;
1544 else if (priority1 > priority2)
1545 return 1;
1546 else
1547 /* Ensure a stable sort. */
1548 return DECL_UID (f1) - DECL_UID (f2);
1549 }
1550
1551 /* Generate functions to call static constructors and destructors
1552 for targets that do not support .ctors/.dtors sections. These
1553 functions have magic names which are detected by collect2. */
1554
1555 static void
1556 build_cdtor_fns (void)
1557 {
1558 if (!VEC_empty (tree, static_ctors))
1559 {
1560 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1561 qsort (VEC_address (tree, static_ctors),
1562 VEC_length (tree, static_ctors),
1563 sizeof (tree),
1564 compare_ctor);
1565 build_cdtor (/*ctor_p=*/true, static_ctors);
1566 }
1567
1568 if (!VEC_empty (tree, static_dtors))
1569 {
1570 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1571 qsort (VEC_address (tree, static_dtors),
1572 VEC_length (tree, static_dtors),
1573 sizeof (tree),
1574 compare_dtor);
1575 build_cdtor (/*ctor_p=*/false, static_dtors);
1576 }
1577 }
1578
1579 /* Look for constructors and destructors and produce function calling them.
1580 This is needed for targets not supporting ctors or dtors, but we perform the
1581 transformation also at linktime to merge possibly numberous
1582 constructors/destructors into single function to improve code locality and
1583 reduce size. */
1584
1585 static unsigned int
1586 ipa_cdtor_merge (void)
1587 {
1588 struct cgraph_node *node;
1589 for (node = cgraph_nodes; node; node = node->next)
1590 if (node->analyzed
1591 && (DECL_STATIC_CONSTRUCTOR (node->decl)
1592 || DECL_STATIC_DESTRUCTOR (node->decl)))
1593 record_cdtor_fn (node);
1594 build_cdtor_fns ();
1595 VEC_free (tree, heap, static_ctors);
1596 VEC_free (tree, heap, static_dtors);
1597 return 0;
1598 }
1599
1600 /* Perform the pass when we have no ctors/dtors support
1601 or at LTO time to merge multiple constructors into single
1602 function. */
1603
1604 static bool
1605 gate_ipa_cdtor_merge (void)
1606 {
1607 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1608 }
1609
1610 struct ipa_opt_pass_d pass_ipa_cdtor_merge =
1611 {
1612 {
1613 IPA_PASS,
1614 "cdtor", /* name */
1615 gate_ipa_cdtor_merge, /* gate */
1616 ipa_cdtor_merge, /* execute */
1617 NULL, /* sub */
1618 NULL, /* next */
1619 0, /* static_pass_number */
1620 TV_CGRAPHOPT, /* tv_id */
1621 0, /* properties_required */
1622 0, /* properties_provided */
1623 0, /* properties_destroyed */
1624 0, /* todo_flags_start */
1625 0 /* todo_flags_finish */
1626 },
1627 NULL, /* generate_summary */
1628 NULL, /* write_summary */
1629 NULL, /* read_summary */
1630 NULL, /* write_optimization_summary */
1631 NULL, /* read_optimization_summary */
1632 NULL, /* stmt_fixup */
1633 0, /* TODOs */
1634 NULL, /* function_transform */
1635 NULL /* variable_transform */
1636 };