lto-symtab.c (lto_symtab_entry_def): Add guessed field.
[gcc.git] / gcc / ipa.c
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "cgraph.h"
26 #include "tree-pass.h"
27 #include "timevar.h"
28 #include "gimple.h"
29 #include "ggc.h"
30 #include "flags.h"
31 #include "pointer-set.h"
32 #include "target.h"
33 #include "tree-iterator.h"
34
35 /* Fill array order with all nodes with output flag set in the reverse
36 topological order. */
37
38 int
39 cgraph_postorder (struct cgraph_node **order)
40 {
41 struct cgraph_node *node, *node2;
42 int stack_size = 0;
43 int order_pos = 0;
44 struct cgraph_edge *edge, last;
45 int pass;
46
47 struct cgraph_node **stack =
48 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
49
50 /* We have to deal with cycles nicely, so use a depth first traversal
51 output algorithm. Ignore the fact that some functions won't need
52 to be output and put them into order as well, so we get dependencies
53 right through inline functions. */
54 for (node = cgraph_nodes; node; node = node->next)
55 node->aux = NULL;
56 for (pass = 0; pass < 2; pass++)
57 for (node = cgraph_nodes; node; node = node->next)
58 if (!node->aux
59 && (pass
60 || (!cgraph_only_called_directly_p (node)
61 && !node->address_taken)))
62 {
63 node2 = node;
64 if (!node->callers)
65 node->aux = &last;
66 else
67 node->aux = node->callers;
68 while (node2)
69 {
70 while (node2->aux != &last)
71 {
72 edge = (struct cgraph_edge *) node2->aux;
73 if (edge->next_caller)
74 node2->aux = edge->next_caller;
75 else
76 node2->aux = &last;
77 /* Break possible cycles involving always-inline
78 functions by ignoring edges from always-inline
79 functions to non-always-inline functions. */
80 if (edge->caller->local.disregard_inline_limits
81 && !edge->callee->local.disregard_inline_limits)
82 continue;
83 if (!edge->caller->aux)
84 {
85 if (!edge->caller->callers)
86 edge->caller->aux = &last;
87 else
88 edge->caller->aux = edge->caller->callers;
89 stack[stack_size++] = node2;
90 node2 = edge->caller;
91 break;
92 }
93 }
94 if (node2->aux == &last)
95 {
96 order[order_pos++] = node2;
97 if (stack_size)
98 node2 = stack[--stack_size];
99 else
100 node2 = NULL;
101 }
102 }
103 }
104 free (stack);
105 for (node = cgraph_nodes; node; node = node->next)
106 node->aux = NULL;
107 return order_pos;
108 }
109
110 /* Look for all functions inlined to NODE and update their inlined_to pointers
111 to INLINED_TO. */
112
113 static void
114 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
115 {
116 struct cgraph_edge *e;
117 for (e = node->callees; e; e = e->next_callee)
118 if (e->callee->global.inlined_to)
119 {
120 e->callee->global.inlined_to = inlined_to;
121 update_inlined_to_pointer (e->callee, inlined_to);
122 }
123 }
124
125 /* Add cgraph NODE to queue starting at FIRST.
126
127 The queue is linked via AUX pointers and terminated by pointer to 1.
128 We enqueue nodes at two occasions: when we find them reachable or when we find
129 their bodies needed for further clonning. In the second case we mark them
130 by pointer to 2 after processing so they are re-queue when they become
131 reachable. */
132
133 static void
134 enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first)
135 {
136 /* Node is still in queue; do nothing. */
137 if (node->aux && node->aux != (void *) 2)
138 return;
139 /* Node was already processed as unreachable, re-enqueue
140 only if it became reachable now. */
141 if (node->aux == (void *)2 && !node->reachable)
142 return;
143 node->aux = *first;
144 *first = node;
145 }
146
147 /* Add varpool NODE to queue starting at FIRST. */
148
149 static void
150 enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first)
151 {
152 node->aux = *first;
153 *first = node;
154 }
155
156 /* Process references. */
157
158 static void
159 process_references (struct ipa_ref_list *list,
160 struct cgraph_node **first,
161 struct varpool_node **first_varpool,
162 bool before_inlining_p)
163 {
164 int i;
165 struct ipa_ref *ref;
166 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
167 {
168 if (ref->refered_type == IPA_REF_CGRAPH)
169 {
170 struct cgraph_node *node = ipa_ref_node (ref);
171 if (!node->reachable
172 && (!DECL_EXTERNAL (node->decl)
173 || before_inlining_p))
174 {
175 node->reachable = true;
176 enqueue_cgraph_node (node, first);
177 }
178 }
179 else
180 {
181 struct varpool_node *node = ipa_ref_varpool_node (ref);
182 if (!node->needed)
183 {
184 varpool_mark_needed_node (node);
185 enqueue_varpool_node (node, first_varpool);
186 }
187 }
188 }
189 }
190
191 /* Return true when function NODE can be removed from callgraph
192 if all direct calls are eliminated. */
193
194 static inline bool
195 varpool_can_remove_if_no_refs (struct varpool_node *node)
196 {
197 return (!node->force_output && !node->used_from_other_partition
198 && (DECL_COMDAT (node->decl) || !node->externally_visible));
199 }
200
201 /* Return true when function can be marked local. */
202
203 static bool
204 cgraph_local_node_p (struct cgraph_node *node)
205 {
206 return (cgraph_only_called_directly_p (node)
207 && node->analyzed
208 && !DECL_EXTERNAL (node->decl)
209 && !node->local.externally_visible
210 && !node->reachable_from_other_partition
211 && !node->in_other_partition);
212 }
213
214 /* Perform reachability analysis and reclaim all unreachable nodes.
215 If BEFORE_INLINING_P is true this function is called before inlining
216 decisions has been made. If BEFORE_INLINING_P is false this function also
217 removes unneeded bodies of extern inline functions. */
218
219 bool
220 cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
221 {
222 struct cgraph_node *first = (struct cgraph_node *) (void *) 1;
223 struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1;
224 struct cgraph_node *node, *next;
225 struct varpool_node *vnode, *vnext;
226 bool changed = false;
227
228 #ifdef ENABLE_CHECKING
229 verify_cgraph ();
230 #endif
231 if (file)
232 fprintf (file, "\nReclaiming functions:");
233 #ifdef ENABLE_CHECKING
234 for (node = cgraph_nodes; node; node = node->next)
235 gcc_assert (!node->aux);
236 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
237 gcc_assert (!vnode->aux);
238 #endif
239 varpool_reset_queue ();
240 for (node = cgraph_nodes; node; node = node->next)
241 if ((!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
242 /* Keep around virtual functions for possible devirtualization. */
243 || (!before_inlining_p
244 && !node->global.inlined_to
245 && DECL_VIRTUAL_P (node->decl)
246 && (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))))
247 && ((!DECL_EXTERNAL (node->decl))
248 || before_inlining_p))
249 {
250 gcc_assert (!node->global.inlined_to);
251 enqueue_cgraph_node (node, &first);
252 node->reachable = true;
253 }
254 else
255 {
256 gcc_assert (!node->aux);
257 node->reachable = false;
258 }
259 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
260 {
261 vnode->next_needed = NULL;
262 vnode->prev_needed = NULL;
263 if (!varpool_can_remove_if_no_refs (vnode))
264 {
265 vnode->needed = false;
266 varpool_mark_needed_node (vnode);
267 enqueue_varpool_node (vnode, &first_varpool);
268 }
269 else
270 vnode->needed = false;
271 }
272
273 /* Perform reachability analysis. As a special case do not consider
274 extern inline functions not inlined as live because we won't output
275 them at all.
276
277 We maintain two worklist, one for cgraph nodes other for varpools and
278 are finished once both are empty. */
279
280 while (first != (struct cgraph_node *) (void *) 1
281 || first_varpool != (struct varpool_node *) (void *) 1)
282 {
283 if (first != (struct cgraph_node *) (void *) 1)
284 {
285 struct cgraph_edge *e;
286 node = first;
287 first = (struct cgraph_node *) first->aux;
288 if (!node->reachable)
289 node->aux = (void *)2;
290
291 /* If we found this node reachable, first mark on the callees
292 reachable too, unless they are direct calls to extern inline functions
293 we decided to not inline. */
294 if (node->reachable)
295 {
296 for (e = node->callees; e; e = e->next_callee)
297 if (!e->callee->reachable
298 && node->analyzed
299 && (!e->inline_failed || !e->callee->analyzed
300 || (!DECL_EXTERNAL (e->callee->decl))
301 || before_inlining_p))
302 {
303 e->callee->reachable = true;
304 enqueue_cgraph_node (e->callee, &first);
305 }
306 process_references (&node->ref_list, &first, &first_varpool, before_inlining_p);
307 }
308
309 /* If any function in a comdat group is reachable, force
310 all other functions in the same comdat group to be
311 also reachable. */
312 if (node->same_comdat_group
313 && node->reachable
314 && !node->global.inlined_to)
315 {
316 for (next = node->same_comdat_group;
317 next != node;
318 next = next->same_comdat_group)
319 if (!next->reachable)
320 {
321 next->reachable = true;
322 enqueue_cgraph_node (next, &first);
323 }
324 }
325
326 /* We can freely remove inline clones even if they are cloned, however if
327 function is clone of real clone, we must keep it around in order to
328 make materialize_clones produce function body with the changes
329 applied. */
330 while (node->clone_of && !node->clone_of->aux
331 && !gimple_has_body_p (node->decl))
332 {
333 bool noninline = node->clone_of->decl != node->decl;
334 node = node->clone_of;
335 if (noninline && !node->reachable && !node->aux)
336 {
337 enqueue_cgraph_node (node, &first);
338 break;
339 }
340 }
341 }
342 if (first_varpool != (struct varpool_node *) (void *) 1)
343 {
344 vnode = first_varpool;
345 first_varpool = (struct varpool_node *)first_varpool->aux;
346 vnode->aux = NULL;
347 process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p);
348 /* If any function in a comdat group is reachable, force
349 all other functions in the same comdat group to be
350 also reachable. */
351 if (vnode->same_comdat_group)
352 {
353 struct varpool_node *next;
354 for (next = vnode->same_comdat_group;
355 next != vnode;
356 next = next->same_comdat_group)
357 if (!next->needed)
358 {
359 varpool_mark_needed_node (next);
360 enqueue_varpool_node (next, &first_varpool);
361 }
362 }
363 }
364 }
365
366 /* Remove unreachable nodes.
367
368 Completely unreachable functions can be fully removed from the callgraph.
369 Extern inline functions that we decided to not inline need to become unanalyzed nodes of
370 callgraph (so we still have edges to them). We remove function body then.
371
372 Also we need to care functions that are unreachable but we need to keep them around
373 for later clonning. In this case we also turn them to unanalyzed nodes, but
374 keep the body around. */
375 for (node = cgraph_nodes; node; node = next)
376 {
377 next = node->next;
378 if (node->aux && !node->reachable)
379 {
380 cgraph_node_remove_callees (node);
381 ipa_remove_all_references (&node->ref_list);
382 node->analyzed = false;
383 node->local.inlinable = false;
384 }
385 if (!node->aux)
386 {
387 node->global.inlined_to = NULL;
388 if (file)
389 fprintf (file, " %s", cgraph_node_name (node));
390 if (!node->analyzed || !DECL_EXTERNAL (node->decl) || before_inlining_p)
391 cgraph_remove_node (node);
392 else
393 {
394 struct cgraph_edge *e;
395
396 /* See if there is reachable caller. */
397 for (e = node->callers; e; e = e->next_caller)
398 if (e->caller->reachable)
399 break;
400
401 /* If so, we need to keep node in the callgraph. */
402 if (e || node->needed)
403 {
404 struct cgraph_node *clone;
405
406 /* If there are still clones, we must keep body around.
407 Otherwise we can just remove the body but keep the clone. */
408 for (clone = node->clones; clone;
409 clone = clone->next_sibling_clone)
410 if (clone->aux)
411 break;
412 if (!clone)
413 {
414 cgraph_release_function_body (node);
415 node->local.inlinable = false;
416 if (node->prev_sibling_clone)
417 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
418 else if (node->clone_of)
419 node->clone_of->clones = node->next_sibling_clone;
420 if (node->next_sibling_clone)
421 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
422 #ifdef ENABLE_CHECKING
423 if (node->clone_of)
424 node->former_clone_of = node->clone_of->decl;
425 #endif
426 node->clone_of = NULL;
427 node->next_sibling_clone = NULL;
428 node->prev_sibling_clone = NULL;
429 }
430 else
431 gcc_assert (!clone->in_other_partition);
432 node->analyzed = false;
433 cgraph_node_remove_callees (node);
434 ipa_remove_all_references (&node->ref_list);
435 }
436 else
437 cgraph_remove_node (node);
438 }
439 changed = true;
440 }
441 }
442 for (node = cgraph_nodes; node; node = node->next)
443 {
444 /* Inline clones might be kept around so their materializing allows further
445 cloning. If the function the clone is inlined into is removed, we need
446 to turn it into normal cone. */
447 if (node->global.inlined_to
448 && !node->callers)
449 {
450 gcc_assert (node->clones);
451 node->global.inlined_to = NULL;
452 update_inlined_to_pointer (node, node);
453 }
454 node->aux = NULL;
455 }
456
457 if (file)
458 fprintf (file, "\n");
459
460 /* We must release unused extern inlines or sanity checking will fail. Rest of transformations
461 are undesirable at -O0 since we do not want to remove anything. */
462 if (!optimize)
463 return changed;
464
465 if (file)
466 fprintf (file, "Reclaiming variables:");
467 for (vnode = varpool_nodes; vnode; vnode = vnext)
468 {
469 vnext = vnode->next;
470 if (!vnode->needed)
471 {
472 if (file)
473 fprintf (file, " %s", varpool_node_name (vnode));
474 varpool_remove_node (vnode);
475 changed = true;
476 }
477 }
478
479 /* Now update address_taken flags and try to promote functions to be local. */
480
481 if (file)
482 fprintf (file, "\nClearing address taken flags:");
483 for (node = cgraph_nodes; node; node = node->next)
484 if (node->address_taken
485 && !node->reachable_from_other_partition)
486 {
487 int i;
488 struct ipa_ref *ref;
489 bool found = false;
490 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
491 && !found; i++)
492 {
493 gcc_assert (ref->use == IPA_REF_ADDR);
494 found = true;
495 }
496 if (!found)
497 {
498 if (file)
499 fprintf (file, " %s", cgraph_node_name (node));
500 node->address_taken = false;
501 changed = true;
502 if (cgraph_local_node_p (node))
503 {
504 node->local.local = true;
505 if (file)
506 fprintf (file, " (local)");
507 }
508 }
509 }
510
511 #ifdef ENABLE_CHECKING
512 verify_cgraph ();
513 #endif
514
515 /* Reclaim alias pairs for functions that have disappeared from the
516 call graph. */
517 remove_unreachable_alias_pairs ();
518
519 return changed;
520 }
521
522 /* Discover variables that have no longer address taken or that are read only
523 and update their flags.
524
525 FIXME: This can not be done in between gimplify and omp_expand since
526 readonly flag plays role on what is shared and what is not. Currently we do
527 this transformation as part of whole program visibility and re-do at
528 ipa-reference pass (to take into account clonning), but it would
529 make sense to do it before early optimizations. */
530
531 void
532 ipa_discover_readonly_nonaddressable_vars (void)
533 {
534 struct varpool_node *vnode;
535 if (dump_file)
536 fprintf (dump_file, "Clearing variable flags:");
537 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
538 if (vnode->finalized && varpool_all_refs_explicit_p (vnode)
539 && (TREE_ADDRESSABLE (vnode->decl) || !TREE_READONLY (vnode->decl)))
540 {
541 bool written = false;
542 bool address_taken = false;
543 int i;
544 struct ipa_ref *ref;
545 for (i = 0; ipa_ref_list_refering_iterate (&vnode->ref_list, i, ref)
546 && (!written || !address_taken); i++)
547 switch (ref->use)
548 {
549 case IPA_REF_ADDR:
550 address_taken = true;
551 break;
552 case IPA_REF_LOAD:
553 break;
554 case IPA_REF_STORE:
555 written = true;
556 break;
557 }
558 if (TREE_ADDRESSABLE (vnode->decl) && !address_taken)
559 {
560 if (dump_file)
561 fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode));
562 TREE_ADDRESSABLE (vnode->decl) = 0;
563 }
564 if (!TREE_READONLY (vnode->decl) && !address_taken && !written
565 /* Making variable in explicit section readonly can cause section
566 type conflict.
567 See e.g. gcc.c-torture/compile/pr23237.c */
568 && DECL_SECTION_NAME (vnode->decl) == NULL)
569 {
570 if (dump_file)
571 fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode));
572 TREE_READONLY (vnode->decl) = 1;
573 }
574 }
575 if (dump_file)
576 fprintf (dump_file, "\n");
577 }
578
579 /* Return true when function NODE should be considered externally visible. */
580
581 static bool
582 cgraph_externally_visible_p (struct cgraph_node *node, bool whole_program, bool aliased)
583 {
584 if (!node->local.finalized)
585 return false;
586 if (!DECL_COMDAT (node->decl)
587 && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)))
588 return false;
589
590 /* Do not even try to be smart about aliased nodes. Until we properly
591 represent everything by same body alias, these are just evil. */
592 if (aliased)
593 return true;
594
595 /* When doing link time optimizations, hidden symbols become local. */
596 if (in_lto_p && DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
597 /* Be sure that node is defined in IR file, not in other object
598 file. In that case we don't set used_from_other_object_file. */
599 && node->analyzed)
600 ;
601 else if (!whole_program)
602 return true;
603 /* COMDAT functions must be shared only if they have address taken,
604 otherwise we can produce our own private implementation with
605 -fwhole-program. */
606 else if (DECL_COMDAT (node->decl))
607 {
608 if (node->address_taken || !node->analyzed)
609 return true;
610 if (node->same_comdat_group)
611 {
612 struct cgraph_node *next;
613
614 /* If more than one function is in the same COMDAT group, it must
615 be shared even if just one function in the comdat group has
616 address taken. */
617 for (next = node->same_comdat_group;
618 next != node;
619 next = next->same_comdat_group)
620 if (next->address_taken || !next->analyzed)
621 return true;
622 }
623 }
624 if (cgraph_used_from_object_file_p (node))
625 return true;
626 if (DECL_PRESERVE_P (node->decl))
627 return true;
628 if (MAIN_NAME_P (DECL_NAME (node->decl)))
629 return true;
630 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl)))
631 return true;
632 return false;
633 }
634
635 /* Dissolve the same_comdat_group list in which NODE resides. */
636
637 static void
638 dissolve_same_comdat_group_list (struct cgraph_node *node)
639 {
640 struct cgraph_node *n = node, *next;
641 do
642 {
643 next = n->same_comdat_group;
644 n->same_comdat_group = NULL;
645 n = next;
646 }
647 while (n != node);
648 }
649
650 /* Mark visibility of all functions.
651
652 A local function is one whose calls can occur only in the current
653 compilation unit and all its calls are explicit, so we can change
654 its calling convention. We simply mark all static functions whose
655 address is not taken as local.
656
657 We also change the TREE_PUBLIC flag of all declarations that are public
658 in language point of view but we want to overwrite this default
659 via visibilities for the backend point of view. */
660
661 static unsigned int
662 function_and_variable_visibility (bool whole_program)
663 {
664 struct cgraph_node *node;
665 struct varpool_node *vnode;
666 struct pointer_set_t *aliased_nodes = pointer_set_create ();
667 struct pointer_set_t *aliased_vnodes = pointer_set_create ();
668 unsigned i;
669 alias_pair *p;
670
671 /* Discover aliased nodes. */
672 FOR_EACH_VEC_ELT (alias_pair, alias_pairs, i, p)
673 {
674 if (dump_file)
675 fprintf (dump_file, "Alias %s->%s",
676 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)),
677 IDENTIFIER_POINTER (p->target));
678
679 if ((node = cgraph_node_for_asm (p->target)) != NULL)
680 {
681 gcc_assert (node->needed);
682 pointer_set_insert (aliased_nodes, node);
683 if (dump_file)
684 fprintf (dump_file, " node %s/%i",
685 cgraph_node_name (node), node->uid);
686 }
687 else if ((vnode = varpool_node_for_asm (p->target)) != NULL)
688 {
689 gcc_assert (vnode->needed);
690 pointer_set_insert (aliased_vnodes, vnode);
691 if (dump_file)
692 fprintf (dump_file, " varpool node %s",
693 varpool_node_name (vnode));
694 }
695 if (dump_file)
696 fprintf (dump_file, "\n");
697 }
698
699 for (node = cgraph_nodes; node; node = node->next)
700 {
701 /* C++ FE on lack of COMDAT support create local COMDAT functions
702 (that ought to be shared but can not due to object format
703 limitations). It is neccesary to keep the flag to make rest of C++ FE
704 happy. Clear the flag here to avoid confusion in middle-end. */
705 if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl))
706 DECL_COMDAT (node->decl) = 0;
707 /* For external decls stop tracking same_comdat_group, it doesn't matter
708 what comdat group they are in when they won't be emitted in this TU,
709 and simplifies later passes. */
710 if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
711 {
712 #ifdef ENABLE_CHECKING
713 struct cgraph_node *n;
714
715 for (n = node->same_comdat_group;
716 n != node;
717 n = n->same_comdat_group)
718 /* If at least one of same comdat group functions is external,
719 all of them have to be, otherwise it is a front-end bug. */
720 gcc_assert (DECL_EXTERNAL (n->decl));
721 #endif
722 dissolve_same_comdat_group_list (node);
723 }
724 gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl))
725 || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl));
726 if (cgraph_externally_visible_p (node, whole_program,
727 pointer_set_contains (aliased_nodes,
728 node)))
729 {
730 gcc_assert (!node->global.inlined_to);
731 node->local.externally_visible = true;
732 }
733 else
734 node->local.externally_visible = false;
735 if (!node->local.externally_visible && node->analyzed
736 && !DECL_EXTERNAL (node->decl))
737 {
738 struct cgraph_node *alias;
739 gcc_assert (whole_program || in_lto_p || !TREE_PUBLIC (node->decl));
740 cgraph_make_decl_local (node->decl);
741 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
742 for (alias = node->same_body; alias; alias = alias->next)
743 cgraph_make_decl_local (alias->decl);
744 if (node->same_comdat_group)
745 /* cgraph_externally_visible_p has already checked all other nodes
746 in the group and they will all be made local. We need to
747 dissolve the group at once so that the predicate does not
748 segfault though. */
749 dissolve_same_comdat_group_list (node);
750 }
751 node->local.local = cgraph_local_node_p (node);
752 }
753 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
754 {
755 /* weak flag makes no sense on local variables. */
756 gcc_assert (!DECL_WEAK (vnode->decl)
757 || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl));
758 /* In several cases declarations can not be common:
759
760 - when declaration has initializer
761 - when it is in weak
762 - when it has specific section
763 - when it resides in non-generic address space.
764 - if declaration is local, it will get into .local common section
765 so common flag is not needed. Frontends still produce these in
766 certain cases, such as for:
767
768 static int a __attribute__ ((common))
769
770 Canonicalize things here and clear the redundant flag. */
771 if (DECL_COMMON (vnode->decl)
772 && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
773 || (DECL_INITIAL (vnode->decl)
774 && DECL_INITIAL (vnode->decl) != error_mark_node)
775 || DECL_WEAK (vnode->decl)
776 || DECL_SECTION_NAME (vnode->decl) != NULL
777 || ! (ADDR_SPACE_GENERIC_P
778 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl))))))
779 DECL_COMMON (vnode->decl) = 0;
780 }
781 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
782 {
783 if (!vnode->finalized)
784 continue;
785 if (vnode->needed
786 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl))
787 && (((!whole_program
788 /* We can privatize comdat readonly variables whose address is
789 not taken, but doing so is not going to bring us
790 optimization oppurtunities until we start reordering
791 datastructures. */
792 || DECL_COMDAT (vnode->decl)
793 || DECL_WEAK (vnode->decl))
794 /* When doing linktime optimizations, all hidden symbols will
795 become local. */
796 && (!in_lto_p
797 || DECL_VISIBILITY (vnode->decl) != VISIBILITY_HIDDEN
798 /* We can get prevailing decision in other object file.
799 In this case we do not sed used_from_object_file. */
800 || !vnode->finalized))
801 || DECL_PRESERVE_P (vnode->decl)
802 || varpool_used_from_object_file_p (vnode)
803 || pointer_set_contains (aliased_vnodes, vnode)
804 || lookup_attribute ("externally_visible",
805 DECL_ATTRIBUTES (vnode->decl))))
806 vnode->externally_visible = true;
807 else
808 vnode->externally_visible = false;
809 if (!vnode->externally_visible)
810 {
811 gcc_assert (in_lto_p || whole_program || !TREE_PUBLIC (vnode->decl));
812 cgraph_make_decl_local (vnode->decl);
813 vnode->resolution = LDPR_PREVAILING_DEF_IRONLY;
814 }
815 gcc_assert (TREE_STATIC (vnode->decl));
816 }
817 pointer_set_destroy (aliased_nodes);
818 pointer_set_destroy (aliased_vnodes);
819
820 if (dump_file)
821 {
822 fprintf (dump_file, "\nMarking local functions:");
823 for (node = cgraph_nodes; node; node = node->next)
824 if (node->local.local)
825 fprintf (dump_file, " %s", cgraph_node_name (node));
826 fprintf (dump_file, "\n\n");
827 fprintf (dump_file, "\nMarking externally visible functions:");
828 for (node = cgraph_nodes; node; node = node->next)
829 if (node->local.externally_visible)
830 fprintf (dump_file, " %s", cgraph_node_name (node));
831 fprintf (dump_file, "\n\n");
832 fprintf (dump_file, "\nMarking externally visible variables:");
833 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
834 if (vnode->externally_visible)
835 fprintf (dump_file, " %s", varpool_node_name (vnode));
836 fprintf (dump_file, "\n\n");
837 }
838 cgraph_function_flags_ready = true;
839 return 0;
840 }
841
842 /* Local function pass handling visibilities. This happens before LTO streaming
843 so in particular -fwhole-program should be ignored at this level. */
844
845 static unsigned int
846 local_function_and_variable_visibility (void)
847 {
848 return function_and_variable_visibility (flag_whole_program && !flag_lto && !flag_whopr);
849 }
850
851 struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility =
852 {
853 {
854 SIMPLE_IPA_PASS,
855 "visibility", /* name */
856 NULL, /* gate */
857 local_function_and_variable_visibility,/* execute */
858 NULL, /* sub */
859 NULL, /* next */
860 0, /* static_pass_number */
861 TV_CGRAPHOPT, /* tv_id */
862 0, /* properties_required */
863 0, /* properties_provided */
864 0, /* properties_destroyed */
865 0, /* todo_flags_start */
866 TODO_remove_functions | TODO_dump_cgraph
867 | TODO_ggc_collect /* todo_flags_finish */
868 }
869 };
870
871 /* Do not re-run on ltrans stage. */
872
873 static bool
874 gate_whole_program_function_and_variable_visibility (void)
875 {
876 return !flag_ltrans;
877 }
878
879 /* Bring functionss local at LTO time whith -fwhole-program. */
880
881 static unsigned int
882 whole_program_function_and_variable_visibility (void)
883 {
884 struct cgraph_node *node;
885 struct varpool_node *vnode;
886
887 function_and_variable_visibility (flag_whole_program);
888
889 for (node = cgraph_nodes; node; node = node->next)
890 if ((node->local.externally_visible && !DECL_COMDAT (node->decl))
891 && node->local.finalized)
892 cgraph_mark_needed_node (node);
893 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
894 if (vnode->externally_visible && !DECL_COMDAT (vnode->decl))
895 varpool_mark_needed_node (vnode);
896 if (dump_file)
897 {
898 fprintf (dump_file, "\nNeeded variables:");
899 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
900 if (vnode->needed)
901 fprintf (dump_file, " %s", varpool_node_name (vnode));
902 fprintf (dump_file, "\n\n");
903 }
904 if (optimize)
905 ipa_discover_readonly_nonaddressable_vars ();
906 return 0;
907 }
908
909 struct ipa_opt_pass_d pass_ipa_whole_program_visibility =
910 {
911 {
912 IPA_PASS,
913 "whole-program", /* name */
914 gate_whole_program_function_and_variable_visibility,/* gate */
915 whole_program_function_and_variable_visibility,/* execute */
916 NULL, /* sub */
917 NULL, /* next */
918 0, /* static_pass_number */
919 TV_CGRAPHOPT, /* tv_id */
920 0, /* properties_required */
921 0, /* properties_provided */
922 0, /* properties_destroyed */
923 0, /* todo_flags_start */
924 TODO_remove_functions | TODO_dump_cgraph
925 | TODO_ggc_collect /* todo_flags_finish */
926 },
927 NULL, /* generate_summary */
928 NULL, /* write_summary */
929 NULL, /* read_summary */
930 NULL, /* write_optimization_summary */
931 NULL, /* read_optimization_summary */
932 NULL, /* stmt_fixup */
933 0, /* TODOs */
934 NULL, /* function_transform */
935 NULL, /* variable_transform */
936 };
937
938 /* Hash a cgraph node set element. */
939
940 static hashval_t
941 hash_cgraph_node_set_element (const void *p)
942 {
943 const_cgraph_node_set_element element = (const_cgraph_node_set_element) p;
944 return htab_hash_pointer (element->node);
945 }
946
947 /* Compare two cgraph node set elements. */
948
949 static int
950 eq_cgraph_node_set_element (const void *p1, const void *p2)
951 {
952 const_cgraph_node_set_element e1 = (const_cgraph_node_set_element) p1;
953 const_cgraph_node_set_element e2 = (const_cgraph_node_set_element) p2;
954
955 return e1->node == e2->node;
956 }
957
958 /* Create a new cgraph node set. */
959
960 cgraph_node_set
961 cgraph_node_set_new (void)
962 {
963 cgraph_node_set new_node_set;
964
965 new_node_set = ggc_alloc_cgraph_node_set_def ();
966 new_node_set->hashtab = htab_create_ggc (10,
967 hash_cgraph_node_set_element,
968 eq_cgraph_node_set_element,
969 NULL);
970 new_node_set->nodes = NULL;
971 return new_node_set;
972 }
973
974 /* Add cgraph_node NODE to cgraph_node_set SET. */
975
976 void
977 cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node)
978 {
979 void **slot;
980 cgraph_node_set_element element;
981 struct cgraph_node_set_element_def dummy;
982
983 dummy.node = node;
984 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
985
986 if (*slot != HTAB_EMPTY_ENTRY)
987 {
988 element = (cgraph_node_set_element) *slot;
989 gcc_assert (node == element->node
990 && (VEC_index (cgraph_node_ptr, set->nodes, element->index)
991 == node));
992 return;
993 }
994
995 /* Insert node into hash table. */
996 element = ggc_alloc_cgraph_node_set_element_def ();
997 element->node = node;
998 element->index = VEC_length (cgraph_node_ptr, set->nodes);
999 *slot = element;
1000
1001 /* Insert into node vector. */
1002 VEC_safe_push (cgraph_node_ptr, gc, set->nodes, node);
1003 }
1004
1005 /* Remove cgraph_node NODE from cgraph_node_set SET. */
1006
1007 void
1008 cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node)
1009 {
1010 void **slot, **last_slot;
1011 cgraph_node_set_element element, last_element;
1012 struct cgraph_node *last_node;
1013 struct cgraph_node_set_element_def dummy;
1014
1015 dummy.node = node;
1016 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1017 if (slot == NULL)
1018 return;
1019
1020 element = (cgraph_node_set_element) *slot;
1021 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1022 == node);
1023
1024 /* Remove from vector. We do this by swapping node with the last element
1025 of the vector. */
1026 last_node = VEC_pop (cgraph_node_ptr, set->nodes);
1027 if (last_node != node)
1028 {
1029 dummy.node = last_node;
1030 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1031 last_element = (cgraph_node_set_element) *last_slot;
1032 gcc_assert (last_element);
1033
1034 /* Move the last element to the original spot of NODE. */
1035 last_element->index = element->index;
1036 VEC_replace (cgraph_node_ptr, set->nodes, last_element->index,
1037 last_node);
1038 }
1039
1040 /* Remove element from hash table. */
1041 htab_clear_slot (set->hashtab, slot);
1042 ggc_free (element);
1043 }
1044
1045 /* Find NODE in SET and return an iterator to it if found. A null iterator
1046 is returned if NODE is not in SET. */
1047
1048 cgraph_node_set_iterator
1049 cgraph_node_set_find (cgraph_node_set set, struct cgraph_node *node)
1050 {
1051 void **slot;
1052 struct cgraph_node_set_element_def dummy;
1053 cgraph_node_set_element element;
1054 cgraph_node_set_iterator csi;
1055
1056 dummy.node = node;
1057 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1058 if (slot == NULL)
1059 csi.index = (unsigned) ~0;
1060 else
1061 {
1062 element = (cgraph_node_set_element) *slot;
1063 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1064 == node);
1065 csi.index = element->index;
1066 }
1067 csi.set = set;
1068
1069 return csi;
1070 }
1071
1072 /* Dump content of SET to file F. */
1073
1074 void
1075 dump_cgraph_node_set (FILE *f, cgraph_node_set set)
1076 {
1077 cgraph_node_set_iterator iter;
1078
1079 for (iter = csi_start (set); !csi_end_p (iter); csi_next (&iter))
1080 {
1081 struct cgraph_node *node = csi_node (iter);
1082 fprintf (f, " %s/%i", cgraph_node_name (node), node->uid);
1083 }
1084 fprintf (f, "\n");
1085 }
1086
1087 /* Dump content of SET to stderr. */
1088
1089 DEBUG_FUNCTION void
1090 debug_cgraph_node_set (cgraph_node_set set)
1091 {
1092 dump_cgraph_node_set (stderr, set);
1093 }
1094
1095 /* Hash a varpool node set element. */
1096
1097 static hashval_t
1098 hash_varpool_node_set_element (const void *p)
1099 {
1100 const_varpool_node_set_element element = (const_varpool_node_set_element) p;
1101 return htab_hash_pointer (element->node);
1102 }
1103
1104 /* Compare two varpool node set elements. */
1105
1106 static int
1107 eq_varpool_node_set_element (const void *p1, const void *p2)
1108 {
1109 const_varpool_node_set_element e1 = (const_varpool_node_set_element) p1;
1110 const_varpool_node_set_element e2 = (const_varpool_node_set_element) p2;
1111
1112 return e1->node == e2->node;
1113 }
1114
1115 /* Create a new varpool node set. */
1116
1117 varpool_node_set
1118 varpool_node_set_new (void)
1119 {
1120 varpool_node_set new_node_set;
1121
1122 new_node_set = ggc_alloc_varpool_node_set_def ();
1123 new_node_set->hashtab = htab_create_ggc (10,
1124 hash_varpool_node_set_element,
1125 eq_varpool_node_set_element,
1126 NULL);
1127 new_node_set->nodes = NULL;
1128 return new_node_set;
1129 }
1130
1131 /* Add varpool_node NODE to varpool_node_set SET. */
1132
1133 void
1134 varpool_node_set_add (varpool_node_set set, struct varpool_node *node)
1135 {
1136 void **slot;
1137 varpool_node_set_element element;
1138 struct varpool_node_set_element_def dummy;
1139
1140 dummy.node = node;
1141 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
1142
1143 if (*slot != HTAB_EMPTY_ENTRY)
1144 {
1145 element = (varpool_node_set_element) *slot;
1146 gcc_assert (node == element->node
1147 && (VEC_index (varpool_node_ptr, set->nodes, element->index)
1148 == node));
1149 return;
1150 }
1151
1152 /* Insert node into hash table. */
1153 element = ggc_alloc_varpool_node_set_element_def ();
1154 element->node = node;
1155 element->index = VEC_length (varpool_node_ptr, set->nodes);
1156 *slot = element;
1157
1158 /* Insert into node vector. */
1159 VEC_safe_push (varpool_node_ptr, gc, set->nodes, node);
1160 }
1161
1162 /* Remove varpool_node NODE from varpool_node_set SET. */
1163
1164 void
1165 varpool_node_set_remove (varpool_node_set set, struct varpool_node *node)
1166 {
1167 void **slot, **last_slot;
1168 varpool_node_set_element element, last_element;
1169 struct varpool_node *last_node;
1170 struct varpool_node_set_element_def dummy;
1171
1172 dummy.node = node;
1173 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1174 if (slot == NULL)
1175 return;
1176
1177 element = (varpool_node_set_element) *slot;
1178 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1179 == node);
1180
1181 /* Remove from vector. We do this by swapping node with the last element
1182 of the vector. */
1183 last_node = VEC_pop (varpool_node_ptr, set->nodes);
1184 if (last_node != node)
1185 {
1186 dummy.node = last_node;
1187 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1188 last_element = (varpool_node_set_element) *last_slot;
1189 gcc_assert (last_element);
1190
1191 /* Move the last element to the original spot of NODE. */
1192 last_element->index = element->index;
1193 VEC_replace (varpool_node_ptr, set->nodes, last_element->index,
1194 last_node);
1195 }
1196
1197 /* Remove element from hash table. */
1198 htab_clear_slot (set->hashtab, slot);
1199 ggc_free (element);
1200 }
1201
1202 /* Find NODE in SET and return an iterator to it if found. A null iterator
1203 is returned if NODE is not in SET. */
1204
1205 varpool_node_set_iterator
1206 varpool_node_set_find (varpool_node_set set, struct varpool_node *node)
1207 {
1208 void **slot;
1209 struct varpool_node_set_element_def dummy;
1210 varpool_node_set_element element;
1211 varpool_node_set_iterator vsi;
1212
1213 dummy.node = node;
1214 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1215 if (slot == NULL)
1216 vsi.index = (unsigned) ~0;
1217 else
1218 {
1219 element = (varpool_node_set_element) *slot;
1220 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1221 == node);
1222 vsi.index = element->index;
1223 }
1224 vsi.set = set;
1225
1226 return vsi;
1227 }
1228
1229 /* Dump content of SET to file F. */
1230
1231 void
1232 dump_varpool_node_set (FILE *f, varpool_node_set set)
1233 {
1234 varpool_node_set_iterator iter;
1235
1236 for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter))
1237 {
1238 struct varpool_node *node = vsi_node (iter);
1239 fprintf (f, " %s", varpool_node_name (node));
1240 }
1241 fprintf (f, "\n");
1242 }
1243
1244 /* Dump content of SET to stderr. */
1245
1246 DEBUG_FUNCTION void
1247 debug_varpool_node_set (varpool_node_set set)
1248 {
1249 dump_varpool_node_set (stderr, set);
1250 }
1251
1252
1253 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1254
1255 static unsigned int
1256 ipa_profile (void)
1257 {
1258 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1259 struct cgraph_edge *e;
1260 int order_pos;
1261 bool something_changed = false;
1262 int i;
1263
1264 order_pos = cgraph_postorder (order);
1265 for (i = order_pos - 1; i >= 0; i--)
1266 {
1267 if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1268 {
1269 for (e = order[i]->callees; e; e = e->next_callee)
1270 if (e->callee->local.local && !e->callee->aux)
1271 {
1272 something_changed = true;
1273 e->callee->aux = (void *)1;
1274 }
1275 }
1276 order[i]->aux = NULL;
1277 }
1278
1279 while (something_changed)
1280 {
1281 something_changed = false;
1282 for (i = order_pos - 1; i >= 0; i--)
1283 {
1284 if (order[i]->aux && cgraph_propagate_frequency (order[i]))
1285 {
1286 for (e = order[i]->callees; e; e = e->next_callee)
1287 if (e->callee->local.local && !e->callee->aux)
1288 {
1289 something_changed = true;
1290 e->callee->aux = (void *)1;
1291 }
1292 }
1293 order[i]->aux = NULL;
1294 }
1295 }
1296 free (order);
1297 return 0;
1298 }
1299
1300 static bool
1301 gate_ipa_profile (void)
1302 {
1303 return flag_ipa_profile;
1304 }
1305
1306 struct ipa_opt_pass_d pass_ipa_profile =
1307 {
1308 {
1309 IPA_PASS,
1310 "ipa-profile", /* name */
1311 gate_ipa_profile, /* gate */
1312 ipa_profile, /* execute */
1313 NULL, /* sub */
1314 NULL, /* next */
1315 0, /* static_pass_number */
1316 TV_IPA_PROFILE, /* tv_id */
1317 0, /* properties_required */
1318 0, /* properties_provided */
1319 0, /* properties_destroyed */
1320 0, /* todo_flags_start */
1321 0 /* todo_flags_finish */
1322 },
1323 NULL, /* generate_summary */
1324 NULL, /* write_summary */
1325 NULL, /* read_summary */
1326 NULL, /* write_optimization_summary */
1327 NULL, /* read_optimization_summary */
1328 NULL, /* stmt_fixup */
1329 0, /* TODOs */
1330 NULL, /* function_transform */
1331 NULL /* variable_transform */
1332 };
1333
1334 /* Generate and emit a static constructor or destructor. WHICH must
1335 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1336 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1337 initialization priority for this constructor or destructor. */
1338
1339 void
1340 cgraph_build_static_cdtor (char which, tree body, int priority)
1341 {
1342 static int counter = 0;
1343 char which_buf[16];
1344 tree decl, name, resdecl;
1345
1346 /* The priority is encoded in the constructor or destructor name.
1347 collect2 will sort the names and arrange that they are called at
1348 program startup. */
1349 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1350 name = get_file_function_name (which_buf);
1351
1352 decl = build_decl (input_location, FUNCTION_DECL, name,
1353 build_function_type_list (void_type_node, NULL_TREE));
1354 current_function_decl = decl;
1355
1356 resdecl = build_decl (input_location,
1357 RESULT_DECL, NULL_TREE, void_type_node);
1358 DECL_ARTIFICIAL (resdecl) = 1;
1359 DECL_RESULT (decl) = resdecl;
1360 DECL_CONTEXT (resdecl) = decl;
1361
1362 allocate_struct_function (decl, false);
1363
1364 TREE_STATIC (decl) = 1;
1365 TREE_USED (decl) = 1;
1366 DECL_ARTIFICIAL (decl) = 1;
1367 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1368 DECL_SAVED_TREE (decl) = body;
1369 if (!targetm.have_ctors_dtors)
1370 {
1371 TREE_PUBLIC (decl) = 1;
1372 DECL_PRESERVE_P (decl) = 1;
1373 }
1374 DECL_UNINLINABLE (decl) = 1;
1375
1376 DECL_INITIAL (decl) = make_node (BLOCK);
1377 TREE_USED (DECL_INITIAL (decl)) = 1;
1378
1379 DECL_SOURCE_LOCATION (decl) = input_location;
1380 cfun->function_end_locus = input_location;
1381
1382 switch (which)
1383 {
1384 case 'I':
1385 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1386 decl_init_priority_insert (decl, priority);
1387 break;
1388 case 'D':
1389 DECL_STATIC_DESTRUCTOR (decl) = 1;
1390 decl_fini_priority_insert (decl, priority);
1391 break;
1392 default:
1393 gcc_unreachable ();
1394 }
1395
1396 gimplify_function_tree (decl);
1397
1398 cgraph_add_new_function (decl, false);
1399
1400 set_cfun (NULL);
1401 current_function_decl = NULL;
1402 }
1403
1404
1405 /* A vector of FUNCTION_DECLs declared as static constructors. */
1406 static VEC(tree, heap) *static_ctors;
1407 /* A vector of FUNCTION_DECLs declared as static destructors. */
1408 static VEC(tree, heap) *static_dtors;
1409
1410 /* When target does not have ctors and dtors, we call all constructor
1411 and destructor by special initialization/destruction function
1412 recognized by collect2.
1413
1414 When we are going to build this function, collect all constructors and
1415 destructors and turn them into normal functions. */
1416
1417 static void
1418 record_cdtor_fn (struct cgraph_node *node)
1419 {
1420 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1421 VEC_safe_push (tree, heap, static_ctors, node->decl);
1422 if (DECL_STATIC_DESTRUCTOR (node->decl))
1423 VEC_safe_push (tree, heap, static_dtors, node->decl);
1424 node = cgraph_node (node->decl);
1425 node->local.disregard_inline_limits = 1;
1426 }
1427
1428 /* Define global constructors/destructor functions for the CDTORS, of
1429 which they are LEN. The CDTORS are sorted by initialization
1430 priority. If CTOR_P is true, these are constructors; otherwise,
1431 they are destructors. */
1432
1433 static void
1434 build_cdtor (bool ctor_p, VEC (tree, heap) *cdtors)
1435 {
1436 size_t i,j;
1437 size_t len = VEC_length (tree, cdtors);
1438
1439 i = 0;
1440 while (i < len)
1441 {
1442 tree body;
1443 tree fn;
1444 priority_type priority;
1445
1446 priority = 0;
1447 body = NULL_TREE;
1448 j = i;
1449 do
1450 {
1451 priority_type p;
1452 fn = VEC_index (tree, cdtors, j);
1453 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1454 if (j == i)
1455 priority = p;
1456 else if (p != priority)
1457 break;
1458 j++;
1459 }
1460 while (j < len);
1461
1462 /* When there is only one cdtor and target supports them, do nothing. */
1463 if (j == i + 1
1464 && targetm.have_ctors_dtors)
1465 {
1466 i++;
1467 continue;
1468 }
1469 /* Find the next batch of constructors/destructors with the same
1470 initialization priority. */
1471 for (;i < j; i++)
1472 {
1473 tree call;
1474 fn = VEC_index (tree, cdtors, i);
1475 call = build_call_expr (fn, 0);
1476 if (ctor_p)
1477 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1478 else
1479 DECL_STATIC_DESTRUCTOR (fn) = 0;
1480 /* We do not want to optimize away pure/const calls here.
1481 When optimizing, these should be already removed, when not
1482 optimizing, we want user to be able to breakpoint in them. */
1483 TREE_SIDE_EFFECTS (call) = 1;
1484 append_to_statement_list (call, &body);
1485 }
1486 while (i < len);
1487 gcc_assert (body != NULL_TREE);
1488 /* Generate a function to call all the function of like
1489 priority. */
1490 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
1491 }
1492 }
1493
1494 /* Comparison function for qsort. P1 and P2 are actually of type
1495 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1496 used to determine the sort order. */
1497
1498 static int
1499 compare_ctor (const void *p1, const void *p2)
1500 {
1501 tree f1;
1502 tree f2;
1503 int priority1;
1504 int priority2;
1505
1506 f1 = *(const tree *)p1;
1507 f2 = *(const tree *)p2;
1508 priority1 = DECL_INIT_PRIORITY (f1);
1509 priority2 = DECL_INIT_PRIORITY (f2);
1510
1511 if (priority1 < priority2)
1512 return -1;
1513 else if (priority1 > priority2)
1514 return 1;
1515 else
1516 /* Ensure a stable sort. Constructors are executed in backwarding
1517 order to make LTO initialize braries first. */
1518 return DECL_UID (f2) - DECL_UID (f1);
1519 }
1520
1521 /* Comparison function for qsort. P1 and P2 are actually of type
1522 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1523 used to determine the sort order. */
1524
1525 static int
1526 compare_dtor (const void *p1, const void *p2)
1527 {
1528 tree f1;
1529 tree f2;
1530 int priority1;
1531 int priority2;
1532
1533 f1 = *(const tree *)p1;
1534 f2 = *(const tree *)p2;
1535 priority1 = DECL_FINI_PRIORITY (f1);
1536 priority2 = DECL_FINI_PRIORITY (f2);
1537
1538 if (priority1 < priority2)
1539 return -1;
1540 else if (priority1 > priority2)
1541 return 1;
1542 else
1543 /* Ensure a stable sort. */
1544 return DECL_UID (f1) - DECL_UID (f2);
1545 }
1546
1547 /* Generate functions to call static constructors and destructors
1548 for targets that do not support .ctors/.dtors sections. These
1549 functions have magic names which are detected by collect2. */
1550
1551 static void
1552 build_cdtor_fns (void)
1553 {
1554 if (!VEC_empty (tree, static_ctors))
1555 {
1556 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1557 qsort (VEC_address (tree, static_ctors),
1558 VEC_length (tree, static_ctors),
1559 sizeof (tree),
1560 compare_ctor);
1561 build_cdtor (/*ctor_p=*/true, static_ctors);
1562 }
1563
1564 if (!VEC_empty (tree, static_dtors))
1565 {
1566 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1567 qsort (VEC_address (tree, static_dtors),
1568 VEC_length (tree, static_dtors),
1569 sizeof (tree),
1570 compare_dtor);
1571 build_cdtor (/*ctor_p=*/false, static_dtors);
1572 }
1573 }
1574
1575 /* Look for constructors and destructors and produce function calling them.
1576 This is needed for targets not supporting ctors or dtors, but we perform the
1577 transformation also at linktime to merge possibly numberous
1578 constructors/destructors into single function to improve code locality and
1579 reduce size. */
1580
1581 static unsigned int
1582 ipa_cdtor_merge (void)
1583 {
1584 struct cgraph_node *node;
1585 for (node = cgraph_nodes; node; node = node->next)
1586 if (node->analyzed
1587 && (DECL_STATIC_CONSTRUCTOR (node->decl)
1588 || DECL_STATIC_DESTRUCTOR (node->decl)))
1589 record_cdtor_fn (node);
1590 build_cdtor_fns ();
1591 VEC_free (tree, heap, static_ctors);
1592 VEC_free (tree, heap, static_dtors);
1593 return 0;
1594 }
1595
1596 /* Perform the pass when we have no ctors/dtors support
1597 or at LTO time to merge multiple constructors into single
1598 function. */
1599
1600 static bool
1601 gate_ipa_cdtor_merge (void)
1602 {
1603 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1604 }
1605
1606 struct ipa_opt_pass_d pass_ipa_cdtor_merge =
1607 {
1608 {
1609 IPA_PASS,
1610 "cdtor", /* name */
1611 gate_ipa_cdtor_merge, /* gate */
1612 ipa_cdtor_merge, /* execute */
1613 NULL, /* sub */
1614 NULL, /* next */
1615 0, /* static_pass_number */
1616 TV_CGRAPHOPT, /* tv_id */
1617 0, /* properties_required */
1618 0, /* properties_provided */
1619 0, /* properties_destroyed */
1620 0, /* todo_flags_start */
1621 0 /* todo_flags_finish */
1622 },
1623 NULL, /* generate_summary */
1624 NULL, /* write_summary */
1625 NULL, /* read_summary */
1626 NULL, /* write_optimization_summary */
1627 NULL, /* read_optimization_summary */
1628 NULL, /* stmt_fixup */
1629 0, /* TODOs */
1630 NULL, /* function_transform */
1631 NULL /* variable_transform */
1632 };