* jvspec.c (jvgenmain_spec): Don't handle -fnew-verifier.
[gcc.git] / gcc / ipa.c
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "cgraph.h"
26 #include "tree-pass.h"
27 #include "timevar.h"
28 #include "gimple.h"
29 #include "ggc.h"
30 #include "flags.h"
31 #include "pointer-set.h"
32 #include "target.h"
33 #include "tree-iterator.h"
34
35 /* Fill array order with all nodes with output flag set in the reverse
36 topological order. */
37
38 int
39 cgraph_postorder (struct cgraph_node **order)
40 {
41 struct cgraph_node *node, *node2;
42 int stack_size = 0;
43 int order_pos = 0;
44 struct cgraph_edge *edge, last;
45 int pass;
46
47 struct cgraph_node **stack =
48 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
49
50 /* We have to deal with cycles nicely, so use a depth first traversal
51 output algorithm. Ignore the fact that some functions won't need
52 to be output and put them into order as well, so we get dependencies
53 right through inline functions. */
54 for (node = cgraph_nodes; node; node = node->next)
55 node->aux = NULL;
56 for (pass = 0; pass < 2; pass++)
57 for (node = cgraph_nodes; node; node = node->next)
58 if (!node->aux
59 && (pass
60 || (!cgraph_only_called_directly_p (node)
61 && !node->address_taken)))
62 {
63 node2 = node;
64 if (!node->callers)
65 node->aux = &last;
66 else
67 node->aux = node->callers;
68 while (node2)
69 {
70 while (node2->aux != &last)
71 {
72 edge = (struct cgraph_edge *) node2->aux;
73 if (edge->next_caller)
74 node2->aux = edge->next_caller;
75 else
76 node2->aux = &last;
77 /* Break possible cycles involving always-inline
78 functions by ignoring edges from always-inline
79 functions to non-always-inline functions. */
80 if (edge->caller->local.disregard_inline_limits
81 && !edge->callee->local.disregard_inline_limits)
82 continue;
83 if (!edge->caller->aux)
84 {
85 if (!edge->caller->callers)
86 edge->caller->aux = &last;
87 else
88 edge->caller->aux = edge->caller->callers;
89 stack[stack_size++] = node2;
90 node2 = edge->caller;
91 break;
92 }
93 }
94 if (node2->aux == &last)
95 {
96 order[order_pos++] = node2;
97 if (stack_size)
98 node2 = stack[--stack_size];
99 else
100 node2 = NULL;
101 }
102 }
103 }
104 free (stack);
105 for (node = cgraph_nodes; node; node = node->next)
106 node->aux = NULL;
107 return order_pos;
108 }
109
110 /* Look for all functions inlined to NODE and update their inlined_to pointers
111 to INLINED_TO. */
112
113 static void
114 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
115 {
116 struct cgraph_edge *e;
117 for (e = node->callees; e; e = e->next_callee)
118 if (e->callee->global.inlined_to)
119 {
120 e->callee->global.inlined_to = inlined_to;
121 update_inlined_to_pointer (e->callee, inlined_to);
122 }
123 }
124
125 /* Add cgraph NODE to queue starting at FIRST.
126
127 The queue is linked via AUX pointers and terminated by pointer to 1.
128 We enqueue nodes at two occasions: when we find them reachable or when we find
129 their bodies needed for further clonning. In the second case we mark them
130 by pointer to 2 after processing so they are re-queue when they become
131 reachable. */
132
133 static void
134 enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first)
135 {
136 /* Node is still in queue; do nothing. */
137 if (node->aux && node->aux != (void *) 2)
138 return;
139 /* Node was already processed as unreachable, re-enqueue
140 only if it became reachable now. */
141 if (node->aux == (void *)2 && !node->reachable)
142 return;
143 node->aux = *first;
144 *first = node;
145 }
146
147 /* Add varpool NODE to queue starting at FIRST. */
148
149 static void
150 enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first)
151 {
152 node->aux = *first;
153 *first = node;
154 }
155
156 /* Process references. */
157
158 static void
159 process_references (struct ipa_ref_list *list,
160 struct cgraph_node **first,
161 struct varpool_node **first_varpool,
162 bool before_inlining_p)
163 {
164 int i;
165 struct ipa_ref *ref;
166 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
167 {
168 if (ref->refered_type == IPA_REF_CGRAPH)
169 {
170 struct cgraph_node *node = ipa_ref_node (ref);
171 if (!node->reachable
172 && (!DECL_EXTERNAL (node->decl)
173 || before_inlining_p))
174 {
175 node->reachable = true;
176 enqueue_cgraph_node (node, first);
177 }
178 }
179 else
180 {
181 struct varpool_node *node = ipa_ref_varpool_node (ref);
182 if (!node->needed)
183 {
184 varpool_mark_needed_node (node);
185 enqueue_varpool_node (node, first_varpool);
186 }
187 }
188 }
189 }
190
191 /* Return true when function NODE can be removed from callgraph
192 if all direct calls are eliminated. */
193
194 static inline bool
195 varpool_can_remove_if_no_refs (struct varpool_node *node)
196 {
197 return (!node->force_output && !node->used_from_other_partition
198 && (DECL_COMDAT (node->decl) || !node->externally_visible));
199 }
200
201 /* Return true when function can be marked local. */
202
203 static bool
204 cgraph_local_node_p (struct cgraph_node *node)
205 {
206 return (cgraph_only_called_directly_p (node)
207 && node->analyzed
208 && !DECL_EXTERNAL (node->decl)
209 && !node->local.externally_visible
210 && !node->reachable_from_other_partition
211 && !node->in_other_partition);
212 }
213
214 /* Perform reachability analysis and reclaim all unreachable nodes.
215 If BEFORE_INLINING_P is true this function is called before inlining
216 decisions has been made. If BEFORE_INLINING_P is false this function also
217 removes unneeded bodies of extern inline functions. */
218
219 bool
220 cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
221 {
222 struct cgraph_node *first = (struct cgraph_node *) (void *) 1;
223 struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1;
224 struct cgraph_node *node, *next;
225 struct varpool_node *vnode, *vnext;
226 bool changed = false;
227
228 #ifdef ENABLE_CHECKING
229 verify_cgraph ();
230 #endif
231 if (file)
232 fprintf (file, "\nReclaiming functions:");
233 #ifdef ENABLE_CHECKING
234 for (node = cgraph_nodes; node; node = node->next)
235 gcc_assert (!node->aux);
236 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
237 gcc_assert (!vnode->aux);
238 #endif
239 varpool_reset_queue ();
240 for (node = cgraph_nodes; node; node = node->next)
241 if (!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
242 && ((!DECL_EXTERNAL (node->decl))
243 || before_inlining_p))
244 {
245 gcc_assert (!node->global.inlined_to);
246 enqueue_cgraph_node (node, &first);
247 node->reachable = true;
248 }
249 else
250 {
251 gcc_assert (!node->aux);
252 node->reachable = false;
253 }
254 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
255 {
256 vnode->next_needed = NULL;
257 vnode->prev_needed = NULL;
258 if (!varpool_can_remove_if_no_refs (vnode))
259 {
260 vnode->needed = false;
261 varpool_mark_needed_node (vnode);
262 enqueue_varpool_node (vnode, &first_varpool);
263 }
264 else
265 vnode->needed = false;
266 }
267
268 /* Perform reachability analysis. As a special case do not consider
269 extern inline functions not inlined as live because we won't output
270 them at all.
271
272 We maintain two worklist, one for cgraph nodes other for varpools and
273 are finished once both are empty. */
274
275 while (first != (struct cgraph_node *) (void *) 1
276 || first_varpool != (struct varpool_node *) (void *) 1)
277 {
278 if (first != (struct cgraph_node *) (void *) 1)
279 {
280 struct cgraph_edge *e;
281 node = first;
282 first = (struct cgraph_node *) first->aux;
283 if (!node->reachable)
284 node->aux = (void *)2;
285
286 /* If we found this node reachable, first mark on the callees
287 reachable too, unless they are direct calls to extern inline functions
288 we decided to not inline. */
289 if (node->reachable)
290 {
291 for (e = node->callees; e; e = e->next_callee)
292 if (!e->callee->reachable
293 && node->analyzed
294 && (!e->inline_failed || !e->callee->analyzed
295 || (!DECL_EXTERNAL (e->callee->decl))
296 || before_inlining_p))
297 {
298 e->callee->reachable = true;
299 enqueue_cgraph_node (e->callee, &first);
300 }
301 process_references (&node->ref_list, &first, &first_varpool, before_inlining_p);
302 }
303
304 /* If any function in a comdat group is reachable, force
305 all other functions in the same comdat group to be
306 also reachable. */
307 if (node->same_comdat_group
308 && node->reachable
309 && !node->global.inlined_to)
310 {
311 for (next = node->same_comdat_group;
312 next != node;
313 next = next->same_comdat_group)
314 if (!next->reachable)
315 {
316 next->reachable = true;
317 enqueue_cgraph_node (next, &first);
318 }
319 }
320
321 /* We can freely remove inline clones even if they are cloned, however if
322 function is clone of real clone, we must keep it around in order to
323 make materialize_clones produce function body with the changes
324 applied. */
325 while (node->clone_of && !node->clone_of->aux
326 && !gimple_has_body_p (node->decl))
327 {
328 bool noninline = node->clone_of->decl != node->decl;
329 node = node->clone_of;
330 if (noninline && !node->reachable && !node->aux)
331 {
332 enqueue_cgraph_node (node, &first);
333 break;
334 }
335 }
336 }
337 if (first_varpool != (struct varpool_node *) (void *) 1)
338 {
339 vnode = first_varpool;
340 first_varpool = (struct varpool_node *)first_varpool->aux;
341 vnode->aux = NULL;
342 process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p);
343 /* If any function in a comdat group is reachable, force
344 all other functions in the same comdat group to be
345 also reachable. */
346 if (vnode->same_comdat_group)
347 {
348 struct varpool_node *next;
349 for (next = vnode->same_comdat_group;
350 next != vnode;
351 next = next->same_comdat_group)
352 if (!next->needed)
353 {
354 varpool_mark_needed_node (next);
355 enqueue_varpool_node (next, &first_varpool);
356 }
357 }
358 }
359 }
360
361 /* Remove unreachable nodes.
362
363 Completely unreachable functions can be fully removed from the callgraph.
364 Extern inline functions that we decided to not inline need to become unanalyzed nodes of
365 callgraph (so we still have edges to them). We remove function body then.
366
367 Also we need to care functions that are unreachable but we need to keep them around
368 for later clonning. In this case we also turn them to unanalyzed nodes, but
369 keep the body around. */
370 for (node = cgraph_nodes; node; node = next)
371 {
372 next = node->next;
373 if (node->aux && !node->reachable)
374 {
375 cgraph_node_remove_callees (node);
376 ipa_remove_all_references (&node->ref_list);
377 node->analyzed = false;
378 node->local.inlinable = false;
379 }
380 if (!node->aux)
381 {
382 node->global.inlined_to = NULL;
383 if (file)
384 fprintf (file, " %s", cgraph_node_name (node));
385 if (!node->analyzed || !DECL_EXTERNAL (node->decl) || before_inlining_p)
386 cgraph_remove_node (node);
387 else
388 {
389 struct cgraph_edge *e;
390
391 /* See if there is reachable caller. */
392 for (e = node->callers; e; e = e->next_caller)
393 if (e->caller->reachable)
394 break;
395
396 /* If so, we need to keep node in the callgraph. */
397 if (e || node->needed)
398 {
399 struct cgraph_node *clone;
400
401 /* If there are still clones, we must keep body around.
402 Otherwise we can just remove the body but keep the clone. */
403 for (clone = node->clones; clone;
404 clone = clone->next_sibling_clone)
405 if (clone->aux)
406 break;
407 if (!clone)
408 {
409 cgraph_release_function_body (node);
410 node->analyzed = false;
411 node->local.inlinable = false;
412 }
413 else
414 gcc_assert (!clone->in_other_partition);
415 cgraph_node_remove_callees (node);
416 ipa_remove_all_references (&node->ref_list);
417 if (node->prev_sibling_clone)
418 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
419 else if (node->clone_of)
420 node->clone_of->clones = node->next_sibling_clone;
421 if (node->next_sibling_clone)
422 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
423 node->clone_of = NULL;
424 node->next_sibling_clone = NULL;
425 node->prev_sibling_clone = NULL;
426 }
427 else
428 cgraph_remove_node (node);
429 }
430 changed = true;
431 }
432 }
433 for (node = cgraph_nodes; node; node = node->next)
434 {
435 /* Inline clones might be kept around so their materializing allows further
436 cloning. If the function the clone is inlined into is removed, we need
437 to turn it into normal cone. */
438 if (node->global.inlined_to
439 && !node->callers)
440 {
441 gcc_assert (node->clones);
442 node->global.inlined_to = NULL;
443 update_inlined_to_pointer (node, node);
444 }
445 node->aux = NULL;
446 }
447
448 if (file)
449 fprintf (file, "\n");
450
451 /* We must release unused extern inlines or sanity checking will fail. Rest of transformations
452 are undesirable at -O0 since we do not want to remove anything. */
453 if (!optimize)
454 return changed;
455
456 if (file)
457 fprintf (file, "Reclaiming variables:");
458 for (vnode = varpool_nodes; vnode; vnode = vnext)
459 {
460 vnext = vnode->next;
461 if (!vnode->needed)
462 {
463 if (file)
464 fprintf (file, " %s", varpool_node_name (vnode));
465 varpool_remove_node (vnode);
466 changed = true;
467 }
468 }
469
470 /* Now update address_taken flags and try to promote functions to be local. */
471
472 if (file)
473 fprintf (file, "\nClearing address taken flags:");
474 for (node = cgraph_nodes; node; node = node->next)
475 if (node->address_taken
476 && !node->reachable_from_other_partition)
477 {
478 int i;
479 struct ipa_ref *ref;
480 bool found = false;
481 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
482 && !found; i++)
483 {
484 gcc_assert (ref->use == IPA_REF_ADDR);
485 found = true;
486 }
487 if (!found)
488 {
489 if (file)
490 fprintf (file, " %s", cgraph_node_name (node));
491 node->address_taken = false;
492 changed = true;
493 if (cgraph_local_node_p (node))
494 {
495 node->local.local = true;
496 if (file)
497 fprintf (file, " (local)");
498 }
499 }
500 }
501
502 #ifdef ENABLE_CHECKING
503 verify_cgraph ();
504 #endif
505
506 /* Reclaim alias pairs for functions that have disappeared from the
507 call graph. */
508 remove_unreachable_alias_pairs ();
509
510 return changed;
511 }
512
513 /* Discover variables that have no longer address taken or that are read only
514 and update their flags.
515
516 FIXME: This can not be done in between gimplify and omp_expand since
517 readonly flag plays role on what is shared and what is not. Currently we do
518 this transformation as part of whole program visibility and re-do at
519 ipa-reference pass (to take into account clonning), but it would
520 make sense to do it before early optimizations. */
521
522 void
523 ipa_discover_readonly_nonaddressable_vars (void)
524 {
525 struct varpool_node *vnode;
526 if (dump_file)
527 fprintf (dump_file, "Clearing variable flags:");
528 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
529 if (vnode->finalized && varpool_all_refs_explicit_p (vnode)
530 && (TREE_ADDRESSABLE (vnode->decl) || !TREE_READONLY (vnode->decl)))
531 {
532 bool written = false;
533 bool address_taken = false;
534 int i;
535 struct ipa_ref *ref;
536 for (i = 0; ipa_ref_list_refering_iterate (&vnode->ref_list, i, ref)
537 && (!written || !address_taken); i++)
538 switch (ref->use)
539 {
540 case IPA_REF_ADDR:
541 address_taken = true;
542 break;
543 case IPA_REF_LOAD:
544 break;
545 case IPA_REF_STORE:
546 written = true;
547 break;
548 }
549 if (TREE_ADDRESSABLE (vnode->decl) && !address_taken)
550 {
551 if (dump_file)
552 fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode));
553 TREE_ADDRESSABLE (vnode->decl) = 0;
554 }
555 if (!TREE_READONLY (vnode->decl) && !address_taken && !written
556 /* Making variable in explicit section readonly can cause section
557 type conflict.
558 See e.g. gcc.c-torture/compile/pr23237.c */
559 && DECL_SECTION_NAME (vnode->decl) == NULL)
560 {
561 if (dump_file)
562 fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode));
563 TREE_READONLY (vnode->decl) = 1;
564 }
565 }
566 if (dump_file)
567 fprintf (dump_file, "\n");
568 }
569
570 /* Return true when function NODE should be considered externally visible. */
571
572 static bool
573 cgraph_externally_visible_p (struct cgraph_node *node, bool whole_program, bool aliased)
574 {
575 if (!node->local.finalized)
576 return false;
577 if (!DECL_COMDAT (node->decl)
578 && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)))
579 return false;
580
581 /* Do not even try to be smart about aliased nodes. Until we properly
582 represent everything by same body alias, these are just evil. */
583 if (aliased)
584 return true;
585
586 /* When doing link time optimizations, hidden symbols become local. */
587 if (in_lto_p && DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
588 /* Be sure that node is defined in IR file, not in other object
589 file. In that case we don't set used_from_other_object_file. */
590 && node->analyzed)
591 ;
592 else if (!whole_program)
593 return true;
594 /* COMDAT functions must be shared only if they have address taken,
595 otherwise we can produce our own private implementation with
596 -fwhole-program. */
597 else if (DECL_COMDAT (node->decl))
598 {
599 if (node->address_taken || !node->analyzed)
600 return true;
601 if (node->same_comdat_group)
602 {
603 struct cgraph_node *next;
604
605 /* If more than one function is in the same COMDAT group, it must
606 be shared even if just one function in the comdat group has
607 address taken. */
608 for (next = node->same_comdat_group;
609 next != node;
610 next = next->same_comdat_group)
611 if (next->address_taken || !next->analyzed)
612 return true;
613 }
614 }
615 if (node->local.used_from_object_file)
616 return true;
617 if (DECL_PRESERVE_P (node->decl))
618 return true;
619 if (MAIN_NAME_P (DECL_NAME (node->decl)))
620 return true;
621 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl)))
622 return true;
623 return false;
624 }
625
626 /* Dissolve the same_comdat_group list in which NODE resides. */
627
628 static void
629 dissolve_same_comdat_group_list (struct cgraph_node *node)
630 {
631 struct cgraph_node *n = node, *next;
632 do
633 {
634 next = n->same_comdat_group;
635 n->same_comdat_group = NULL;
636 n = next;
637 }
638 while (n != node);
639 }
640
641 /* Mark visibility of all functions.
642
643 A local function is one whose calls can occur only in the current
644 compilation unit and all its calls are explicit, so we can change
645 its calling convention. We simply mark all static functions whose
646 address is not taken as local.
647
648 We also change the TREE_PUBLIC flag of all declarations that are public
649 in language point of view but we want to overwrite this default
650 via visibilities for the backend point of view. */
651
652 static unsigned int
653 function_and_variable_visibility (bool whole_program)
654 {
655 struct cgraph_node *node;
656 struct varpool_node *vnode;
657 struct pointer_set_t *aliased_nodes = pointer_set_create ();
658 struct pointer_set_t *aliased_vnodes = pointer_set_create ();
659 unsigned i;
660 alias_pair *p;
661
662 /* Discover aliased nodes. */
663 FOR_EACH_VEC_ELT (alias_pair, alias_pairs, i, p)
664 {
665 if (dump_file)
666 fprintf (dump_file, "Alias %s->%s",
667 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)),
668 IDENTIFIER_POINTER (p->target));
669
670 if ((node = cgraph_node_for_asm (p->target)) != NULL)
671 {
672 gcc_assert (node->needed);
673 pointer_set_insert (aliased_nodes, node);
674 if (dump_file)
675 fprintf (dump_file, " node %s/%i",
676 cgraph_node_name (node), node->uid);
677 }
678 else if ((vnode = varpool_node_for_asm (p->target)) != NULL)
679 {
680 gcc_assert (vnode->needed);
681 pointer_set_insert (aliased_vnodes, vnode);
682 if (dump_file)
683 fprintf (dump_file, " varpool node %s",
684 varpool_node_name (vnode));
685 }
686 if (dump_file)
687 fprintf (dump_file, "\n");
688 }
689
690 for (node = cgraph_nodes; node; node = node->next)
691 {
692 /* C++ FE on lack of COMDAT support create local COMDAT functions
693 (that ought to be shared but can not due to object format
694 limitations). It is neccesary to keep the flag to make rest of C++ FE
695 happy. Clear the flag here to avoid confusion in middle-end. */
696 if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl))
697 DECL_COMDAT (node->decl) = 0;
698 /* For external decls stop tracking same_comdat_group, it doesn't matter
699 what comdat group they are in when they won't be emitted in this TU,
700 and simplifies later passes. */
701 if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
702 {
703 #ifdef ENABLE_CHECKING
704 struct cgraph_node *n;
705
706 for (n = node->same_comdat_group;
707 n != node;
708 n = n->same_comdat_group)
709 /* If at least one of same comdat group functions is external,
710 all of them have to be, otherwise it is a front-end bug. */
711 gcc_assert (DECL_EXTERNAL (n->decl));
712 #endif
713 dissolve_same_comdat_group_list (node);
714 }
715 gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl))
716 || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl));
717 if (cgraph_externally_visible_p (node, whole_program,
718 pointer_set_contains (aliased_nodes,
719 node)))
720 {
721 gcc_assert (!node->global.inlined_to);
722 node->local.externally_visible = true;
723 }
724 else
725 node->local.externally_visible = false;
726 if (!node->local.externally_visible && node->analyzed
727 && !DECL_EXTERNAL (node->decl))
728 {
729 struct cgraph_node *alias;
730 gcc_assert (whole_program || in_lto_p || !TREE_PUBLIC (node->decl));
731 cgraph_make_decl_local (node->decl);
732 for (alias = node->same_body; alias; alias = alias->next)
733 cgraph_make_decl_local (alias->decl);
734 if (node->same_comdat_group)
735 /* cgraph_externally_visible_p has already checked all other nodes
736 in the group and they will all be made local. We need to
737 dissolve the group at once so that the predicate does not
738 segfault though. */
739 dissolve_same_comdat_group_list (node);
740 }
741 node->local.local = cgraph_local_node_p (node);
742 }
743 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
744 {
745 /* weak flag makes no sense on local variables. */
746 gcc_assert (!DECL_WEAK (vnode->decl)
747 || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl));
748 /* In several cases declarations can not be common:
749
750 - when declaration has initializer
751 - when it is in weak
752 - when it has specific section
753 - when it resides in non-generic address space.
754 - if declaration is local, it will get into .local common section
755 so common flag is not needed. Frontends still produce these in
756 certain cases, such as for:
757
758 static int a __attribute__ ((common))
759
760 Canonicalize things here and clear the redundant flag. */
761 if (DECL_COMMON (vnode->decl)
762 && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
763 || (DECL_INITIAL (vnode->decl)
764 && DECL_INITIAL (vnode->decl) != error_mark_node)
765 || DECL_WEAK (vnode->decl)
766 || DECL_SECTION_NAME (vnode->decl) != NULL
767 || ! (ADDR_SPACE_GENERIC_P
768 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl))))))
769 DECL_COMMON (vnode->decl) = 0;
770 }
771 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
772 {
773 if (!vnode->finalized)
774 continue;
775 if (vnode->needed
776 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl))
777 && (((!whole_program
778 /* We can privatize comdat readonly variables whose address is
779 not taken, but doing so is not going to bring us
780 optimization oppurtunities until we start reordering
781 datastructures. */
782 || DECL_COMDAT (vnode->decl)
783 || DECL_WEAK (vnode->decl))
784 /* When doing linktime optimizations, all hidden symbols will
785 become local. */
786 && (!in_lto_p
787 || DECL_VISIBILITY (vnode->decl) != VISIBILITY_HIDDEN
788 /* We can get prevailing decision in other object file.
789 In this case we do not sed used_from_object_file. */
790 || !vnode->finalized))
791 || DECL_PRESERVE_P (vnode->decl)
792 || vnode->used_from_object_file
793 || pointer_set_contains (aliased_vnodes, vnode)
794 || lookup_attribute ("externally_visible",
795 DECL_ATTRIBUTES (vnode->decl))))
796 vnode->externally_visible = true;
797 else
798 vnode->externally_visible = false;
799 if (!vnode->externally_visible)
800 {
801 gcc_assert (in_lto_p || whole_program || !TREE_PUBLIC (vnode->decl));
802 cgraph_make_decl_local (vnode->decl);
803 }
804 gcc_assert (TREE_STATIC (vnode->decl));
805 }
806 pointer_set_destroy (aliased_nodes);
807 pointer_set_destroy (aliased_vnodes);
808
809 if (dump_file)
810 {
811 fprintf (dump_file, "\nMarking local functions:");
812 for (node = cgraph_nodes; node; node = node->next)
813 if (node->local.local)
814 fprintf (dump_file, " %s", cgraph_node_name (node));
815 fprintf (dump_file, "\n\n");
816 fprintf (dump_file, "\nMarking externally visible functions:");
817 for (node = cgraph_nodes; node; node = node->next)
818 if (node->local.externally_visible)
819 fprintf (dump_file, " %s", cgraph_node_name (node));
820 fprintf (dump_file, "\n\n");
821 fprintf (dump_file, "\nMarking externally visible variables:");
822 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
823 if (vnode->externally_visible)
824 fprintf (dump_file, " %s", varpool_node_name (vnode));
825 fprintf (dump_file, "\n\n");
826 }
827 cgraph_function_flags_ready = true;
828 return 0;
829 }
830
831 /* Local function pass handling visibilities. This happens before LTO streaming
832 so in particular -fwhole-program should be ignored at this level. */
833
834 static unsigned int
835 local_function_and_variable_visibility (void)
836 {
837 return function_and_variable_visibility (flag_whole_program && !flag_lto && !flag_whopr);
838 }
839
840 struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility =
841 {
842 {
843 SIMPLE_IPA_PASS,
844 "visibility", /* name */
845 NULL, /* gate */
846 local_function_and_variable_visibility,/* execute */
847 NULL, /* sub */
848 NULL, /* next */
849 0, /* static_pass_number */
850 TV_CGRAPHOPT, /* tv_id */
851 0, /* properties_required */
852 0, /* properties_provided */
853 0, /* properties_destroyed */
854 0, /* todo_flags_start */
855 TODO_remove_functions | TODO_dump_cgraph
856 | TODO_ggc_collect /* todo_flags_finish */
857 }
858 };
859
860 /* Do not re-run on ltrans stage. */
861
862 static bool
863 gate_whole_program_function_and_variable_visibility (void)
864 {
865 return !flag_ltrans;
866 }
867
868 /* Bring functionss local at LTO time whith -fwhole-program. */
869
870 static unsigned int
871 whole_program_function_and_variable_visibility (void)
872 {
873 struct cgraph_node *node;
874 struct varpool_node *vnode;
875
876 function_and_variable_visibility (flag_whole_program);
877
878 for (node = cgraph_nodes; node; node = node->next)
879 if ((node->local.externally_visible && !DECL_COMDAT (node->decl))
880 && node->local.finalized)
881 cgraph_mark_needed_node (node);
882 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
883 if (vnode->externally_visible && !DECL_COMDAT (vnode->decl))
884 varpool_mark_needed_node (vnode);
885 if (dump_file)
886 {
887 fprintf (dump_file, "\nNeeded variables:");
888 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
889 if (vnode->needed)
890 fprintf (dump_file, " %s", varpool_node_name (vnode));
891 fprintf (dump_file, "\n\n");
892 }
893 if (optimize)
894 ipa_discover_readonly_nonaddressable_vars ();
895 return 0;
896 }
897
898 struct ipa_opt_pass_d pass_ipa_whole_program_visibility =
899 {
900 {
901 IPA_PASS,
902 "whole-program", /* name */
903 gate_whole_program_function_and_variable_visibility,/* gate */
904 whole_program_function_and_variable_visibility,/* execute */
905 NULL, /* sub */
906 NULL, /* next */
907 0, /* static_pass_number */
908 TV_CGRAPHOPT, /* tv_id */
909 0, /* properties_required */
910 0, /* properties_provided */
911 0, /* properties_destroyed */
912 0, /* todo_flags_start */
913 TODO_remove_functions | TODO_dump_cgraph
914 | TODO_ggc_collect /* todo_flags_finish */
915 },
916 NULL, /* generate_summary */
917 NULL, /* write_summary */
918 NULL, /* read_summary */
919 NULL, /* write_optimization_summary */
920 NULL, /* read_optimization_summary */
921 NULL, /* stmt_fixup */
922 0, /* TODOs */
923 NULL, /* function_transform */
924 NULL, /* variable_transform */
925 };
926
927 /* Hash a cgraph node set element. */
928
929 static hashval_t
930 hash_cgraph_node_set_element (const void *p)
931 {
932 const_cgraph_node_set_element element = (const_cgraph_node_set_element) p;
933 return htab_hash_pointer (element->node);
934 }
935
936 /* Compare two cgraph node set elements. */
937
938 static int
939 eq_cgraph_node_set_element (const void *p1, const void *p2)
940 {
941 const_cgraph_node_set_element e1 = (const_cgraph_node_set_element) p1;
942 const_cgraph_node_set_element e2 = (const_cgraph_node_set_element) p2;
943
944 return e1->node == e2->node;
945 }
946
947 /* Create a new cgraph node set. */
948
949 cgraph_node_set
950 cgraph_node_set_new (void)
951 {
952 cgraph_node_set new_node_set;
953
954 new_node_set = ggc_alloc_cgraph_node_set_def ();
955 new_node_set->hashtab = htab_create_ggc (10,
956 hash_cgraph_node_set_element,
957 eq_cgraph_node_set_element,
958 NULL);
959 new_node_set->nodes = NULL;
960 return new_node_set;
961 }
962
963 /* Add cgraph_node NODE to cgraph_node_set SET. */
964
965 void
966 cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node)
967 {
968 void **slot;
969 cgraph_node_set_element element;
970 struct cgraph_node_set_element_def dummy;
971
972 dummy.node = node;
973 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
974
975 if (*slot != HTAB_EMPTY_ENTRY)
976 {
977 element = (cgraph_node_set_element) *slot;
978 gcc_assert (node == element->node
979 && (VEC_index (cgraph_node_ptr, set->nodes, element->index)
980 == node));
981 return;
982 }
983
984 /* Insert node into hash table. */
985 element = ggc_alloc_cgraph_node_set_element_def ();
986 element->node = node;
987 element->index = VEC_length (cgraph_node_ptr, set->nodes);
988 *slot = element;
989
990 /* Insert into node vector. */
991 VEC_safe_push (cgraph_node_ptr, gc, set->nodes, node);
992 }
993
994 /* Remove cgraph_node NODE from cgraph_node_set SET. */
995
996 void
997 cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node)
998 {
999 void **slot, **last_slot;
1000 cgraph_node_set_element element, last_element;
1001 struct cgraph_node *last_node;
1002 struct cgraph_node_set_element_def dummy;
1003
1004 dummy.node = node;
1005 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1006 if (slot == NULL)
1007 return;
1008
1009 element = (cgraph_node_set_element) *slot;
1010 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1011 == node);
1012
1013 /* Remove from vector. We do this by swapping node with the last element
1014 of the vector. */
1015 last_node = VEC_pop (cgraph_node_ptr, set->nodes);
1016 if (last_node != node)
1017 {
1018 dummy.node = last_node;
1019 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1020 last_element = (cgraph_node_set_element) *last_slot;
1021 gcc_assert (last_element);
1022
1023 /* Move the last element to the original spot of NODE. */
1024 last_element->index = element->index;
1025 VEC_replace (cgraph_node_ptr, set->nodes, last_element->index,
1026 last_node);
1027 }
1028
1029 /* Remove element from hash table. */
1030 htab_clear_slot (set->hashtab, slot);
1031 ggc_free (element);
1032 }
1033
1034 /* Find NODE in SET and return an iterator to it if found. A null iterator
1035 is returned if NODE is not in SET. */
1036
1037 cgraph_node_set_iterator
1038 cgraph_node_set_find (cgraph_node_set set, struct cgraph_node *node)
1039 {
1040 void **slot;
1041 struct cgraph_node_set_element_def dummy;
1042 cgraph_node_set_element element;
1043 cgraph_node_set_iterator csi;
1044
1045 dummy.node = node;
1046 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1047 if (slot == NULL)
1048 csi.index = (unsigned) ~0;
1049 else
1050 {
1051 element = (cgraph_node_set_element) *slot;
1052 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1053 == node);
1054 csi.index = element->index;
1055 }
1056 csi.set = set;
1057
1058 return csi;
1059 }
1060
1061 /* Dump content of SET to file F. */
1062
1063 void
1064 dump_cgraph_node_set (FILE *f, cgraph_node_set set)
1065 {
1066 cgraph_node_set_iterator iter;
1067
1068 for (iter = csi_start (set); !csi_end_p (iter); csi_next (&iter))
1069 {
1070 struct cgraph_node *node = csi_node (iter);
1071 fprintf (f, " %s/%i", cgraph_node_name (node), node->uid);
1072 }
1073 fprintf (f, "\n");
1074 }
1075
1076 /* Dump content of SET to stderr. */
1077
1078 DEBUG_FUNCTION void
1079 debug_cgraph_node_set (cgraph_node_set set)
1080 {
1081 dump_cgraph_node_set (stderr, set);
1082 }
1083
1084 /* Hash a varpool node set element. */
1085
1086 static hashval_t
1087 hash_varpool_node_set_element (const void *p)
1088 {
1089 const_varpool_node_set_element element = (const_varpool_node_set_element) p;
1090 return htab_hash_pointer (element->node);
1091 }
1092
1093 /* Compare two varpool node set elements. */
1094
1095 static int
1096 eq_varpool_node_set_element (const void *p1, const void *p2)
1097 {
1098 const_varpool_node_set_element e1 = (const_varpool_node_set_element) p1;
1099 const_varpool_node_set_element e2 = (const_varpool_node_set_element) p2;
1100
1101 return e1->node == e2->node;
1102 }
1103
1104 /* Create a new varpool node set. */
1105
1106 varpool_node_set
1107 varpool_node_set_new (void)
1108 {
1109 varpool_node_set new_node_set;
1110
1111 new_node_set = ggc_alloc_varpool_node_set_def ();
1112 new_node_set->hashtab = htab_create_ggc (10,
1113 hash_varpool_node_set_element,
1114 eq_varpool_node_set_element,
1115 NULL);
1116 new_node_set->nodes = NULL;
1117 return new_node_set;
1118 }
1119
1120 /* Add varpool_node NODE to varpool_node_set SET. */
1121
1122 void
1123 varpool_node_set_add (varpool_node_set set, struct varpool_node *node)
1124 {
1125 void **slot;
1126 varpool_node_set_element element;
1127 struct varpool_node_set_element_def dummy;
1128
1129 dummy.node = node;
1130 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
1131
1132 if (*slot != HTAB_EMPTY_ENTRY)
1133 {
1134 element = (varpool_node_set_element) *slot;
1135 gcc_assert (node == element->node
1136 && (VEC_index (varpool_node_ptr, set->nodes, element->index)
1137 == node));
1138 return;
1139 }
1140
1141 /* Insert node into hash table. */
1142 element = ggc_alloc_varpool_node_set_element_def ();
1143 element->node = node;
1144 element->index = VEC_length (varpool_node_ptr, set->nodes);
1145 *slot = element;
1146
1147 /* Insert into node vector. */
1148 VEC_safe_push (varpool_node_ptr, gc, set->nodes, node);
1149 }
1150
1151 /* Remove varpool_node NODE from varpool_node_set SET. */
1152
1153 void
1154 varpool_node_set_remove (varpool_node_set set, struct varpool_node *node)
1155 {
1156 void **slot, **last_slot;
1157 varpool_node_set_element element, last_element;
1158 struct varpool_node *last_node;
1159 struct varpool_node_set_element_def dummy;
1160
1161 dummy.node = node;
1162 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1163 if (slot == NULL)
1164 return;
1165
1166 element = (varpool_node_set_element) *slot;
1167 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1168 == node);
1169
1170 /* Remove from vector. We do this by swapping node with the last element
1171 of the vector. */
1172 last_node = VEC_pop (varpool_node_ptr, set->nodes);
1173 if (last_node != node)
1174 {
1175 dummy.node = last_node;
1176 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1177 last_element = (varpool_node_set_element) *last_slot;
1178 gcc_assert (last_element);
1179
1180 /* Move the last element to the original spot of NODE. */
1181 last_element->index = element->index;
1182 VEC_replace (varpool_node_ptr, set->nodes, last_element->index,
1183 last_node);
1184 }
1185
1186 /* Remove element from hash table. */
1187 htab_clear_slot (set->hashtab, slot);
1188 ggc_free (element);
1189 }
1190
1191 /* Find NODE in SET and return an iterator to it if found. A null iterator
1192 is returned if NODE is not in SET. */
1193
1194 varpool_node_set_iterator
1195 varpool_node_set_find (varpool_node_set set, struct varpool_node *node)
1196 {
1197 void **slot;
1198 struct varpool_node_set_element_def dummy;
1199 varpool_node_set_element element;
1200 varpool_node_set_iterator vsi;
1201
1202 dummy.node = node;
1203 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1204 if (slot == NULL)
1205 vsi.index = (unsigned) ~0;
1206 else
1207 {
1208 element = (varpool_node_set_element) *slot;
1209 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1210 == node);
1211 vsi.index = element->index;
1212 }
1213 vsi.set = set;
1214
1215 return vsi;
1216 }
1217
1218 /* Dump content of SET to file F. */
1219
1220 void
1221 dump_varpool_node_set (FILE *f, varpool_node_set set)
1222 {
1223 varpool_node_set_iterator iter;
1224
1225 for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter))
1226 {
1227 struct varpool_node *node = vsi_node (iter);
1228 fprintf (f, " %s", varpool_node_name (node));
1229 }
1230 fprintf (f, "\n");
1231 }
1232
1233 /* Dump content of SET to stderr. */
1234
1235 DEBUG_FUNCTION void
1236 debug_varpool_node_set (varpool_node_set set)
1237 {
1238 dump_varpool_node_set (stderr, set);
1239 }
1240
1241
1242 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1243
1244 static unsigned int
1245 ipa_profile (void)
1246 {
1247 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1248 struct cgraph_edge *e;
1249 int order_pos;
1250 bool something_changed = false;
1251 int i;
1252
1253 order_pos = cgraph_postorder (order);
1254 for (i = order_pos - 1; i >= 0; i--)
1255 {
1256 if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1257 {
1258 for (e = order[i]->callees; e; e = e->next_callee)
1259 if (e->callee->local.local && !e->callee->aux)
1260 {
1261 something_changed = true;
1262 e->callee->aux = (void *)1;
1263 }
1264 }
1265 order[i]->aux = NULL;
1266 }
1267
1268 while (something_changed)
1269 {
1270 something_changed = false;
1271 for (i = order_pos - 1; i >= 0; i--)
1272 {
1273 if (order[i]->aux && cgraph_propagate_frequency (order[i]))
1274 {
1275 for (e = order[i]->callees; e; e = e->next_callee)
1276 if (e->callee->local.local && !e->callee->aux)
1277 {
1278 something_changed = true;
1279 e->callee->aux = (void *)1;
1280 }
1281 }
1282 order[i]->aux = NULL;
1283 }
1284 }
1285 free (order);
1286 return 0;
1287 }
1288
1289 static bool
1290 gate_ipa_profile (void)
1291 {
1292 return flag_ipa_profile;
1293 }
1294
1295 struct ipa_opt_pass_d pass_ipa_profile =
1296 {
1297 {
1298 IPA_PASS,
1299 "ipa-profile", /* name */
1300 gate_ipa_profile, /* gate */
1301 ipa_profile, /* execute */
1302 NULL, /* sub */
1303 NULL, /* next */
1304 0, /* static_pass_number */
1305 TV_IPA_PROFILE, /* tv_id */
1306 0, /* properties_required */
1307 0, /* properties_provided */
1308 0, /* properties_destroyed */
1309 0, /* todo_flags_start */
1310 0 /* todo_flags_finish */
1311 },
1312 NULL, /* generate_summary */
1313 NULL, /* write_summary */
1314 NULL, /* read_summary */
1315 NULL, /* write_optimization_summary */
1316 NULL, /* read_optimization_summary */
1317 NULL, /* stmt_fixup */
1318 0, /* TODOs */
1319 NULL, /* function_transform */
1320 NULL /* variable_transform */
1321 };
1322
1323 /* Generate and emit a static constructor or destructor. WHICH must
1324 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1325 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1326 initialization priority for this constructor or destructor. */
1327
1328 void
1329 cgraph_build_static_cdtor (char which, tree body, int priority)
1330 {
1331 static int counter = 0;
1332 char which_buf[16];
1333 tree decl, name, resdecl;
1334
1335 /* The priority is encoded in the constructor or destructor name.
1336 collect2 will sort the names and arrange that they are called at
1337 program startup. */
1338 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1339 name = get_file_function_name (which_buf);
1340
1341 decl = build_decl (input_location, FUNCTION_DECL, name,
1342 build_function_type_list (void_type_node, NULL_TREE));
1343 current_function_decl = decl;
1344
1345 resdecl = build_decl (input_location,
1346 RESULT_DECL, NULL_TREE, void_type_node);
1347 DECL_ARTIFICIAL (resdecl) = 1;
1348 DECL_RESULT (decl) = resdecl;
1349 DECL_CONTEXT (resdecl) = decl;
1350
1351 allocate_struct_function (decl, false);
1352
1353 TREE_STATIC (decl) = 1;
1354 TREE_USED (decl) = 1;
1355 DECL_ARTIFICIAL (decl) = 1;
1356 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1357 DECL_SAVED_TREE (decl) = body;
1358 if (!targetm.have_ctors_dtors)
1359 {
1360 TREE_PUBLIC (decl) = 1;
1361 DECL_PRESERVE_P (decl) = 1;
1362 }
1363 DECL_UNINLINABLE (decl) = 1;
1364
1365 DECL_INITIAL (decl) = make_node (BLOCK);
1366 TREE_USED (DECL_INITIAL (decl)) = 1;
1367
1368 DECL_SOURCE_LOCATION (decl) = input_location;
1369 cfun->function_end_locus = input_location;
1370
1371 switch (which)
1372 {
1373 case 'I':
1374 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1375 decl_init_priority_insert (decl, priority);
1376 break;
1377 case 'D':
1378 DECL_STATIC_DESTRUCTOR (decl) = 1;
1379 decl_fini_priority_insert (decl, priority);
1380 break;
1381 default:
1382 gcc_unreachable ();
1383 }
1384
1385 gimplify_function_tree (decl);
1386
1387 cgraph_add_new_function (decl, false);
1388
1389 set_cfun (NULL);
1390 current_function_decl = NULL;
1391 }
1392
1393
1394 /* A vector of FUNCTION_DECLs declared as static constructors. */
1395 static VEC(tree, heap) *static_ctors;
1396 /* A vector of FUNCTION_DECLs declared as static destructors. */
1397 static VEC(tree, heap) *static_dtors;
1398
1399 /* When target does not have ctors and dtors, we call all constructor
1400 and destructor by special initialization/destruction function
1401 recognized by collect2.
1402
1403 When we are going to build this function, collect all constructors and
1404 destructors and turn them into normal functions. */
1405
1406 static void
1407 record_cdtor_fn (struct cgraph_node *node)
1408 {
1409 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1410 VEC_safe_push (tree, heap, static_ctors, node->decl);
1411 if (DECL_STATIC_DESTRUCTOR (node->decl))
1412 VEC_safe_push (tree, heap, static_dtors, node->decl);
1413 node = cgraph_node (node->decl);
1414 node->local.disregard_inline_limits = 1;
1415 }
1416
1417 /* Define global constructors/destructor functions for the CDTORS, of
1418 which they are LEN. The CDTORS are sorted by initialization
1419 priority. If CTOR_P is true, these are constructors; otherwise,
1420 they are destructors. */
1421
1422 static void
1423 build_cdtor (bool ctor_p, tree *cdtors, size_t len)
1424 {
1425 size_t i,j;
1426
1427 i = 0;
1428 while (i < len)
1429 {
1430 tree body;
1431 tree fn;
1432 priority_type priority;
1433
1434 priority = 0;
1435 body = NULL_TREE;
1436 j = i;
1437 do
1438 {
1439 priority_type p;
1440 fn = cdtors[i];
1441 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1442 if (j == i)
1443 priority = p;
1444 else if (p != priority)
1445 break;
1446 j++;
1447 }
1448 while (j < len);
1449
1450 /* When there is only once constructor and target supports them, do nothing. */
1451 if (j == i + 1
1452 && targetm.have_ctors_dtors)
1453 {
1454 i++;
1455 continue;
1456 }
1457 /* Find the next batch of constructors/destructors with the same
1458 initialization priority. */
1459 do
1460 {
1461 priority_type p;
1462 tree call;
1463 fn = cdtors[i];
1464 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1465 if (p != priority)
1466 break;
1467 call = build_call_expr (fn, 0);
1468 if (ctor_p)
1469 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1470 else
1471 DECL_STATIC_DESTRUCTOR (fn) = 0;
1472 /* We do not want to optimize away pure/const calls here.
1473 When optimizing, these should be already removed, when not
1474 optimizing, we want user to be able to breakpoint in them. */
1475 TREE_SIDE_EFFECTS (call) = 1;
1476 append_to_statement_list (call, &body);
1477 ++i;
1478 }
1479 while (i < len);
1480 gcc_assert (body != NULL_TREE);
1481 /* Generate a function to call all the function of like
1482 priority. */
1483 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
1484 }
1485 }
1486
1487 /* Comparison function for qsort. P1 and P2 are actually of type
1488 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1489 used to determine the sort order. */
1490
1491 static int
1492 compare_ctor (const void *p1, const void *p2)
1493 {
1494 tree f1;
1495 tree f2;
1496 int priority1;
1497 int priority2;
1498
1499 f1 = *(const tree *)p1;
1500 f2 = *(const tree *)p2;
1501 priority1 = DECL_INIT_PRIORITY (f1);
1502 priority2 = DECL_INIT_PRIORITY (f2);
1503
1504 if (priority1 < priority2)
1505 return -1;
1506 else if (priority1 > priority2)
1507 return 1;
1508 else
1509 /* Ensure a stable sort. Constructors are executed in backwarding
1510 order to make LTO initialize braries first. */
1511 return DECL_UID (f2) - DECL_UID (f1);
1512 }
1513
1514 /* Comparison function for qsort. P1 and P2 are actually of type
1515 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1516 used to determine the sort order. */
1517
1518 static int
1519 compare_dtor (const void *p1, const void *p2)
1520 {
1521 tree f1;
1522 tree f2;
1523 int priority1;
1524 int priority2;
1525
1526 f1 = *(const tree *)p1;
1527 f2 = *(const tree *)p2;
1528 priority1 = DECL_FINI_PRIORITY (f1);
1529 priority2 = DECL_FINI_PRIORITY (f2);
1530
1531 if (priority1 < priority2)
1532 return -1;
1533 else if (priority1 > priority2)
1534 return 1;
1535 else
1536 /* Ensure a stable sort. */
1537 return DECL_UID (f1) - DECL_UID (f2);
1538 }
1539
1540 /* Generate functions to call static constructors and destructors
1541 for targets that do not support .ctors/.dtors sections. These
1542 functions have magic names which are detected by collect2. */
1543
1544 static void
1545 build_cdtor_fns (void)
1546 {
1547 if (!VEC_empty (tree, static_ctors))
1548 {
1549 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1550 qsort (VEC_address (tree, static_ctors),
1551 VEC_length (tree, static_ctors),
1552 sizeof (tree),
1553 compare_ctor);
1554 build_cdtor (/*ctor_p=*/true,
1555 VEC_address (tree, static_ctors),
1556 VEC_length (tree, static_ctors));
1557 VEC_truncate (tree, static_ctors, 0);
1558 }
1559
1560 if (!VEC_empty (tree, static_dtors))
1561 {
1562 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1563 qsort (VEC_address (tree, static_dtors),
1564 VEC_length (tree, static_dtors),
1565 sizeof (tree),
1566 compare_dtor);
1567 build_cdtor (/*ctor_p=*/false,
1568 VEC_address (tree, static_dtors),
1569 VEC_length (tree, static_dtors));
1570 VEC_truncate (tree, static_dtors, 0);
1571 }
1572 }
1573
1574 /* Look for constructors and destructors and produce function calling them.
1575 This is needed for targets not supporting ctors or dtors, but we perform the
1576 transformation also at linktime to merge possibly numberous
1577 constructors/destructors into single function to improve code locality and
1578 reduce size. */
1579
1580 static unsigned int
1581 ipa_cdtor_merge (void)
1582 {
1583 struct cgraph_node *node;
1584 for (node = cgraph_nodes; node; node = node->next)
1585 if (node->analyzed
1586 && (DECL_STATIC_CONSTRUCTOR (node->decl)
1587 || DECL_STATIC_DESTRUCTOR (node->decl)))
1588 record_cdtor_fn (node);
1589 build_cdtor_fns ();
1590 VEC_free (tree, heap, static_ctors);
1591 VEC_free (tree, heap, static_dtors);
1592 return 0;
1593 }
1594
1595 /* Perform the pass when we have no ctors/dtors support
1596 or at LTO time to merge multiple constructors into single
1597 function. */
1598
1599 static bool
1600 gate_ipa_cdtor_merge (void)
1601 {
1602 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1603 }
1604
1605 struct ipa_opt_pass_d pass_ipa_cdtor_merge =
1606 {
1607 {
1608 IPA_PASS,
1609 "cdtor", /* name */
1610 gate_ipa_cdtor_merge, /* gate */
1611 ipa_cdtor_merge, /* execute */
1612 NULL, /* sub */
1613 NULL, /* next */
1614 0, /* static_pass_number */
1615 TV_CGRAPHOPT, /* tv_id */
1616 0, /* properties_required */
1617 0, /* properties_provided */
1618 0, /* properties_destroyed */
1619 0, /* todo_flags_start */
1620 0 /* todo_flags_finish */
1621 },
1622 NULL, /* generate_summary */
1623 NULL, /* write_summary */
1624 NULL, /* read_summary */
1625 NULL, /* write_optimization_summary */
1626 NULL, /* read_optimization_summary */
1627 NULL, /* stmt_fixup */
1628 0, /* TODOs */
1629 NULL, /* function_transform */
1630 NULL /* variable_transform */
1631 };