cgraph.h (struct varpool_node): Add const_value_known.
[gcc.git] / gcc / ipa.c
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "cgraph.h"
26 #include "tree-pass.h"
27 #include "timevar.h"
28 #include "gimple.h"
29 #include "ggc.h"
30 #include "flags.h"
31 #include "pointer-set.h"
32 #include "target.h"
33 #include "tree-iterator.h"
34
35 /* Fill array order with all nodes with output flag set in the reverse
36 topological order. */
37
38 int
39 cgraph_postorder (struct cgraph_node **order)
40 {
41 struct cgraph_node *node, *node2;
42 int stack_size = 0;
43 int order_pos = 0;
44 struct cgraph_edge *edge, last;
45 int pass;
46
47 struct cgraph_node **stack =
48 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
49
50 /* We have to deal with cycles nicely, so use a depth first traversal
51 output algorithm. Ignore the fact that some functions won't need
52 to be output and put them into order as well, so we get dependencies
53 right through inline functions. */
54 for (node = cgraph_nodes; node; node = node->next)
55 node->aux = NULL;
56 for (pass = 0; pass < 2; pass++)
57 for (node = cgraph_nodes; node; node = node->next)
58 if (!node->aux
59 && (pass
60 || (!cgraph_only_called_directly_p (node)
61 && !node->address_taken)))
62 {
63 node2 = node;
64 if (!node->callers)
65 node->aux = &last;
66 else
67 node->aux = node->callers;
68 while (node2)
69 {
70 while (node2->aux != &last)
71 {
72 edge = (struct cgraph_edge *) node2->aux;
73 if (edge->next_caller)
74 node2->aux = edge->next_caller;
75 else
76 node2->aux = &last;
77 /* Break possible cycles involving always-inline
78 functions by ignoring edges from always-inline
79 functions to non-always-inline functions. */
80 if (edge->caller->local.disregard_inline_limits
81 && !edge->callee->local.disregard_inline_limits)
82 continue;
83 if (!edge->caller->aux)
84 {
85 if (!edge->caller->callers)
86 edge->caller->aux = &last;
87 else
88 edge->caller->aux = edge->caller->callers;
89 stack[stack_size++] = node2;
90 node2 = edge->caller;
91 break;
92 }
93 }
94 if (node2->aux == &last)
95 {
96 order[order_pos++] = node2;
97 if (stack_size)
98 node2 = stack[--stack_size];
99 else
100 node2 = NULL;
101 }
102 }
103 }
104 free (stack);
105 for (node = cgraph_nodes; node; node = node->next)
106 node->aux = NULL;
107 return order_pos;
108 }
109
110 /* Look for all functions inlined to NODE and update their inlined_to pointers
111 to INLINED_TO. */
112
113 static void
114 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
115 {
116 struct cgraph_edge *e;
117 for (e = node->callees; e; e = e->next_callee)
118 if (e->callee->global.inlined_to)
119 {
120 e->callee->global.inlined_to = inlined_to;
121 update_inlined_to_pointer (e->callee, inlined_to);
122 }
123 }
124
125 /* Add cgraph NODE to queue starting at FIRST.
126
127 The queue is linked via AUX pointers and terminated by pointer to 1.
128 We enqueue nodes at two occasions: when we find them reachable or when we find
129 their bodies needed for further clonning. In the second case we mark them
130 by pointer to 2 after processing so they are re-queue when they become
131 reachable. */
132
133 static void
134 enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first)
135 {
136 /* Node is still in queue; do nothing. */
137 if (node->aux && node->aux != (void *) 2)
138 return;
139 /* Node was already processed as unreachable, re-enqueue
140 only if it became reachable now. */
141 if (node->aux == (void *)2 && !node->reachable)
142 return;
143 node->aux = *first;
144 *first = node;
145 }
146
147 /* Add varpool NODE to queue starting at FIRST. */
148
149 static void
150 enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first)
151 {
152 node->aux = *first;
153 *first = node;
154 }
155
156 /* Process references. */
157
158 static void
159 process_references (struct ipa_ref_list *list,
160 struct cgraph_node **first,
161 struct varpool_node **first_varpool,
162 bool before_inlining_p)
163 {
164 int i;
165 struct ipa_ref *ref;
166 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
167 {
168 if (ref->refered_type == IPA_REF_CGRAPH)
169 {
170 struct cgraph_node *node = ipa_ref_node (ref);
171 if (!node->reachable
172 && (!DECL_EXTERNAL (node->decl)
173 || before_inlining_p))
174 {
175 node->reachable = true;
176 enqueue_cgraph_node (node, first);
177 }
178 }
179 else
180 {
181 struct varpool_node *node = ipa_ref_varpool_node (ref);
182 if (!node->needed)
183 {
184 varpool_mark_needed_node (node);
185 enqueue_varpool_node (node, first_varpool);
186 }
187 }
188 }
189 }
190
191 /* Return true when function NODE can be removed from callgraph
192 if all direct calls are eliminated. */
193
194 static inline bool
195 varpool_can_remove_if_no_refs (struct varpool_node *node)
196 {
197 return (!node->force_output && !node->used_from_other_partition
198 && (DECL_COMDAT (node->decl) || !node->externally_visible));
199 }
200
201 /* Return true when function can be marked local. */
202
203 static bool
204 cgraph_local_node_p (struct cgraph_node *node)
205 {
206 return (cgraph_only_called_directly_p (node)
207 && node->analyzed
208 && !DECL_EXTERNAL (node->decl)
209 && !node->local.externally_visible
210 && !node->reachable_from_other_partition
211 && !node->in_other_partition);
212 }
213
214 /* Perform reachability analysis and reclaim all unreachable nodes.
215 If BEFORE_INLINING_P is true this function is called before inlining
216 decisions has been made. If BEFORE_INLINING_P is false this function also
217 removes unneeded bodies of extern inline functions. */
218
219 bool
220 cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
221 {
222 struct cgraph_node *first = (struct cgraph_node *) (void *) 1;
223 struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1;
224 struct cgraph_node *node, *next;
225 struct varpool_node *vnode, *vnext;
226 bool changed = false;
227
228 #ifdef ENABLE_CHECKING
229 verify_cgraph ();
230 #endif
231 if (file)
232 fprintf (file, "\nReclaiming functions:");
233 #ifdef ENABLE_CHECKING
234 for (node = cgraph_nodes; node; node = node->next)
235 gcc_assert (!node->aux);
236 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
237 gcc_assert (!vnode->aux);
238 #endif
239 varpool_reset_queue ();
240 for (node = cgraph_nodes; node; node = node->next)
241 if (!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
242 && ((!DECL_EXTERNAL (node->decl))
243 || before_inlining_p))
244 {
245 gcc_assert (!node->global.inlined_to);
246 enqueue_cgraph_node (node, &first);
247 node->reachable = true;
248 }
249 else
250 {
251 gcc_assert (!node->aux);
252 node->reachable = false;
253 }
254 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
255 {
256 vnode->next_needed = NULL;
257 vnode->prev_needed = NULL;
258 if (!varpool_can_remove_if_no_refs (vnode))
259 {
260 vnode->needed = false;
261 varpool_mark_needed_node (vnode);
262 enqueue_varpool_node (vnode, &first_varpool);
263 }
264 else
265 vnode->needed = false;
266 }
267
268 /* Perform reachability analysis. As a special case do not consider
269 extern inline functions not inlined as live because we won't output
270 them at all.
271
272 We maintain two worklist, one for cgraph nodes other for varpools and
273 are finished once both are empty. */
274
275 while (first != (struct cgraph_node *) (void *) 1
276 || first_varpool != (struct varpool_node *) (void *) 1)
277 {
278 if (first != (struct cgraph_node *) (void *) 1)
279 {
280 struct cgraph_edge *e;
281 node = first;
282 first = (struct cgraph_node *) first->aux;
283 if (!node->reachable)
284 node->aux = (void *)2;
285
286 /* If we found this node reachable, first mark on the callees
287 reachable too, unless they are direct calls to extern inline functions
288 we decided to not inline. */
289 if (node->reachable)
290 {
291 for (e = node->callees; e; e = e->next_callee)
292 if (!e->callee->reachable
293 && node->analyzed
294 && (!e->inline_failed || !e->callee->analyzed
295 || (!DECL_EXTERNAL (e->callee->decl))
296 || before_inlining_p))
297 {
298 e->callee->reachable = true;
299 enqueue_cgraph_node (e->callee, &first);
300 }
301 process_references (&node->ref_list, &first, &first_varpool, before_inlining_p);
302 }
303
304 /* If any function in a comdat group is reachable, force
305 all other functions in the same comdat group to be
306 also reachable. */
307 if (node->same_comdat_group
308 && node->reachable
309 && !node->global.inlined_to)
310 {
311 for (next = node->same_comdat_group;
312 next != node;
313 next = next->same_comdat_group)
314 if (!next->reachable)
315 {
316 next->reachable = true;
317 enqueue_cgraph_node (next, &first);
318 }
319 }
320
321 /* We can freely remove inline clones even if they are cloned, however if
322 function is clone of real clone, we must keep it around in order to
323 make materialize_clones produce function body with the changes
324 applied. */
325 while (node->clone_of && !node->clone_of->aux
326 && !gimple_has_body_p (node->decl))
327 {
328 bool noninline = node->clone_of->decl != node->decl;
329 node = node->clone_of;
330 if (noninline && !node->reachable && !node->aux)
331 {
332 enqueue_cgraph_node (node, &first);
333 break;
334 }
335 }
336 }
337 if (first_varpool != (struct varpool_node *) (void *) 1)
338 {
339 vnode = first_varpool;
340 first_varpool = (struct varpool_node *)first_varpool->aux;
341 vnode->aux = NULL;
342 process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p);
343 /* If any function in a comdat group is reachable, force
344 all other functions in the same comdat group to be
345 also reachable. */
346 if (vnode->same_comdat_group)
347 {
348 struct varpool_node *next;
349 for (next = vnode->same_comdat_group;
350 next != vnode;
351 next = next->same_comdat_group)
352 if (!next->needed)
353 {
354 varpool_mark_needed_node (next);
355 enqueue_varpool_node (next, &first_varpool);
356 }
357 }
358 }
359 }
360
361 /* Remove unreachable nodes.
362
363 Completely unreachable functions can be fully removed from the callgraph.
364 Extern inline functions that we decided to not inline need to become unanalyzed nodes of
365 callgraph (so we still have edges to them). We remove function body then.
366
367 Also we need to care functions that are unreachable but we need to keep them around
368 for later clonning. In this case we also turn them to unanalyzed nodes, but
369 keep the body around. */
370 for (node = cgraph_nodes; node; node = next)
371 {
372 next = node->next;
373 if (node->aux && !node->reachable)
374 {
375 cgraph_node_remove_callees (node);
376 ipa_remove_all_references (&node->ref_list);
377 node->analyzed = false;
378 node->local.inlinable = false;
379 }
380 if (!node->aux)
381 {
382 node->global.inlined_to = NULL;
383 if (file)
384 fprintf (file, " %s", cgraph_node_name (node));
385 if (!node->analyzed || !DECL_EXTERNAL (node->decl) || before_inlining_p)
386 cgraph_remove_node (node);
387 else
388 {
389 struct cgraph_edge *e;
390
391 /* See if there is reachable caller. */
392 for (e = node->callers; e; e = e->next_caller)
393 if (e->caller->reachable)
394 break;
395
396 /* If so, we need to keep node in the callgraph. */
397 if (e || node->needed)
398 {
399 struct cgraph_node *clone;
400
401 /* If there are still clones, we must keep body around.
402 Otherwise we can just remove the body but keep the clone. */
403 for (clone = node->clones; clone;
404 clone = clone->next_sibling_clone)
405 if (clone->aux)
406 break;
407 if (!clone)
408 {
409 cgraph_release_function_body (node);
410 node->analyzed = false;
411 node->local.inlinable = false;
412 }
413 else
414 gcc_assert (!clone->in_other_partition);
415 cgraph_node_remove_callees (node);
416 ipa_remove_all_references (&node->ref_list);
417 if (node->prev_sibling_clone)
418 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
419 else if (node->clone_of)
420 node->clone_of->clones = node->next_sibling_clone;
421 if (node->next_sibling_clone)
422 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
423 node->clone_of = NULL;
424 node->next_sibling_clone = NULL;
425 node->prev_sibling_clone = NULL;
426 }
427 else
428 cgraph_remove_node (node);
429 }
430 changed = true;
431 }
432 }
433 for (node = cgraph_nodes; node; node = node->next)
434 {
435 /* Inline clones might be kept around so their materializing allows further
436 cloning. If the function the clone is inlined into is removed, we need
437 to turn it into normal cone. */
438 if (node->global.inlined_to
439 && !node->callers)
440 {
441 gcc_assert (node->clones);
442 node->global.inlined_to = NULL;
443 update_inlined_to_pointer (node, node);
444 }
445 node->aux = NULL;
446 }
447
448 if (file)
449 fprintf (file, "\n");
450
451 /* We must release unused extern inlines or sanity checking will fail. Rest of transformations
452 are undesirable at -O0 since we do not want to remove anything. */
453 if (!optimize)
454 return changed;
455
456 if (file)
457 fprintf (file, "Reclaiming variables:");
458 for (vnode = varpool_nodes; vnode; vnode = vnext)
459 {
460 vnext = vnode->next;
461 if (!vnode->needed)
462 {
463 if (file)
464 fprintf (file, " %s", varpool_node_name (vnode));
465 varpool_remove_node (vnode);
466 changed = true;
467 }
468 }
469
470 /* Now update address_taken flags and try to promote functions to be local. */
471
472 if (file)
473 fprintf (file, "\nClearing address taken flags:");
474 for (node = cgraph_nodes; node; node = node->next)
475 if (node->address_taken
476 && !node->reachable_from_other_partition)
477 {
478 int i;
479 struct ipa_ref *ref;
480 bool found = false;
481 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
482 && !found; i++)
483 {
484 gcc_assert (ref->use == IPA_REF_ADDR);
485 found = true;
486 }
487 if (!found)
488 {
489 if (file)
490 fprintf (file, " %s", cgraph_node_name (node));
491 node->address_taken = false;
492 changed = true;
493 if (cgraph_local_node_p (node))
494 {
495 node->local.local = true;
496 if (file)
497 fprintf (file, " (local)");
498 }
499 }
500 }
501
502 #ifdef ENABLE_CHECKING
503 verify_cgraph ();
504 #endif
505
506 /* Reclaim alias pairs for functions that have disappeared from the
507 call graph. */
508 remove_unreachable_alias_pairs ();
509
510 return changed;
511 }
512
513 /* Discover variables that have no longer address taken or that are read only
514 and update their flags.
515
516 FIXME: This can not be done in between gimplify and omp_expand since
517 readonly flag plays role on what is shared and what is not. Currently we do
518 this transformation as part of whole program visibility and re-do at
519 ipa-reference pass (to take into account clonning), but it would
520 make sense to do it before early optimizations. */
521
522 void
523 ipa_discover_readonly_nonaddressable_vars (void)
524 {
525 struct varpool_node *vnode;
526 if (dump_file)
527 fprintf (dump_file, "Clearing variable flags:");
528 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
529 if (vnode->finalized && varpool_all_refs_explicit_p (vnode)
530 && (TREE_ADDRESSABLE (vnode->decl) || !TREE_READONLY (vnode->decl)))
531 {
532 bool written = false;
533 bool address_taken = false;
534 int i;
535 struct ipa_ref *ref;
536 for (i = 0; ipa_ref_list_refering_iterate (&vnode->ref_list, i, ref)
537 && (!written || !address_taken); i++)
538 switch (ref->use)
539 {
540 case IPA_REF_ADDR:
541 address_taken = true;
542 break;
543 case IPA_REF_LOAD:
544 break;
545 case IPA_REF_STORE:
546 written = true;
547 break;
548 }
549 if (TREE_ADDRESSABLE (vnode->decl) && !address_taken)
550 {
551 if (dump_file)
552 fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode));
553 TREE_ADDRESSABLE (vnode->decl) = 0;
554 }
555 if (!TREE_READONLY (vnode->decl) && !address_taken && !written
556 /* Making variable in explicit section readonly can cause section
557 type conflict.
558 See e.g. gcc.c-torture/compile/pr23237.c */
559 && DECL_SECTION_NAME (vnode->decl) == NULL)
560 {
561 if (dump_file)
562 fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode));
563 TREE_READONLY (vnode->decl) = 1;
564 vnode->const_value_known |= varpool_decide_const_value_known (vnode);
565 }
566 }
567 if (dump_file)
568 fprintf (dump_file, "\n");
569 }
570
571 /* Return true when function NODE should be considered externally visible. */
572
573 static bool
574 cgraph_externally_visible_p (struct cgraph_node *node, bool whole_program, bool aliased)
575 {
576 if (!node->local.finalized)
577 return false;
578 if (!DECL_COMDAT (node->decl)
579 && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)))
580 return false;
581
582 /* Do not even try to be smart about aliased nodes. Until we properly
583 represent everything by same body alias, these are just evil. */
584 if (aliased)
585 return true;
586
587 /* When doing link time optimizations, hidden symbols become local. */
588 if (in_lto_p && DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
589 /* Be sure that node is defined in IR file, not in other object
590 file. In that case we don't set used_from_other_object_file. */
591 && node->analyzed)
592 ;
593 else if (!whole_program)
594 return true;
595 /* COMDAT functions must be shared only if they have address taken,
596 otherwise we can produce our own private implementation with
597 -fwhole-program. */
598 else if (DECL_COMDAT (node->decl))
599 {
600 if (node->address_taken || !node->analyzed)
601 return true;
602 if (node->same_comdat_group)
603 {
604 struct cgraph_node *next;
605
606 /* If more than one function is in the same COMDAT group, it must
607 be shared even if just one function in the comdat group has
608 address taken. */
609 for (next = node->same_comdat_group;
610 next != node;
611 next = next->same_comdat_group)
612 if (next->address_taken || !next->analyzed)
613 return true;
614 }
615 }
616 if (node->local.used_from_object_file)
617 return true;
618 if (DECL_PRESERVE_P (node->decl))
619 return true;
620 if (MAIN_NAME_P (DECL_NAME (node->decl)))
621 return true;
622 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl)))
623 return true;
624 return false;
625 }
626
627 /* Dissolve the same_comdat_group list in which NODE resides. */
628
629 static void
630 dissolve_same_comdat_group_list (struct cgraph_node *node)
631 {
632 struct cgraph_node *n = node, *next;
633 do
634 {
635 next = n->same_comdat_group;
636 n->same_comdat_group = NULL;
637 n = next;
638 }
639 while (n != node);
640 }
641
642 /* Mark visibility of all functions.
643
644 A local function is one whose calls can occur only in the current
645 compilation unit and all its calls are explicit, so we can change
646 its calling convention. We simply mark all static functions whose
647 address is not taken as local.
648
649 We also change the TREE_PUBLIC flag of all declarations that are public
650 in language point of view but we want to overwrite this default
651 via visibilities for the backend point of view. */
652
653 static unsigned int
654 function_and_variable_visibility (bool whole_program)
655 {
656 struct cgraph_node *node;
657 struct varpool_node *vnode;
658 struct pointer_set_t *aliased_nodes = pointer_set_create ();
659 struct pointer_set_t *aliased_vnodes = pointer_set_create ();
660 unsigned i;
661 alias_pair *p;
662
663 /* Discover aliased nodes. */
664 FOR_EACH_VEC_ELT (alias_pair, alias_pairs, i, p)
665 {
666 if (dump_file)
667 fprintf (dump_file, "Alias %s->%s",
668 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)),
669 IDENTIFIER_POINTER (p->target));
670
671 if ((node = cgraph_node_for_asm (p->target)) != NULL)
672 {
673 gcc_assert (node->needed);
674 pointer_set_insert (aliased_nodes, node);
675 if (dump_file)
676 fprintf (dump_file, " node %s/%i",
677 cgraph_node_name (node), node->uid);
678 }
679 else if ((vnode = varpool_node_for_asm (p->target)) != NULL)
680 {
681 gcc_assert (vnode->needed);
682 pointer_set_insert (aliased_vnodes, vnode);
683 if (dump_file)
684 fprintf (dump_file, " varpool node %s",
685 varpool_node_name (vnode));
686 }
687 if (dump_file)
688 fprintf (dump_file, "\n");
689 }
690
691 for (node = cgraph_nodes; node; node = node->next)
692 {
693 /* C++ FE on lack of COMDAT support create local COMDAT functions
694 (that ought to be shared but can not due to object format
695 limitations). It is neccesary to keep the flag to make rest of C++ FE
696 happy. Clear the flag here to avoid confusion in middle-end. */
697 if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl))
698 DECL_COMDAT (node->decl) = 0;
699 /* For external decls stop tracking same_comdat_group, it doesn't matter
700 what comdat group they are in when they won't be emitted in this TU,
701 and simplifies later passes. */
702 if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
703 {
704 #ifdef ENABLE_CHECKING
705 struct cgraph_node *n;
706
707 for (n = node->same_comdat_group;
708 n != node;
709 n = n->same_comdat_group)
710 /* If at least one of same comdat group functions is external,
711 all of them have to be, otherwise it is a front-end bug. */
712 gcc_assert (DECL_EXTERNAL (n->decl));
713 #endif
714 dissolve_same_comdat_group_list (node);
715 }
716 gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl))
717 || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl));
718 if (cgraph_externally_visible_p (node, whole_program,
719 pointer_set_contains (aliased_nodes,
720 node)))
721 {
722 gcc_assert (!node->global.inlined_to);
723 node->local.externally_visible = true;
724 }
725 else
726 node->local.externally_visible = false;
727 if (!node->local.externally_visible && node->analyzed
728 && !DECL_EXTERNAL (node->decl))
729 {
730 struct cgraph_node *alias;
731 gcc_assert (whole_program || in_lto_p || !TREE_PUBLIC (node->decl));
732 cgraph_make_decl_local (node->decl);
733 for (alias = node->same_body; alias; alias = alias->next)
734 cgraph_make_decl_local (alias->decl);
735 if (node->same_comdat_group)
736 /* cgraph_externally_visible_p has already checked all other nodes
737 in the group and they will all be made local. We need to
738 dissolve the group at once so that the predicate does not
739 segfault though. */
740 dissolve_same_comdat_group_list (node);
741 }
742 node->local.local = cgraph_local_node_p (node);
743 }
744 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
745 {
746 /* weak flag makes no sense on local variables. */
747 gcc_assert (!DECL_WEAK (vnode->decl)
748 || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl));
749 /* In several cases declarations can not be common:
750
751 - when declaration has initializer
752 - when it is in weak
753 - when it has specific section
754 - when it resides in non-generic address space.
755 - if declaration is local, it will get into .local common section
756 so common flag is not needed. Frontends still produce these in
757 certain cases, such as for:
758
759 static int a __attribute__ ((common))
760
761 Canonicalize things here and clear the redundant flag. */
762 if (DECL_COMMON (vnode->decl)
763 && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
764 || (DECL_INITIAL (vnode->decl)
765 && DECL_INITIAL (vnode->decl) != error_mark_node)
766 || DECL_WEAK (vnode->decl)
767 || DECL_SECTION_NAME (vnode->decl) != NULL
768 || ! (ADDR_SPACE_GENERIC_P
769 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl))))))
770 DECL_COMMON (vnode->decl) = 0;
771 /* Even extern variables might have initializers known.
772 See, for example testsuite/g++.dg/opt/static3.C */
773 vnode->const_value_known |= varpool_decide_const_value_known (vnode);
774 }
775 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
776 {
777 if (!vnode->finalized)
778 continue;
779 if (vnode->needed
780 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl))
781 && (((!whole_program
782 /* We can privatize comdat readonly variables whose address is
783 not taken, but doing so is not going to bring us
784 optimization oppurtunities until we start reordering
785 datastructures. */
786 || DECL_COMDAT (vnode->decl)
787 || DECL_WEAK (vnode->decl))
788 /* When doing linktime optimizations, all hidden symbols will
789 become local. */
790 && (!in_lto_p
791 || DECL_VISIBILITY (vnode->decl) != VISIBILITY_HIDDEN
792 /* We can get prevailing decision in other object file.
793 In this case we do not sed used_from_object_file. */
794 || !vnode->finalized))
795 || DECL_PRESERVE_P (vnode->decl)
796 || vnode->used_from_object_file
797 || pointer_set_contains (aliased_vnodes, vnode)
798 || lookup_attribute ("externally_visible",
799 DECL_ATTRIBUTES (vnode->decl))))
800 vnode->externally_visible = true;
801 else
802 vnode->externally_visible = false;
803 if (!vnode->externally_visible)
804 {
805 gcc_assert (in_lto_p || whole_program || !TREE_PUBLIC (vnode->decl));
806 cgraph_make_decl_local (vnode->decl);
807 }
808 vnode->const_value_known |= varpool_decide_const_value_known (vnode);
809 gcc_assert (TREE_STATIC (vnode->decl));
810 }
811 pointer_set_destroy (aliased_nodes);
812 pointer_set_destroy (aliased_vnodes);
813
814 if (dump_file)
815 {
816 fprintf (dump_file, "\nMarking local functions:");
817 for (node = cgraph_nodes; node; node = node->next)
818 if (node->local.local)
819 fprintf (dump_file, " %s", cgraph_node_name (node));
820 fprintf (dump_file, "\n\n");
821 fprintf (dump_file, "\nMarking externally visible functions:");
822 for (node = cgraph_nodes; node; node = node->next)
823 if (node->local.externally_visible)
824 fprintf (dump_file, " %s", cgraph_node_name (node));
825 fprintf (dump_file, "\n\n");
826 fprintf (dump_file, "\nMarking externally visible variables:");
827 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
828 if (vnode->externally_visible)
829 fprintf (dump_file, " %s", varpool_node_name (vnode));
830 fprintf (dump_file, "\n\n");
831 }
832 cgraph_function_flags_ready = true;
833 return 0;
834 }
835
836 /* Local function pass handling visibilities. This happens before LTO streaming
837 so in particular -fwhole-program should be ignored at this level. */
838
839 static unsigned int
840 local_function_and_variable_visibility (void)
841 {
842 return function_and_variable_visibility (flag_whole_program && !flag_lto && !flag_whopr);
843 }
844
845 struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility =
846 {
847 {
848 SIMPLE_IPA_PASS,
849 "visibility", /* name */
850 NULL, /* gate */
851 local_function_and_variable_visibility,/* execute */
852 NULL, /* sub */
853 NULL, /* next */
854 0, /* static_pass_number */
855 TV_CGRAPHOPT, /* tv_id */
856 0, /* properties_required */
857 0, /* properties_provided */
858 0, /* properties_destroyed */
859 0, /* todo_flags_start */
860 TODO_remove_functions | TODO_dump_cgraph
861 | TODO_ggc_collect /* todo_flags_finish */
862 }
863 };
864
865 /* Do not re-run on ltrans stage. */
866
867 static bool
868 gate_whole_program_function_and_variable_visibility (void)
869 {
870 return !flag_ltrans;
871 }
872
873 /* Bring functionss local at LTO time whith -fwhole-program. */
874
875 static unsigned int
876 whole_program_function_and_variable_visibility (void)
877 {
878 struct cgraph_node *node;
879 struct varpool_node *vnode;
880
881 function_and_variable_visibility (flag_whole_program);
882
883 for (node = cgraph_nodes; node; node = node->next)
884 if ((node->local.externally_visible && !DECL_COMDAT (node->decl))
885 && node->local.finalized)
886 cgraph_mark_needed_node (node);
887 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
888 if (vnode->externally_visible && !DECL_COMDAT (vnode->decl))
889 varpool_mark_needed_node (vnode);
890 if (dump_file)
891 {
892 fprintf (dump_file, "\nNeeded variables:");
893 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
894 if (vnode->needed)
895 fprintf (dump_file, " %s", varpool_node_name (vnode));
896 fprintf (dump_file, "\n\n");
897 }
898 if (optimize)
899 ipa_discover_readonly_nonaddressable_vars ();
900 return 0;
901 }
902
903 struct ipa_opt_pass_d pass_ipa_whole_program_visibility =
904 {
905 {
906 IPA_PASS,
907 "whole-program", /* name */
908 gate_whole_program_function_and_variable_visibility,/* gate */
909 whole_program_function_and_variable_visibility,/* execute */
910 NULL, /* sub */
911 NULL, /* next */
912 0, /* static_pass_number */
913 TV_CGRAPHOPT, /* tv_id */
914 0, /* properties_required */
915 0, /* properties_provided */
916 0, /* properties_destroyed */
917 0, /* todo_flags_start */
918 TODO_remove_functions | TODO_dump_cgraph
919 | TODO_ggc_collect /* todo_flags_finish */
920 },
921 NULL, /* generate_summary */
922 NULL, /* write_summary */
923 NULL, /* read_summary */
924 NULL, /* write_optimization_summary */
925 NULL, /* read_optimization_summary */
926 NULL, /* stmt_fixup */
927 0, /* TODOs */
928 NULL, /* function_transform */
929 NULL, /* variable_transform */
930 };
931
932 /* Hash a cgraph node set element. */
933
934 static hashval_t
935 hash_cgraph_node_set_element (const void *p)
936 {
937 const_cgraph_node_set_element element = (const_cgraph_node_set_element) p;
938 return htab_hash_pointer (element->node);
939 }
940
941 /* Compare two cgraph node set elements. */
942
943 static int
944 eq_cgraph_node_set_element (const void *p1, const void *p2)
945 {
946 const_cgraph_node_set_element e1 = (const_cgraph_node_set_element) p1;
947 const_cgraph_node_set_element e2 = (const_cgraph_node_set_element) p2;
948
949 return e1->node == e2->node;
950 }
951
952 /* Create a new cgraph node set. */
953
954 cgraph_node_set
955 cgraph_node_set_new (void)
956 {
957 cgraph_node_set new_node_set;
958
959 new_node_set = ggc_alloc_cgraph_node_set_def ();
960 new_node_set->hashtab = htab_create_ggc (10,
961 hash_cgraph_node_set_element,
962 eq_cgraph_node_set_element,
963 NULL);
964 new_node_set->nodes = NULL;
965 return new_node_set;
966 }
967
968 /* Add cgraph_node NODE to cgraph_node_set SET. */
969
970 void
971 cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node)
972 {
973 void **slot;
974 cgraph_node_set_element element;
975 struct cgraph_node_set_element_def dummy;
976
977 dummy.node = node;
978 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
979
980 if (*slot != HTAB_EMPTY_ENTRY)
981 {
982 element = (cgraph_node_set_element) *slot;
983 gcc_assert (node == element->node
984 && (VEC_index (cgraph_node_ptr, set->nodes, element->index)
985 == node));
986 return;
987 }
988
989 /* Insert node into hash table. */
990 element = ggc_alloc_cgraph_node_set_element_def ();
991 element->node = node;
992 element->index = VEC_length (cgraph_node_ptr, set->nodes);
993 *slot = element;
994
995 /* Insert into node vector. */
996 VEC_safe_push (cgraph_node_ptr, gc, set->nodes, node);
997 }
998
999 /* Remove cgraph_node NODE from cgraph_node_set SET. */
1000
1001 void
1002 cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node)
1003 {
1004 void **slot, **last_slot;
1005 cgraph_node_set_element element, last_element;
1006 struct cgraph_node *last_node;
1007 struct cgraph_node_set_element_def dummy;
1008
1009 dummy.node = node;
1010 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1011 if (slot == NULL)
1012 return;
1013
1014 element = (cgraph_node_set_element) *slot;
1015 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1016 == node);
1017
1018 /* Remove from vector. We do this by swapping node with the last element
1019 of the vector. */
1020 last_node = VEC_pop (cgraph_node_ptr, set->nodes);
1021 if (last_node != node)
1022 {
1023 dummy.node = last_node;
1024 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1025 last_element = (cgraph_node_set_element) *last_slot;
1026 gcc_assert (last_element);
1027
1028 /* Move the last element to the original spot of NODE. */
1029 last_element->index = element->index;
1030 VEC_replace (cgraph_node_ptr, set->nodes, last_element->index,
1031 last_node);
1032 }
1033
1034 /* Remove element from hash table. */
1035 htab_clear_slot (set->hashtab, slot);
1036 ggc_free (element);
1037 }
1038
1039 /* Find NODE in SET and return an iterator to it if found. A null iterator
1040 is returned if NODE is not in SET. */
1041
1042 cgraph_node_set_iterator
1043 cgraph_node_set_find (cgraph_node_set set, struct cgraph_node *node)
1044 {
1045 void **slot;
1046 struct cgraph_node_set_element_def dummy;
1047 cgraph_node_set_element element;
1048 cgraph_node_set_iterator csi;
1049
1050 dummy.node = node;
1051 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1052 if (slot == NULL)
1053 csi.index = (unsigned) ~0;
1054 else
1055 {
1056 element = (cgraph_node_set_element) *slot;
1057 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1058 == node);
1059 csi.index = element->index;
1060 }
1061 csi.set = set;
1062
1063 return csi;
1064 }
1065
1066 /* Dump content of SET to file F. */
1067
1068 void
1069 dump_cgraph_node_set (FILE *f, cgraph_node_set set)
1070 {
1071 cgraph_node_set_iterator iter;
1072
1073 for (iter = csi_start (set); !csi_end_p (iter); csi_next (&iter))
1074 {
1075 struct cgraph_node *node = csi_node (iter);
1076 fprintf (f, " %s/%i", cgraph_node_name (node), node->uid);
1077 }
1078 fprintf (f, "\n");
1079 }
1080
1081 /* Dump content of SET to stderr. */
1082
1083 DEBUG_FUNCTION void
1084 debug_cgraph_node_set (cgraph_node_set set)
1085 {
1086 dump_cgraph_node_set (stderr, set);
1087 }
1088
1089 /* Hash a varpool node set element. */
1090
1091 static hashval_t
1092 hash_varpool_node_set_element (const void *p)
1093 {
1094 const_varpool_node_set_element element = (const_varpool_node_set_element) p;
1095 return htab_hash_pointer (element->node);
1096 }
1097
1098 /* Compare two varpool node set elements. */
1099
1100 static int
1101 eq_varpool_node_set_element (const void *p1, const void *p2)
1102 {
1103 const_varpool_node_set_element e1 = (const_varpool_node_set_element) p1;
1104 const_varpool_node_set_element e2 = (const_varpool_node_set_element) p2;
1105
1106 return e1->node == e2->node;
1107 }
1108
1109 /* Create a new varpool node set. */
1110
1111 varpool_node_set
1112 varpool_node_set_new (void)
1113 {
1114 varpool_node_set new_node_set;
1115
1116 new_node_set = ggc_alloc_varpool_node_set_def ();
1117 new_node_set->hashtab = htab_create_ggc (10,
1118 hash_varpool_node_set_element,
1119 eq_varpool_node_set_element,
1120 NULL);
1121 new_node_set->nodes = NULL;
1122 return new_node_set;
1123 }
1124
1125 /* Add varpool_node NODE to varpool_node_set SET. */
1126
1127 void
1128 varpool_node_set_add (varpool_node_set set, struct varpool_node *node)
1129 {
1130 void **slot;
1131 varpool_node_set_element element;
1132 struct varpool_node_set_element_def dummy;
1133
1134 dummy.node = node;
1135 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
1136
1137 if (*slot != HTAB_EMPTY_ENTRY)
1138 {
1139 element = (varpool_node_set_element) *slot;
1140 gcc_assert (node == element->node
1141 && (VEC_index (varpool_node_ptr, set->nodes, element->index)
1142 == node));
1143 return;
1144 }
1145
1146 /* Insert node into hash table. */
1147 element = ggc_alloc_varpool_node_set_element_def ();
1148 element->node = node;
1149 element->index = VEC_length (varpool_node_ptr, set->nodes);
1150 *slot = element;
1151
1152 /* Insert into node vector. */
1153 VEC_safe_push (varpool_node_ptr, gc, set->nodes, node);
1154 }
1155
1156 /* Remove varpool_node NODE from varpool_node_set SET. */
1157
1158 void
1159 varpool_node_set_remove (varpool_node_set set, struct varpool_node *node)
1160 {
1161 void **slot, **last_slot;
1162 varpool_node_set_element element, last_element;
1163 struct varpool_node *last_node;
1164 struct varpool_node_set_element_def dummy;
1165
1166 dummy.node = node;
1167 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1168 if (slot == NULL)
1169 return;
1170
1171 element = (varpool_node_set_element) *slot;
1172 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1173 == node);
1174
1175 /* Remove from vector. We do this by swapping node with the last element
1176 of the vector. */
1177 last_node = VEC_pop (varpool_node_ptr, set->nodes);
1178 if (last_node != node)
1179 {
1180 dummy.node = last_node;
1181 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1182 last_element = (varpool_node_set_element) *last_slot;
1183 gcc_assert (last_element);
1184
1185 /* Move the last element to the original spot of NODE. */
1186 last_element->index = element->index;
1187 VEC_replace (varpool_node_ptr, set->nodes, last_element->index,
1188 last_node);
1189 }
1190
1191 /* Remove element from hash table. */
1192 htab_clear_slot (set->hashtab, slot);
1193 ggc_free (element);
1194 }
1195
1196 /* Find NODE in SET and return an iterator to it if found. A null iterator
1197 is returned if NODE is not in SET. */
1198
1199 varpool_node_set_iterator
1200 varpool_node_set_find (varpool_node_set set, struct varpool_node *node)
1201 {
1202 void **slot;
1203 struct varpool_node_set_element_def dummy;
1204 varpool_node_set_element element;
1205 varpool_node_set_iterator vsi;
1206
1207 dummy.node = node;
1208 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1209 if (slot == NULL)
1210 vsi.index = (unsigned) ~0;
1211 else
1212 {
1213 element = (varpool_node_set_element) *slot;
1214 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1215 == node);
1216 vsi.index = element->index;
1217 }
1218 vsi.set = set;
1219
1220 return vsi;
1221 }
1222
1223 /* Dump content of SET to file F. */
1224
1225 void
1226 dump_varpool_node_set (FILE *f, varpool_node_set set)
1227 {
1228 varpool_node_set_iterator iter;
1229
1230 for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter))
1231 {
1232 struct varpool_node *node = vsi_node (iter);
1233 fprintf (f, " %s", varpool_node_name (node));
1234 }
1235 fprintf (f, "\n");
1236 }
1237
1238 /* Dump content of SET to stderr. */
1239
1240 DEBUG_FUNCTION void
1241 debug_varpool_node_set (varpool_node_set set)
1242 {
1243 dump_varpool_node_set (stderr, set);
1244 }
1245
1246
1247 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1248
1249 static unsigned int
1250 ipa_profile (void)
1251 {
1252 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1253 struct cgraph_edge *e;
1254 int order_pos;
1255 bool something_changed = false;
1256 int i;
1257
1258 order_pos = cgraph_postorder (order);
1259 for (i = order_pos - 1; i >= 0; i--)
1260 {
1261 if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1262 {
1263 for (e = order[i]->callees; e; e = e->next_callee)
1264 if (e->callee->local.local && !e->callee->aux)
1265 {
1266 something_changed = true;
1267 e->callee->aux = (void *)1;
1268 }
1269 }
1270 order[i]->aux = NULL;
1271 }
1272
1273 while (something_changed)
1274 {
1275 something_changed = false;
1276 for (i = order_pos - 1; i >= 0; i--)
1277 {
1278 if (order[i]->aux && cgraph_propagate_frequency (order[i]))
1279 {
1280 for (e = order[i]->callees; e; e = e->next_callee)
1281 if (e->callee->local.local && !e->callee->aux)
1282 {
1283 something_changed = true;
1284 e->callee->aux = (void *)1;
1285 }
1286 }
1287 order[i]->aux = NULL;
1288 }
1289 }
1290 free (order);
1291 return 0;
1292 }
1293
1294 static bool
1295 gate_ipa_profile (void)
1296 {
1297 return flag_ipa_profile;
1298 }
1299
1300 struct ipa_opt_pass_d pass_ipa_profile =
1301 {
1302 {
1303 IPA_PASS,
1304 "ipa-profile", /* name */
1305 gate_ipa_profile, /* gate */
1306 ipa_profile, /* execute */
1307 NULL, /* sub */
1308 NULL, /* next */
1309 0, /* static_pass_number */
1310 TV_IPA_PROFILE, /* tv_id */
1311 0, /* properties_required */
1312 0, /* properties_provided */
1313 0, /* properties_destroyed */
1314 0, /* todo_flags_start */
1315 0 /* todo_flags_finish */
1316 },
1317 NULL, /* generate_summary */
1318 NULL, /* write_summary */
1319 NULL, /* read_summary */
1320 NULL, /* write_optimization_summary */
1321 NULL, /* read_optimization_summary */
1322 NULL, /* stmt_fixup */
1323 0, /* TODOs */
1324 NULL, /* function_transform */
1325 NULL /* variable_transform */
1326 };
1327
1328 /* Generate and emit a static constructor or destructor. WHICH must
1329 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1330 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1331 initialization priority for this constructor or destructor. */
1332
1333 void
1334 cgraph_build_static_cdtor (char which, tree body, int priority)
1335 {
1336 static int counter = 0;
1337 char which_buf[16];
1338 tree decl, name, resdecl;
1339
1340 /* The priority is encoded in the constructor or destructor name.
1341 collect2 will sort the names and arrange that they are called at
1342 program startup. */
1343 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1344 name = get_file_function_name (which_buf);
1345
1346 decl = build_decl (input_location, FUNCTION_DECL, name,
1347 build_function_type_list (void_type_node, NULL_TREE));
1348 current_function_decl = decl;
1349
1350 resdecl = build_decl (input_location,
1351 RESULT_DECL, NULL_TREE, void_type_node);
1352 DECL_ARTIFICIAL (resdecl) = 1;
1353 DECL_RESULT (decl) = resdecl;
1354 DECL_CONTEXT (resdecl) = decl;
1355
1356 allocate_struct_function (decl, false);
1357
1358 TREE_STATIC (decl) = 1;
1359 TREE_USED (decl) = 1;
1360 DECL_ARTIFICIAL (decl) = 1;
1361 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1362 DECL_SAVED_TREE (decl) = body;
1363 if (!targetm.have_ctors_dtors)
1364 {
1365 TREE_PUBLIC (decl) = 1;
1366 DECL_PRESERVE_P (decl) = 1;
1367 }
1368 DECL_UNINLINABLE (decl) = 1;
1369
1370 DECL_INITIAL (decl) = make_node (BLOCK);
1371 TREE_USED (DECL_INITIAL (decl)) = 1;
1372
1373 DECL_SOURCE_LOCATION (decl) = input_location;
1374 cfun->function_end_locus = input_location;
1375
1376 switch (which)
1377 {
1378 case 'I':
1379 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1380 decl_init_priority_insert (decl, priority);
1381 break;
1382 case 'D':
1383 DECL_STATIC_DESTRUCTOR (decl) = 1;
1384 decl_fini_priority_insert (decl, priority);
1385 break;
1386 default:
1387 gcc_unreachable ();
1388 }
1389
1390 gimplify_function_tree (decl);
1391
1392 cgraph_add_new_function (decl, false);
1393
1394 set_cfun (NULL);
1395 current_function_decl = NULL;
1396 }
1397
1398
1399 /* A vector of FUNCTION_DECLs declared as static constructors. */
1400 static VEC(tree, heap) *static_ctors;
1401 /* A vector of FUNCTION_DECLs declared as static destructors. */
1402 static VEC(tree, heap) *static_dtors;
1403
1404 /* When target does not have ctors and dtors, we call all constructor
1405 and destructor by special initialization/destruction function
1406 recognized by collect2.
1407
1408 When we are going to build this function, collect all constructors and
1409 destructors and turn them into normal functions. */
1410
1411 static void
1412 record_cdtor_fn (struct cgraph_node *node)
1413 {
1414 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1415 VEC_safe_push (tree, heap, static_ctors, node->decl);
1416 if (DECL_STATIC_DESTRUCTOR (node->decl))
1417 VEC_safe_push (tree, heap, static_dtors, node->decl);
1418 node = cgraph_node (node->decl);
1419 node->local.disregard_inline_limits = 1;
1420 }
1421
1422 /* Define global constructors/destructor functions for the CDTORS, of
1423 which they are LEN. The CDTORS are sorted by initialization
1424 priority. If CTOR_P is true, these are constructors; otherwise,
1425 they are destructors. */
1426
1427 static void
1428 build_cdtor (bool ctor_p, tree *cdtors, size_t len)
1429 {
1430 size_t i,j;
1431
1432 i = 0;
1433 while (i < len)
1434 {
1435 tree body;
1436 tree fn;
1437 priority_type priority;
1438
1439 priority = 0;
1440 body = NULL_TREE;
1441 j = i;
1442 do
1443 {
1444 priority_type p;
1445 fn = cdtors[i];
1446 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1447 if (j == i)
1448 priority = p;
1449 else if (p != priority)
1450 break;
1451 j++;
1452 }
1453 while (j < len);
1454
1455 /* When there is only once constructor and target supports them, do nothing. */
1456 if (j == i + 1
1457 && targetm.have_ctors_dtors)
1458 {
1459 i++;
1460 continue;
1461 }
1462 /* Find the next batch of constructors/destructors with the same
1463 initialization priority. */
1464 do
1465 {
1466 priority_type p;
1467 tree call;
1468 fn = cdtors[i];
1469 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1470 if (p != priority)
1471 break;
1472 call = build_call_expr (fn, 0);
1473 if (ctor_p)
1474 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1475 else
1476 DECL_STATIC_DESTRUCTOR (fn) = 0;
1477 /* We do not want to optimize away pure/const calls here.
1478 When optimizing, these should be already removed, when not
1479 optimizing, we want user to be able to breakpoint in them. */
1480 TREE_SIDE_EFFECTS (call) = 1;
1481 append_to_statement_list (call, &body);
1482 ++i;
1483 }
1484 while (i < len);
1485 gcc_assert (body != NULL_TREE);
1486 /* Generate a function to call all the function of like
1487 priority. */
1488 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
1489 }
1490 }
1491
1492 /* Comparison function for qsort. P1 and P2 are actually of type
1493 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1494 used to determine the sort order. */
1495
1496 static int
1497 compare_ctor (const void *p1, const void *p2)
1498 {
1499 tree f1;
1500 tree f2;
1501 int priority1;
1502 int priority2;
1503
1504 f1 = *(const tree *)p1;
1505 f2 = *(const tree *)p2;
1506 priority1 = DECL_INIT_PRIORITY (f1);
1507 priority2 = DECL_INIT_PRIORITY (f2);
1508
1509 if (priority1 < priority2)
1510 return -1;
1511 else if (priority1 > priority2)
1512 return 1;
1513 else
1514 /* Ensure a stable sort. Constructors are executed in backwarding
1515 order to make LTO initialize braries first. */
1516 return DECL_UID (f2) - DECL_UID (f1);
1517 }
1518
1519 /* Comparison function for qsort. P1 and P2 are actually of type
1520 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1521 used to determine the sort order. */
1522
1523 static int
1524 compare_dtor (const void *p1, const void *p2)
1525 {
1526 tree f1;
1527 tree f2;
1528 int priority1;
1529 int priority2;
1530
1531 f1 = *(const tree *)p1;
1532 f2 = *(const tree *)p2;
1533 priority1 = DECL_FINI_PRIORITY (f1);
1534 priority2 = DECL_FINI_PRIORITY (f2);
1535
1536 if (priority1 < priority2)
1537 return -1;
1538 else if (priority1 > priority2)
1539 return 1;
1540 else
1541 /* Ensure a stable sort. */
1542 return DECL_UID (f1) - DECL_UID (f2);
1543 }
1544
1545 /* Generate functions to call static constructors and destructors
1546 for targets that do not support .ctors/.dtors sections. These
1547 functions have magic names which are detected by collect2. */
1548
1549 static void
1550 build_cdtor_fns (void)
1551 {
1552 if (!VEC_empty (tree, static_ctors))
1553 {
1554 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1555 qsort (VEC_address (tree, static_ctors),
1556 VEC_length (tree, static_ctors),
1557 sizeof (tree),
1558 compare_ctor);
1559 build_cdtor (/*ctor_p=*/true,
1560 VEC_address (tree, static_ctors),
1561 VEC_length (tree, static_ctors));
1562 VEC_truncate (tree, static_ctors, 0);
1563 }
1564
1565 if (!VEC_empty (tree, static_dtors))
1566 {
1567 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1568 qsort (VEC_address (tree, static_dtors),
1569 VEC_length (tree, static_dtors),
1570 sizeof (tree),
1571 compare_dtor);
1572 build_cdtor (/*ctor_p=*/false,
1573 VEC_address (tree, static_dtors),
1574 VEC_length (tree, static_dtors));
1575 VEC_truncate (tree, static_dtors, 0);
1576 }
1577 }
1578
1579 /* Look for constructors and destructors and produce function calling them.
1580 This is needed for targets not supporting ctors or dtors, but we perform the
1581 transformation also at linktime to merge possibly numberous
1582 constructors/destructors into single function to improve code locality and
1583 reduce size. */
1584
1585 static unsigned int
1586 ipa_cdtor_merge (void)
1587 {
1588 struct cgraph_node *node;
1589 for (node = cgraph_nodes; node; node = node->next)
1590 if (node->analyzed
1591 && (DECL_STATIC_CONSTRUCTOR (node->decl)
1592 || DECL_STATIC_DESTRUCTOR (node->decl)))
1593 record_cdtor_fn (node);
1594 build_cdtor_fns ();
1595 VEC_free (tree, heap, static_ctors);
1596 VEC_free (tree, heap, static_dtors);
1597 return 0;
1598 }
1599
1600 /* Perform the pass when we have no ctors/dtors support
1601 or at LTO time to merge multiple constructors into single
1602 function. */
1603
1604 static bool
1605 gate_ipa_cdtor_merge (void)
1606 {
1607 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1608 }
1609
1610 struct ipa_opt_pass_d pass_ipa_cdtor_merge =
1611 {
1612 {
1613 IPA_PASS,
1614 "cdtor", /* name */
1615 gate_ipa_cdtor_merge, /* gate */
1616 ipa_cdtor_merge, /* execute */
1617 NULL, /* sub */
1618 NULL, /* next */
1619 0, /* static_pass_number */
1620 TV_CGRAPHOPT, /* tv_id */
1621 0, /* properties_required */
1622 0, /* properties_provided */
1623 0, /* properties_destroyed */
1624 0, /* todo_flags_start */
1625 0 /* todo_flags_finish */
1626 },
1627 NULL, /* generate_summary */
1628 NULL, /* write_summary */
1629 NULL, /* read_summary */
1630 NULL, /* write_optimization_summary */
1631 NULL, /* read_optimization_summary */
1632 NULL, /* stmt_fixup */
1633 0, /* TODOs */
1634 NULL, /* function_transform */
1635 NULL /* variable_transform */
1636 };