ipa.c: Include pointer-set.h
[gcc.git] / gcc / ipa.c
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "cgraph.h"
26 #include "tree-pass.h"
27 #include "timevar.h"
28 #include "gimple.h"
29 #include "ggc.h"
30 #include "flags.h"
31 #include "pointer-set.h"
32
33 /* Fill array order with all nodes with output flag set in the reverse
34 topological order. */
35
36 int
37 cgraph_postorder (struct cgraph_node **order)
38 {
39 struct cgraph_node *node, *node2;
40 int stack_size = 0;
41 int order_pos = 0;
42 struct cgraph_edge *edge, last;
43 int pass;
44
45 struct cgraph_node **stack =
46 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
47
48 /* We have to deal with cycles nicely, so use a depth first traversal
49 output algorithm. Ignore the fact that some functions won't need
50 to be output and put them into order as well, so we get dependencies
51 right through inline functions. */
52 for (node = cgraph_nodes; node; node = node->next)
53 node->aux = NULL;
54 for (pass = 0; pass < 2; pass++)
55 for (node = cgraph_nodes; node; node = node->next)
56 if (!node->aux
57 && (pass
58 || (!cgraph_only_called_directly_p (node)
59 && !node->address_taken)))
60 {
61 node2 = node;
62 if (!node->callers)
63 node->aux = &last;
64 else
65 node->aux = node->callers;
66 while (node2)
67 {
68 while (node2->aux != &last)
69 {
70 edge = (struct cgraph_edge *) node2->aux;
71 if (edge->next_caller)
72 node2->aux = edge->next_caller;
73 else
74 node2->aux = &last;
75 /* Break possible cycles involving always-inline
76 functions by ignoring edges from always-inline
77 functions to non-always-inline functions. */
78 if (edge->caller->local.disregard_inline_limits
79 && !edge->callee->local.disregard_inline_limits)
80 continue;
81 if (!edge->caller->aux)
82 {
83 if (!edge->caller->callers)
84 edge->caller->aux = &last;
85 else
86 edge->caller->aux = edge->caller->callers;
87 stack[stack_size++] = node2;
88 node2 = edge->caller;
89 break;
90 }
91 }
92 if (node2->aux == &last)
93 {
94 order[order_pos++] = node2;
95 if (stack_size)
96 node2 = stack[--stack_size];
97 else
98 node2 = NULL;
99 }
100 }
101 }
102 free (stack);
103 for (node = cgraph_nodes; node; node = node->next)
104 node->aux = NULL;
105 return order_pos;
106 }
107
108 /* Look for all functions inlined to NODE and update their inlined_to pointers
109 to INLINED_TO. */
110
111 static void
112 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
113 {
114 struct cgraph_edge *e;
115 for (e = node->callees; e; e = e->next_callee)
116 if (e->callee->global.inlined_to)
117 {
118 e->callee->global.inlined_to = inlined_to;
119 update_inlined_to_pointer (e->callee, inlined_to);
120 }
121 }
122
123 /* Add cgraph NODE to queue starting at FIRST.
124
125 The queue is linked via AUX pointers and terminated by pointer to 1.
126 We enqueue nodes at two occasions: when we find them reachable or when we find
127 their bodies needed for further clonning. In the second case we mark them
128 by pointer to 2 after processing so they are re-queue when they become
129 reachable. */
130
131 static void
132 enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first)
133 {
134 /* Node is still in queue; do nothing. */
135 if (node->aux && node->aux != (void *) 2)
136 return;
137 /* Node was already processed as unreachable, re-enqueue
138 only if it became reachable now. */
139 if (node->aux == (void *)2 && !node->reachable)
140 return;
141 node->aux = *first;
142 *first = node;
143 }
144
145 /* Add varpool NODE to queue starting at FIRST. */
146
147 static void
148 enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first)
149 {
150 node->aux = *first;
151 *first = node;
152 }
153
154 /* Process references. */
155
156 static void
157 process_references (struct ipa_ref_list *list,
158 struct cgraph_node **first,
159 struct varpool_node **first_varpool,
160 bool before_inlining_p)
161 {
162 int i;
163 struct ipa_ref *ref;
164 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
165 {
166 if (ref->refered_type == IPA_REF_CGRAPH)
167 {
168 struct cgraph_node *node = ipa_ref_node (ref);
169 if (!node->reachable
170 && (!DECL_EXTERNAL (node->decl)
171 || before_inlining_p))
172 {
173 node->reachable = true;
174 enqueue_cgraph_node (node, first);
175 }
176 }
177 else
178 {
179 struct varpool_node *node = ipa_ref_varpool_node (ref);
180 if (!node->needed)
181 {
182 varpool_mark_needed_node (node);
183 enqueue_varpool_node (node, first_varpool);
184 }
185 }
186 }
187 }
188
189 /* Return true when function NODE can be removed from callgraph
190 if all direct calls are eliminated. */
191
192 static inline bool
193 varpool_can_remove_if_no_refs (struct varpool_node *node)
194 {
195 return (!node->force_output && !node->used_from_other_partition
196 && (DECL_COMDAT (node->decl) || !node->externally_visible));
197 }
198
199 /* Return true when function can be marked local. */
200
201 static bool
202 cgraph_local_node_p (struct cgraph_node *node)
203 {
204 return (cgraph_only_called_directly_p (node)
205 && node->analyzed
206 && !DECL_EXTERNAL (node->decl)
207 && !node->local.externally_visible
208 && !node->reachable_from_other_partition
209 && !node->in_other_partition);
210 }
211
212 /* Perform reachability analysis and reclaim all unreachable nodes.
213 If BEFORE_INLINING_P is true this function is called before inlining
214 decisions has been made. If BEFORE_INLINING_P is false this function also
215 removes unneeded bodies of extern inline functions. */
216
217 bool
218 cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
219 {
220 struct cgraph_node *first = (struct cgraph_node *) (void *) 1;
221 struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1;
222 struct cgraph_node *node, *next;
223 struct varpool_node *vnode, *vnext;
224 bool changed = false;
225
226 #ifdef ENABLE_CHECKING
227 verify_cgraph ();
228 #endif
229 if (file)
230 fprintf (file, "\nReclaiming functions:");
231 #ifdef ENABLE_CHECKING
232 for (node = cgraph_nodes; node; node = node->next)
233 gcc_assert (!node->aux);
234 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
235 gcc_assert (!vnode->aux);
236 #endif
237 varpool_reset_queue ();
238 for (node = cgraph_nodes; node; node = node->next)
239 if (!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
240 && ((!DECL_EXTERNAL (node->decl))
241 || before_inlining_p))
242 {
243 gcc_assert (!node->global.inlined_to);
244 enqueue_cgraph_node (node, &first);
245 node->reachable = true;
246 }
247 else
248 {
249 gcc_assert (!node->aux);
250 node->reachable = false;
251 }
252 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
253 {
254 vnode->next_needed = NULL;
255 vnode->prev_needed = NULL;
256 if (!varpool_can_remove_if_no_refs (vnode))
257 {
258 vnode->needed = false;
259 varpool_mark_needed_node (vnode);
260 enqueue_varpool_node (vnode, &first_varpool);
261 }
262 else
263 vnode->needed = false;
264 }
265
266 /* Perform reachability analysis. As a special case do not consider
267 extern inline functions not inlined as live because we won't output
268 them at all.
269
270 We maintain two worklist, one for cgraph nodes other for varpools and
271 are finished once both are empty. */
272
273 while (first != (struct cgraph_node *) (void *) 1
274 || first_varpool != (struct varpool_node *) (void *) 1)
275 {
276 if (first != (struct cgraph_node *) (void *) 1)
277 {
278 struct cgraph_edge *e;
279 node = first;
280 first = (struct cgraph_node *) first->aux;
281 if (!node->reachable)
282 node->aux = (void *)2;
283
284 /* If we found this node reachable, first mark on the callees
285 reachable too, unless they are direct calls to extern inline functions
286 we decided to not inline. */
287 if (node->reachable)
288 {
289 for (e = node->callees; e; e = e->next_callee)
290 if (!e->callee->reachable
291 && node->analyzed
292 && (!e->inline_failed || !e->callee->analyzed
293 || (!DECL_EXTERNAL (e->callee->decl))
294 || before_inlining_p))
295 {
296 e->callee->reachable = true;
297 enqueue_cgraph_node (e->callee, &first);
298 }
299 process_references (&node->ref_list, &first, &first_varpool, before_inlining_p);
300 }
301
302 /* If any function in a comdat group is reachable, force
303 all other functions in the same comdat group to be
304 also reachable. */
305 if (node->same_comdat_group
306 && node->reachable
307 && !node->global.inlined_to)
308 {
309 for (next = node->same_comdat_group;
310 next != node;
311 next = next->same_comdat_group)
312 if (!next->reachable)
313 {
314 next->reachable = true;
315 enqueue_cgraph_node (next, &first);
316 }
317 }
318
319 /* We can freely remove inline clones even if they are cloned, however if
320 function is clone of real clone, we must keep it around in order to
321 make materialize_clones produce function body with the changes
322 applied. */
323 while (node->clone_of && !node->clone_of->aux
324 && !gimple_has_body_p (node->decl))
325 {
326 bool noninline = node->clone_of->decl != node->decl;
327 node = node->clone_of;
328 if (noninline && !node->reachable && !node->aux)
329 {
330 enqueue_cgraph_node (node, &first);
331 break;
332 }
333 }
334 }
335 if (first_varpool != (struct varpool_node *) (void *) 1)
336 {
337 vnode = first_varpool;
338 first_varpool = (struct varpool_node *)first_varpool->aux;
339 vnode->aux = NULL;
340 process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p);
341 /* If any function in a comdat group is reachable, force
342 all other functions in the same comdat group to be
343 also reachable. */
344 if (vnode->same_comdat_group)
345 {
346 struct varpool_node *next;
347 for (next = vnode->same_comdat_group;
348 next != vnode;
349 next = next->same_comdat_group)
350 if (!next->needed)
351 {
352 varpool_mark_needed_node (next);
353 enqueue_varpool_node (next, &first_varpool);
354 }
355 }
356 }
357 }
358
359 /* Remove unreachable nodes.
360
361 Completely unreachable functions can be fully removed from the callgraph.
362 Extern inline functions that we decided to not inline need to become unanalyzed nodes of
363 callgraph (so we still have edges to them). We remove function body then.
364
365 Also we need to care functions that are unreachable but we need to keep them around
366 for later clonning. In this case we also turn them to unanalyzed nodes, but
367 keep the body around. */
368 for (node = cgraph_nodes; node; node = next)
369 {
370 next = node->next;
371 if (node->aux && !node->reachable)
372 {
373 cgraph_node_remove_callees (node);
374 ipa_remove_all_references (&node->ref_list);
375 node->analyzed = false;
376 node->local.inlinable = false;
377 }
378 if (!node->aux)
379 {
380 node->global.inlined_to = NULL;
381 if (file)
382 fprintf (file, " %s", cgraph_node_name (node));
383 if (!node->analyzed || !DECL_EXTERNAL (node->decl) || before_inlining_p)
384 cgraph_remove_node (node);
385 else
386 {
387 struct cgraph_edge *e;
388
389 /* See if there is reachable caller. */
390 for (e = node->callers; e; e = e->next_caller)
391 if (e->caller->reachable)
392 break;
393
394 /* If so, we need to keep node in the callgraph. */
395 if (e || node->needed)
396 {
397 struct cgraph_node *clone;
398
399 /* If there are still clones, we must keep body around.
400 Otherwise we can just remove the body but keep the clone. */
401 for (clone = node->clones; clone;
402 clone = clone->next_sibling_clone)
403 if (clone->aux)
404 break;
405 if (!clone)
406 {
407 cgraph_release_function_body (node);
408 node->analyzed = false;
409 node->local.inlinable = false;
410 }
411 else
412 gcc_assert (!clone->in_other_partition);
413 cgraph_node_remove_callees (node);
414 ipa_remove_all_references (&node->ref_list);
415 if (node->prev_sibling_clone)
416 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
417 else if (node->clone_of)
418 node->clone_of->clones = node->next_sibling_clone;
419 if (node->next_sibling_clone)
420 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
421 node->clone_of = NULL;
422 node->next_sibling_clone = NULL;
423 node->prev_sibling_clone = NULL;
424 }
425 else
426 cgraph_remove_node (node);
427 }
428 changed = true;
429 }
430 }
431 for (node = cgraph_nodes; node; node = node->next)
432 {
433 /* Inline clones might be kept around so their materializing allows further
434 cloning. If the function the clone is inlined into is removed, we need
435 to turn it into normal cone. */
436 if (node->global.inlined_to
437 && !node->callers)
438 {
439 gcc_assert (node->clones);
440 node->global.inlined_to = NULL;
441 update_inlined_to_pointer (node, node);
442 }
443 node->aux = NULL;
444 }
445
446 if (file)
447 fprintf (file, "\n");
448
449 /* We must release unused extern inlines or sanity checking will fail. Rest of transformations
450 are undesirable at -O0 since we do not want to remove anything. */
451 if (!optimize)
452 return changed;
453
454 if (file)
455 fprintf (file, "Reclaiming variables:");
456 for (vnode = varpool_nodes; vnode; vnode = vnext)
457 {
458 vnext = vnode->next;
459 if (!vnode->needed)
460 {
461 if (file)
462 fprintf (file, " %s", varpool_node_name (vnode));
463 varpool_remove_node (vnode);
464 changed = true;
465 }
466 }
467
468 /* Now update address_taken flags and try to promote functions to be local. */
469
470 if (file)
471 fprintf (file, "\nClearing address taken flags:");
472 for (node = cgraph_nodes; node; node = node->next)
473 if (node->address_taken
474 && !node->reachable_from_other_partition)
475 {
476 int i;
477 struct ipa_ref *ref;
478 bool found = false;
479 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
480 && !found; i++)
481 {
482 gcc_assert (ref->use == IPA_REF_ADDR);
483 found = true;
484 }
485 if (!found)
486 {
487 if (file)
488 fprintf (file, " %s", cgraph_node_name (node));
489 node->address_taken = false;
490 changed = true;
491 if (cgraph_local_node_p (node))
492 {
493 node->local.local = true;
494 if (file)
495 fprintf (file, " (local)");
496 }
497 }
498 }
499
500 #ifdef ENABLE_CHECKING
501 verify_cgraph ();
502 #endif
503
504 /* Reclaim alias pairs for functions that have disappeared from the
505 call graph. */
506 remove_unreachable_alias_pairs ();
507
508 return changed;
509 }
510
511 /* Discover variables that have no longer address taken or that are read only
512 and update their flags.
513
514 FIXME: This can not be done in between gimplify and omp_expand since
515 readonly flag plays role on what is shared and what is not. Currently we do
516 this transformation as part of whole program visibility and re-do at
517 ipa-reference pass (to take into account clonning), but it would
518 make sense to do it before early optimizations. */
519
520 void
521 ipa_discover_readonly_nonaddressable_vars (void)
522 {
523 struct varpool_node *vnode;
524 if (dump_file)
525 fprintf (dump_file, "Clearing variable flags:");
526 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
527 if (vnode->finalized && varpool_all_refs_explicit_p (vnode)
528 && (TREE_ADDRESSABLE (vnode->decl) || !TREE_READONLY (vnode->decl)))
529 {
530 bool written = false;
531 bool address_taken = false;
532 int i;
533 struct ipa_ref *ref;
534 for (i = 0; ipa_ref_list_refering_iterate (&vnode->ref_list, i, ref)
535 && (!written || !address_taken); i++)
536 switch (ref->use)
537 {
538 case IPA_REF_ADDR:
539 address_taken = true;
540 break;
541 case IPA_REF_LOAD:
542 break;
543 case IPA_REF_STORE:
544 written = true;
545 break;
546 }
547 if (TREE_ADDRESSABLE (vnode->decl) && !address_taken)
548 {
549 if (dump_file)
550 fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode));
551 TREE_ADDRESSABLE (vnode->decl) = 0;
552 }
553 if (!TREE_READONLY (vnode->decl) && !address_taken && !written
554 /* Making variable in explicit section readonly can cause section
555 type conflict.
556 See e.g. gcc.c-torture/compile/pr23237.c */
557 && DECL_SECTION_NAME (vnode->decl) == NULL)
558 {
559 if (dump_file)
560 fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode));
561 TREE_READONLY (vnode->decl) = 1;
562 }
563 }
564 if (dump_file)
565 fprintf (dump_file, "\n");
566 }
567
568 /* Return true when function NODE should be considered externally visible. */
569
570 static bool
571 cgraph_externally_visible_p (struct cgraph_node *node, bool whole_program, bool aliased)
572 {
573 if (!node->local.finalized)
574 return false;
575 if (!DECL_COMDAT (node->decl)
576 && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)))
577 return false;
578
579 /* Do not even try to be smart about aliased nodes. Until we properly
580 represent everything by same body alias, these are just evil. */
581 if (aliased)
582 return true;
583
584 /* When doing link time optimizations, hidden symbols become local. */
585 if (in_lto_p && DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN)
586 ;
587 else if (!whole_program)
588 return true;
589 /* COMDAT functions must be shared only if they have address taken,
590 otherwise we can produce our own private implementation with
591 -fwhole-program. */
592 else if (DECL_COMDAT (node->decl))
593 {
594 if (node->address_taken || !node->analyzed)
595 return true;
596 if (node->same_comdat_group)
597 {
598 struct cgraph_node *next;
599
600 /* If more than one function is in the same COMDAT group, it must
601 be shared even if just one function in the comdat group has
602 address taken. */
603 for (next = node->same_comdat_group;
604 next != node;
605 next = next->same_comdat_group)
606 if (next->address_taken || !next->analyzed)
607 return true;
608 }
609 }
610 if (node->local.used_from_object_file)
611 return true;
612 if (DECL_PRESERVE_P (node->decl))
613 return true;
614 if (MAIN_NAME_P (DECL_NAME (node->decl)))
615 return true;
616 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl)))
617 return true;
618 return false;
619 }
620
621 /* Dissolve the same_comdat_group list in which NODE resides. */
622
623 static void
624 dissolve_same_comdat_group_list (struct cgraph_node *node)
625 {
626 struct cgraph_node *n = node, *next;
627 do
628 {
629 next = n->same_comdat_group;
630 n->same_comdat_group = NULL;
631 n = next;
632 }
633 while (n != node);
634 }
635
636 /* Mark visibility of all functions.
637
638 A local function is one whose calls can occur only in the current
639 compilation unit and all its calls are explicit, so we can change
640 its calling convention. We simply mark all static functions whose
641 address is not taken as local.
642
643 We also change the TREE_PUBLIC flag of all declarations that are public
644 in language point of view but we want to overwrite this default
645 via visibilities for the backend point of view. */
646
647 static unsigned int
648 function_and_variable_visibility (bool whole_program)
649 {
650 struct cgraph_node *node;
651 struct varpool_node *vnode;
652 struct pointer_set_t *aliased_nodes = pointer_set_create ();
653 struct pointer_set_t *aliased_vnodes = pointer_set_create ();
654 unsigned i;
655 alias_pair *p;
656
657 /* Discover aliased nodes. */
658 for (i = 0; VEC_iterate (alias_pair, alias_pairs, i, p); i++)
659 {
660 if (dump_file)
661 fprintf (dump_file, "Alias %s->%s",
662 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)),
663 IDENTIFIER_POINTER (p->target));
664
665 if ((node = cgraph_node_for_asm (p->target)) != NULL)
666 {
667 gcc_assert (node->needed);
668 pointer_set_insert (aliased_nodes, node);
669 if (dump_file)
670 fprintf (dump_file, " node %s/%i",
671 cgraph_node_name (node), node->uid);
672 }
673 else if ((vnode = varpool_node_for_asm (p->target)) != NULL)
674 {
675 gcc_assert (vnode->needed);
676 pointer_set_insert (aliased_vnodes, vnode);
677 if (dump_file)
678 fprintf (dump_file, " varpool node %s",
679 varpool_node_name (vnode));
680 }
681 if (dump_file)
682 fprintf (dump_file, "\n");
683 }
684
685 for (node = cgraph_nodes; node; node = node->next)
686 {
687 /* C++ FE on lack of COMDAT support create local COMDAT functions
688 (that ought to be shared but can not due to object format
689 limitations). It is neccesary to keep the flag to make rest of C++ FE
690 happy. Clear the flag here to avoid confusion in middle-end. */
691 if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl))
692 DECL_COMDAT (node->decl) = 0;
693 /* For external decls stop tracking same_comdat_group, it doesn't matter
694 what comdat group they are in when they won't be emitted in this TU,
695 and simplifies later passes. */
696 if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
697 {
698 #ifdef ENABLE_CHECKING
699 struct cgraph_node *n;
700
701 for (n = node->same_comdat_group;
702 n != node;
703 n = n->same_comdat_group)
704 /* If at least one of same comdat group functions is external,
705 all of them have to be, otherwise it is a front-end bug. */
706 gcc_assert (DECL_EXTERNAL (n->decl));
707 #endif
708 dissolve_same_comdat_group_list (node);
709 }
710 gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl))
711 || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl));
712 if (cgraph_externally_visible_p (node, whole_program,
713 pointer_set_contains (aliased_nodes,
714 node)))
715 {
716 gcc_assert (!node->global.inlined_to);
717 node->local.externally_visible = true;
718 }
719 else
720 node->local.externally_visible = false;
721 if (!node->local.externally_visible && node->analyzed
722 && !DECL_EXTERNAL (node->decl))
723 {
724 struct cgraph_node *alias;
725 gcc_assert (whole_program || in_lto_p || !TREE_PUBLIC (node->decl));
726 cgraph_make_decl_local (node->decl);
727 for (alias = node->same_body; alias; alias = alias->next)
728 cgraph_make_decl_local (alias->decl);
729 if (node->same_comdat_group)
730 /* cgraph_externally_visible_p has already checked all other nodes
731 in the group and they will all be made local. We need to
732 dissolve the group at once so that the predicate does not
733 segfault though. */
734 dissolve_same_comdat_group_list (node);
735 }
736 node->local.local = cgraph_local_node_p (node);
737 }
738 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
739 {
740 /* weak flag makes no sense on local variables. */
741 gcc_assert (!DECL_WEAK (vnode->decl)
742 || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl));
743 /* In several cases declarations can not be common:
744
745 - when declaration has initializer
746 - when it is in weak
747 - when it has specific section
748 - when it resides in non-generic address space.
749 - if declaration is local, it will get into .local common section
750 so common flag is not needed. Frontends still produce these in
751 certain cases, such as for:
752
753 static int a __attribute__ ((common))
754
755 Canonicalize things here and clear the redundant flag. */
756 if (DECL_COMMON (vnode->decl)
757 && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
758 || (DECL_INITIAL (vnode->decl)
759 && DECL_INITIAL (vnode->decl) != error_mark_node)
760 || DECL_WEAK (vnode->decl)
761 || DECL_SECTION_NAME (vnode->decl) != NULL
762 || ! (ADDR_SPACE_GENERIC_P
763 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl))))))
764 DECL_COMMON (vnode->decl) = 0;
765 }
766 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
767 {
768 if (!vnode->finalized)
769 continue;
770 if (vnode->needed
771 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl))
772 && (((!whole_program
773 /* We can privatize comdat readonly variables whose address is
774 not taken, but doing so is not going to bring us
775 optimization oppurtunities until we start reordering
776 datastructures. */
777 || DECL_COMDAT (vnode->decl)
778 || DECL_WEAK (vnode->decl))
779 /* When doing linktime optimizations, all hidden symbols will
780 become local. */
781 && (!in_lto_p
782 || DECL_VISIBILITY (vnode->decl) != VISIBILITY_HIDDEN))
783 || vnode->used_from_object_file
784 || pointer_set_contains (aliased_vnodes, vnode)
785 || lookup_attribute ("externally_visible",
786 DECL_ATTRIBUTES (vnode->decl))))
787 vnode->externally_visible = true;
788 else
789 vnode->externally_visible = false;
790 if (!vnode->externally_visible)
791 {
792 gcc_assert (in_lto_p || whole_program || !TREE_PUBLIC (vnode->decl));
793 cgraph_make_decl_local (vnode->decl);
794 }
795 gcc_assert (TREE_STATIC (vnode->decl));
796 }
797 pointer_set_destroy (aliased_nodes);
798 pointer_set_destroy (aliased_vnodes);
799
800 if (dump_file)
801 {
802 fprintf (dump_file, "\nMarking local functions:");
803 for (node = cgraph_nodes; node; node = node->next)
804 if (node->local.local)
805 fprintf (dump_file, " %s", cgraph_node_name (node));
806 fprintf (dump_file, "\n\n");
807 fprintf (dump_file, "\nMarking externally visible functions:");
808 for (node = cgraph_nodes; node; node = node->next)
809 if (node->local.externally_visible)
810 fprintf (dump_file, " %s", cgraph_node_name (node));
811 fprintf (dump_file, "\n\n");
812 fprintf (dump_file, "\nMarking externally visible variables:");
813 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
814 if (vnode->externally_visible)
815 fprintf (dump_file, " %s", varpool_node_name (vnode));
816 fprintf (dump_file, "\n\n");
817 }
818 cgraph_function_flags_ready = true;
819 return 0;
820 }
821
822 /* Local function pass handling visibilities. This happens before LTO streaming
823 so in particular -fwhole-program should be ignored at this level. */
824
825 static unsigned int
826 local_function_and_variable_visibility (void)
827 {
828 return function_and_variable_visibility (flag_whole_program && !flag_lto && !flag_whopr);
829 }
830
831 struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility =
832 {
833 {
834 SIMPLE_IPA_PASS,
835 "visibility", /* name */
836 NULL, /* gate */
837 local_function_and_variable_visibility,/* execute */
838 NULL, /* sub */
839 NULL, /* next */
840 0, /* static_pass_number */
841 TV_CGRAPHOPT, /* tv_id */
842 0, /* properties_required */
843 0, /* properties_provided */
844 0, /* properties_destroyed */
845 0, /* todo_flags_start */
846 TODO_remove_functions | TODO_dump_cgraph
847 | TODO_ggc_collect /* todo_flags_finish */
848 }
849 };
850
851 /* Do not re-run on ltrans stage. */
852
853 static bool
854 gate_whole_program_function_and_variable_visibility (void)
855 {
856 return !flag_ltrans;
857 }
858
859 /* Bring functionss local at LTO time whith -fwhole-program. */
860
861 static unsigned int
862 whole_program_function_and_variable_visibility (void)
863 {
864 struct cgraph_node *node;
865 struct varpool_node *vnode;
866
867 function_and_variable_visibility (flag_whole_program);
868
869 for (node = cgraph_nodes; node; node = node->next)
870 if ((node->local.externally_visible && !DECL_COMDAT (node->decl))
871 && node->local.finalized)
872 cgraph_mark_needed_node (node);
873 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
874 if (vnode->externally_visible && !DECL_COMDAT (vnode->decl))
875 varpool_mark_needed_node (vnode);
876 if (dump_file)
877 {
878 fprintf (dump_file, "\nNeeded variables:");
879 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
880 if (vnode->needed)
881 fprintf (dump_file, " %s", varpool_node_name (vnode));
882 fprintf (dump_file, "\n\n");
883 }
884 if (optimize)
885 ipa_discover_readonly_nonaddressable_vars ();
886 return 0;
887 }
888
889 struct ipa_opt_pass_d pass_ipa_whole_program_visibility =
890 {
891 {
892 IPA_PASS,
893 "whole-program", /* name */
894 gate_whole_program_function_and_variable_visibility,/* gate */
895 whole_program_function_and_variable_visibility,/* execute */
896 NULL, /* sub */
897 NULL, /* next */
898 0, /* static_pass_number */
899 TV_CGRAPHOPT, /* tv_id */
900 0, /* properties_required */
901 0, /* properties_provided */
902 0, /* properties_destroyed */
903 0, /* todo_flags_start */
904 TODO_remove_functions | TODO_dump_cgraph
905 | TODO_ggc_collect /* todo_flags_finish */
906 },
907 NULL, /* generate_summary */
908 NULL, /* write_summary */
909 NULL, /* read_summary */
910 NULL, /* write_optimization_summary */
911 NULL, /* read_optimization_summary */
912 NULL, /* stmt_fixup */
913 0, /* TODOs */
914 NULL, /* function_transform */
915 NULL, /* variable_transform */
916 };
917
918 /* Hash a cgraph node set element. */
919
920 static hashval_t
921 hash_cgraph_node_set_element (const void *p)
922 {
923 const_cgraph_node_set_element element = (const_cgraph_node_set_element) p;
924 return htab_hash_pointer (element->node);
925 }
926
927 /* Compare two cgraph node set elements. */
928
929 static int
930 eq_cgraph_node_set_element (const void *p1, const void *p2)
931 {
932 const_cgraph_node_set_element e1 = (const_cgraph_node_set_element) p1;
933 const_cgraph_node_set_element e2 = (const_cgraph_node_set_element) p2;
934
935 return e1->node == e2->node;
936 }
937
938 /* Create a new cgraph node set. */
939
940 cgraph_node_set
941 cgraph_node_set_new (void)
942 {
943 cgraph_node_set new_node_set;
944
945 new_node_set = ggc_alloc_cgraph_node_set_def ();
946 new_node_set->hashtab = htab_create_ggc (10,
947 hash_cgraph_node_set_element,
948 eq_cgraph_node_set_element,
949 NULL);
950 new_node_set->nodes = NULL;
951 return new_node_set;
952 }
953
954 /* Add cgraph_node NODE to cgraph_node_set SET. */
955
956 void
957 cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node)
958 {
959 void **slot;
960 cgraph_node_set_element element;
961 struct cgraph_node_set_element_def dummy;
962
963 dummy.node = node;
964 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
965
966 if (*slot != HTAB_EMPTY_ENTRY)
967 {
968 element = (cgraph_node_set_element) *slot;
969 gcc_assert (node == element->node
970 && (VEC_index (cgraph_node_ptr, set->nodes, element->index)
971 == node));
972 return;
973 }
974
975 /* Insert node into hash table. */
976 element = ggc_alloc_cgraph_node_set_element_def ();
977 element->node = node;
978 element->index = VEC_length (cgraph_node_ptr, set->nodes);
979 *slot = element;
980
981 /* Insert into node vector. */
982 VEC_safe_push (cgraph_node_ptr, gc, set->nodes, node);
983 }
984
985 /* Remove cgraph_node NODE from cgraph_node_set SET. */
986
987 void
988 cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node)
989 {
990 void **slot, **last_slot;
991 cgraph_node_set_element element, last_element;
992 struct cgraph_node *last_node;
993 struct cgraph_node_set_element_def dummy;
994
995 dummy.node = node;
996 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
997 if (slot == NULL)
998 return;
999
1000 element = (cgraph_node_set_element) *slot;
1001 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1002 == node);
1003
1004 /* Remove from vector. We do this by swapping node with the last element
1005 of the vector. */
1006 last_node = VEC_pop (cgraph_node_ptr, set->nodes);
1007 if (last_node != node)
1008 {
1009 dummy.node = last_node;
1010 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1011 last_element = (cgraph_node_set_element) *last_slot;
1012 gcc_assert (last_element);
1013
1014 /* Move the last element to the original spot of NODE. */
1015 last_element->index = element->index;
1016 VEC_replace (cgraph_node_ptr, set->nodes, last_element->index,
1017 last_node);
1018 }
1019
1020 /* Remove element from hash table. */
1021 htab_clear_slot (set->hashtab, slot);
1022 ggc_free (element);
1023 }
1024
1025 /* Find NODE in SET and return an iterator to it if found. A null iterator
1026 is returned if NODE is not in SET. */
1027
1028 cgraph_node_set_iterator
1029 cgraph_node_set_find (cgraph_node_set set, struct cgraph_node *node)
1030 {
1031 void **slot;
1032 struct cgraph_node_set_element_def dummy;
1033 cgraph_node_set_element element;
1034 cgraph_node_set_iterator csi;
1035
1036 dummy.node = node;
1037 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1038 if (slot == NULL)
1039 csi.index = (unsigned) ~0;
1040 else
1041 {
1042 element = (cgraph_node_set_element) *slot;
1043 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1044 == node);
1045 csi.index = element->index;
1046 }
1047 csi.set = set;
1048
1049 return csi;
1050 }
1051
1052 /* Dump content of SET to file F. */
1053
1054 void
1055 dump_cgraph_node_set (FILE *f, cgraph_node_set set)
1056 {
1057 cgraph_node_set_iterator iter;
1058
1059 for (iter = csi_start (set); !csi_end_p (iter); csi_next (&iter))
1060 {
1061 struct cgraph_node *node = csi_node (iter);
1062 fprintf (f, " %s/%i", cgraph_node_name (node), node->uid);
1063 }
1064 fprintf (f, "\n");
1065 }
1066
1067 /* Dump content of SET to stderr. */
1068
1069 DEBUG_FUNCTION void
1070 debug_cgraph_node_set (cgraph_node_set set)
1071 {
1072 dump_cgraph_node_set (stderr, set);
1073 }
1074
1075 /* Hash a varpool node set element. */
1076
1077 static hashval_t
1078 hash_varpool_node_set_element (const void *p)
1079 {
1080 const_varpool_node_set_element element = (const_varpool_node_set_element) p;
1081 return htab_hash_pointer (element->node);
1082 }
1083
1084 /* Compare two varpool node set elements. */
1085
1086 static int
1087 eq_varpool_node_set_element (const void *p1, const void *p2)
1088 {
1089 const_varpool_node_set_element e1 = (const_varpool_node_set_element) p1;
1090 const_varpool_node_set_element e2 = (const_varpool_node_set_element) p2;
1091
1092 return e1->node == e2->node;
1093 }
1094
1095 /* Create a new varpool node set. */
1096
1097 varpool_node_set
1098 varpool_node_set_new (void)
1099 {
1100 varpool_node_set new_node_set;
1101
1102 new_node_set = ggc_alloc_varpool_node_set_def ();
1103 new_node_set->hashtab = htab_create_ggc (10,
1104 hash_varpool_node_set_element,
1105 eq_varpool_node_set_element,
1106 NULL);
1107 new_node_set->nodes = NULL;
1108 return new_node_set;
1109 }
1110
1111 /* Add varpool_node NODE to varpool_node_set SET. */
1112
1113 void
1114 varpool_node_set_add (varpool_node_set set, struct varpool_node *node)
1115 {
1116 void **slot;
1117 varpool_node_set_element element;
1118 struct varpool_node_set_element_def dummy;
1119
1120 dummy.node = node;
1121 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
1122
1123 if (*slot != HTAB_EMPTY_ENTRY)
1124 {
1125 element = (varpool_node_set_element) *slot;
1126 gcc_assert (node == element->node
1127 && (VEC_index (varpool_node_ptr, set->nodes, element->index)
1128 == node));
1129 return;
1130 }
1131
1132 /* Insert node into hash table. */
1133 element = ggc_alloc_varpool_node_set_element_def ();
1134 element->node = node;
1135 element->index = VEC_length (varpool_node_ptr, set->nodes);
1136 *slot = element;
1137
1138 /* Insert into node vector. */
1139 VEC_safe_push (varpool_node_ptr, gc, set->nodes, node);
1140 }
1141
1142 /* Remove varpool_node NODE from varpool_node_set SET. */
1143
1144 void
1145 varpool_node_set_remove (varpool_node_set set, struct varpool_node *node)
1146 {
1147 void **slot, **last_slot;
1148 varpool_node_set_element element, last_element;
1149 struct varpool_node *last_node;
1150 struct varpool_node_set_element_def dummy;
1151
1152 dummy.node = node;
1153 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1154 if (slot == NULL)
1155 return;
1156
1157 element = (varpool_node_set_element) *slot;
1158 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1159 == node);
1160
1161 /* Remove from vector. We do this by swapping node with the last element
1162 of the vector. */
1163 last_node = VEC_pop (varpool_node_ptr, set->nodes);
1164 if (last_node != node)
1165 {
1166 dummy.node = last_node;
1167 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1168 last_element = (varpool_node_set_element) *last_slot;
1169 gcc_assert (last_element);
1170
1171 /* Move the last element to the original spot of NODE. */
1172 last_element->index = element->index;
1173 VEC_replace (varpool_node_ptr, set->nodes, last_element->index,
1174 last_node);
1175 }
1176
1177 /* Remove element from hash table. */
1178 htab_clear_slot (set->hashtab, slot);
1179 ggc_free (element);
1180 }
1181
1182 /* Find NODE in SET and return an iterator to it if found. A null iterator
1183 is returned if NODE is not in SET. */
1184
1185 varpool_node_set_iterator
1186 varpool_node_set_find (varpool_node_set set, struct varpool_node *node)
1187 {
1188 void **slot;
1189 struct varpool_node_set_element_def dummy;
1190 varpool_node_set_element element;
1191 varpool_node_set_iterator vsi;
1192
1193 dummy.node = node;
1194 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1195 if (slot == NULL)
1196 vsi.index = (unsigned) ~0;
1197 else
1198 {
1199 element = (varpool_node_set_element) *slot;
1200 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1201 == node);
1202 vsi.index = element->index;
1203 }
1204 vsi.set = set;
1205
1206 return vsi;
1207 }
1208
1209 /* Dump content of SET to file F. */
1210
1211 void
1212 dump_varpool_node_set (FILE *f, varpool_node_set set)
1213 {
1214 varpool_node_set_iterator iter;
1215
1216 for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter))
1217 {
1218 struct varpool_node *node = vsi_node (iter);
1219 fprintf (f, " %s", varpool_node_name (node));
1220 }
1221 fprintf (f, "\n");
1222 }
1223
1224 /* Dump content of SET to stderr. */
1225
1226 DEBUG_FUNCTION void
1227 debug_varpool_node_set (varpool_node_set set)
1228 {
1229 dump_varpool_node_set (stderr, set);
1230 }
1231
1232
1233 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1234
1235 static unsigned int
1236 ipa_profile (void)
1237 {
1238 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1239 struct cgraph_edge *e;
1240 int order_pos;
1241 bool something_changed = false;
1242 int i;
1243
1244 order_pos = cgraph_postorder (order);
1245 for (i = order_pos - 1; i >= 0; i--)
1246 {
1247 if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1248 {
1249 for (e = order[i]->callees; e; e = e->next_callee)
1250 if (e->callee->local.local && !e->callee->aux)
1251 {
1252 something_changed = true;
1253 e->callee->aux = (void *)1;
1254 }
1255 }
1256 order[i]->aux = NULL;
1257 }
1258
1259 while (something_changed)
1260 {
1261 something_changed = false;
1262 for (i = order_pos - 1; i >= 0; i--)
1263 {
1264 if (order[i]->aux && cgraph_propagate_frequency (order[i]))
1265 {
1266 for (e = order[i]->callees; e; e = e->next_callee)
1267 if (e->callee->local.local && !e->callee->aux)
1268 {
1269 something_changed = true;
1270 e->callee->aux = (void *)1;
1271 }
1272 }
1273 order[i]->aux = NULL;
1274 }
1275 }
1276 free (order);
1277 return 0;
1278 }
1279
1280 static bool
1281 gate_ipa_profile (void)
1282 {
1283 return flag_ipa_profile;
1284 }
1285
1286 struct ipa_opt_pass_d pass_ipa_profile =
1287 {
1288 {
1289 IPA_PASS,
1290 "ipa-profile", /* name */
1291 gate_ipa_profile, /* gate */
1292 ipa_profile, /* execute */
1293 NULL, /* sub */
1294 NULL, /* next */
1295 0, /* static_pass_number */
1296 TV_IPA_PROFILE, /* tv_id */
1297 0, /* properties_required */
1298 0, /* properties_provided */
1299 0, /* properties_destroyed */
1300 0, /* todo_flags_start */
1301 0 /* todo_flags_finish */
1302 },
1303 NULL, /* generate_summary */
1304 NULL, /* write_summary */
1305 NULL, /* read_summary */
1306 NULL, /* write_optimization_summary */
1307 NULL, /* read_optimization_summary */
1308 NULL, /* stmt_fixup */
1309 0, /* TODOs */
1310 NULL, /* function_transform */
1311 NULL /* variable_transform */
1312 };