intrinsic.h (gfc_check_selected_real_kind, [...]): Update prototypes.
[gcc.git] / gcc / ipa.c
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "cgraph.h"
26 #include "tree-pass.h"
27 #include "timevar.h"
28 #include "gimple.h"
29 #include "ggc.h"
30 #include "flags.h"
31
32 /* Fill array order with all nodes with output flag set in the reverse
33 topological order. */
34
35 int
36 cgraph_postorder (struct cgraph_node **order)
37 {
38 struct cgraph_node *node, *node2;
39 int stack_size = 0;
40 int order_pos = 0;
41 struct cgraph_edge *edge, last;
42 int pass;
43
44 struct cgraph_node **stack =
45 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
46
47 /* We have to deal with cycles nicely, so use a depth first traversal
48 output algorithm. Ignore the fact that some functions won't need
49 to be output and put them into order as well, so we get dependencies
50 right through inline functions. */
51 for (node = cgraph_nodes; node; node = node->next)
52 node->aux = NULL;
53 for (pass = 0; pass < 2; pass++)
54 for (node = cgraph_nodes; node; node = node->next)
55 if (!node->aux
56 && (pass
57 || (!cgraph_only_called_directly_p (node)
58 && !node->address_taken)))
59 {
60 node2 = node;
61 if (!node->callers)
62 node->aux = &last;
63 else
64 node->aux = node->callers;
65 while (node2)
66 {
67 while (node2->aux != &last)
68 {
69 edge = (struct cgraph_edge *) node2->aux;
70 if (edge->next_caller)
71 node2->aux = edge->next_caller;
72 else
73 node2->aux = &last;
74 /* Break possible cycles involving always-inline
75 functions by ignoring edges from always-inline
76 functions to non-always-inline functions. */
77 if (edge->caller->local.disregard_inline_limits
78 && !edge->callee->local.disregard_inline_limits)
79 continue;
80 if (!edge->caller->aux)
81 {
82 if (!edge->caller->callers)
83 edge->caller->aux = &last;
84 else
85 edge->caller->aux = edge->caller->callers;
86 stack[stack_size++] = node2;
87 node2 = edge->caller;
88 break;
89 }
90 }
91 if (node2->aux == &last)
92 {
93 order[order_pos++] = node2;
94 if (stack_size)
95 node2 = stack[--stack_size];
96 else
97 node2 = NULL;
98 }
99 }
100 }
101 free (stack);
102 for (node = cgraph_nodes; node; node = node->next)
103 node->aux = NULL;
104 return order_pos;
105 }
106
107 /* Look for all functions inlined to NODE and update their inlined_to pointers
108 to INLINED_TO. */
109
110 static void
111 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
112 {
113 struct cgraph_edge *e;
114 for (e = node->callees; e; e = e->next_callee)
115 if (e->callee->global.inlined_to)
116 {
117 e->callee->global.inlined_to = inlined_to;
118 update_inlined_to_pointer (e->callee, inlined_to);
119 }
120 }
121
122 /* Add cgraph NODE to queue starting at FIRST.
123
124 The queue is linked via AUX pointers and terminated by pointer to 1.
125 We enqueue nodes at two occasions: when we find them reachable or when we find
126 their bodies needed for further clonning. In the second case we mark them
127 by pointer to 2 after processing so they are re-queue when they become
128 reachable. */
129
130 static void
131 enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first)
132 {
133 /* Node is still in queue; do nothing. */
134 if (node->aux && node->aux != (void *) 2)
135 return;
136 /* Node was already processed as unreachable, re-enqueue
137 only if it became reachable now. */
138 if (node->aux == (void *)2 && !node->reachable)
139 return;
140 node->aux = *first;
141 *first = node;
142 }
143
144 /* Add varpool NODE to queue starting at FIRST. */
145
146 static void
147 enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first)
148 {
149 node->aux = *first;
150 *first = node;
151 }
152
153 /* Process references. */
154
155 static void
156 process_references (struct ipa_ref_list *list,
157 struct cgraph_node **first,
158 struct varpool_node **first_varpool,
159 bool before_inlining_p)
160 {
161 int i;
162 struct ipa_ref *ref;
163 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
164 {
165 if (ref->refered_type == IPA_REF_CGRAPH)
166 {
167 struct cgraph_node *node = ipa_ref_node (ref);
168 if (!node->reachable
169 && (!DECL_EXTERNAL (node->decl)
170 || before_inlining_p))
171 {
172 node->reachable = true;
173 enqueue_cgraph_node (node, first);
174 }
175 }
176 else
177 {
178 struct varpool_node *node = ipa_ref_varpool_node (ref);
179 if (!node->needed)
180 {
181 varpool_mark_needed_node (node);
182 enqueue_varpool_node (node, first_varpool);
183 }
184 }
185 }
186 }
187
188 /* Return true when function NODE can be removed from callgraph
189 if all direct calls are eliminated. */
190
191 static inline bool
192 varpool_can_remove_if_no_refs (struct varpool_node *node)
193 {
194 return (!node->force_output && !node->used_from_other_partition
195 && (DECL_COMDAT (node->decl) || !node->externally_visible));
196 }
197
198 /* Return true when function can be marked local. */
199
200 static bool
201 cgraph_local_node_p (struct cgraph_node *node)
202 {
203 return (cgraph_only_called_directly_p (node)
204 && node->analyzed
205 && !DECL_EXTERNAL (node->decl)
206 && !node->local.externally_visible
207 && !node->reachable_from_other_partition
208 && !node->in_other_partition);
209 }
210
211 /* Perform reachability analysis and reclaim all unreachable nodes.
212 If BEFORE_INLINING_P is true this function is called before inlining
213 decisions has been made. If BEFORE_INLINING_P is false this function also
214 removes unneeded bodies of extern inline functions. */
215
216 bool
217 cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
218 {
219 struct cgraph_node *first = (struct cgraph_node *) (void *) 1;
220 struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1;
221 struct cgraph_node *node, *next;
222 struct varpool_node *vnode, *vnext;
223 bool changed = false;
224
225 #ifdef ENABLE_CHECKING
226 verify_cgraph ();
227 #endif
228 if (file)
229 fprintf (file, "\nReclaiming functions:");
230 #ifdef ENABLE_CHECKING
231 for (node = cgraph_nodes; node; node = node->next)
232 gcc_assert (!node->aux);
233 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
234 gcc_assert (!vnode->aux);
235 #endif
236 varpool_reset_queue ();
237 for (node = cgraph_nodes; node; node = node->next)
238 if (!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
239 && ((!DECL_EXTERNAL (node->decl))
240 || before_inlining_p))
241 {
242 gcc_assert (!node->global.inlined_to);
243 enqueue_cgraph_node (node, &first);
244 node->reachable = true;
245 }
246 else
247 {
248 gcc_assert (!node->aux);
249 node->reachable = false;
250 }
251 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
252 {
253 vnode->next_needed = NULL;
254 vnode->prev_needed = NULL;
255 if (!varpool_can_remove_if_no_refs (vnode))
256 {
257 vnode->needed = false;
258 varpool_mark_needed_node (vnode);
259 enqueue_varpool_node (vnode, &first_varpool);
260 }
261 else
262 vnode->needed = false;
263 }
264
265 /* Perform reachability analysis. As a special case do not consider
266 extern inline functions not inlined as live because we won't output
267 them at all.
268
269 We maintain two worklist, one for cgraph nodes other for varpools and
270 are finished once both are empty. */
271
272 while (first != (struct cgraph_node *) (void *) 1
273 || first_varpool != (struct varpool_node *) (void *) 1)
274 {
275 if (first != (struct cgraph_node *) (void *) 1)
276 {
277 struct cgraph_edge *e;
278 node = first;
279 first = (struct cgraph_node *) first->aux;
280 if (!node->reachable)
281 node->aux = (void *)2;
282
283 /* If we found this node reachable, first mark on the callees
284 reachable too, unless they are direct calls to extern inline functions
285 we decided to not inline. */
286 if (node->reachable)
287 {
288 for (e = node->callees; e; e = e->next_callee)
289 if (!e->callee->reachable
290 && node->analyzed
291 && (!e->inline_failed || !e->callee->analyzed
292 || (!DECL_EXTERNAL (e->callee->decl))
293 || before_inlining_p))
294 {
295 e->callee->reachable = true;
296 enqueue_cgraph_node (e->callee, &first);
297 }
298 process_references (&node->ref_list, &first, &first_varpool, before_inlining_p);
299 }
300
301 /* If any function in a comdat group is reachable, force
302 all other functions in the same comdat group to be
303 also reachable. */
304 if (node->same_comdat_group
305 && node->reachable
306 && !node->global.inlined_to)
307 {
308 for (next = node->same_comdat_group;
309 next != node;
310 next = next->same_comdat_group)
311 if (!next->reachable)
312 {
313 next->reachable = true;
314 enqueue_cgraph_node (next, &first);
315 }
316 }
317
318 /* We can freely remove inline clones even if they are cloned, however if
319 function is clone of real clone, we must keep it around in order to
320 make materialize_clones produce function body with the changes
321 applied. */
322 while (node->clone_of && !node->clone_of->aux
323 && !gimple_has_body_p (node->decl))
324 {
325 bool noninline = node->clone_of->decl != node->decl;
326 node = node->clone_of;
327 if (noninline && !node->reachable && !node->aux)
328 {
329 enqueue_cgraph_node (node, &first);
330 break;
331 }
332 }
333 }
334 if (first_varpool != (struct varpool_node *) (void *) 1)
335 {
336 vnode = first_varpool;
337 first_varpool = (struct varpool_node *)first_varpool->aux;
338 vnode->aux = NULL;
339 process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p);
340 /* If any function in a comdat group is reachable, force
341 all other functions in the same comdat group to be
342 also reachable. */
343 if (vnode->same_comdat_group)
344 {
345 struct varpool_node *next;
346 for (next = vnode->same_comdat_group;
347 next != vnode;
348 next = next->same_comdat_group)
349 if (!next->needed)
350 {
351 varpool_mark_needed_node (next);
352 enqueue_varpool_node (next, &first_varpool);
353 }
354 }
355 }
356 }
357
358 /* Remove unreachable nodes.
359
360 Completely unreachable functions can be fully removed from the callgraph.
361 Extern inline functions that we decided to not inline need to become unanalyzed nodes of
362 callgraph (so we still have edges to them). We remove function body then.
363
364 Also we need to care functions that are unreachable but we need to keep them around
365 for later clonning. In this case we also turn them to unanalyzed nodes, but
366 keep the body around. */
367 for (node = cgraph_nodes; node; node = next)
368 {
369 next = node->next;
370 if (node->aux && !node->reachable)
371 {
372 cgraph_node_remove_callees (node);
373 ipa_remove_all_references (&node->ref_list);
374 node->analyzed = false;
375 node->local.inlinable = false;
376 }
377 if (!node->aux)
378 {
379 node->global.inlined_to = NULL;
380 if (file)
381 fprintf (file, " %s", cgraph_node_name (node));
382 if (!node->analyzed || !DECL_EXTERNAL (node->decl) || before_inlining_p)
383 cgraph_remove_node (node);
384 else
385 {
386 struct cgraph_edge *e;
387
388 /* See if there is reachable caller. */
389 for (e = node->callers; e; e = e->next_caller)
390 if (e->caller->reachable)
391 break;
392
393 /* If so, we need to keep node in the callgraph. */
394 if (e || node->needed)
395 {
396 struct cgraph_node *clone;
397
398 /* If there are still clones, we must keep body around.
399 Otherwise we can just remove the body but keep the clone. */
400 for (clone = node->clones; clone;
401 clone = clone->next_sibling_clone)
402 if (clone->aux)
403 break;
404 if (!clone)
405 {
406 cgraph_release_function_body (node);
407 node->analyzed = false;
408 node->local.inlinable = false;
409 }
410 else
411 gcc_assert (!clone->in_other_partition);
412 cgraph_node_remove_callees (node);
413 ipa_remove_all_references (&node->ref_list);
414 if (node->prev_sibling_clone)
415 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
416 else if (node->clone_of)
417 node->clone_of->clones = node->next_sibling_clone;
418 if (node->next_sibling_clone)
419 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
420 node->clone_of = NULL;
421 node->next_sibling_clone = NULL;
422 node->prev_sibling_clone = NULL;
423 }
424 else
425 cgraph_remove_node (node);
426 }
427 changed = true;
428 }
429 }
430 for (node = cgraph_nodes; node; node = node->next)
431 {
432 /* Inline clones might be kept around so their materializing allows further
433 cloning. If the function the clone is inlined into is removed, we need
434 to turn it into normal cone. */
435 if (node->global.inlined_to
436 && !node->callers)
437 {
438 gcc_assert (node->clones);
439 node->global.inlined_to = NULL;
440 update_inlined_to_pointer (node, node);
441 }
442 node->aux = NULL;
443 }
444
445 if (file)
446 fprintf (file, "\n");
447
448 /* We must release unused extern inlines or sanity checking will fail. Rest of transformations
449 are undesirable at -O0 since we do not want to remove anything. */
450 if (!optimize)
451 return changed;
452
453 if (file)
454 fprintf (file, "Reclaiming variables:");
455 for (vnode = varpool_nodes; vnode; vnode = vnext)
456 {
457 vnext = vnode->next;
458 if (!vnode->needed)
459 {
460 if (file)
461 fprintf (file, " %s", varpool_node_name (vnode));
462 varpool_remove_node (vnode);
463 changed = true;
464 }
465 }
466
467 /* Now update address_taken flags and try to promote functions to be local. */
468
469 if (file)
470 fprintf (file, "\nClearing address taken flags:");
471 for (node = cgraph_nodes; node; node = node->next)
472 if (node->address_taken
473 && !node->reachable_from_other_partition)
474 {
475 int i;
476 struct ipa_ref *ref;
477 bool found = false;
478 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
479 && !found; i++)
480 {
481 gcc_assert (ref->use == IPA_REF_ADDR);
482 found = true;
483 }
484 if (!found)
485 {
486 if (file)
487 fprintf (file, " %s", cgraph_node_name (node));
488 node->address_taken = false;
489 changed = true;
490 if (cgraph_local_node_p (node))
491 {
492 node->local.local = true;
493 if (file)
494 fprintf (file, " (local)");
495 }
496 }
497 }
498
499 #ifdef ENABLE_CHECKING
500 verify_cgraph ();
501 #endif
502
503 /* Reclaim alias pairs for functions that have disappeared from the
504 call graph. */
505 remove_unreachable_alias_pairs ();
506
507 return changed;
508 }
509
510 /* Discover variables that have no longer address taken or that are read only
511 and update their flags.
512
513 FIXME: This can not be done in between gimplify and omp_expand since
514 readonly flag plays role on what is shared and what is not. Currently we do
515 this transformation as part of whole program visibility and re-do at
516 ipa-reference pass (to take into account clonning), but it would
517 make sense to do it before early optimizations. */
518
519 void
520 ipa_discover_readonly_nonaddressable_vars (void)
521 {
522 struct varpool_node *vnode;
523 if (dump_file)
524 fprintf (dump_file, "Clearing variable flags:");
525 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
526 if (vnode->finalized && varpool_all_refs_explicit_p (vnode)
527 && (TREE_ADDRESSABLE (vnode->decl) || !TREE_READONLY (vnode->decl)))
528 {
529 bool written = false;
530 bool address_taken = false;
531 int i;
532 struct ipa_ref *ref;
533 for (i = 0; ipa_ref_list_refering_iterate (&vnode->ref_list, i, ref)
534 && (!written || !address_taken); i++)
535 switch (ref->use)
536 {
537 case IPA_REF_ADDR:
538 address_taken = true;
539 break;
540 case IPA_REF_LOAD:
541 break;
542 case IPA_REF_STORE:
543 written = true;
544 break;
545 }
546 if (TREE_ADDRESSABLE (vnode->decl) && !address_taken)
547 {
548 if (dump_file)
549 fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode));
550 TREE_ADDRESSABLE (vnode->decl) = 0;
551 }
552 if (!TREE_READONLY (vnode->decl) && !address_taken && !written
553 /* Making variable in explicit section readonly can cause section
554 type conflict.
555 See e.g. gcc.c-torture/compile/pr23237.c */
556 && DECL_SECTION_NAME (vnode->decl) == NULL)
557 {
558 if (dump_file)
559 fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode));
560 TREE_READONLY (vnode->decl) = 1;
561 }
562 }
563 if (dump_file)
564 fprintf (dump_file, "\n");
565 }
566
567 /* Return true when function NODE should be considered externally visible. */
568
569 static bool
570 cgraph_externally_visible_p (struct cgraph_node *node, bool whole_program)
571 {
572 if (!node->local.finalized)
573 return false;
574 if (!DECL_COMDAT (node->decl)
575 && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)))
576 return false;
577 if (!whole_program)
578 return true;
579 if (DECL_PRESERVE_P (node->decl))
580 return true;
581 /* COMDAT functions must be shared only if they have address taken,
582 otherwise we can produce our own private implementation with
583 -fwhole-program. */
584 if (DECL_COMDAT (node->decl))
585 {
586 if (node->address_taken || !node->analyzed)
587 return true;
588 if (node->same_comdat_group)
589 {
590 struct cgraph_node *next;
591
592 /* If more than one function is in the same COMDAT group, it must
593 be shared even if just one function in the comdat group has
594 address taken. */
595 for (next = node->same_comdat_group;
596 next != node;
597 next = next->same_comdat_group)
598 if (next->address_taken || !next->analyzed)
599 return true;
600 }
601 }
602 if (MAIN_NAME_P (DECL_NAME (node->decl)))
603 return true;
604 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl)))
605 return true;
606 return false;
607 }
608
609 /* Dissolve the same_comdat_group list in which NODE resides. */
610
611 static void
612 dissolve_same_comdat_group_list (struct cgraph_node *node)
613 {
614 struct cgraph_node *n = node, *next;
615 do
616 {
617 next = n->same_comdat_group;
618 n->same_comdat_group = NULL;
619 n = next;
620 }
621 while (n != node);
622 }
623
624 /* Mark visibility of all functions.
625
626 A local function is one whose calls can occur only in the current
627 compilation unit and all its calls are explicit, so we can change
628 its calling convention. We simply mark all static functions whose
629 address is not taken as local.
630
631 We also change the TREE_PUBLIC flag of all declarations that are public
632 in language point of view but we want to overwrite this default
633 via visibilities for the backend point of view. */
634
635 static unsigned int
636 function_and_variable_visibility (bool whole_program)
637 {
638 struct cgraph_node *node;
639 struct varpool_node *vnode;
640
641 for (node = cgraph_nodes; node; node = node->next)
642 {
643 /* C++ FE on lack of COMDAT support create local COMDAT functions
644 (that ought to be shared but can not due to object format
645 limitations). It is neccesary to keep the flag to make rest of C++ FE
646 happy. Clear the flag here to avoid confusion in middle-end. */
647 if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl))
648 DECL_COMDAT (node->decl) = 0;
649 /* For external decls stop tracking same_comdat_group, it doesn't matter
650 what comdat group they are in when they won't be emitted in this TU,
651 and simplifies later passes. */
652 if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
653 {
654 #ifdef ENABLE_CHECKING
655 struct cgraph_node *n;
656
657 for (n = node->same_comdat_group;
658 n != node;
659 n = n->same_comdat_group)
660 /* If at least one of same comdat group functions is external,
661 all of them have to be, otherwise it is a front-end bug. */
662 gcc_assert (DECL_EXTERNAL (n->decl));
663 #endif
664 dissolve_same_comdat_group_list (node);
665 }
666 gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl))
667 || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl));
668 if (cgraph_externally_visible_p (node, whole_program))
669 {
670 gcc_assert (!node->global.inlined_to);
671 node->local.externally_visible = true;
672 }
673 else
674 node->local.externally_visible = false;
675 if (!node->local.externally_visible && node->analyzed
676 && !DECL_EXTERNAL (node->decl))
677 {
678 struct cgraph_node *alias;
679 gcc_assert (whole_program || !TREE_PUBLIC (node->decl));
680 cgraph_make_decl_local (node->decl);
681 for (alias = node->same_body; alias; alias = alias->next)
682 cgraph_make_decl_local (alias->decl);
683 if (node->same_comdat_group)
684 /* cgraph_externally_visible_p has already checked all other nodes
685 in the group and they will all be made local. We need to
686 dissolve the group at once so that the predicate does not
687 segfault though. */
688 dissolve_same_comdat_group_list (node);
689 }
690 node->local.local = cgraph_local_node_p (node);
691 }
692 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
693 {
694 /* weak flag makes no sense on local variables. */
695 gcc_assert (!DECL_WEAK (vnode->decl)
696 || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl));
697 /* In several cases declarations can not be common:
698
699 - when declaration has initializer
700 - when it is in weak
701 - when it has specific section
702 - when it resides in non-generic address space.
703 - if declaration is local, it will get into .local common section
704 so common flag is not needed. Frontends still produce these in
705 certain cases, such as for:
706
707 static int a __attribute__ ((common))
708
709 Canonicalize things here and clear the redundant flag. */
710 if (DECL_COMMON (vnode->decl)
711 && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
712 || (DECL_INITIAL (vnode->decl)
713 && DECL_INITIAL (vnode->decl) != error_mark_node)
714 || DECL_WEAK (vnode->decl)
715 || DECL_SECTION_NAME (vnode->decl) != NULL
716 || ! (ADDR_SPACE_GENERIC_P
717 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl))))))
718 DECL_COMMON (vnode->decl) = 0;
719 }
720 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
721 {
722 if (!vnode->finalized)
723 continue;
724 if (vnode->needed
725 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl))
726 && (!whole_program
727 /* We can privatize comdat readonly variables whose address is not taken,
728 but doing so is not going to bring us optimization oppurtunities until
729 we start reordering datastructures. */
730 || DECL_COMDAT (vnode->decl)
731 || DECL_WEAK (vnode->decl)
732 || lookup_attribute ("externally_visible",
733 DECL_ATTRIBUTES (vnode->decl))))
734 vnode->externally_visible = true;
735 else
736 vnode->externally_visible = false;
737 if (!vnode->externally_visible)
738 {
739 gcc_assert (whole_program || !TREE_PUBLIC (vnode->decl));
740 cgraph_make_decl_local (vnode->decl);
741 }
742 gcc_assert (TREE_STATIC (vnode->decl));
743 }
744
745 if (dump_file)
746 {
747 fprintf (dump_file, "\nMarking local functions:");
748 for (node = cgraph_nodes; node; node = node->next)
749 if (node->local.local)
750 fprintf (dump_file, " %s", cgraph_node_name (node));
751 fprintf (dump_file, "\n\n");
752 fprintf (dump_file, "\nMarking externally visible functions:");
753 for (node = cgraph_nodes; node; node = node->next)
754 if (node->local.externally_visible)
755 fprintf (dump_file, " %s", cgraph_node_name (node));
756 fprintf (dump_file, "\n\n");
757 fprintf (dump_file, "\nMarking externally visible variables:");
758 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
759 if (vnode->externally_visible)
760 fprintf (dump_file, " %s", varpool_node_name (vnode));
761 fprintf (dump_file, "\n\n");
762 }
763 cgraph_function_flags_ready = true;
764 return 0;
765 }
766
767 /* Local function pass handling visibilities. This happens before LTO streaming
768 so in particular -fwhole-program should be ignored at this level. */
769
770 static unsigned int
771 local_function_and_variable_visibility (void)
772 {
773 return function_and_variable_visibility (flag_whole_program && !flag_lto && !flag_whopr);
774 }
775
776 struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility =
777 {
778 {
779 SIMPLE_IPA_PASS,
780 "visibility", /* name */
781 NULL, /* gate */
782 local_function_and_variable_visibility,/* execute */
783 NULL, /* sub */
784 NULL, /* next */
785 0, /* static_pass_number */
786 TV_CGRAPHOPT, /* tv_id */
787 0, /* properties_required */
788 0, /* properties_provided */
789 0, /* properties_destroyed */
790 0, /* todo_flags_start */
791 TODO_remove_functions | TODO_dump_cgraph
792 | TODO_ggc_collect /* todo_flags_finish */
793 }
794 };
795
796 /* Do not re-run on ltrans stage. */
797
798 static bool
799 gate_whole_program_function_and_variable_visibility (void)
800 {
801 return !flag_ltrans;
802 }
803
804 /* Bring functionss local at LTO time whith -fwhole-program. */
805
806 static unsigned int
807 whole_program_function_and_variable_visibility (void)
808 {
809 struct cgraph_node *node;
810 struct varpool_node *vnode;
811
812 function_and_variable_visibility (flag_whole_program);
813
814 for (node = cgraph_nodes; node; node = node->next)
815 if ((node->local.externally_visible && !DECL_COMDAT (node->decl))
816 && node->local.finalized)
817 cgraph_mark_needed_node (node);
818 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
819 if (vnode->externally_visible && !DECL_COMDAT (vnode->decl))
820 varpool_mark_needed_node (vnode);
821 if (dump_file)
822 {
823 fprintf (dump_file, "\nNeeded variables:");
824 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
825 if (vnode->needed)
826 fprintf (dump_file, " %s", varpool_node_name (vnode));
827 fprintf (dump_file, "\n\n");
828 }
829 if (optimize)
830 ipa_discover_readonly_nonaddressable_vars ();
831 return 0;
832 }
833
834 struct ipa_opt_pass_d pass_ipa_whole_program_visibility =
835 {
836 {
837 IPA_PASS,
838 "whole-program", /* name */
839 gate_whole_program_function_and_variable_visibility,/* gate */
840 whole_program_function_and_variable_visibility,/* execute */
841 NULL, /* sub */
842 NULL, /* next */
843 0, /* static_pass_number */
844 TV_CGRAPHOPT, /* tv_id */
845 0, /* properties_required */
846 0, /* properties_provided */
847 0, /* properties_destroyed */
848 0, /* todo_flags_start */
849 TODO_remove_functions | TODO_dump_cgraph
850 | TODO_ggc_collect /* todo_flags_finish */
851 },
852 NULL, /* generate_summary */
853 NULL, /* write_summary */
854 NULL, /* read_summary */
855 NULL, /* write_optimization_summary */
856 NULL, /* read_optimization_summary */
857 NULL, /* stmt_fixup */
858 0, /* TODOs */
859 NULL, /* function_transform */
860 NULL, /* variable_transform */
861 };
862
863 /* Hash a cgraph node set element. */
864
865 static hashval_t
866 hash_cgraph_node_set_element (const void *p)
867 {
868 const_cgraph_node_set_element element = (const_cgraph_node_set_element) p;
869 return htab_hash_pointer (element->node);
870 }
871
872 /* Compare two cgraph node set elements. */
873
874 static int
875 eq_cgraph_node_set_element (const void *p1, const void *p2)
876 {
877 const_cgraph_node_set_element e1 = (const_cgraph_node_set_element) p1;
878 const_cgraph_node_set_element e2 = (const_cgraph_node_set_element) p2;
879
880 return e1->node == e2->node;
881 }
882
883 /* Create a new cgraph node set. */
884
885 cgraph_node_set
886 cgraph_node_set_new (void)
887 {
888 cgraph_node_set new_node_set;
889
890 new_node_set = ggc_alloc_cgraph_node_set_def ();
891 new_node_set->hashtab = htab_create_ggc (10,
892 hash_cgraph_node_set_element,
893 eq_cgraph_node_set_element,
894 NULL);
895 new_node_set->nodes = NULL;
896 return new_node_set;
897 }
898
899 /* Add cgraph_node NODE to cgraph_node_set SET. */
900
901 void
902 cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node)
903 {
904 void **slot;
905 cgraph_node_set_element element;
906 struct cgraph_node_set_element_def dummy;
907
908 dummy.node = node;
909 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
910
911 if (*slot != HTAB_EMPTY_ENTRY)
912 {
913 element = (cgraph_node_set_element) *slot;
914 gcc_assert (node == element->node
915 && (VEC_index (cgraph_node_ptr, set->nodes, element->index)
916 == node));
917 return;
918 }
919
920 /* Insert node into hash table. */
921 element = ggc_alloc_cgraph_node_set_element_def ();
922 element->node = node;
923 element->index = VEC_length (cgraph_node_ptr, set->nodes);
924 *slot = element;
925
926 /* Insert into node vector. */
927 VEC_safe_push (cgraph_node_ptr, gc, set->nodes, node);
928 }
929
930 /* Remove cgraph_node NODE from cgraph_node_set SET. */
931
932 void
933 cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node)
934 {
935 void **slot, **last_slot;
936 cgraph_node_set_element element, last_element;
937 struct cgraph_node *last_node;
938 struct cgraph_node_set_element_def dummy;
939
940 dummy.node = node;
941 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
942 if (slot == NULL)
943 return;
944
945 element = (cgraph_node_set_element) *slot;
946 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
947 == node);
948
949 /* Remove from vector. We do this by swapping node with the last element
950 of the vector. */
951 last_node = VEC_pop (cgraph_node_ptr, set->nodes);
952 if (last_node != node)
953 {
954 dummy.node = last_node;
955 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
956 last_element = (cgraph_node_set_element) *last_slot;
957 gcc_assert (last_element);
958
959 /* Move the last element to the original spot of NODE. */
960 last_element->index = element->index;
961 VEC_replace (cgraph_node_ptr, set->nodes, last_element->index,
962 last_node);
963 }
964
965 /* Remove element from hash table. */
966 htab_clear_slot (set->hashtab, slot);
967 ggc_free (element);
968 }
969
970 /* Find NODE in SET and return an iterator to it if found. A null iterator
971 is returned if NODE is not in SET. */
972
973 cgraph_node_set_iterator
974 cgraph_node_set_find (cgraph_node_set set, struct cgraph_node *node)
975 {
976 void **slot;
977 struct cgraph_node_set_element_def dummy;
978 cgraph_node_set_element element;
979 cgraph_node_set_iterator csi;
980
981 dummy.node = node;
982 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
983 if (slot == NULL)
984 csi.index = (unsigned) ~0;
985 else
986 {
987 element = (cgraph_node_set_element) *slot;
988 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
989 == node);
990 csi.index = element->index;
991 }
992 csi.set = set;
993
994 return csi;
995 }
996
997 /* Dump content of SET to file F. */
998
999 void
1000 dump_cgraph_node_set (FILE *f, cgraph_node_set set)
1001 {
1002 cgraph_node_set_iterator iter;
1003
1004 for (iter = csi_start (set); !csi_end_p (iter); csi_next (&iter))
1005 {
1006 struct cgraph_node *node = csi_node (iter);
1007 fprintf (f, " %s/%i", cgraph_node_name (node), node->uid);
1008 }
1009 fprintf (f, "\n");
1010 }
1011
1012 /* Dump content of SET to stderr. */
1013
1014 DEBUG_FUNCTION void
1015 debug_cgraph_node_set (cgraph_node_set set)
1016 {
1017 dump_cgraph_node_set (stderr, set);
1018 }
1019
1020 /* Hash a varpool node set element. */
1021
1022 static hashval_t
1023 hash_varpool_node_set_element (const void *p)
1024 {
1025 const_varpool_node_set_element element = (const_varpool_node_set_element) p;
1026 return htab_hash_pointer (element->node);
1027 }
1028
1029 /* Compare two varpool node set elements. */
1030
1031 static int
1032 eq_varpool_node_set_element (const void *p1, const void *p2)
1033 {
1034 const_varpool_node_set_element e1 = (const_varpool_node_set_element) p1;
1035 const_varpool_node_set_element e2 = (const_varpool_node_set_element) p2;
1036
1037 return e1->node == e2->node;
1038 }
1039
1040 /* Create a new varpool node set. */
1041
1042 varpool_node_set
1043 varpool_node_set_new (void)
1044 {
1045 varpool_node_set new_node_set;
1046
1047 new_node_set = ggc_alloc_varpool_node_set_def ();
1048 new_node_set->hashtab = htab_create_ggc (10,
1049 hash_varpool_node_set_element,
1050 eq_varpool_node_set_element,
1051 NULL);
1052 new_node_set->nodes = NULL;
1053 return new_node_set;
1054 }
1055
1056 /* Add varpool_node NODE to varpool_node_set SET. */
1057
1058 void
1059 varpool_node_set_add (varpool_node_set set, struct varpool_node *node)
1060 {
1061 void **slot;
1062 varpool_node_set_element element;
1063 struct varpool_node_set_element_def dummy;
1064
1065 dummy.node = node;
1066 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
1067
1068 if (*slot != HTAB_EMPTY_ENTRY)
1069 {
1070 element = (varpool_node_set_element) *slot;
1071 gcc_assert (node == element->node
1072 && (VEC_index (varpool_node_ptr, set->nodes, element->index)
1073 == node));
1074 return;
1075 }
1076
1077 /* Insert node into hash table. */
1078 element = ggc_alloc_varpool_node_set_element_def ();
1079 element->node = node;
1080 element->index = VEC_length (varpool_node_ptr, set->nodes);
1081 *slot = element;
1082
1083 /* Insert into node vector. */
1084 VEC_safe_push (varpool_node_ptr, gc, set->nodes, node);
1085 }
1086
1087 /* Remove varpool_node NODE from varpool_node_set SET. */
1088
1089 void
1090 varpool_node_set_remove (varpool_node_set set, struct varpool_node *node)
1091 {
1092 void **slot, **last_slot;
1093 varpool_node_set_element element, last_element;
1094 struct varpool_node *last_node;
1095 struct varpool_node_set_element_def dummy;
1096
1097 dummy.node = node;
1098 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1099 if (slot == NULL)
1100 return;
1101
1102 element = (varpool_node_set_element) *slot;
1103 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1104 == node);
1105
1106 /* Remove from vector. We do this by swapping node with the last element
1107 of the vector. */
1108 last_node = VEC_pop (varpool_node_ptr, set->nodes);
1109 if (last_node != node)
1110 {
1111 dummy.node = last_node;
1112 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1113 last_element = (varpool_node_set_element) *last_slot;
1114 gcc_assert (last_element);
1115
1116 /* Move the last element to the original spot of NODE. */
1117 last_element->index = element->index;
1118 VEC_replace (varpool_node_ptr, set->nodes, last_element->index,
1119 last_node);
1120 }
1121
1122 /* Remove element from hash table. */
1123 htab_clear_slot (set->hashtab, slot);
1124 ggc_free (element);
1125 }
1126
1127 /* Find NODE in SET and return an iterator to it if found. A null iterator
1128 is returned if NODE is not in SET. */
1129
1130 varpool_node_set_iterator
1131 varpool_node_set_find (varpool_node_set set, struct varpool_node *node)
1132 {
1133 void **slot;
1134 struct varpool_node_set_element_def dummy;
1135 varpool_node_set_element element;
1136 varpool_node_set_iterator vsi;
1137
1138 dummy.node = node;
1139 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1140 if (slot == NULL)
1141 vsi.index = (unsigned) ~0;
1142 else
1143 {
1144 element = (varpool_node_set_element) *slot;
1145 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1146 == node);
1147 vsi.index = element->index;
1148 }
1149 vsi.set = set;
1150
1151 return vsi;
1152 }
1153
1154 /* Dump content of SET to file F. */
1155
1156 void
1157 dump_varpool_node_set (FILE *f, varpool_node_set set)
1158 {
1159 varpool_node_set_iterator iter;
1160
1161 for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter))
1162 {
1163 struct varpool_node *node = vsi_node (iter);
1164 fprintf (f, " %s", varpool_node_name (node));
1165 }
1166 fprintf (f, "\n");
1167 }
1168
1169 /* Dump content of SET to stderr. */
1170
1171 DEBUG_FUNCTION void
1172 debug_varpool_node_set (varpool_node_set set)
1173 {
1174 dump_varpool_node_set (stderr, set);
1175 }
1176
1177
1178 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1179
1180 static unsigned int
1181 ipa_profile (void)
1182 {
1183 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1184 struct cgraph_edge *e;
1185 int order_pos;
1186 bool something_changed = false;
1187 int i;
1188
1189 order_pos = cgraph_postorder (order);
1190 for (i = order_pos - 1; i >= 0; i--)
1191 {
1192 if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1193 {
1194 for (e = order[i]->callees; e; e = e->next_callee)
1195 if (e->callee->local.local && !e->callee->aux)
1196 {
1197 something_changed = true;
1198 e->callee->aux = (void *)1;
1199 }
1200 }
1201 order[i]->aux = NULL;
1202 }
1203
1204 while (something_changed)
1205 {
1206 something_changed = false;
1207 for (i = order_pos - 1; i >= 0; i--)
1208 {
1209 if (order[i]->aux && cgraph_propagate_frequency (order[i]))
1210 {
1211 for (e = order[i]->callees; e; e = e->next_callee)
1212 if (e->callee->local.local && !e->callee->aux)
1213 {
1214 something_changed = true;
1215 e->callee->aux = (void *)1;
1216 }
1217 }
1218 order[i]->aux = NULL;
1219 }
1220 }
1221 free (order);
1222 return 0;
1223 }
1224
1225 static bool
1226 gate_ipa_profile (void)
1227 {
1228 return flag_ipa_profile;
1229 }
1230
1231 struct ipa_opt_pass_d pass_ipa_profile =
1232 {
1233 {
1234 IPA_PASS,
1235 "ipa-profile", /* name */
1236 gate_ipa_profile, /* gate */
1237 ipa_profile, /* execute */
1238 NULL, /* sub */
1239 NULL, /* next */
1240 0, /* static_pass_number */
1241 TV_IPA_PROFILE, /* tv_id */
1242 0, /* properties_required */
1243 0, /* properties_provided */
1244 0, /* properties_destroyed */
1245 0, /* todo_flags_start */
1246 0 /* todo_flags_finish */
1247 },
1248 NULL, /* generate_summary */
1249 NULL, /* write_summary */
1250 NULL, /* read_summary */
1251 NULL, /* write_optimization_summary */
1252 NULL, /* read_optimization_summary */
1253 NULL, /* stmt_fixup */
1254 0, /* TODOs */
1255 NULL, /* function_transform */
1256 NULL /* variable_transform */
1257 };