ipa-cp.c (ipcp_cloning_candidate_p): Use opt_for_fn.
[gcc.git] / gcc / ipa.c
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "calls.h"
26 #include "stringpool.h"
27 #include "predict.h"
28 #include "basic-block.h"
29 #include "hash-map.h"
30 #include "is-a.h"
31 #include "plugin-api.h"
32 #include "vec.h"
33 #include "hashtab.h"
34 #include "hash-set.h"
35 #include "machmode.h"
36 #include "hard-reg-set.h"
37 #include "input.h"
38 #include "function.h"
39 #include "ipa-ref.h"
40 #include "cgraph.h"
41 #include "tree-pass.h"
42 #include "gimple-expr.h"
43 #include "gimplify.h"
44 #include "flags.h"
45 #include "target.h"
46 #include "tree-iterator.h"
47 #include "ipa-utils.h"
48 #include "alloc-pool.h"
49 #include "ipa-prop.h"
50 #include "ipa-inline.h"
51 #include "tree-inline.h"
52 #include "profile.h"
53 #include "params.h"
54 #include "internal-fn.h"
55 #include "tree-ssa-alias.h"
56 #include "gimple.h"
57 #include "dbgcnt.h"
58
59
60 /* Return true when NODE has ADDR reference. */
61
62 static bool
63 has_addr_references_p (struct cgraph_node *node,
64 void *data ATTRIBUTE_UNUSED)
65 {
66 int i;
67 struct ipa_ref *ref = NULL;
68
69 for (i = 0; node->iterate_referring (i, ref); i++)
70 if (ref->use == IPA_REF_ADDR)
71 return true;
72 return false;
73 }
74
75 /* Look for all functions inlined to NODE and update their inlined_to pointers
76 to INLINED_TO. */
77
78 static void
79 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
80 {
81 struct cgraph_edge *e;
82 for (e = node->callees; e; e = e->next_callee)
83 if (e->callee->global.inlined_to)
84 {
85 e->callee->global.inlined_to = inlined_to;
86 update_inlined_to_pointer (e->callee, inlined_to);
87 }
88 }
89
90 /* Add symtab NODE to queue starting at FIRST.
91
92 The queue is linked via AUX pointers and terminated by pointer to 1.
93 We enqueue nodes at two occasions: when we find them reachable or when we find
94 their bodies needed for further clonning. In the second case we mark them
95 by pointer to 2 after processing so they are re-queue when they become
96 reachable. */
97
98 static void
99 enqueue_node (symtab_node *node, symtab_node **first,
100 hash_set<symtab_node *> *reachable)
101 {
102 /* Node is still in queue; do nothing. */
103 if (node->aux && node->aux != (void *) 2)
104 return;
105 /* Node was already processed as unreachable, re-enqueue
106 only if it became reachable now. */
107 if (node->aux == (void *)2 && !reachable->contains (node))
108 return;
109 node->aux = *first;
110 *first = node;
111 }
112
113 /* Process references. */
114
115 static void
116 process_references (symtab_node *snode,
117 symtab_node **first,
118 bool before_inlining_p,
119 hash_set<symtab_node *> *reachable)
120 {
121 int i;
122 struct ipa_ref *ref = NULL;
123 for (i = 0; snode->iterate_reference (i, ref); i++)
124 {
125 symtab_node *node = ref->referred;
126
127 if (node->definition && !node->in_other_partition
128 && ((!DECL_EXTERNAL (node->decl) || node->alias)
129 || (((before_inlining_p
130 && (symtab->state < IPA_SSA
131 || !lookup_attribute ("always_inline",
132 DECL_ATTRIBUTES (node->decl)))))
133 /* We use variable constructors during late complation for
134 constant folding. Keep references alive so partitioning
135 knows about potential references. */
136 || (TREE_CODE (node->decl) == VAR_DECL
137 && flag_wpa
138 && ctor_for_folding (node->decl)
139 != error_mark_node))))
140 reachable->add (node);
141 enqueue_node (node, first, reachable);
142 }
143 }
144
145 /* EDGE is an polymorphic call. If BEFORE_INLINING_P is set, mark
146 all its potential targets as reachable to permit later inlining if
147 devirtualization happens. After inlining still keep their declarations
148 around, so we can devirtualize to a direct call.
149
150 Also try to make trivial devirutalization when no or only one target is
151 possible. */
152
153 static void
154 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
155 struct cgraph_edge *edge,
156 symtab_node **first,
157 hash_set<symtab_node *> *reachable,
158 bool before_inlining_p)
159 {
160 unsigned int i;
161 void *cache_token;
162 bool final;
163 vec <cgraph_node *>targets
164 = possible_polymorphic_call_targets
165 (edge, &final, &cache_token);
166
167 if (!reachable_call_targets->add (cache_token))
168 {
169 for (i = 0; i < targets.length (); i++)
170 {
171 struct cgraph_node *n = targets[i];
172
173 /* Do not bother to mark virtual methods in anonymous namespace;
174 either we will find use of virtual table defining it, or it is
175 unused. */
176 if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
177 && type_in_anonymous_namespace_p
178 (method_class_type (TREE_TYPE (n->decl))))
179 continue;
180
181 /* Prior inlining, keep alive bodies of possible targets for
182 devirtualization. */
183 if (n->definition
184 && (before_inlining_p
185 && (symtab->state < IPA_SSA
186 || !lookup_attribute ("always_inline",
187 DECL_ATTRIBUTES (n->decl)))))
188 reachable->add (n);
189
190 /* Even after inlining we want to keep the possible targets in the
191 boundary, so late passes can still produce direct call even if
192 the chance for inlining is lost. */
193 enqueue_node (n, first, reachable);
194 }
195 }
196
197 /* Very trivial devirtualization; when the type is
198 final or anonymous (so we know all its derivation)
199 and there is only one possible virtual call target,
200 make the edge direct. */
201 if (final)
202 {
203 if (targets.length () <= 1 && dbg_cnt (devirt))
204 {
205 cgraph_node *target, *node = edge->caller;
206 if (targets.length () == 1)
207 target = targets[0];
208 else
209 target = cgraph_node::get_create
210 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
211
212 if (dump_enabled_p ())
213 {
214 location_t locus;
215 if (edge->call_stmt)
216 locus = gimple_location (edge->call_stmt);
217 else
218 locus = UNKNOWN_LOCATION;
219 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
220 "devirtualizing call in %s/%i to %s/%i\n",
221 edge->caller->name (), edge->caller->order,
222 target->name (),
223 target->order);
224 }
225 edge = edge->make_direct (target);
226 if (inline_summary_vec)
227 inline_update_overall_summary (node);
228 else if (edge->call_stmt)
229 {
230 edge->redirect_call_stmt_to_callee ();
231
232 /* Call to __builtin_unreachable shouldn't be instrumented. */
233 if (!targets.length ())
234 gimple_call_set_with_bounds (edge->call_stmt, false);
235 }
236 }
237 }
238 }
239
240 /* Perform reachability analysis and reclaim all unreachable nodes.
241
242 The algorithm is basically mark&sweep but with some extra refinements:
243
244 - reachable extern inline functions needs special handling; the bodies needs
245 to stay in memory until inlining in hope that they will be inlined.
246 After inlining we release their bodies and turn them into unanalyzed
247 nodes even when they are reachable.
248
249 BEFORE_INLINING_P specify whether we are before or after inlining.
250
251 - virtual functions are kept in callgraph even if they seem unreachable in
252 hope calls to them will be devirtualized.
253
254 Again we remove them after inlining. In late optimization some
255 devirtualization may happen, but it is not important since we won't inline
256 the call. In theory early opts and IPA should work out all important cases.
257
258 - virtual clones needs bodies of their origins for later materialization;
259 this means that we want to keep the body even if the origin is unreachable
260 otherwise. To avoid origin from sitting in the callgraph and being
261 walked by IPA passes, we turn them into unanalyzed nodes with body
262 defined.
263
264 We maintain set of function declaration where body needs to stay in
265 body_needed_for_clonning
266
267 Inline clones represent special case: their declaration match the
268 declaration of origin and cgraph_remove_node already knows how to
269 reshape callgraph and preserve body when offline copy of function or
270 inline clone is being removed.
271
272 - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
273 variables with DECL_INITIAL set. We finalize these and keep reachable
274 ones around for constant folding purposes. After inlining we however
275 stop walking their references to let everything static referneced by them
276 to be removed when it is otherwise unreachable.
277
278 We maintain queue of both reachable symbols (i.e. defined symbols that needs
279 to stay) and symbols that are in boundary (i.e. external symbols referenced
280 by reachable symbols or origins of clones). The queue is represented
281 as linked list by AUX pointer terminated by 1.
282
283 At the end we keep all reachable symbols. For symbols in boundary we always
284 turn definition into a declaration, but we may keep function body around
285 based on body_needed_for_clonning
286
287 All symbols that enter the queue have AUX pointer non-zero and are in the
288 boundary. Pointer set REACHABLE is used to track reachable symbols.
289
290 Every symbol can be visited twice - once as part of boundary and once
291 as real reachable symbol. enqueue_node needs to decide whether the
292 node needs to be re-queued for second processing. For this purpose
293 we set AUX pointer of processed symbols in the boundary to constant 2. */
294
295 bool
296 symbol_table::remove_unreachable_nodes (bool before_inlining_p, FILE *file)
297 {
298 symtab_node *first = (symtab_node *) (void *) 1;
299 struct cgraph_node *node, *next;
300 varpool_node *vnode, *vnext;
301 bool changed = false;
302 hash_set<symtab_node *> reachable;
303 hash_set<tree> body_needed_for_clonning;
304 hash_set<void *> reachable_call_targets;
305
306 timevar_push (TV_IPA_UNREACHABLE);
307 build_type_inheritance_graph ();
308 if (file)
309 fprintf (file, "\nReclaiming functions:");
310 #ifdef ENABLE_CHECKING
311 FOR_EACH_FUNCTION (node)
312 gcc_assert (!node->aux);
313 FOR_EACH_VARIABLE (vnode)
314 gcc_assert (!vnode->aux);
315 #endif
316 /* Mark functions whose bodies are obviously needed.
317 This is mostly when they can be referenced externally. Inline clones
318 are special since their declarations are shared with master clone and thus
319 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
320 FOR_EACH_FUNCTION (node)
321 {
322 node->used_as_abstract_origin = false;
323 if (node->definition
324 && !node->global.inlined_to
325 && !node->in_other_partition
326 && !node->can_remove_if_no_direct_calls_and_refs_p ())
327 {
328 gcc_assert (!node->global.inlined_to);
329 reachable.add (node);
330 enqueue_node (node, &first, &reachable);
331 }
332 else
333 gcc_assert (!node->aux);
334 }
335
336 /* Mark variables that are obviously needed. */
337 FOR_EACH_DEFINED_VARIABLE (vnode)
338 if (!vnode->can_remove_if_no_refs_p()
339 && !vnode->in_other_partition)
340 {
341 reachable.add (vnode);
342 enqueue_node (vnode, &first, &reachable);
343 }
344
345 /* Perform reachability analysis. */
346 while (first != (symtab_node *) (void *) 1)
347 {
348 bool in_boundary_p = !reachable.contains (first);
349 symtab_node *node = first;
350
351 first = (symtab_node *)first->aux;
352
353 /* If we are processing symbol in boundary, mark its AUX pointer for
354 possible later re-processing in enqueue_node. */
355 if (in_boundary_p)
356 node->aux = (void *)2;
357 else
358 {
359 if (TREE_CODE (node->decl) == FUNCTION_DECL
360 && DECL_ABSTRACT_ORIGIN (node->decl))
361 {
362 struct cgraph_node *origin_node
363 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (node->decl));
364 origin_node->used_as_abstract_origin = true;
365 enqueue_node (origin_node, &first, &reachable);
366 }
367 /* If any symbol in a comdat group is reachable, force
368 all externally visible symbols in the same comdat
369 group to be reachable as well. Comdat-local symbols
370 can be discarded if all uses were inlined. */
371 if (node->same_comdat_group)
372 {
373 symtab_node *next;
374 for (next = node->same_comdat_group;
375 next != node;
376 next = next->same_comdat_group)
377 if (!next->comdat_local_p ()
378 && !reachable.add (next))
379 enqueue_node (next, &first, &reachable);
380 }
381 /* Mark references as reachable. */
382 process_references (node, &first, before_inlining_p, &reachable);
383 }
384
385 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
386 {
387 /* Mark the callees reachable unless they are direct calls to extern
388 inline functions we decided to not inline. */
389 if (!in_boundary_p)
390 {
391 struct cgraph_edge *e;
392 /* Keep alive possible targets for devirtualization. */
393 if (opt_for_fn (cnode->decl, optimize)
394 && opt_for_fn (cnode->decl, flag_devirtualize))
395 {
396 struct cgraph_edge *next;
397 for (e = cnode->indirect_calls; e; e = next)
398 {
399 next = e->next_callee;
400 if (e->indirect_info->polymorphic)
401 walk_polymorphic_call_targets (&reachable_call_targets,
402 e, &first, &reachable,
403 before_inlining_p);
404 }
405 }
406 for (e = cnode->callees; e; e = e->next_callee)
407 {
408 if (e->callee->definition
409 && !e->callee->in_other_partition
410 && (!e->inline_failed
411 || !DECL_EXTERNAL (e->callee->decl)
412 || e->callee->alias
413 || before_inlining_p))
414 {
415 /* Be sure that we will not optimize out alias target
416 body. */
417 if (DECL_EXTERNAL (e->callee->decl)
418 && e->callee->alias
419 && before_inlining_p)
420 reachable.add (e->callee->function_symbol ());
421 reachable.add (e->callee);
422 }
423 enqueue_node (e->callee, &first, &reachable);
424 }
425
426 /* When inline clone exists, mark body to be preserved so when removing
427 offline copy of the function we don't kill it. */
428 if (cnode->global.inlined_to)
429 body_needed_for_clonning.add (cnode->decl);
430
431 /* For non-inline clones, force their origins to the boundary and ensure
432 that body is not removed. */
433 while (cnode->clone_of)
434 {
435 bool noninline = cnode->clone_of->decl != cnode->decl;
436 cnode = cnode->clone_of;
437 if (noninline)
438 {
439 body_needed_for_clonning.add (cnode->decl);
440 enqueue_node (cnode, &first, &reachable);
441 }
442 }
443
444 }
445 /* If any reachable function has simd clones, mark them as
446 reachable as well. */
447 if (cnode->simd_clones)
448 {
449 cgraph_node *next;
450 for (next = cnode->simd_clones;
451 next;
452 next = next->simdclone->next_clone)
453 if (in_boundary_p
454 || !reachable.add (next))
455 enqueue_node (next, &first, &reachable);
456 }
457 }
458 /* When we see constructor of external variable, keep referred nodes in the
459 boundary. This will also hold initializers of the external vars NODE
460 refers to. */
461 varpool_node *vnode = dyn_cast <varpool_node *> (node);
462 if (vnode
463 && DECL_EXTERNAL (node->decl)
464 && !vnode->alias
465 && in_boundary_p)
466 {
467 struct ipa_ref *ref = NULL;
468 for (int i = 0; node->iterate_reference (i, ref); i++)
469 enqueue_node (ref->referred, &first, &reachable);
470 }
471 }
472
473 /* Remove unreachable functions. */
474 for (node = first_function (); node; node = next)
475 {
476 next = next_function (node);
477
478 /* If node is not needed at all, remove it. */
479 if (!node->aux)
480 {
481 if (file)
482 fprintf (file, " %s/%i", node->name (), node->order);
483 node->remove ();
484 changed = true;
485 }
486 /* If node is unreachable, remove its body. */
487 else if (!reachable.contains (node))
488 {
489 if (!body_needed_for_clonning.contains (node->decl))
490 node->release_body ();
491 else if (!node->clone_of)
492 gcc_assert (in_lto_p || DECL_RESULT (node->decl));
493 if (node->definition)
494 {
495 if (file)
496 fprintf (file, " %s/%i", node->name (), node->order);
497 node->body_removed = true;
498 node->analyzed = false;
499 node->definition = false;
500 node->cpp_implicit_alias = false;
501 node->alias = false;
502 node->thunk.thunk_p = false;
503 node->weakref = false;
504 /* After early inlining we drop always_inline attributes on
505 bodies of functions that are still referenced (have their
506 address taken). */
507 DECL_ATTRIBUTES (node->decl)
508 = remove_attribute ("always_inline",
509 DECL_ATTRIBUTES (node->decl));
510 if (!node->in_other_partition)
511 node->local.local = false;
512 node->remove_callees ();
513 node->remove_from_same_comdat_group ();
514 node->remove_all_references ();
515 changed = true;
516 if (node->thunk.thunk_p
517 && node->thunk.add_pointer_bounds_args)
518 {
519 node->thunk.thunk_p = false;
520 node->thunk.add_pointer_bounds_args = false;
521 }
522 }
523 }
524 else
525 gcc_assert (node->clone_of || !node->has_gimple_body_p ()
526 || in_lto_p || DECL_RESULT (node->decl));
527 }
528
529 /* Inline clones might be kept around so their materializing allows further
530 cloning. If the function the clone is inlined into is removed, we need
531 to turn it into normal cone. */
532 FOR_EACH_FUNCTION (node)
533 {
534 if (node->global.inlined_to
535 && !node->callers)
536 {
537 gcc_assert (node->clones);
538 node->global.inlined_to = NULL;
539 update_inlined_to_pointer (node, node);
540 }
541 node->aux = NULL;
542 }
543
544 /* Remove unreachable variables. */
545 if (file)
546 fprintf (file, "\nReclaiming variables:");
547 for (vnode = first_variable (); vnode; vnode = vnext)
548 {
549 vnext = next_variable (vnode);
550 if (!vnode->aux
551 /* For can_refer_decl_in_current_unit_p we want to track for
552 all external variables if they are defined in other partition
553 or not. */
554 && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
555 {
556 if (file)
557 fprintf (file, " %s/%i", vnode->name (), vnode->order);
558 vnode->remove ();
559 changed = true;
560 }
561 else if (!reachable.contains (vnode))
562 {
563 tree init;
564 if (vnode->definition)
565 {
566 if (file)
567 fprintf (file, " %s", vnode->name ());
568 changed = true;
569 }
570 /* Keep body if it may be useful for constant folding. */
571 if ((init = ctor_for_folding (vnode->decl)) == error_mark_node
572 && !POINTER_BOUNDS_P (vnode->decl))
573 vnode->remove_initializer ();
574 else
575 DECL_INITIAL (vnode->decl) = init;
576 vnode->body_removed = true;
577 vnode->definition = false;
578 vnode->analyzed = false;
579 vnode->aux = NULL;
580
581 vnode->remove_from_same_comdat_group ();
582
583 vnode->remove_all_references ();
584 }
585 else
586 vnode->aux = NULL;
587 }
588
589 /* Now update address_taken flags and try to promote functions to be local. */
590 if (file)
591 fprintf (file, "\nClearing address taken flags:");
592 FOR_EACH_DEFINED_FUNCTION (node)
593 if (node->address_taken
594 && !node->used_from_other_partition)
595 {
596 if (!node->call_for_symbol_thunks_and_aliases
597 (has_addr_references_p, NULL, true)
598 && (!node->instrumentation_clone
599 || !node->instrumented_version
600 || !node->instrumented_version->address_taken))
601 {
602 if (file)
603 fprintf (file, " %s", node->name ());
604 node->address_taken = false;
605 changed = true;
606 if (node->local_p ())
607 {
608 node->local.local = true;
609 if (file)
610 fprintf (file, " (local)");
611 }
612 }
613 }
614 if (file)
615 fprintf (file, "\n");
616
617 #ifdef ENABLE_CHECKING
618 symtab_node::verify_symtab_nodes ();
619 #endif
620
621 /* If we removed something, perhaps profile could be improved. */
622 if (changed && optimize && inline_edge_summary_vec.exists ())
623 FOR_EACH_DEFINED_FUNCTION (node)
624 ipa_propagate_frequency (node);
625
626 timevar_pop (TV_IPA_UNREACHABLE);
627 return changed;
628 }
629
630 /* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
631 as needed, also clear EXPLICIT_REFS if the references to given variable
632 do not need to be explicit. */
633
634 void
635 process_references (varpool_node *vnode,
636 bool *written, bool *address_taken,
637 bool *read, bool *explicit_refs)
638 {
639 int i;
640 struct ipa_ref *ref;
641
642 if (!vnode->all_refs_explicit_p ()
643 || TREE_THIS_VOLATILE (vnode->decl))
644 *explicit_refs = false;
645
646 for (i = 0; vnode->iterate_referring (i, ref)
647 && *explicit_refs && (!*written || !*address_taken || !*read); i++)
648 switch (ref->use)
649 {
650 case IPA_REF_ADDR:
651 *address_taken = true;
652 break;
653 case IPA_REF_LOAD:
654 *read = true;
655 break;
656 case IPA_REF_STORE:
657 *written = true;
658 break;
659 case IPA_REF_ALIAS:
660 process_references (dyn_cast<varpool_node *> (ref->referring), written,
661 address_taken, read, explicit_refs);
662 break;
663 case IPA_REF_CHKP:
664 gcc_unreachable ();
665 }
666 }
667
668 /* Set TREE_READONLY bit. */
669
670 bool
671 set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
672 {
673 TREE_READONLY (vnode->decl) = true;
674 return false;
675 }
676
677 /* Set writeonly bit and clear the initalizer, since it will not be needed. */
678
679 bool
680 set_writeonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
681 {
682 vnode->writeonly = true;
683 if (optimize)
684 {
685 DECL_INITIAL (vnode->decl) = NULL;
686 if (!vnode->alias)
687 vnode->remove_all_references ();
688 }
689 return false;
690 }
691
692 /* Clear addressale bit of VNODE. */
693
694 bool
695 clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
696 {
697 vnode->address_taken = false;
698 TREE_ADDRESSABLE (vnode->decl) = 0;
699 return false;
700 }
701
702 /* Discover variables that have no longer address taken or that are read only
703 and update their flags.
704
705 FIXME: This can not be done in between gimplify and omp_expand since
706 readonly flag plays role on what is shared and what is not. Currently we do
707 this transformation as part of whole program visibility and re-do at
708 ipa-reference pass (to take into account clonning), but it would
709 make sense to do it before early optimizations. */
710
711 void
712 ipa_discover_readonly_nonaddressable_vars (void)
713 {
714 varpool_node *vnode;
715 if (dump_file)
716 fprintf (dump_file, "Clearing variable flags:");
717 FOR_EACH_VARIABLE (vnode)
718 if (!vnode->alias
719 && (TREE_ADDRESSABLE (vnode->decl)
720 || !vnode->writeonly
721 || !TREE_READONLY (vnode->decl)))
722 {
723 bool written = false;
724 bool address_taken = false;
725 bool read = false;
726 bool explicit_refs = true;
727
728 process_references (vnode, &written, &address_taken, &read, &explicit_refs);
729 if (!explicit_refs)
730 continue;
731 if (!address_taken)
732 {
733 if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
734 fprintf (dump_file, " %s (non-addressable)", vnode->name ());
735 vnode->call_for_node_and_aliases (clear_addressable_bit, NULL, true);
736 }
737 if (!address_taken && !written
738 /* Making variable in explicit section readonly can cause section
739 type conflict.
740 See e.g. gcc.c-torture/compile/pr23237.c */
741 && vnode->get_section () == NULL)
742 {
743 if (!TREE_READONLY (vnode->decl) && dump_file)
744 fprintf (dump_file, " %s (read-only)", vnode->name ());
745 vnode->call_for_node_and_aliases (set_readonly_bit, NULL, true);
746 }
747 if (!vnode->writeonly && !read && !address_taken && written)
748 {
749 if (dump_file)
750 fprintf (dump_file, " %s (write-only)", vnode->name ());
751 vnode->call_for_node_and_aliases (set_writeonly_bit, NULL, true);
752 }
753 }
754 if (dump_file)
755 fprintf (dump_file, "\n");
756 }
757
758 /* Free inline summary. */
759
760 namespace {
761
762 const pass_data pass_data_ipa_free_inline_summary =
763 {
764 SIMPLE_IPA_PASS, /* type */
765 "free-inline-summary", /* name */
766 OPTGROUP_NONE, /* optinfo_flags */
767 TV_IPA_FREE_INLINE_SUMMARY, /* tv_id */
768 0, /* properties_required */
769 0, /* properties_provided */
770 0, /* properties_destroyed */
771 0, /* todo_flags_start */
772 /* Early optimizations may make function unreachable. We can not
773 remove unreachable functions as part of the ealry opts pass because
774 TODOs are run before subpasses. Do it here. */
775 ( TODO_remove_functions | TODO_dump_symtab ), /* todo_flags_finish */
776 };
777
778 class pass_ipa_free_inline_summary : public simple_ipa_opt_pass
779 {
780 public:
781 pass_ipa_free_inline_summary (gcc::context *ctxt)
782 : simple_ipa_opt_pass (pass_data_ipa_free_inline_summary, ctxt)
783 {}
784
785 /* opt_pass methods: */
786 virtual unsigned int execute (function *)
787 {
788 inline_free_summary ();
789 return 0;
790 }
791
792 }; // class pass_ipa_free_inline_summary
793
794 } // anon namespace
795
796 simple_ipa_opt_pass *
797 make_pass_ipa_free_inline_summary (gcc::context *ctxt)
798 {
799 return new pass_ipa_free_inline_summary (ctxt);
800 }
801
802 /* Generate and emit a static constructor or destructor. WHICH must
803 be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
804 (for chp static vars constructor) or 'B' (for chkp static bounds
805 constructor). BODY is a STATEMENT_LIST containing GENERIC
806 statements. PRIORITY is the initialization priority for this
807 constructor or destructor.
808
809 FINAL specify whether the externally visible name for collect2 should
810 be produced. */
811
812 static void
813 cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
814 {
815 static int counter = 0;
816 char which_buf[16];
817 tree decl, name, resdecl;
818
819 /* The priority is encoded in the constructor or destructor name.
820 collect2 will sort the names and arrange that they are called at
821 program startup. */
822 if (final)
823 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
824 else
825 /* Proudce sane name but one not recognizable by collect2, just for the
826 case we fail to inline the function. */
827 sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
828 name = get_file_function_name (which_buf);
829
830 decl = build_decl (input_location, FUNCTION_DECL, name,
831 build_function_type_list (void_type_node, NULL_TREE));
832 current_function_decl = decl;
833
834 resdecl = build_decl (input_location,
835 RESULT_DECL, NULL_TREE, void_type_node);
836 DECL_ARTIFICIAL (resdecl) = 1;
837 DECL_RESULT (decl) = resdecl;
838 DECL_CONTEXT (resdecl) = decl;
839
840 allocate_struct_function (decl, false);
841
842 TREE_STATIC (decl) = 1;
843 TREE_USED (decl) = 1;
844 DECL_ARTIFICIAL (decl) = 1;
845 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
846 DECL_SAVED_TREE (decl) = body;
847 if (!targetm.have_ctors_dtors && final)
848 {
849 TREE_PUBLIC (decl) = 1;
850 DECL_PRESERVE_P (decl) = 1;
851 }
852 DECL_UNINLINABLE (decl) = 1;
853
854 DECL_INITIAL (decl) = make_node (BLOCK);
855 TREE_USED (DECL_INITIAL (decl)) = 1;
856
857 DECL_SOURCE_LOCATION (decl) = input_location;
858 cfun->function_end_locus = input_location;
859
860 switch (which)
861 {
862 case 'I':
863 DECL_STATIC_CONSTRUCTOR (decl) = 1;
864 decl_init_priority_insert (decl, priority);
865 break;
866 case 'P':
867 DECL_STATIC_CONSTRUCTOR (decl) = 1;
868 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("chkp ctor"),
869 NULL,
870 NULL_TREE);
871 decl_init_priority_insert (decl, priority);
872 break;
873 case 'B':
874 DECL_STATIC_CONSTRUCTOR (decl) = 1;
875 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("bnd_legacy"),
876 NULL,
877 NULL_TREE);
878 decl_init_priority_insert (decl, priority);
879 break;
880 case 'D':
881 DECL_STATIC_DESTRUCTOR (decl) = 1;
882 decl_fini_priority_insert (decl, priority);
883 break;
884 default:
885 gcc_unreachable ();
886 }
887
888 gimplify_function_tree (decl);
889
890 cgraph_node::add_new_function (decl, false);
891
892 set_cfun (NULL);
893 current_function_decl = NULL;
894 }
895
896 /* Generate and emit a static constructor or destructor. WHICH must
897 be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
898 (for chkp static vars constructor) or 'B' (for chkp static bounds
899 constructor). BODY is a STATEMENT_LIST containing GENERIC
900 statements. PRIORITY is the initialization priority for this
901 constructor or destructor. */
902
903 void
904 cgraph_build_static_cdtor (char which, tree body, int priority)
905 {
906 cgraph_build_static_cdtor_1 (which, body, priority, false);
907 }
908
909 /* A vector of FUNCTION_DECLs declared as static constructors. */
910 static vec<tree> static_ctors;
911 /* A vector of FUNCTION_DECLs declared as static destructors. */
912 static vec<tree> static_dtors;
913
914 /* When target does not have ctors and dtors, we call all constructor
915 and destructor by special initialization/destruction function
916 recognized by collect2.
917
918 When we are going to build this function, collect all constructors and
919 destructors and turn them into normal functions. */
920
921 static void
922 record_cdtor_fn (struct cgraph_node *node)
923 {
924 if (DECL_STATIC_CONSTRUCTOR (node->decl))
925 static_ctors.safe_push (node->decl);
926 if (DECL_STATIC_DESTRUCTOR (node->decl))
927 static_dtors.safe_push (node->decl);
928 node = cgraph_node::get (node->decl);
929 DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
930 }
931
932 /* Define global constructors/destructor functions for the CDTORS, of
933 which they are LEN. The CDTORS are sorted by initialization
934 priority. If CTOR_P is true, these are constructors; otherwise,
935 they are destructors. */
936
937 static void
938 build_cdtor (bool ctor_p, vec<tree> cdtors)
939 {
940 size_t i,j;
941 size_t len = cdtors.length ();
942
943 i = 0;
944 while (i < len)
945 {
946 tree body;
947 tree fn;
948 priority_type priority;
949
950 priority = 0;
951 body = NULL_TREE;
952 j = i;
953 do
954 {
955 priority_type p;
956 fn = cdtors[j];
957 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
958 if (j == i)
959 priority = p;
960 else if (p != priority)
961 break;
962 j++;
963 }
964 while (j < len);
965
966 /* When there is only one cdtor and target supports them, do nothing. */
967 if (j == i + 1
968 && targetm.have_ctors_dtors)
969 {
970 i++;
971 continue;
972 }
973 /* Find the next batch of constructors/destructors with the same
974 initialization priority. */
975 for (;i < j; i++)
976 {
977 tree call;
978 fn = cdtors[i];
979 call = build_call_expr (fn, 0);
980 if (ctor_p)
981 DECL_STATIC_CONSTRUCTOR (fn) = 0;
982 else
983 DECL_STATIC_DESTRUCTOR (fn) = 0;
984 /* We do not want to optimize away pure/const calls here.
985 When optimizing, these should be already removed, when not
986 optimizing, we want user to be able to breakpoint in them. */
987 TREE_SIDE_EFFECTS (call) = 1;
988 append_to_statement_list (call, &body);
989 }
990 gcc_assert (body != NULL_TREE);
991 /* Generate a function to call all the function of like
992 priority. */
993 cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true);
994 }
995 }
996
997 /* Comparison function for qsort. P1 and P2 are actually of type
998 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
999 used to determine the sort order. */
1000
1001 static int
1002 compare_ctor (const void *p1, const void *p2)
1003 {
1004 tree f1;
1005 tree f2;
1006 int priority1;
1007 int priority2;
1008
1009 f1 = *(const tree *)p1;
1010 f2 = *(const tree *)p2;
1011 priority1 = DECL_INIT_PRIORITY (f1);
1012 priority2 = DECL_INIT_PRIORITY (f2);
1013
1014 if (priority1 < priority2)
1015 return -1;
1016 else if (priority1 > priority2)
1017 return 1;
1018 else
1019 /* Ensure a stable sort. Constructors are executed in backwarding
1020 order to make LTO initialize braries first. */
1021 return DECL_UID (f2) - DECL_UID (f1);
1022 }
1023
1024 /* Comparison function for qsort. P1 and P2 are actually of type
1025 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1026 used to determine the sort order. */
1027
1028 static int
1029 compare_dtor (const void *p1, const void *p2)
1030 {
1031 tree f1;
1032 tree f2;
1033 int priority1;
1034 int priority2;
1035
1036 f1 = *(const tree *)p1;
1037 f2 = *(const tree *)p2;
1038 priority1 = DECL_FINI_PRIORITY (f1);
1039 priority2 = DECL_FINI_PRIORITY (f2);
1040
1041 if (priority1 < priority2)
1042 return -1;
1043 else if (priority1 > priority2)
1044 return 1;
1045 else
1046 /* Ensure a stable sort. */
1047 return DECL_UID (f1) - DECL_UID (f2);
1048 }
1049
1050 /* Generate functions to call static constructors and destructors
1051 for targets that do not support .ctors/.dtors sections. These
1052 functions have magic names which are detected by collect2. */
1053
1054 static void
1055 build_cdtor_fns (void)
1056 {
1057 if (!static_ctors.is_empty ())
1058 {
1059 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1060 static_ctors.qsort (compare_ctor);
1061 build_cdtor (/*ctor_p=*/true, static_ctors);
1062 }
1063
1064 if (!static_dtors.is_empty ())
1065 {
1066 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1067 static_dtors.qsort (compare_dtor);
1068 build_cdtor (/*ctor_p=*/false, static_dtors);
1069 }
1070 }
1071
1072 /* Look for constructors and destructors and produce function calling them.
1073 This is needed for targets not supporting ctors or dtors, but we perform the
1074 transformation also at linktime to merge possibly numerous
1075 constructors/destructors into single function to improve code locality and
1076 reduce size. */
1077
1078 static unsigned int
1079 ipa_cdtor_merge (void)
1080 {
1081 struct cgraph_node *node;
1082 FOR_EACH_DEFINED_FUNCTION (node)
1083 if (DECL_STATIC_CONSTRUCTOR (node->decl)
1084 || DECL_STATIC_DESTRUCTOR (node->decl))
1085 record_cdtor_fn (node);
1086 build_cdtor_fns ();
1087 static_ctors.release ();
1088 static_dtors.release ();
1089 return 0;
1090 }
1091
1092 namespace {
1093
1094 const pass_data pass_data_ipa_cdtor_merge =
1095 {
1096 IPA_PASS, /* type */
1097 "cdtor", /* name */
1098 OPTGROUP_NONE, /* optinfo_flags */
1099 TV_CGRAPHOPT, /* tv_id */
1100 0, /* properties_required */
1101 0, /* properties_provided */
1102 0, /* properties_destroyed */
1103 0, /* todo_flags_start */
1104 0, /* todo_flags_finish */
1105 };
1106
1107 class pass_ipa_cdtor_merge : public ipa_opt_pass_d
1108 {
1109 public:
1110 pass_ipa_cdtor_merge (gcc::context *ctxt)
1111 : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt,
1112 NULL, /* generate_summary */
1113 NULL, /* write_summary */
1114 NULL, /* read_summary */
1115 NULL, /* write_optimization_summary */
1116 NULL, /* read_optimization_summary */
1117 NULL, /* stmt_fixup */
1118 0, /* function_transform_todo_flags_start */
1119 NULL, /* function_transform */
1120 NULL) /* variable_transform */
1121 {}
1122
1123 /* opt_pass methods: */
1124 virtual bool gate (function *);
1125 virtual unsigned int execute (function *) { return ipa_cdtor_merge (); }
1126
1127 }; // class pass_ipa_cdtor_merge
1128
1129 bool
1130 pass_ipa_cdtor_merge::gate (function *)
1131 {
1132 /* Perform the pass when we have no ctors/dtors support
1133 or at LTO time to merge multiple constructors into single
1134 function. */
1135 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1136 }
1137
1138 } // anon namespace
1139
1140 ipa_opt_pass_d *
1141 make_pass_ipa_cdtor_merge (gcc::context *ctxt)
1142 {
1143 return new pass_ipa_cdtor_merge (ctxt);
1144 }
1145
1146 /* Invalid pointer representing BOTTOM for single user dataflow. */
1147 #define BOTTOM ((cgraph_node *)(size_t) 2)
1148
1149 /* Meet operation for single user dataflow.
1150 Here we want to associate variables with sigle function that may access it.
1151
1152 FUNCTION is current single user of a variable, VAR is variable that uses it.
1153 Latttice is stored in SINGLE_USER_MAP.
1154
1155 We represent:
1156 - TOP by no entry in SIGNLE_USER_MAP
1157 - BOTTOM by BOTTOM in AUX pointer (to save lookups)
1158 - known single user by cgraph pointer in SINGLE_USER_MAP. */
1159
1160 cgraph_node *
1161 meet (cgraph_node *function, varpool_node *var,
1162 hash_map<varpool_node *, cgraph_node *> &single_user_map)
1163 {
1164 struct cgraph_node *user, **f;
1165
1166 if (var->aux == BOTTOM)
1167 return BOTTOM;
1168
1169 f = single_user_map.get (var);
1170 if (!f)
1171 return function;
1172 user = *f;
1173 if (!function)
1174 return user;
1175 else if (function != user)
1176 return BOTTOM;
1177 else
1178 return function;
1179 }
1180
1181 /* Propagation step of single-use dataflow.
1182
1183 Check all uses of VNODE and see if they are used by single function FUNCTION.
1184 SINGLE_USER_MAP represents the dataflow lattice. */
1185
1186 cgraph_node *
1187 propagate_single_user (varpool_node *vnode, cgraph_node *function,
1188 hash_map<varpool_node *, cgraph_node *> &single_user_map)
1189 {
1190 int i;
1191 struct ipa_ref *ref;
1192
1193 gcc_assert (!vnode->externally_visible);
1194
1195 /* If node is an alias, first meet with its target. */
1196 if (vnode->alias)
1197 function = meet (function, vnode->get_alias_target (), single_user_map);
1198
1199 /* Check all users and see if they correspond to a single function. */
1200 for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
1201 {
1202 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
1203 if (cnode)
1204 {
1205 if (cnode->global.inlined_to)
1206 cnode = cnode->global.inlined_to;
1207 if (!function)
1208 function = cnode;
1209 else if (function != cnode)
1210 function = BOTTOM;
1211 }
1212 else
1213 function = meet (function, dyn_cast <varpool_node *> (ref->referring), single_user_map);
1214 }
1215 return function;
1216 }
1217
1218 /* Pass setting used_by_single_function flag.
1219 This flag is set on variable when there is only one function that may possibly
1220 referr to it. */
1221
1222 static unsigned int
1223 ipa_single_use (void)
1224 {
1225 varpool_node *first = (varpool_node *) (void *) 1;
1226 varpool_node *var;
1227 hash_map<varpool_node *, cgraph_node *> single_user_map;
1228
1229 FOR_EACH_DEFINED_VARIABLE (var)
1230 if (!var->all_refs_explicit_p ())
1231 var->aux = BOTTOM;
1232 else
1233 {
1234 /* Enqueue symbol for dataflow. */
1235 var->aux = first;
1236 first = var;
1237 }
1238
1239 /* The actual dataflow. */
1240
1241 while (first != (void *) 1)
1242 {
1243 cgraph_node *user, *orig_user, **f;
1244
1245 var = first;
1246 first = (varpool_node *)first->aux;
1247
1248 f = single_user_map.get (var);
1249 if (f)
1250 orig_user = *f;
1251 else
1252 orig_user = NULL;
1253 user = propagate_single_user (var, orig_user, single_user_map);
1254
1255 gcc_checking_assert (var->aux != BOTTOM);
1256
1257 /* If user differs, enqueue all references. */
1258 if (user != orig_user)
1259 {
1260 unsigned int i;
1261 ipa_ref *ref;
1262
1263 single_user_map.put (var, user);
1264
1265 /* Enqueue all aliases for re-processing. */
1266 for (i = 0; var->iterate_referring (i, ref); i++)
1267 if (ref->use == IPA_REF_ALIAS
1268 && !ref->referring->aux)
1269 {
1270 ref->referring->aux = first;
1271 first = dyn_cast <varpool_node *> (ref->referring);
1272 }
1273 /* Enqueue all users for re-processing. */
1274 for (i = 0; var->iterate_reference (i, ref); i++)
1275 if (!ref->referred->aux
1276 && ref->referred->definition
1277 && is_a <varpool_node *> (ref->referred))
1278 {
1279 ref->referred->aux = first;
1280 first = dyn_cast <varpool_node *> (ref->referred);
1281 }
1282
1283 /* If user is BOTTOM, just punt on this var. */
1284 if (user == BOTTOM)
1285 var->aux = BOTTOM;
1286 else
1287 var->aux = NULL;
1288 }
1289 else
1290 var->aux = NULL;
1291 }
1292
1293 FOR_EACH_DEFINED_VARIABLE (var)
1294 {
1295 if (var->aux != BOTTOM)
1296 {
1297 #ifdef ENABLE_CHECKING
1298 if (!single_user_map.get (var))
1299 gcc_assert (single_user_map.get (var));
1300 #endif
1301 if (dump_file)
1302 {
1303 fprintf (dump_file, "Variable %s/%i is used by single function\n",
1304 var->name (), var->order);
1305 }
1306 var->used_by_single_function = true;
1307 }
1308 var->aux = NULL;
1309 }
1310 return 0;
1311 }
1312
1313 namespace {
1314
1315 const pass_data pass_data_ipa_single_use =
1316 {
1317 IPA_PASS, /* type */
1318 "single-use", /* name */
1319 OPTGROUP_NONE, /* optinfo_flags */
1320 TV_CGRAPHOPT, /* tv_id */
1321 0, /* properties_required */
1322 0, /* properties_provided */
1323 0, /* properties_destroyed */
1324 0, /* todo_flags_start */
1325 0, /* todo_flags_finish */
1326 };
1327
1328 class pass_ipa_single_use : public ipa_opt_pass_d
1329 {
1330 public:
1331 pass_ipa_single_use (gcc::context *ctxt)
1332 : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt,
1333 NULL, /* generate_summary */
1334 NULL, /* write_summary */
1335 NULL, /* read_summary */
1336 NULL, /* write_optimization_summary */
1337 NULL, /* read_optimization_summary */
1338 NULL, /* stmt_fixup */
1339 0, /* function_transform_todo_flags_start */
1340 NULL, /* function_transform */
1341 NULL) /* variable_transform */
1342 {}
1343
1344 /* opt_pass methods: */
1345 virtual bool gate (function *);
1346 virtual unsigned int execute (function *) { return ipa_single_use (); }
1347
1348 }; // class pass_ipa_single_use
1349
1350 bool
1351 pass_ipa_single_use::gate (function *)
1352 {
1353 return optimize;
1354 }
1355
1356 } // anon namespace
1357
1358 ipa_opt_pass_d *
1359 make_pass_ipa_single_use (gcc::context *ctxt)
1360 {
1361 return new pass_ipa_single_use (ctxt);
1362 }