decl.c (value_annotation_hasher::handle_cache_entry): Delete.
[gcc.git] / gcc / ipa.c
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "alias.h"
25 #include "symtab.h"
26 #include "options.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "calls.h"
30 #include "stringpool.h"
31 #include "predict.h"
32 #include "basic-block.h"
33 #include "plugin-api.h"
34 #include "hard-reg-set.h"
35 #include "function.h"
36 #include "ipa-ref.h"
37 #include "cgraph.h"
38 #include "tree-pass.h"
39 #include "gimple-expr.h"
40 #include "gimplify.h"
41 #include "flags.h"
42 #include "target.h"
43 #include "tree-iterator.h"
44 #include "ipa-utils.h"
45 #include "alloc-pool.h"
46 #include "symbol-summary.h"
47 #include "ipa-prop.h"
48 #include "ipa-inline.h"
49 #include "tree-inline.h"
50 #include "profile.h"
51 #include "params.h"
52 #include "internal-fn.h"
53 #include "tree-ssa-alias.h"
54 #include "gimple.h"
55 #include "dbgcnt.h"
56
57
58 /* Return true when NODE has ADDR reference. */
59
60 static bool
61 has_addr_references_p (struct cgraph_node *node,
62 void *data ATTRIBUTE_UNUSED)
63 {
64 int i;
65 struct ipa_ref *ref = NULL;
66
67 for (i = 0; node->iterate_referring (i, ref); i++)
68 if (ref->use == IPA_REF_ADDR)
69 return true;
70 return false;
71 }
72
73 /* Look for all functions inlined to NODE and update their inlined_to pointers
74 to INLINED_TO. */
75
76 static void
77 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
78 {
79 struct cgraph_edge *e;
80 for (e = node->callees; e; e = e->next_callee)
81 if (e->callee->global.inlined_to)
82 {
83 e->callee->global.inlined_to = inlined_to;
84 update_inlined_to_pointer (e->callee, inlined_to);
85 }
86 }
87
88 /* Add symtab NODE to queue starting at FIRST.
89
90 The queue is linked via AUX pointers and terminated by pointer to 1.
91 We enqueue nodes at two occasions: when we find them reachable or when we find
92 their bodies needed for further clonning. In the second case we mark them
93 by pointer to 2 after processing so they are re-queue when they become
94 reachable. */
95
96 static void
97 enqueue_node (symtab_node *node, symtab_node **first,
98 hash_set<symtab_node *> *reachable)
99 {
100 /* Node is still in queue; do nothing. */
101 if (node->aux && node->aux != (void *) 2)
102 return;
103 /* Node was already processed as unreachable, re-enqueue
104 only if it became reachable now. */
105 if (node->aux == (void *)2 && !reachable->contains (node))
106 return;
107 node->aux = *first;
108 *first = node;
109 }
110
111 /* Process references. */
112
113 static void
114 process_references (symtab_node *snode,
115 symtab_node **first,
116 bool before_inlining_p,
117 hash_set<symtab_node *> *reachable)
118 {
119 int i;
120 struct ipa_ref *ref = NULL;
121 for (i = 0; snode->iterate_reference (i, ref); i++)
122 {
123 symtab_node *node = ref->referred;
124 symtab_node *body = node->ultimate_alias_target ();
125
126 if (node->definition && !node->in_other_partition
127 && ((!DECL_EXTERNAL (node->decl) || node->alias)
128 || (((before_inlining_p
129 && ((TREE_CODE (node->decl) != FUNCTION_DECL
130 && optimize)
131 || (TREE_CODE (node->decl) == FUNCTION_DECL
132 && opt_for_fn (body->decl, optimize))
133 || (symtab->state < IPA_SSA
134 && lookup_attribute
135 ("always_inline",
136 DECL_ATTRIBUTES (body->decl))))))
137 /* We use variable constructors during late compilation for
138 constant folding. Keep references alive so partitioning
139 knows about potential references. */
140 || (TREE_CODE (node->decl) == VAR_DECL
141 && flag_wpa
142 && ctor_for_folding (node->decl)
143 != error_mark_node))))
144 {
145 /* Be sure that we will not optimize out alias target
146 body. */
147 if (DECL_EXTERNAL (node->decl)
148 && node->alias
149 && before_inlining_p)
150 reachable->add (body);
151 reachable->add (node);
152 }
153 enqueue_node (node, first, reachable);
154 }
155 }
156
157 /* EDGE is an polymorphic call. If BEFORE_INLINING_P is set, mark
158 all its potential targets as reachable to permit later inlining if
159 devirtualization happens. After inlining still keep their declarations
160 around, so we can devirtualize to a direct call.
161
162 Also try to make trivial devirutalization when no or only one target is
163 possible. */
164
165 static void
166 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
167 struct cgraph_edge *edge,
168 symtab_node **first,
169 hash_set<symtab_node *> *reachable,
170 bool before_inlining_p)
171 {
172 unsigned int i;
173 void *cache_token;
174 bool final;
175 vec <cgraph_node *>targets
176 = possible_polymorphic_call_targets
177 (edge, &final, &cache_token);
178
179 if (!reachable_call_targets->add (cache_token))
180 {
181 for (i = 0; i < targets.length (); i++)
182 {
183 struct cgraph_node *n = targets[i];
184
185 /* Do not bother to mark virtual methods in anonymous namespace;
186 either we will find use of virtual table defining it, or it is
187 unused. */
188 if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
189 && type_in_anonymous_namespace_p
190 (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl))))
191 continue;
192
193 symtab_node *body = n->function_symbol ();
194
195 /* Prior inlining, keep alive bodies of possible targets for
196 devirtualization. */
197 if (n->definition
198 && (before_inlining_p
199 && opt_for_fn (body->decl, optimize)
200 && opt_for_fn (body->decl, flag_devirtualize)))
201 {
202 /* Be sure that we will not optimize out alias target
203 body. */
204 if (DECL_EXTERNAL (n->decl)
205 && n->alias
206 && before_inlining_p)
207 reachable->add (body);
208 reachable->add (n);
209 }
210 /* Even after inlining we want to keep the possible targets in the
211 boundary, so late passes can still produce direct call even if
212 the chance for inlining is lost. */
213 enqueue_node (n, first, reachable);
214 }
215 }
216
217 /* Very trivial devirtualization; when the type is
218 final or anonymous (so we know all its derivation)
219 and there is only one possible virtual call target,
220 make the edge direct. */
221 if (final)
222 {
223 if (targets.length () <= 1 && dbg_cnt (devirt))
224 {
225 cgraph_node *target, *node = edge->caller;
226 if (targets.length () == 1)
227 target = targets[0];
228 else
229 target = cgraph_node::get_create
230 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
231
232 if (dump_enabled_p ())
233 {
234 location_t locus;
235 if (edge->call_stmt)
236 locus = gimple_location (edge->call_stmt);
237 else
238 locus = UNKNOWN_LOCATION;
239 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
240 "devirtualizing call in %s/%i to %s/%i\n",
241 edge->caller->name (), edge->caller->order,
242 target->name (),
243 target->order);
244 }
245 edge = edge->make_direct (target);
246 if (inline_summaries)
247 inline_update_overall_summary (node);
248 else if (edge->call_stmt)
249 {
250 edge->redirect_call_stmt_to_callee ();
251
252 /* Call to __builtin_unreachable shouldn't be instrumented. */
253 if (!targets.length ())
254 gimple_call_set_with_bounds (edge->call_stmt, false);
255 }
256 }
257 }
258 }
259
260 /* Perform reachability analysis and reclaim all unreachable nodes.
261
262 The algorithm is basically mark&sweep but with some extra refinements:
263
264 - reachable extern inline functions needs special handling; the bodies needs
265 to stay in memory until inlining in hope that they will be inlined.
266 After inlining we release their bodies and turn them into unanalyzed
267 nodes even when they are reachable.
268
269 - virtual functions are kept in callgraph even if they seem unreachable in
270 hope calls to them will be devirtualized.
271
272 Again we remove them after inlining. In late optimization some
273 devirtualization may happen, but it is not important since we won't inline
274 the call. In theory early opts and IPA should work out all important cases.
275
276 - virtual clones needs bodies of their origins for later materialization;
277 this means that we want to keep the body even if the origin is unreachable
278 otherwise. To avoid origin from sitting in the callgraph and being
279 walked by IPA passes, we turn them into unanalyzed nodes with body
280 defined.
281
282 We maintain set of function declaration where body needs to stay in
283 body_needed_for_clonning
284
285 Inline clones represent special case: their declaration match the
286 declaration of origin and cgraph_remove_node already knows how to
287 reshape callgraph and preserve body when offline copy of function or
288 inline clone is being removed.
289
290 - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
291 variables with DECL_INITIAL set. We finalize these and keep reachable
292 ones around for constant folding purposes. After inlining we however
293 stop walking their references to let everything static referneced by them
294 to be removed when it is otherwise unreachable.
295
296 We maintain queue of both reachable symbols (i.e. defined symbols that needs
297 to stay) and symbols that are in boundary (i.e. external symbols referenced
298 by reachable symbols or origins of clones). The queue is represented
299 as linked list by AUX pointer terminated by 1.
300
301 At the end we keep all reachable symbols. For symbols in boundary we always
302 turn definition into a declaration, but we may keep function body around
303 based on body_needed_for_clonning
304
305 All symbols that enter the queue have AUX pointer non-zero and are in the
306 boundary. Pointer set REACHABLE is used to track reachable symbols.
307
308 Every symbol can be visited twice - once as part of boundary and once
309 as real reachable symbol. enqueue_node needs to decide whether the
310 node needs to be re-queued for second processing. For this purpose
311 we set AUX pointer of processed symbols in the boundary to constant 2. */
312
313 bool
314 symbol_table::remove_unreachable_nodes (FILE *file)
315 {
316 symtab_node *first = (symtab_node *) (void *) 1;
317 struct cgraph_node *node, *next;
318 varpool_node *vnode, *vnext;
319 bool changed = false;
320 hash_set<symtab_node *> reachable;
321 hash_set<tree> body_needed_for_clonning;
322 hash_set<void *> reachable_call_targets;
323 bool before_inlining_p = symtab->state < (!optimize ? IPA_SSA
324 : IPA_SSA_AFTER_INLINING);
325
326 timevar_push (TV_IPA_UNREACHABLE);
327 build_type_inheritance_graph ();
328 if (file)
329 fprintf (file, "\nReclaiming functions:");
330 #ifdef ENABLE_CHECKING
331 FOR_EACH_FUNCTION (node)
332 gcc_assert (!node->aux);
333 FOR_EACH_VARIABLE (vnode)
334 gcc_assert (!vnode->aux);
335 #endif
336 /* Mark functions whose bodies are obviously needed.
337 This is mostly when they can be referenced externally. Inline clones
338 are special since their declarations are shared with master clone and thus
339 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
340 FOR_EACH_FUNCTION (node)
341 {
342 node->used_as_abstract_origin = false;
343 if (node->definition
344 && !node->global.inlined_to
345 && !node->in_other_partition
346 && !node->can_remove_if_no_direct_calls_and_refs_p ())
347 {
348 gcc_assert (!node->global.inlined_to);
349 reachable.add (node);
350 enqueue_node (node, &first, &reachable);
351 }
352 else
353 gcc_assert (!node->aux);
354 }
355
356 /* Mark variables that are obviously needed. */
357 FOR_EACH_DEFINED_VARIABLE (vnode)
358 if (!vnode->can_remove_if_no_refs_p()
359 && !vnode->in_other_partition)
360 {
361 reachable.add (vnode);
362 enqueue_node (vnode, &first, &reachable);
363 }
364
365 /* Perform reachability analysis. */
366 while (first != (symtab_node *) (void *) 1)
367 {
368 bool in_boundary_p = !reachable.contains (first);
369 symtab_node *node = first;
370
371 first = (symtab_node *)first->aux;
372
373 /* If we are processing symbol in boundary, mark its AUX pointer for
374 possible later re-processing in enqueue_node. */
375 if (in_boundary_p)
376 {
377 node->aux = (void *)2;
378 if (node->alias && node->analyzed)
379 enqueue_node (node->get_alias_target (), &first, &reachable);
380 }
381 else
382 {
383 if (TREE_CODE (node->decl) == FUNCTION_DECL
384 && DECL_ABSTRACT_ORIGIN (node->decl))
385 {
386 struct cgraph_node *origin_node
387 = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
388 if (origin_node && !origin_node->used_as_abstract_origin)
389 {
390 origin_node->used_as_abstract_origin = true;
391 gcc_assert (!origin_node->prev_sibling_clone);
392 gcc_assert (!origin_node->next_sibling_clone);
393 for (cgraph_node *n = origin_node->clones; n;
394 n = n->next_sibling_clone)
395 if (n->decl == DECL_ABSTRACT_ORIGIN (node->decl))
396 n->used_as_abstract_origin = true;
397 }
398 }
399 /* If any symbol in a comdat group is reachable, force
400 all externally visible symbols in the same comdat
401 group to be reachable as well. Comdat-local symbols
402 can be discarded if all uses were inlined. */
403 if (node->same_comdat_group)
404 {
405 symtab_node *next;
406 for (next = node->same_comdat_group;
407 next != node;
408 next = next->same_comdat_group)
409 if (!next->comdat_local_p ()
410 && !reachable.add (next))
411 enqueue_node (next, &first, &reachable);
412 }
413 /* Mark references as reachable. */
414 process_references (node, &first, before_inlining_p, &reachable);
415 }
416
417 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
418 {
419 /* Mark the callees reachable unless they are direct calls to extern
420 inline functions we decided to not inline. */
421 if (!in_boundary_p)
422 {
423 struct cgraph_edge *e;
424 /* Keep alive possible targets for devirtualization. */
425 if (opt_for_fn (cnode->decl, optimize)
426 && opt_for_fn (cnode->decl, flag_devirtualize))
427 {
428 struct cgraph_edge *next;
429 for (e = cnode->indirect_calls; e; e = next)
430 {
431 next = e->next_callee;
432 if (e->indirect_info->polymorphic)
433 walk_polymorphic_call_targets (&reachable_call_targets,
434 e, &first, &reachable,
435 before_inlining_p);
436 }
437 }
438 for (e = cnode->callees; e; e = e->next_callee)
439 {
440 symtab_node *body = e->callee->function_symbol ();
441 if (e->callee->definition
442 && !e->callee->in_other_partition
443 && (!e->inline_failed
444 || !DECL_EXTERNAL (e->callee->decl)
445 || e->callee->alias
446 || (before_inlining_p
447 && (opt_for_fn (body->decl, optimize)
448 || (symtab->state < IPA_SSA
449 && lookup_attribute
450 ("always_inline",
451 DECL_ATTRIBUTES (body->decl)))))))
452 {
453 /* Be sure that we will not optimize out alias target
454 body. */
455 if (DECL_EXTERNAL (e->callee->decl)
456 && e->callee->alias
457 && before_inlining_p)
458 reachable.add (body);
459 reachable.add (e->callee);
460 }
461 enqueue_node (e->callee, &first, &reachable);
462 }
463
464 /* When inline clone exists, mark body to be preserved so when removing
465 offline copy of the function we don't kill it. */
466 if (cnode->global.inlined_to)
467 body_needed_for_clonning.add (cnode->decl);
468
469 /* For instrumentation clones we always need original
470 function node for proper LTO privatization. */
471 if (cnode->instrumentation_clone
472 && cnode->definition)
473 {
474 gcc_assert (cnode->instrumented_version || in_lto_p);
475 if (cnode->instrumented_version)
476 {
477 enqueue_node (cnode->instrumented_version, &first,
478 &reachable);
479 reachable.add (cnode->instrumented_version);
480 }
481 }
482
483 /* For non-inline clones, force their origins to the boundary and ensure
484 that body is not removed. */
485 while (cnode->clone_of)
486 {
487 bool noninline = cnode->clone_of->decl != cnode->decl;
488 cnode = cnode->clone_of;
489 if (noninline)
490 {
491 body_needed_for_clonning.add (cnode->decl);
492 enqueue_node (cnode, &first, &reachable);
493 }
494 }
495
496 }
497 else if (cnode->thunk.thunk_p)
498 enqueue_node (cnode->callees->callee, &first, &reachable);
499
500 /* If any reachable function has simd clones, mark them as
501 reachable as well. */
502 if (cnode->simd_clones)
503 {
504 cgraph_node *next;
505 for (next = cnode->simd_clones;
506 next;
507 next = next->simdclone->next_clone)
508 if (in_boundary_p
509 || !reachable.add (next))
510 enqueue_node (next, &first, &reachable);
511 }
512 }
513 /* When we see constructor of external variable, keep referred nodes in the
514 boundary. This will also hold initializers of the external vars NODE
515 refers to. */
516 varpool_node *vnode = dyn_cast <varpool_node *> (node);
517 if (vnode
518 && DECL_EXTERNAL (node->decl)
519 && !vnode->alias
520 && in_boundary_p)
521 {
522 struct ipa_ref *ref = NULL;
523 for (int i = 0; node->iterate_reference (i, ref); i++)
524 enqueue_node (ref->referred, &first, &reachable);
525 }
526 }
527
528 /* Remove unreachable functions. */
529 for (node = first_function (); node; node = next)
530 {
531 next = next_function (node);
532
533 /* If node is not needed at all, remove it. */
534 if (!node->aux)
535 {
536 if (file)
537 fprintf (file, " %s/%i", node->name (), node->order);
538 node->remove ();
539 changed = true;
540 }
541 /* If node is unreachable, remove its body. */
542 else if (!reachable.contains (node))
543 {
544 /* We keep definitions of thunks and aliases in the boundary so
545 we can walk to the ultimate alias targets and function symbols
546 reliably. */
547 if (node->alias || node->thunk.thunk_p)
548 ;
549 else if (!body_needed_for_clonning.contains (node->decl)
550 && !node->alias && !node->thunk.thunk_p)
551 node->release_body ();
552 else if (!node->clone_of)
553 gcc_assert (in_lto_p || DECL_RESULT (node->decl));
554 if (node->definition && !node->alias && !node->thunk.thunk_p)
555 {
556 if (file)
557 fprintf (file, " %s/%i", node->name (), node->order);
558 node->body_removed = true;
559 node->analyzed = false;
560 node->definition = false;
561 node->cpp_implicit_alias = false;
562 node->alias = false;
563 node->thunk.thunk_p = false;
564 node->weakref = false;
565 /* After early inlining we drop always_inline attributes on
566 bodies of functions that are still referenced (have their
567 address taken). */
568 DECL_ATTRIBUTES (node->decl)
569 = remove_attribute ("always_inline",
570 DECL_ATTRIBUTES (node->decl));
571 if (!node->in_other_partition)
572 node->local.local = false;
573 node->remove_callees ();
574 node->remove_all_references ();
575 changed = true;
576 if (node->thunk.thunk_p
577 && node->thunk.add_pointer_bounds_args)
578 {
579 node->thunk.thunk_p = false;
580 node->thunk.add_pointer_bounds_args = false;
581 }
582 }
583 }
584 else
585 gcc_assert (node->clone_of || !node->has_gimple_body_p ()
586 || in_lto_p || DECL_RESULT (node->decl));
587 }
588
589 /* Inline clones might be kept around so their materializing allows further
590 cloning. If the function the clone is inlined into is removed, we need
591 to turn it into normal cone. */
592 FOR_EACH_FUNCTION (node)
593 {
594 if (node->global.inlined_to
595 && !node->callers)
596 {
597 gcc_assert (node->clones);
598 node->global.inlined_to = NULL;
599 update_inlined_to_pointer (node, node);
600 }
601 node->aux = NULL;
602 }
603
604 /* Remove unreachable variables. */
605 if (file)
606 fprintf (file, "\nReclaiming variables:");
607 for (vnode = first_variable (); vnode; vnode = vnext)
608 {
609 vnext = next_variable (vnode);
610 if (!vnode->aux
611 /* For can_refer_decl_in_current_unit_p we want to track for
612 all external variables if they are defined in other partition
613 or not. */
614 && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
615 {
616 struct ipa_ref *ref = NULL;
617
618 /* First remove the aliases, so varpool::remove can possibly lookup
619 the constructor and save it for future use. */
620 while (vnode->iterate_direct_aliases (0, ref))
621 {
622 if (file)
623 fprintf (file, " %s/%i", ref->referred->name (),
624 ref->referred->order);
625 ref->referring->remove ();
626 }
627 if (file)
628 fprintf (file, " %s/%i", vnode->name (), vnode->order);
629 vnext = next_variable (vnode);
630 vnode->remove ();
631 changed = true;
632 }
633 else if (!reachable.contains (vnode) && !vnode->alias)
634 {
635 tree init;
636 if (vnode->definition)
637 {
638 if (file)
639 fprintf (file, " %s", vnode->name ());
640 changed = true;
641 }
642 /* Keep body if it may be useful for constant folding. */
643 if ((init = ctor_for_folding (vnode->decl)) == error_mark_node
644 && !POINTER_BOUNDS_P (vnode->decl))
645 vnode->remove_initializer ();
646 else
647 DECL_INITIAL (vnode->decl) = init;
648 vnode->body_removed = true;
649 vnode->definition = false;
650 vnode->analyzed = false;
651 vnode->aux = NULL;
652
653 vnode->remove_from_same_comdat_group ();
654
655 vnode->remove_all_references ();
656 }
657 else
658 vnode->aux = NULL;
659 }
660
661 /* Now update address_taken flags and try to promote functions to be local. */
662 if (file)
663 fprintf (file, "\nClearing address taken flags:");
664 FOR_EACH_DEFINED_FUNCTION (node)
665 if (node->address_taken
666 && !node->used_from_other_partition)
667 {
668 if (!node->call_for_symbol_and_aliases
669 (has_addr_references_p, NULL, true)
670 && (!node->instrumentation_clone
671 || !node->instrumented_version
672 || !node->instrumented_version->address_taken))
673 {
674 if (file)
675 fprintf (file, " %s", node->name ());
676 node->address_taken = false;
677 changed = true;
678 if (node->local_p ())
679 {
680 node->local.local = true;
681 if (file)
682 fprintf (file, " (local)");
683 }
684 }
685 }
686 if (file)
687 fprintf (file, "\n");
688
689 #ifdef ENABLE_CHECKING
690 symtab_node::verify_symtab_nodes ();
691 #endif
692
693 /* If we removed something, perhaps profile could be improved. */
694 if (changed && optimize && inline_edge_summary_vec.exists ())
695 FOR_EACH_DEFINED_FUNCTION (node)
696 ipa_propagate_frequency (node);
697
698 timevar_pop (TV_IPA_UNREACHABLE);
699 return changed;
700 }
701
702 /* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
703 as needed, also clear EXPLICIT_REFS if the references to given variable
704 do not need to be explicit. */
705
706 void
707 process_references (varpool_node *vnode,
708 bool *written, bool *address_taken,
709 bool *read, bool *explicit_refs)
710 {
711 int i;
712 struct ipa_ref *ref;
713
714 if (!vnode->all_refs_explicit_p ()
715 || TREE_THIS_VOLATILE (vnode->decl))
716 *explicit_refs = false;
717
718 for (i = 0; vnode->iterate_referring (i, ref)
719 && *explicit_refs && (!*written || !*address_taken || !*read); i++)
720 switch (ref->use)
721 {
722 case IPA_REF_ADDR:
723 *address_taken = true;
724 break;
725 case IPA_REF_LOAD:
726 *read = true;
727 break;
728 case IPA_REF_STORE:
729 *written = true;
730 break;
731 case IPA_REF_ALIAS:
732 process_references (dyn_cast<varpool_node *> (ref->referring), written,
733 address_taken, read, explicit_refs);
734 break;
735 case IPA_REF_CHKP:
736 gcc_unreachable ();
737 }
738 }
739
740 /* Set TREE_READONLY bit. */
741
742 bool
743 set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
744 {
745 TREE_READONLY (vnode->decl) = true;
746 return false;
747 }
748
749 /* Set writeonly bit and clear the initalizer, since it will not be needed. */
750
751 bool
752 set_writeonly_bit (varpool_node *vnode, void *data)
753 {
754 vnode->writeonly = true;
755 if (optimize)
756 {
757 DECL_INITIAL (vnode->decl) = NULL;
758 if (!vnode->alias)
759 {
760 if (vnode->num_references ())
761 *(bool *)data = true;
762 vnode->remove_all_references ();
763 }
764 }
765 return false;
766 }
767
768 /* Clear addressale bit of VNODE. */
769
770 bool
771 clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
772 {
773 vnode->address_taken = false;
774 TREE_ADDRESSABLE (vnode->decl) = 0;
775 return false;
776 }
777
778 /* Discover variables that have no longer address taken or that are read only
779 and update their flags.
780
781 Return true when unreachable symbol removan should be done.
782
783 FIXME: This can not be done in between gimplify and omp_expand since
784 readonly flag plays role on what is shared and what is not. Currently we do
785 this transformation as part of whole program visibility and re-do at
786 ipa-reference pass (to take into account clonning), but it would
787 make sense to do it before early optimizations. */
788
789 bool
790 ipa_discover_readonly_nonaddressable_vars (void)
791 {
792 bool remove_p = false;
793 varpool_node *vnode;
794 if (dump_file)
795 fprintf (dump_file, "Clearing variable flags:");
796 FOR_EACH_VARIABLE (vnode)
797 if (!vnode->alias
798 && (TREE_ADDRESSABLE (vnode->decl)
799 || !vnode->writeonly
800 || !TREE_READONLY (vnode->decl)))
801 {
802 bool written = false;
803 bool address_taken = false;
804 bool read = false;
805 bool explicit_refs = true;
806
807 process_references (vnode, &written, &address_taken, &read,
808 &explicit_refs);
809 if (!explicit_refs)
810 continue;
811 if (!address_taken)
812 {
813 if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
814 fprintf (dump_file, " %s (non-addressable)", vnode->name ());
815 vnode->call_for_symbol_and_aliases (clear_addressable_bit, NULL,
816 true);
817 }
818 if (!address_taken && !written
819 /* Making variable in explicit section readonly can cause section
820 type conflict.
821 See e.g. gcc.c-torture/compile/pr23237.c */
822 && vnode->get_section () == NULL)
823 {
824 if (!TREE_READONLY (vnode->decl) && dump_file)
825 fprintf (dump_file, " %s (read-only)", vnode->name ());
826 vnode->call_for_symbol_and_aliases (set_readonly_bit, NULL, true);
827 }
828 if (!vnode->writeonly && !read && !address_taken && written)
829 {
830 if (dump_file)
831 fprintf (dump_file, " %s (write-only)", vnode->name ());
832 vnode->call_for_symbol_and_aliases (set_writeonly_bit, &remove_p,
833 true);
834 }
835 }
836 if (dump_file)
837 fprintf (dump_file, "\n");
838 return remove_p;
839 }
840
841 /* Free inline summary. */
842
843 namespace {
844
845 const pass_data pass_data_ipa_free_inline_summary =
846 {
847 SIMPLE_IPA_PASS, /* type */
848 "free-inline-summary", /* name */
849 OPTGROUP_NONE, /* optinfo_flags */
850 TV_IPA_FREE_INLINE_SUMMARY, /* tv_id */
851 0, /* properties_required */
852 0, /* properties_provided */
853 0, /* properties_destroyed */
854 0, /* todo_flags_start */
855 /* Early optimizations may make function unreachable. We can not
856 remove unreachable functions as part of the ealry opts pass because
857 TODOs are run before subpasses. Do it here. */
858 ( TODO_remove_functions | TODO_dump_symtab ), /* todo_flags_finish */
859 };
860
861 class pass_ipa_free_inline_summary : public simple_ipa_opt_pass
862 {
863 public:
864 pass_ipa_free_inline_summary (gcc::context *ctxt)
865 : simple_ipa_opt_pass (pass_data_ipa_free_inline_summary, ctxt)
866 {}
867
868 /* opt_pass methods: */
869 virtual unsigned int execute (function *)
870 {
871 inline_free_summary ();
872 return 0;
873 }
874
875 }; // class pass_ipa_free_inline_summary
876
877 } // anon namespace
878
879 simple_ipa_opt_pass *
880 make_pass_ipa_free_inline_summary (gcc::context *ctxt)
881 {
882 return new pass_ipa_free_inline_summary (ctxt);
883 }
884
885 /* Generate and emit a static constructor or destructor. WHICH must
886 be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
887 (for chp static vars constructor) or 'B' (for chkp static bounds
888 constructor). BODY is a STATEMENT_LIST containing GENERIC
889 statements. PRIORITY is the initialization priority for this
890 constructor or destructor.
891
892 FINAL specify whether the externally visible name for collect2 should
893 be produced. */
894
895 static void
896 cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
897 {
898 static int counter = 0;
899 char which_buf[16];
900 tree decl, name, resdecl;
901
902 /* The priority is encoded in the constructor or destructor name.
903 collect2 will sort the names and arrange that they are called at
904 program startup. */
905 if (final)
906 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
907 else
908 /* Proudce sane name but one not recognizable by collect2, just for the
909 case we fail to inline the function. */
910 sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
911 name = get_file_function_name (which_buf);
912
913 decl = build_decl (input_location, FUNCTION_DECL, name,
914 build_function_type_list (void_type_node, NULL_TREE));
915 current_function_decl = decl;
916
917 resdecl = build_decl (input_location,
918 RESULT_DECL, NULL_TREE, void_type_node);
919 DECL_ARTIFICIAL (resdecl) = 1;
920 DECL_RESULT (decl) = resdecl;
921 DECL_CONTEXT (resdecl) = decl;
922
923 allocate_struct_function (decl, false);
924
925 TREE_STATIC (decl) = 1;
926 TREE_USED (decl) = 1;
927 DECL_ARTIFICIAL (decl) = 1;
928 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
929 DECL_SAVED_TREE (decl) = body;
930 if (!targetm.have_ctors_dtors && final)
931 {
932 TREE_PUBLIC (decl) = 1;
933 DECL_PRESERVE_P (decl) = 1;
934 }
935 DECL_UNINLINABLE (decl) = 1;
936
937 DECL_INITIAL (decl) = make_node (BLOCK);
938 TREE_USED (DECL_INITIAL (decl)) = 1;
939
940 DECL_SOURCE_LOCATION (decl) = input_location;
941 cfun->function_end_locus = input_location;
942
943 switch (which)
944 {
945 case 'I':
946 DECL_STATIC_CONSTRUCTOR (decl) = 1;
947 decl_init_priority_insert (decl, priority);
948 break;
949 case 'P':
950 DECL_STATIC_CONSTRUCTOR (decl) = 1;
951 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("chkp ctor"),
952 NULL,
953 NULL_TREE);
954 decl_init_priority_insert (decl, priority);
955 break;
956 case 'B':
957 DECL_STATIC_CONSTRUCTOR (decl) = 1;
958 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("bnd_legacy"),
959 NULL,
960 NULL_TREE);
961 decl_init_priority_insert (decl, priority);
962 break;
963 case 'D':
964 DECL_STATIC_DESTRUCTOR (decl) = 1;
965 decl_fini_priority_insert (decl, priority);
966 break;
967 default:
968 gcc_unreachable ();
969 }
970
971 gimplify_function_tree (decl);
972
973 cgraph_node::add_new_function (decl, false);
974
975 set_cfun (NULL);
976 current_function_decl = NULL;
977 }
978
979 /* Generate and emit a static constructor or destructor. WHICH must
980 be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
981 (for chkp static vars constructor) or 'B' (for chkp static bounds
982 constructor). BODY is a STATEMENT_LIST containing GENERIC
983 statements. PRIORITY is the initialization priority for this
984 constructor or destructor. */
985
986 void
987 cgraph_build_static_cdtor (char which, tree body, int priority)
988 {
989 cgraph_build_static_cdtor_1 (which, body, priority, false);
990 }
991
992 /* A vector of FUNCTION_DECLs declared as static constructors. */
993 static vec<tree> static_ctors;
994 /* A vector of FUNCTION_DECLs declared as static destructors. */
995 static vec<tree> static_dtors;
996
997 /* When target does not have ctors and dtors, we call all constructor
998 and destructor by special initialization/destruction function
999 recognized by collect2.
1000
1001 When we are going to build this function, collect all constructors and
1002 destructors and turn them into normal functions. */
1003
1004 static void
1005 record_cdtor_fn (struct cgraph_node *node)
1006 {
1007 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1008 static_ctors.safe_push (node->decl);
1009 if (DECL_STATIC_DESTRUCTOR (node->decl))
1010 static_dtors.safe_push (node->decl);
1011 node = cgraph_node::get (node->decl);
1012 DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
1013 }
1014
1015 /* Define global constructors/destructor functions for the CDTORS, of
1016 which they are LEN. The CDTORS are sorted by initialization
1017 priority. If CTOR_P is true, these are constructors; otherwise,
1018 they are destructors. */
1019
1020 static void
1021 build_cdtor (bool ctor_p, vec<tree> cdtors)
1022 {
1023 size_t i,j;
1024 size_t len = cdtors.length ();
1025
1026 i = 0;
1027 while (i < len)
1028 {
1029 tree body;
1030 tree fn;
1031 priority_type priority;
1032
1033 priority = 0;
1034 body = NULL_TREE;
1035 j = i;
1036 do
1037 {
1038 priority_type p;
1039 fn = cdtors[j];
1040 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1041 if (j == i)
1042 priority = p;
1043 else if (p != priority)
1044 break;
1045 j++;
1046 }
1047 while (j < len);
1048
1049 /* When there is only one cdtor and target supports them, do nothing. */
1050 if (j == i + 1
1051 && targetm.have_ctors_dtors)
1052 {
1053 i++;
1054 continue;
1055 }
1056 /* Find the next batch of constructors/destructors with the same
1057 initialization priority. */
1058 for (;i < j; i++)
1059 {
1060 tree call;
1061 fn = cdtors[i];
1062 call = build_call_expr (fn, 0);
1063 if (ctor_p)
1064 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1065 else
1066 DECL_STATIC_DESTRUCTOR (fn) = 0;
1067 /* We do not want to optimize away pure/const calls here.
1068 When optimizing, these should be already removed, when not
1069 optimizing, we want user to be able to breakpoint in them. */
1070 TREE_SIDE_EFFECTS (call) = 1;
1071 append_to_statement_list (call, &body);
1072 }
1073 gcc_assert (body != NULL_TREE);
1074 /* Generate a function to call all the function of like
1075 priority. */
1076 cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true);
1077 }
1078 }
1079
1080 /* Comparison function for qsort. P1 and P2 are actually of type
1081 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1082 used to determine the sort order. */
1083
1084 static int
1085 compare_ctor (const void *p1, const void *p2)
1086 {
1087 tree f1;
1088 tree f2;
1089 int priority1;
1090 int priority2;
1091
1092 f1 = *(const tree *)p1;
1093 f2 = *(const tree *)p2;
1094 priority1 = DECL_INIT_PRIORITY (f1);
1095 priority2 = DECL_INIT_PRIORITY (f2);
1096
1097 if (priority1 < priority2)
1098 return -1;
1099 else if (priority1 > priority2)
1100 return 1;
1101 else
1102 /* Ensure a stable sort. Constructors are executed in backwarding
1103 order to make LTO initialize braries first. */
1104 return DECL_UID (f2) - DECL_UID (f1);
1105 }
1106
1107 /* Comparison function for qsort. P1 and P2 are actually of type
1108 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1109 used to determine the sort order. */
1110
1111 static int
1112 compare_dtor (const void *p1, const void *p2)
1113 {
1114 tree f1;
1115 tree f2;
1116 int priority1;
1117 int priority2;
1118
1119 f1 = *(const tree *)p1;
1120 f2 = *(const tree *)p2;
1121 priority1 = DECL_FINI_PRIORITY (f1);
1122 priority2 = DECL_FINI_PRIORITY (f2);
1123
1124 if (priority1 < priority2)
1125 return -1;
1126 else if (priority1 > priority2)
1127 return 1;
1128 else
1129 /* Ensure a stable sort. */
1130 return DECL_UID (f1) - DECL_UID (f2);
1131 }
1132
1133 /* Generate functions to call static constructors and destructors
1134 for targets that do not support .ctors/.dtors sections. These
1135 functions have magic names which are detected by collect2. */
1136
1137 static void
1138 build_cdtor_fns (void)
1139 {
1140 if (!static_ctors.is_empty ())
1141 {
1142 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1143 static_ctors.qsort (compare_ctor);
1144 build_cdtor (/*ctor_p=*/true, static_ctors);
1145 }
1146
1147 if (!static_dtors.is_empty ())
1148 {
1149 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1150 static_dtors.qsort (compare_dtor);
1151 build_cdtor (/*ctor_p=*/false, static_dtors);
1152 }
1153 }
1154
1155 /* Look for constructors and destructors and produce function calling them.
1156 This is needed for targets not supporting ctors or dtors, but we perform the
1157 transformation also at linktime to merge possibly numerous
1158 constructors/destructors into single function to improve code locality and
1159 reduce size. */
1160
1161 static unsigned int
1162 ipa_cdtor_merge (void)
1163 {
1164 struct cgraph_node *node;
1165 FOR_EACH_DEFINED_FUNCTION (node)
1166 if (DECL_STATIC_CONSTRUCTOR (node->decl)
1167 || DECL_STATIC_DESTRUCTOR (node->decl))
1168 record_cdtor_fn (node);
1169 build_cdtor_fns ();
1170 static_ctors.release ();
1171 static_dtors.release ();
1172 return 0;
1173 }
1174
1175 namespace {
1176
1177 const pass_data pass_data_ipa_cdtor_merge =
1178 {
1179 IPA_PASS, /* type */
1180 "cdtor", /* name */
1181 OPTGROUP_NONE, /* optinfo_flags */
1182 TV_CGRAPHOPT, /* tv_id */
1183 0, /* properties_required */
1184 0, /* properties_provided */
1185 0, /* properties_destroyed */
1186 0, /* todo_flags_start */
1187 0, /* todo_flags_finish */
1188 };
1189
1190 class pass_ipa_cdtor_merge : public ipa_opt_pass_d
1191 {
1192 public:
1193 pass_ipa_cdtor_merge (gcc::context *ctxt)
1194 : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt,
1195 NULL, /* generate_summary */
1196 NULL, /* write_summary */
1197 NULL, /* read_summary */
1198 NULL, /* write_optimization_summary */
1199 NULL, /* read_optimization_summary */
1200 NULL, /* stmt_fixup */
1201 0, /* function_transform_todo_flags_start */
1202 NULL, /* function_transform */
1203 NULL) /* variable_transform */
1204 {}
1205
1206 /* opt_pass methods: */
1207 virtual bool gate (function *);
1208 virtual unsigned int execute (function *) { return ipa_cdtor_merge (); }
1209
1210 }; // class pass_ipa_cdtor_merge
1211
1212 bool
1213 pass_ipa_cdtor_merge::gate (function *)
1214 {
1215 /* Perform the pass when we have no ctors/dtors support
1216 or at LTO time to merge multiple constructors into single
1217 function. */
1218 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1219 }
1220
1221 } // anon namespace
1222
1223 ipa_opt_pass_d *
1224 make_pass_ipa_cdtor_merge (gcc::context *ctxt)
1225 {
1226 return new pass_ipa_cdtor_merge (ctxt);
1227 }
1228
1229 /* Invalid pointer representing BOTTOM for single user dataflow. */
1230 #define BOTTOM ((cgraph_node *)(size_t) 2)
1231
1232 /* Meet operation for single user dataflow.
1233 Here we want to associate variables with sigle function that may access it.
1234
1235 FUNCTION is current single user of a variable, VAR is variable that uses it.
1236 Latttice is stored in SINGLE_USER_MAP.
1237
1238 We represent:
1239 - TOP by no entry in SIGNLE_USER_MAP
1240 - BOTTOM by BOTTOM in AUX pointer (to save lookups)
1241 - known single user by cgraph pointer in SINGLE_USER_MAP. */
1242
1243 cgraph_node *
1244 meet (cgraph_node *function, varpool_node *var,
1245 hash_map<varpool_node *, cgraph_node *> &single_user_map)
1246 {
1247 struct cgraph_node *user, **f;
1248
1249 if (var->aux == BOTTOM)
1250 return BOTTOM;
1251
1252 f = single_user_map.get (var);
1253 if (!f)
1254 return function;
1255 user = *f;
1256 if (!function)
1257 return user;
1258 else if (function != user)
1259 return BOTTOM;
1260 else
1261 return function;
1262 }
1263
1264 /* Propagation step of single-use dataflow.
1265
1266 Check all uses of VNODE and see if they are used by single function FUNCTION.
1267 SINGLE_USER_MAP represents the dataflow lattice. */
1268
1269 cgraph_node *
1270 propagate_single_user (varpool_node *vnode, cgraph_node *function,
1271 hash_map<varpool_node *, cgraph_node *> &single_user_map)
1272 {
1273 int i;
1274 struct ipa_ref *ref;
1275
1276 gcc_assert (!vnode->externally_visible);
1277
1278 /* If node is an alias, first meet with its target. */
1279 if (vnode->alias)
1280 function = meet (function, vnode->get_alias_target (), single_user_map);
1281
1282 /* Check all users and see if they correspond to a single function. */
1283 for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
1284 {
1285 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
1286 if (cnode)
1287 {
1288 if (cnode->global.inlined_to)
1289 cnode = cnode->global.inlined_to;
1290 if (!function)
1291 function = cnode;
1292 else if (function != cnode)
1293 function = BOTTOM;
1294 }
1295 else
1296 function = meet (function, dyn_cast <varpool_node *> (ref->referring),
1297 single_user_map);
1298 }
1299 return function;
1300 }
1301
1302 /* Pass setting used_by_single_function flag.
1303 This flag is set on variable when there is only one function that may
1304 possibly referr to it. */
1305
1306 static unsigned int
1307 ipa_single_use (void)
1308 {
1309 varpool_node *first = (varpool_node *) (void *) 1;
1310 varpool_node *var;
1311 hash_map<varpool_node *, cgraph_node *> single_user_map;
1312
1313 FOR_EACH_DEFINED_VARIABLE (var)
1314 if (!var->all_refs_explicit_p ())
1315 var->aux = BOTTOM;
1316 else
1317 {
1318 /* Enqueue symbol for dataflow. */
1319 var->aux = first;
1320 first = var;
1321 }
1322
1323 /* The actual dataflow. */
1324
1325 while (first != (void *) 1)
1326 {
1327 cgraph_node *user, *orig_user, **f;
1328
1329 var = first;
1330 first = (varpool_node *)first->aux;
1331
1332 f = single_user_map.get (var);
1333 if (f)
1334 orig_user = *f;
1335 else
1336 orig_user = NULL;
1337 user = propagate_single_user (var, orig_user, single_user_map);
1338
1339 gcc_checking_assert (var->aux != BOTTOM);
1340
1341 /* If user differs, enqueue all references. */
1342 if (user != orig_user)
1343 {
1344 unsigned int i;
1345 ipa_ref *ref;
1346
1347 single_user_map.put (var, user);
1348
1349 /* Enqueue all aliases for re-processing. */
1350 for (i = 0; var->iterate_direct_aliases (i, ref); i++)
1351 if (!ref->referring->aux)
1352 {
1353 ref->referring->aux = first;
1354 first = dyn_cast <varpool_node *> (ref->referring);
1355 }
1356 /* Enqueue all users for re-processing. */
1357 for (i = 0; var->iterate_reference (i, ref); i++)
1358 if (!ref->referred->aux
1359 && ref->referred->definition
1360 && is_a <varpool_node *> (ref->referred))
1361 {
1362 ref->referred->aux = first;
1363 first = dyn_cast <varpool_node *> (ref->referred);
1364 }
1365
1366 /* If user is BOTTOM, just punt on this var. */
1367 if (user == BOTTOM)
1368 var->aux = BOTTOM;
1369 else
1370 var->aux = NULL;
1371 }
1372 else
1373 var->aux = NULL;
1374 }
1375
1376 FOR_EACH_DEFINED_VARIABLE (var)
1377 {
1378 if (var->aux != BOTTOM)
1379 {
1380 #ifdef ENABLE_CHECKING
1381 /* Not having the single user known means that the VAR is
1382 unreachable. Either someone forgot to remove unreachable
1383 variables or the reachability here is wrong. */
1384
1385 gcc_assert (single_user_map.get (var));
1386 #endif
1387 if (dump_file)
1388 {
1389 fprintf (dump_file, "Variable %s/%i is used by single function\n",
1390 var->name (), var->order);
1391 }
1392 var->used_by_single_function = true;
1393 }
1394 var->aux = NULL;
1395 }
1396 return 0;
1397 }
1398
1399 namespace {
1400
1401 const pass_data pass_data_ipa_single_use =
1402 {
1403 IPA_PASS, /* type */
1404 "single-use", /* name */
1405 OPTGROUP_NONE, /* optinfo_flags */
1406 TV_CGRAPHOPT, /* tv_id */
1407 0, /* properties_required */
1408 0, /* properties_provided */
1409 0, /* properties_destroyed */
1410 0, /* todo_flags_start */
1411 0, /* todo_flags_finish */
1412 };
1413
1414 class pass_ipa_single_use : public ipa_opt_pass_d
1415 {
1416 public:
1417 pass_ipa_single_use (gcc::context *ctxt)
1418 : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt,
1419 NULL, /* generate_summary */
1420 NULL, /* write_summary */
1421 NULL, /* read_summary */
1422 NULL, /* write_optimization_summary */
1423 NULL, /* read_optimization_summary */
1424 NULL, /* stmt_fixup */
1425 0, /* function_transform_todo_flags_start */
1426 NULL, /* function_transform */
1427 NULL) /* variable_transform */
1428 {}
1429
1430 /* opt_pass methods: */
1431 virtual bool gate (function *);
1432 virtual unsigned int execute (function *) { return ipa_single_use (); }
1433
1434 }; // class pass_ipa_single_use
1435
1436 bool
1437 pass_ipa_single_use::gate (function *)
1438 {
1439 return optimize;
1440 }
1441
1442 } // anon namespace
1443
1444 ipa_opt_pass_d *
1445 make_pass_ipa_single_use (gcc::context *ctxt)
1446 {
1447 return new pass_ipa_single_use (ctxt);
1448 }