/* Basic IPA optimizations and utilities.
- Copyright (C) 2003-2014 Free Software Foundation, Inc.
+ Copyright (C) 2003-2018 Free Software Foundation, Inc.
This file is part of GCC.
#include "config.h"
#include "system.h"
#include "coretypes.h"
-#include "tm.h"
+#include "backend.h"
+#include "target.h"
#include "tree.h"
-#include "calls.h"
+#include "gimple.h"
+#include "alloc-pool.h"
+#include "tree-pass.h"
#include "stringpool.h"
-#include "predict.h"
-#include "basic-block.h"
-#include "hash-map.h"
-#include "is-a.h"
-#include "plugin-api.h"
-#include "vec.h"
-#include "hashtab.h"
-#include "hash-set.h"
-#include "machmode.h"
-#include "hard-reg-set.h"
-#include "input.h"
-#include "function.h"
-#include "ipa-ref.h"
#include "cgraph.h"
-#include "tree-pass.h"
-#include "gimple-expr.h"
#include "gimplify.h"
-#include "flags.h"
-#include "target.h"
#include "tree-iterator.h"
#include "ipa-utils.h"
-#include "alloc-pool.h"
+#include "symbol-summary.h"
+#include "tree-vrp.h"
#include "ipa-prop.h"
-#include "ipa-inline.h"
-#include "tree-inline.h"
-#include "profile.h"
-#include "params.h"
-#include "internal-fn.h"
-#include "tree-ssa-alias.h"
-#include "gimple.h"
+#include "ipa-fnsummary.h"
#include "dbgcnt.h"
-
+#include "debug.h"
+#include "stringpool.h"
+#include "attribs.h"
/* Return true when NODE has ADDR reference. */
static bool
has_addr_references_p (struct cgraph_node *node,
- void *data ATTRIBUTE_UNUSED)
+ void *)
{
int i;
struct ipa_ref *ref = NULL;
return false;
}
+/* Return true when NODE can be target of an indirect call. */
+
+static bool
+is_indirect_call_target_p (struct cgraph_node *node, void *)
+{
+ return node->indirect_call_target;
+}
+
/* Look for all functions inlined to NODE and update their inlined_to pointers
to INLINED_TO. */
*first = node;
}
+/* Return true if NODE may get inlined later.
+ This is used to keep DECL_EXTERNAL function bodies around long enough
+ so inliner can proces them. */
+
+static bool
+possible_inline_candidate_p (symtab_node *node)
+{
+ if (symtab->state >= IPA_SSA_AFTER_INLINING)
+ return false;
+ cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
+ if (!cnode)
+ return false;
+ if (DECL_UNINLINABLE (cnode->decl))
+ return false;
+ if (opt_for_fn (cnode->decl, optimize))
+ return true;
+ if (symtab->state >= IPA_SSA)
+ return false;
+ return lookup_attribute ("always_inline", DECL_ATTRIBUTES (node->decl));
+}
+
/* Process references. */
static void
process_references (symtab_node *snode,
symtab_node **first,
- bool before_inlining_p,
hash_set<symtab_node *> *reachable)
{
int i;
for (i = 0; snode->iterate_reference (i, ref); i++)
{
symtab_node *node = ref->referred;
+ symtab_node *body = node->ultimate_alias_target ();
if (node->definition && !node->in_other_partition
&& ((!DECL_EXTERNAL (node->decl) || node->alias)
- || (((before_inlining_p
- && (symtab->state < IPA_SSA
- || !lookup_attribute ("always_inline",
- DECL_ATTRIBUTES (node->decl)))))
- /* We use variable constructors during late complation for
+ || (possible_inline_candidate_p (node)
+ /* We use variable constructors during late compilation for
constant folding. Keep references alive so partitioning
knows about potential references. */
- || (TREE_CODE (node->decl) == VAR_DECL
- && flag_wpa
- && ctor_for_folding (node->decl)
- != error_mark_node))))
- reachable->add (node);
+ || (VAR_P (node->decl)
+ && (flag_wpa
+ || flag_incremental_link
+ == INCREMENTAL_LINK_LTO)
+ && dyn_cast <varpool_node *> (node)
+ ->ctor_useable_for_folding_p ()))))
+ {
+ /* Be sure that we will not optimize out alias target
+ body. */
+ if (DECL_EXTERNAL (node->decl)
+ && node->alias
+ && symtab->state < IPA_SSA_AFTER_INLINING)
+ reachable->add (body);
+ reachable->add (node);
+ }
enqueue_node (node, first, reachable);
}
}
walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
struct cgraph_edge *edge,
symtab_node **first,
- hash_set<symtab_node *> *reachable,
- bool before_inlining_p)
+ hash_set<symtab_node *> *reachable)
{
unsigned int i;
void *cache_token;
unused. */
if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
&& type_in_anonymous_namespace_p
- (method_class_type (TREE_TYPE (n->decl))))
+ (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl))))
continue;
+ n->indirect_call_target = true;
+ symtab_node *body = n->function_symbol ();
+
/* Prior inlining, keep alive bodies of possible targets for
devirtualization. */
- if (n->definition
- && (before_inlining_p
- && (symtab->state < IPA_SSA
- || !lookup_attribute ("always_inline",
- DECL_ATTRIBUTES (n->decl)))))
- reachable->add (n);
-
+ if (n->definition
+ && (possible_inline_candidate_p (body)
+ && opt_for_fn (body->decl, flag_devirtualize)))
+ {
+ /* Be sure that we will not optimize out alias target
+ body. */
+ if (DECL_EXTERNAL (n->decl)
+ && n->alias
+ && symtab->state < IPA_SSA_AFTER_INLINING)
+ reachable->add (body);
+ reachable->add (n);
+ }
/* Even after inlining we want to keep the possible targets in the
boundary, so late passes can still produce direct call even if
the chance for inlining is lost. */
(builtin_decl_implicit (BUILT_IN_UNREACHABLE));
if (dump_enabled_p ())
- {
- location_t locus;
- if (edge->call_stmt)
- locus = gimple_location (edge->call_stmt);
- else
- locus = UNKNOWN_LOCATION;
- dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
- "devirtualizing call in %s/%i to %s/%i\n",
- edge->caller->name (), edge->caller->order,
- target->name (),
- target->order);
+ {
+ dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, edge->call_stmt,
+ "devirtualizing call in %s to %s\n",
+ edge->caller->dump_name (),
+ target->dump_name ());
}
edge = edge->make_direct (target);
- if (inline_summary_vec)
- inline_update_overall_summary (node);
+ if (ipa_fn_summaries)
+ ipa_update_overall_fn_summary (node);
else if (edge->call_stmt)
- {
- edge->redirect_call_stmt_to_callee ();
-
- /* Call to __builtin_unreachable shouldn't be instrumented. */
- if (!targets.length ())
- gimple_call_set_with_bounds (edge->call_stmt, false);
- }
+ edge->redirect_call_stmt_to_callee ();
}
}
}
After inlining we release their bodies and turn them into unanalyzed
nodes even when they are reachable.
- BEFORE_INLINING_P specify whether we are before or after inlining.
-
- virtual functions are kept in callgraph even if they seem unreachable in
hope calls to them will be devirtualized.
we set AUX pointer of processed symbols in the boundary to constant 2. */
bool
-symbol_table::remove_unreachable_nodes (bool before_inlining_p, FILE *file)
+symbol_table::remove_unreachable_nodes (FILE *file)
{
symtab_node *first = (symtab_node *) (void *) 1;
struct cgraph_node *node, *next;
build_type_inheritance_graph ();
if (file)
fprintf (file, "\nReclaiming functions:");
-#ifdef ENABLE_CHECKING
- FOR_EACH_FUNCTION (node)
- gcc_assert (!node->aux);
- FOR_EACH_VARIABLE (vnode)
- gcc_assert (!vnode->aux);
-#endif
+ if (flag_checking)
+ {
+ FOR_EACH_FUNCTION (node)
+ gcc_assert (!node->aux);
+ FOR_EACH_VARIABLE (vnode)
+ gcc_assert (!vnode->aux);
+ }
/* Mark functions whose bodies are obviously needed.
This is mostly when they can be referenced externally. Inline clones
are special since their declarations are shared with master clone and thus
FOR_EACH_FUNCTION (node)
{
node->used_as_abstract_origin = false;
+ node->indirect_call_target = false;
if (node->definition
&& !node->global.inlined_to
&& !node->in_other_partition
/* If we are processing symbol in boundary, mark its AUX pointer for
possible later re-processing in enqueue_node. */
if (in_boundary_p)
- node->aux = (void *)2;
+ {
+ node->aux = (void *)2;
+ if (node->alias && node->analyzed)
+ enqueue_node (node->get_alias_target (), &first, &reachable);
+ }
else
{
if (TREE_CODE (node->decl) == FUNCTION_DECL
&& DECL_ABSTRACT_ORIGIN (node->decl))
{
struct cgraph_node *origin_node
- = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (node->decl));
- origin_node->used_as_abstract_origin = true;
- enqueue_node (origin_node, &first, &reachable);
+ = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
+ if (origin_node && !origin_node->used_as_abstract_origin)
+ {
+ origin_node->used_as_abstract_origin = true;
+ gcc_assert (!origin_node->prev_sibling_clone);
+ gcc_assert (!origin_node->next_sibling_clone);
+ for (cgraph_node *n = origin_node->clones; n;
+ n = n->next_sibling_clone)
+ if (n->decl == DECL_ABSTRACT_ORIGIN (node->decl))
+ n->used_as_abstract_origin = true;
+ }
}
/* If any symbol in a comdat group is reachable, force
all externally visible symbols in the same comdat
enqueue_node (next, &first, &reachable);
}
/* Mark references as reachable. */
- process_references (node, &first, before_inlining_p, &reachable);
+ process_references (node, &first, &reachable);
}
if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
next = e->next_callee;
if (e->indirect_info->polymorphic)
walk_polymorphic_call_targets (&reachable_call_targets,
- e, &first, &reachable,
- before_inlining_p);
+ e, &first, &reachable);
}
}
for (e = cnode->callees; e; e = e->next_callee)
{
+ symtab_node *body = e->callee->function_symbol ();
if (e->callee->definition
&& !e->callee->in_other_partition
&& (!e->inline_failed
|| !DECL_EXTERNAL (e->callee->decl)
|| e->callee->alias
- || before_inlining_p))
+ || possible_inline_candidate_p (e->callee)))
{
/* Be sure that we will not optimize out alias target
body. */
if (DECL_EXTERNAL (e->callee->decl)
&& e->callee->alias
- && before_inlining_p)
- reachable.add (e->callee->function_symbol ());
+ && symtab->state < IPA_SSA_AFTER_INLINING)
+ reachable.add (body);
reachable.add (e->callee);
}
enqueue_node (e->callee, &first, &reachable);
}
}
+ else if (cnode->thunk.thunk_p)
+ enqueue_node (cnode->callees->callee, &first, &reachable);
+
/* If any reachable function has simd clones, mark them as
reachable as well. */
if (cnode->simd_clones)
if (!node->aux)
{
if (file)
- fprintf (file, " %s/%i", node->name (), node->order);
+ fprintf (file, " %s", node->dump_name ());
node->remove ();
changed = true;
}
/* If node is unreachable, remove its body. */
else if (!reachable.contains (node))
{
- if (!body_needed_for_clonning.contains (node->decl))
+ /* We keep definitions of thunks and aliases in the boundary so
+ we can walk to the ultimate alias targets and function symbols
+ reliably. */
+ if (node->alias || node->thunk.thunk_p)
+ ;
+ else if (!body_needed_for_clonning.contains (node->decl)
+ && !node->alias && !node->thunk.thunk_p)
node->release_body ();
else if (!node->clone_of)
gcc_assert (in_lto_p || DECL_RESULT (node->decl));
- if (node->definition)
+ if (node->definition && !node->alias && !node->thunk.thunk_p)
{
if (file)
- fprintf (file, " %s/%i", node->name (), node->order);
+ fprintf (file, " %s", node->dump_name ());
node->body_removed = true;
node->analyzed = false;
node->definition = false;
node->cpp_implicit_alias = false;
node->alias = false;
+ node->transparent_alias = false;
node->thunk.thunk_p = false;
node->weakref = false;
/* After early inlining we drop always_inline attributes on
if (!node->in_other_partition)
node->local.local = false;
node->remove_callees ();
- node->remove_from_same_comdat_group ();
node->remove_all_references ();
changed = true;
if (node->thunk.thunk_p
or not. */
&& (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
{
+ struct ipa_ref *ref = NULL;
+
+ /* First remove the aliases, so varpool::remove can possibly lookup
+ the constructor and save it for future use. */
+ while (vnode->iterate_direct_aliases (0, ref))
+ {
+ if (file)
+ fprintf (file, " %s", ref->referred->dump_name ());
+ ref->referring->remove ();
+ }
if (file)
- fprintf (file, " %s/%i", vnode->name (), vnode->order);
+ fprintf (file, " %s", vnode->dump_name ());
+ vnext = next_variable (vnode);
+ /* Signal removal to the debug machinery. */
+ if (! flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
+ {
+ vnode->definition = false;
+ (*debug_hooks->late_global_decl) (vnode->decl);
+ }
vnode->remove ();
changed = true;
}
- else if (!reachable.contains (vnode))
+ else if (!reachable.contains (vnode) && !vnode->alias)
{
tree init;
if (vnode->definition)
changed = true;
}
/* Keep body if it may be useful for constant folding. */
- if ((init = ctor_for_folding (vnode->decl)) == error_mark_node
- && !POINTER_BOUNDS_P (vnode->decl))
+ if ((flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
+ || ((init = ctor_for_folding (vnode->decl)) == error_mark_node))
vnode->remove_initializer ();
else
DECL_INITIAL (vnode->decl) = init;
if (node->address_taken
&& !node->used_from_other_partition)
{
- if (!node->call_for_symbol_thunks_and_aliases
- (has_addr_references_p, NULL, true)
- && (!node->instrumentation_clone
- || !node->instrumented_version
- || !node->instrumented_version->address_taken))
+ if (!node->call_for_symbol_and_aliases
+ (has_addr_references_p, NULL, true))
{
if (file)
fprintf (file, " %s", node->name ());
node->address_taken = false;
changed = true;
- if (node->local_p ())
+ if (node->local_p ()
+ /* Virtual functions may be kept in cgraph just because
+ of possible later devirtualization. Do not mark them as
+ local too early so we won't optimize them out before
+ we are done with polymorphic call analysis. */
+ && (symtab->state >= IPA_SSA_AFTER_INLINING
+ || !node->call_for_symbol_and_aliases
+ (is_indirect_call_target_p, NULL, true)))
{
node->local.local = true;
if (file)
if (file)
fprintf (file, "\n");
-#ifdef ENABLE_CHECKING
- symtab_node::verify_symtab_nodes ();
-#endif
+ symtab_node::checking_verify_symtab_nodes ();
/* If we removed something, perhaps profile could be improved. */
- if (changed && optimize && inline_edge_summary_vec.exists ())
+ if (changed && (optimize || in_lto_p) && ipa_call_summaries)
FOR_EACH_DEFINED_FUNCTION (node)
ipa_propagate_frequency (node);
process_references (dyn_cast<varpool_node *> (ref->referring), written,
address_taken, read, explicit_refs);
break;
- case IPA_REF_CHKP:
- gcc_unreachable ();
}
}
/* Set writeonly bit and clear the initalizer, since it will not be needed. */
bool
-set_writeonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
+set_writeonly_bit (varpool_node *vnode, void *data)
{
vnode->writeonly = true;
- if (optimize)
+ if (optimize || in_lto_p)
{
DECL_INITIAL (vnode->decl) = NULL;
if (!vnode->alias)
- vnode->remove_all_references ();
+ {
+ if (vnode->num_references ())
+ *(bool *)data = true;
+ vnode->remove_all_references ();
+ }
}
return false;
}
return false;
}
-/* Discover variables that have no longer address taken or that are read only
- and update their flags.
+/* Discover variables that have no longer address taken, are read-only or
+ write-only and update their flags.
+
+ Return true when unreachable symbol removal should be done.
FIXME: This can not be done in between gimplify and omp_expand since
readonly flag plays role on what is shared and what is not. Currently we do
ipa-reference pass (to take into account clonning), but it would
make sense to do it before early optimizations. */
-void
-ipa_discover_readonly_nonaddressable_vars (void)
+bool
+ipa_discover_variable_flags (void)
{
+ if (!flag_ipa_reference_addressable)
+ return false;
+
+ bool remove_p = false;
varpool_node *vnode;
if (dump_file)
fprintf (dump_file, "Clearing variable flags:");
bool read = false;
bool explicit_refs = true;
- process_references (vnode, &written, &address_taken, &read, &explicit_refs);
+ process_references (vnode, &written, &address_taken, &read,
+ &explicit_refs);
if (!explicit_refs)
continue;
if (!address_taken)
{
if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
fprintf (dump_file, " %s (non-addressable)", vnode->name ());
- vnode->call_for_node_and_aliases (clear_addressable_bit, NULL, true);
+ vnode->call_for_symbol_and_aliases (clear_addressable_bit, NULL,
+ true);
}
if (!address_taken && !written
/* Making variable in explicit section readonly can cause section
{
if (!TREE_READONLY (vnode->decl) && dump_file)
fprintf (dump_file, " %s (read-only)", vnode->name ());
- vnode->call_for_node_and_aliases (set_readonly_bit, NULL, true);
+ vnode->call_for_symbol_and_aliases (set_readonly_bit, NULL, true);
}
if (!vnode->writeonly && !read && !address_taken && written)
{
if (dump_file)
fprintf (dump_file, " %s (write-only)", vnode->name ());
- vnode->call_for_node_and_aliases (set_writeonly_bit, NULL, true);
+ vnode->call_for_symbol_and_aliases (set_writeonly_bit, &remove_p,
+ true);
}
}
if (dump_file)
fprintf (dump_file, "\n");
-}
-
-/* Free inline summary. */
-
-namespace {
-
-const pass_data pass_data_ipa_free_inline_summary =
-{
- SIMPLE_IPA_PASS, /* type */
- "free-inline-summary", /* name */
- OPTGROUP_NONE, /* optinfo_flags */
- TV_IPA_FREE_INLINE_SUMMARY, /* tv_id */
- 0, /* properties_required */
- 0, /* properties_provided */
- 0, /* properties_destroyed */
- 0, /* todo_flags_start */
- /* Early optimizations may make function unreachable. We can not
- remove unreachable functions as part of the ealry opts pass because
- TODOs are run before subpasses. Do it here. */
- ( TODO_remove_functions | TODO_dump_symtab ), /* todo_flags_finish */
-};
-
-class pass_ipa_free_inline_summary : public simple_ipa_opt_pass
-{
-public:
- pass_ipa_free_inline_summary (gcc::context *ctxt)
- : simple_ipa_opt_pass (pass_data_ipa_free_inline_summary, ctxt)
- {}
-
- /* opt_pass methods: */
- virtual unsigned int execute (function *)
- {
- inline_free_summary ();
- return 0;
- }
-
-}; // class pass_ipa_free_inline_summary
-
-} // anon namespace
-
-simple_ipa_opt_pass *
-make_pass_ipa_free_inline_summary (gcc::context *ctxt)
-{
- return new pass_ipa_free_inline_summary (ctxt);
+ return remove_p;
}
/* Generate and emit a static constructor or destructor. WHICH must
- be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
- (for chp static vars constructor) or 'B' (for chkp static bounds
- constructor). BODY is a STATEMENT_LIST containing GENERIC
+ be one of 'I' (for a constructor), 'D' (for a destructor).
+ BODY is a STATEMENT_LIST containing GENERIC
statements. PRIORITY is the initialization priority for this
constructor or destructor.
TREE_STATIC (decl) = 1;
TREE_USED (decl) = 1;
DECL_ARTIFICIAL (decl) = 1;
+ DECL_IGNORED_P (decl) = 1;
DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
DECL_SAVED_TREE (decl) = body;
if (!targetm.have_ctors_dtors && final)
DECL_UNINLINABLE (decl) = 1;
DECL_INITIAL (decl) = make_node (BLOCK);
+ BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
TREE_USED (DECL_INITIAL (decl)) = 1;
DECL_SOURCE_LOCATION (decl) = input_location;
DECL_STATIC_CONSTRUCTOR (decl) = 1;
decl_init_priority_insert (decl, priority);
break;
- case 'P':
- DECL_STATIC_CONSTRUCTOR (decl) = 1;
- DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("chkp ctor"),
- NULL,
- NULL_TREE);
- decl_init_priority_insert (decl, priority);
- break;
- case 'B':
- DECL_STATIC_CONSTRUCTOR (decl) = 1;
- DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("bnd_legacy"),
- NULL,
- NULL_TREE);
- decl_init_priority_insert (decl, priority);
- break;
case 'D':
DECL_STATIC_DESTRUCTOR (decl) = 1;
decl_fini_priority_insert (decl, priority);
}
/* Generate and emit a static constructor or destructor. WHICH must
- be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
- (for chkp static vars constructor) or 'B' (for chkp static bounds
- constructor). BODY is a STATEMENT_LIST containing GENERIC
+ be one of 'I' (for a constructor) or 'D' (for a destructor).
+ BODY is a STATEMENT_LIST containing GENERIC
statements. PRIORITY is the initialization priority for this
constructor or destructor. */
cgraph_build_static_cdtor_1 (which, body, priority, false);
}
-/* A vector of FUNCTION_DECLs declared as static constructors. */
-static vec<tree> static_ctors;
-/* A vector of FUNCTION_DECLs declared as static destructors. */
-static vec<tree> static_dtors;
-
/* When target does not have ctors and dtors, we call all constructor
and destructor by special initialization/destruction function
recognized by collect2.
destructors and turn them into normal functions. */
static void
-record_cdtor_fn (struct cgraph_node *node)
+record_cdtor_fn (struct cgraph_node *node, vec<tree> *ctors, vec<tree> *dtors)
{
if (DECL_STATIC_CONSTRUCTOR (node->decl))
- static_ctors.safe_push (node->decl);
+ ctors->safe_push (node->decl);
if (DECL_STATIC_DESTRUCTOR (node->decl))
- static_dtors.safe_push (node->decl);
+ dtors->safe_push (node->decl);
node = cgraph_node::get (node->decl);
DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
}
they are destructors. */
static void
-build_cdtor (bool ctor_p, vec<tree> cdtors)
+build_cdtor (bool ctor_p, const vec<tree> &cdtors)
{
size_t i,j;
size_t len = cdtors.length ();
functions have magic names which are detected by collect2. */
static void
-build_cdtor_fns (void)
+build_cdtor_fns (vec<tree> *ctors, vec<tree> *dtors)
{
- if (!static_ctors.is_empty ())
+ if (!ctors->is_empty ())
{
gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
- static_ctors.qsort (compare_ctor);
- build_cdtor (/*ctor_p=*/true, static_ctors);
+ ctors->qsort (compare_ctor);
+ build_cdtor (/*ctor_p=*/true, *ctors);
}
- if (!static_dtors.is_empty ())
+ if (!dtors->is_empty ())
{
gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
- static_dtors.qsort (compare_dtor);
- build_cdtor (/*ctor_p=*/false, static_dtors);
+ dtors->qsort (compare_dtor);
+ build_cdtor (/*ctor_p=*/false, *dtors);
}
}
static unsigned int
ipa_cdtor_merge (void)
{
+ /* A vector of FUNCTION_DECLs declared as static constructors. */
+ auto_vec<tree, 20> ctors;
+ /* A vector of FUNCTION_DECLs declared as static destructors. */
+ auto_vec<tree, 20> dtors;
struct cgraph_node *node;
FOR_EACH_DEFINED_FUNCTION (node)
if (DECL_STATIC_CONSTRUCTOR (node->decl)
|| DECL_STATIC_DESTRUCTOR (node->decl))
- record_cdtor_fn (node);
- build_cdtor_fns ();
- static_ctors.release ();
- static_dtors.release ();
+ record_cdtor_fn (node, &ctors, &dtors);
+ build_cdtor_fns (&ctors, &dtors);
return 0;
}
/* Perform the pass when we have no ctors/dtors support
or at LTO time to merge multiple constructors into single
function. */
- return !targetm.have_ctors_dtors || (optimize && in_lto_p);
+ return !targetm.have_ctors_dtors || in_lto_p;
}
} // anon namespace
function = BOTTOM;
}
else
- function = meet (function, dyn_cast <varpool_node *> (ref->referring), single_user_map);
+ function = meet (function, dyn_cast <varpool_node *> (ref->referring),
+ single_user_map);
}
return function;
}
/* Pass setting used_by_single_function flag.
- This flag is set on variable when there is only one function that may possibly
- referr to it. */
+ This flag is set on variable when there is only one function that may
+ possibly referr to it. */
static unsigned int
ipa_single_use (void)
single_user_map.put (var, user);
/* Enqueue all aliases for re-processing. */
- for (i = 0; var->iterate_referring (i, ref); i++)
- if (ref->use == IPA_REF_ALIAS
- && !ref->referring->aux)
+ for (i = 0; var->iterate_direct_aliases (i, ref); i++)
+ if (!ref->referring->aux)
{
ref->referring->aux = first;
first = dyn_cast <varpool_node *> (ref->referring);
{
if (var->aux != BOTTOM)
{
-#ifdef ENABLE_CHECKING
- if (!single_user_map.get (var))
- gcc_assert (single_user_map.get (var));
-#endif
+ /* Not having the single user known means that the VAR is
+ unreachable. Either someone forgot to remove unreachable
+ variables or the reachability here is wrong. */
+
+ gcc_checking_assert (single_user_map.get (var));
+
if (dump_file)
{
- fprintf (dump_file, "Variable %s/%i is used by single function\n",
- var->name (), var->order);
+ fprintf (dump_file, "Variable %s is used by single function\n",
+ var->dump_name ());
}
var->used_by_single_function = true;
}
{}
/* opt_pass methods: */
- virtual bool gate (function *);
virtual unsigned int execute (function *) { return ipa_single_use (); }
}; // class pass_ipa_single_use
-bool
-pass_ipa_single_use::gate (function *)
-{
- return optimize;
-}
-
} // anon namespace
ipa_opt_pass_d *
{
return new pass_ipa_single_use (ctxt);
}
+
+/* Materialize all clones. */
+
+namespace {
+
+const pass_data pass_data_materialize_all_clones =
+{
+ SIMPLE_IPA_PASS, /* type */
+ "materialize-all-clones", /* name */
+ OPTGROUP_NONE, /* optinfo_flags */
+ TV_IPA_OPT, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ 0, /* todo_flags_finish */
+};
+
+class pass_materialize_all_clones : public simple_ipa_opt_pass
+{
+public:
+ pass_materialize_all_clones (gcc::context *ctxt)
+ : simple_ipa_opt_pass (pass_data_materialize_all_clones, ctxt)
+ {}
+
+ /* opt_pass methods: */
+ virtual unsigned int execute (function *)
+ {
+ symtab->materialize_all_clones ();
+ return 0;
+ }
+
+}; // class pass_materialize_all_clones
+
+} // anon namespace
+
+simple_ipa_opt_pass *
+make_pass_materialize_all_clones (gcc::context *ctxt)
+{
+ return new pass_materialize_all_clones (ctxt);
+}