/* Callgraph based analysis of static variables.
- Copyright (C) 2004-2013 Free Software Foundation, Inc.
+ Copyright (C) 2004-2015 Free Software Foundation, Inc.
Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
This file is part of GCC.
#include "system.h"
#include "coretypes.h"
#include "tm.h"
+#include "hash-set.h"
+#include "machmode.h"
+#include "vec.h"
+#include "double-int.h"
+#include "input.h"
+#include "alias.h"
+#include "symtab.h"
+#include "wide-int.h"
+#include "inchash.h"
#include "tree.h"
+#include "fold-const.h"
#include "print-tree.h"
#include "calls.h"
+#include "predict.h"
+#include "hard-reg-set.h"
+#include "input.h"
+#include "function.h"
+#include "dominance.h"
+#include "cfg.h"
+#include "cfganal.h"
#include "basic-block.h"
#include "tree-ssa-alias.h"
#include "internal-fn.h"
#include "tree-inline.h"
#include "tree-pass.h"
#include "langhooks.h"
+#include "hash-map.h"
+#include "plugin-api.h"
+#include "ipa-ref.h"
+#include "cgraph.h"
#include "ipa-utils.h"
#include "flags.h"
#include "diagnostic.h"
#include "tree-scalar-evolution.h"
#include "intl.h"
#include "opts.h"
-
-static struct pointer_set_t *visited_nodes;
+#include "varasm.h"
/* Lattice values for const and pure functions. Everything starts out
being const, then may drop to pure and then neither depending on
bool looping;
bool can_throw;
+
+ /* If function can call free, munmap or otherwise make previously
+ non-trapping memory accesses trapping. */
+ bool can_free;
};
/* State used when we know nothing about function. */
static struct funct_state_d varying_state
- = { IPA_NEITHER, IPA_NEITHER, true, true, true };
+ = { IPA_NEITHER, IPA_NEITHER, true, true, true, true };
typedef struct funct_state_d * funct_state;
static vec<funct_state> funct_state_vec;
-/* Holders of ipa cgraph hooks: */
-static struct cgraph_node_hook_list *function_insertion_hook_holder;
-static struct cgraph_2node_hook_list *node_duplication_hook_holder;
-static struct cgraph_node_hook_list *node_removal_hook_holder;
+static bool gate_pure_const (void);
+
+namespace {
+
+const pass_data pass_data_ipa_pure_const =
+{
+ IPA_PASS, /* type */
+ "pure-const", /* name */
+ OPTGROUP_NONE, /* optinfo_flags */
+ TV_IPA_PURE_CONST, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ 0, /* todo_flags_finish */
+};
+
+class pass_ipa_pure_const : public ipa_opt_pass_d
+{
+public:
+ pass_ipa_pure_const(gcc::context *ctxt);
+
+ /* opt_pass methods: */
+ bool gate (function *) { return gate_pure_const (); }
+ unsigned int execute (function *fun);
+
+ void register_hooks (void);
+
+private:
+ bool init_p;
+
+ /* Holders of ipa cgraph hooks: */
+ struct cgraph_node_hook_list *function_insertion_hook_holder;
+ struct cgraph_2node_hook_list *node_duplication_hook_holder;
+ struct cgraph_node_hook_list *node_removal_hook_holder;
+
+}; // class pass_ipa_pure_const
+
+} // anon namespace
/* Try to guess if function body will always be visible to compiler
when compiling the call and whether compiler will be able
/* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
is true if the function is known to be finite. The diagnostic is
- controlled by OPTION. WARNED_ABOUT is a pointer_set unique for
+ controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
OPTION, this function may initialize it and it is always returned
by the function. */
-static struct pointer_set_t *
+static hash_set<tree> *
suggest_attribute (int option, tree decl, bool known_finite,
- struct pointer_set_t *warned_about,
+ hash_set<tree> *warned_about,
const char * attrib_name)
{
if (!option_enabled (option, &global_options))
return warned_about;
if (!warned_about)
- warned_about = pointer_set_create ();
- if (pointer_set_contains (warned_about, decl))
+ warned_about = new hash_set<tree>;
+ if (warned_about->contains (decl))
return warned_about;
- pointer_set_insert (warned_about, decl);
+ warned_about->add (decl);
warning_at (DECL_SOURCE_LOCATION (decl),
option,
known_finite
static void
warn_function_pure (tree decl, bool known_finite)
{
- static struct pointer_set_t *warned_about;
+ static hash_set<tree> *warned_about;
warned_about
= suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
static void
warn_function_const (tree decl, bool known_finite)
{
- static struct pointer_set_t *warned_about;
+ static hash_set<tree> *warned_about;
warned_about
= suggest_attribute (OPT_Wsuggest_attribute_const, decl,
known_finite, warned_about, "const");
static void
warn_function_noreturn (tree decl)
{
- static struct pointer_set_t *warned_about;
+ static hash_set<tree> *warned_about;
if (!lang_hooks.missing_noreturn_ok_p (decl)
&& targetm.warn_func_return (decl))
warned_about
*looping = true;
*state = IPA_CONST;
return true;
+ default:
+ break;
}
return false;
}
the entire call expression. */
static void
-check_call (funct_state local, gimple call, bool ipa)
+check_call (funct_state local, gcall *call, bool ipa)
{
int flags = gimple_call_flags (call);
tree callee_t = gimple_call_fndecl (call);
enum pure_const_state_e call_state;
bool call_looping;
+ if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
+ && !nonfreeing_call_p (call))
+ local->can_free = true;
+
if (special_builtin_state (&call_state, &call_looping, callee_t))
{
worse_state (&local->pure_const_state, &local->looping,
break;
}
}
+ else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
+ local->can_free = true;
/* When not in IPA mode, we can still handle self recursion. */
if (!ipa && callee_t
/* Wrapper around check_decl for loads in local more. */
static bool
-check_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
+check_load (gimple, tree op, tree, void *data)
{
if (DECL_P (op))
check_decl ((funct_state)data, op, false, false);
/* Wrapper around check_decl for stores in local more. */
static bool
-check_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
+check_store (gimple, tree op, tree, void *data)
{
if (DECL_P (op))
check_decl ((funct_state)data, op, true, false);
/* Wrapper around check_decl for loads in ipa mode. */
static bool
-check_ipa_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
+check_ipa_load (gimple, tree op, tree, void *data)
{
if (DECL_P (op))
check_decl ((funct_state)data, op, false, true);
/* Wrapper around check_decl for stores in ipa mode. */
static bool
-check_ipa_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
+check_ipa_store (gimple, tree op, tree, void *data)
{
if (DECL_P (op))
check_decl ((funct_state)data, op, true, true);
if (is_gimple_debug (stmt))
return;
+ /* Do consider clobber as side effects before IPA, so we rather inline
+ C++ destructors and keep clobber semantics than eliminate them.
+
+ TODO: We may get smarter during early optimizations on these and let
+ functions containing only clobbers to be optimized more. This is a common
+ case of C++ destructors. */
+
+ if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
+ return;
+
if (dump_file)
{
fprintf (dump_file, " scanning: ");
switch (gimple_code (stmt))
{
case GIMPLE_CALL:
- check_call (local, stmt, ipa);
+ check_call (local, as_a <gcall *> (stmt), ipa);
break;
case GIMPLE_LABEL:
- if (DECL_NONLOCAL (gimple_label_label (stmt)))
+ if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
/* Target of long jump. */
{
if (dump_file)
}
break;
case GIMPLE_ASM:
- if (gimple_asm_clobbers_memory_p (stmt))
+ if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
{
if (dump_file)
fprintf (dump_file, " memory asm clobber is not const/pure\n");
/* Abandon all hope, ye who enter here. */
local->pure_const_state = IPA_NEITHER;
+ local->can_free = true;
}
- if (gimple_asm_volatile_p (stmt))
+ if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
{
if (dump_file)
fprintf (dump_file, " volatile is not const/pure\n");
/* Abandon all hope, ye who enter here. */
local->pure_const_state = IPA_NEITHER;
- local->looping = true;
+ local->looping = true;
+ local->can_free = true;
}
return;
default:
l->looping_previously_known = true;
l->looping = false;
l->can_throw = false;
+ l->can_free = false;
state_from_flags (&l->state_previously_known, &l->looping_previously_known,
flags_from_decl_or_type (fn->decl),
- cgraph_node_cannot_return (fn));
+ fn->cannot_return_p ());
if (fn->thunk.thunk_p || fn->alias)
{
/* Thunk gets propagated through, so nothing interesting happens. */
gcc_assert (ipa);
+ if (fn->thunk.thunk_p && fn->thunk.virtual_offset_p)
+ l->pure_const_state = IPA_NEITHER;
return l;
}
gsi_next (&gsi))
{
check_stmt (&gsi, l, ipa);
- if (l->pure_const_state == IPA_NEITHER && l->looping && l->can_throw)
+ if (l->pure_const_state == IPA_NEITHER
+ && l->looping
+ && l->can_throw
+ && l->can_free)
goto end;
}
}
fprintf (dump_file, "Function is locally const.\n");
if (l->pure_const_state == IPA_PURE)
fprintf (dump_file, "Function is locally pure.\n");
+ if (l->can_free)
+ fprintf (dump_file, "Function can locally free.\n");
}
return l;
}
static void
add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
{
- if (cgraph_function_body_availability (node) < AVAIL_OVERWRITABLE)
+ if (node->get_availability () < AVAIL_INTERPOSABLE)
return;
/* There are some shared nodes, in particular the initializers on
static declarations. We do not need to scan them more than once
since all we would be interested in are the addressof
operations. */
- visited_nodes = pointer_set_create ();
- if (cgraph_function_body_availability (node) > AVAIL_OVERWRITABLE)
+ if (node->get_availability () > AVAIL_INTERPOSABLE
+ && opt_for_fn (node->decl, flag_ipa_pure_const))
set_function_state (node, analyze_function (node, true));
- pointer_set_destroy (visited_nodes);
- visited_nodes = NULL;
}
/* Called when new clone is inserted to callgraph late. */
}
\f
-static void
+void
+pass_ipa_pure_const::
register_hooks (void)
{
- static bool init_p = false;
-
if (init_p)
return;
init_p = true;
node_removal_hook_holder =
- cgraph_add_node_removal_hook (&remove_node_data, NULL);
+ symtab->add_cgraph_removal_hook (&remove_node_data, NULL);
node_duplication_hook_holder =
- cgraph_add_node_duplication_hook (&duplicate_node_data, NULL);
+ symtab->add_cgraph_duplication_hook (&duplicate_node_data, NULL);
function_insertion_hook_holder =
- cgraph_add_function_insertion_hook (&add_new_function, NULL);
+ symtab->add_cgraph_insertion_hook (&add_new_function, NULL);
}
{
struct cgraph_node *node;
- register_hooks ();
-
- /* There are some shared nodes, in particular the initializers on
- static declarations. We do not need to scan them more than once
- since all we would be interested in are the addressof
- operations. */
- visited_nodes = pointer_set_create ();
+ pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
+ pass->register_hooks ();
/* Process all of the functions.
- We process AVAIL_OVERWRITABLE functions. We can not use the results
+ We process AVAIL_INTERPOSABLE functions. We can not use the results
by default, but the info can be used at LTO with -fwhole-program or
when function got cloned and the clone is AVAILABLE. */
FOR_EACH_DEFINED_FUNCTION (node)
- if (cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
+ if (node->get_availability () >= AVAIL_INTERPOSABLE
+ && opt_for_fn (node->decl, flag_ipa_pure_const))
set_function_state (node, analyze_function (node, true));
-
- pointer_set_destroy (visited_nodes);
- visited_nodes = NULL;
}
bp_pack_value (&bp, fs->looping_previously_known, 1);
bp_pack_value (&bp, fs->looping, 1);
bp_pack_value (&bp, fs->can_throw, 1);
+ bp_pack_value (&bp, fs->can_free, 1);
streamer_write_bitpack (&bp);
}
}
struct lto_file_decl_data *file_data;
unsigned int j = 0;
- register_hooks ();
+ pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
+ pass->register_hooks ();
+
while ((file_data = file_data_vec[j++]))
{
const char *data;
fs = XCNEW (struct funct_state_d);
index = streamer_read_uhwi (ib);
encoder = file_data->symtab_node_encoder;
- node = cgraph (lto_symtab_encoder_deref (encoder, index));
+ node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
+ index));
set_function_state (node, fs);
/* Note that the flags must be read in the opposite
fs->looping_previously_known = bp_unpack_value (&bp, 1);
fs->looping = bp_unpack_value (&bp, 1);
fs->can_throw = bp_unpack_value (&bp, 1);
+ fs->can_free = bp_unpack_value (&bp, 1);
if (dump_file)
{
int flags = flags_from_decl_or_type (node->decl);
fprintf (dump_file," function is previously known looping\n");
if (fs->can_throw)
fprintf (dump_file," function is locally throwing\n");
+ if (fs->can_free)
+ fprintf (dump_file," function can locally free\n");
}
}
{
struct cgraph_edge *e;
for (e = node->callees; e; e = e->next_callee)
- if (cgraph_function_node (e->callee, NULL) == node)
+ if (e->callee->function_symbol () == node)
return true;
return false;
}
+/* Return true if N is cdtor that is not const or pure. In this case we may
+ need to remove unreachable function if it is marked const/pure. */
+
+static bool
+cdtor_p (cgraph_node *n, void *)
+{
+ if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
+ return !TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl);
+ return false;
+}
+
/* Produce transitive closure over the callgraph and compute pure/const
attributes. */
-static void
+static bool
propagate_pure_const (void)
{
struct cgraph_node *node;
struct cgraph_node *w;
struct cgraph_node **order =
- XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
+ XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
int order_pos;
int i;
struct ipa_dfs_info * w_info;
+ bool remove_p = false;
order_pos = ipa_reduced_postorder (order, true, false, NULL);
if (dump_file)
{
- dump_cgraph (dump_file);
+ cgraph_node::dump_cgraph (dump_file);
ipa_print_order (dump_file, "reduced", order, order_pos);
}
struct cgraph_edge *e;
struct cgraph_edge *ie;
int i;
- struct ipa_ref *ref;
+ struct ipa_ref *ref = NULL;
funct_state w_l = get_function_state (w);
if (dump_file && (dump_flags & TDF_DETAILS))
break;
/* For overwritable nodes we can not assume anything. */
- if (cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
+ if (w->get_availability () == AVAIL_INTERPOSABLE)
{
worse_state (&pure_const_state, &looping,
w_l->state_previously_known,
for (e = w->callees; e; e = e->next_callee)
{
enum availability avail;
- struct cgraph_node *y = cgraph_function_node (e->callee, &avail);
+ struct cgraph_node *y = e->callee->
+ function_or_virtual_thunk_symbol (&avail);
enum pure_const_state_e edge_state = IPA_CONST;
bool edge_looping = false;
e->callee->name (),
e->callee->order);
}
- if (avail > AVAIL_OVERWRITABLE)
+ if (avail > AVAIL_INTERPOSABLE)
{
funct_state y_l = get_function_state (y);
if (dump_file && (dump_flags & TDF_DETAILS))
y_l->looping);
}
if (y_l->pure_const_state > IPA_PURE
- && cgraph_edge_cannot_lead_to_return (e))
+ && e->cannot_lead_to_return_p ())
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file,
else
state_from_flags (&edge_state, &edge_looping,
flags_from_decl_or_type (y->decl),
- cgraph_edge_cannot_lead_to_return (e));
+ e->cannot_lead_to_return_p ());
/* Merge the results with what we already know. */
better_state (&edge_state, &edge_looping,
fprintf (dump_file, " Indirect call");
state_from_flags (&edge_state, &edge_looping,
ie->indirect_info->ecf_flags,
- cgraph_edge_cannot_lead_to_return (ie));
+ ie->cannot_lead_to_return_p ());
/* Merge the results with what we already know. */
better_state (&edge_state, &edge_looping,
w_l->state_previously_known,
break;
/* And finally all loads and stores. */
- for (i = 0; ipa_ref_list_reference_iterate (&w->ref_list, i, ref); i++)
+ for (i = 0; w->iterate_reference (i, ref); i++)
{
enum pure_const_state_e ref_state = IPA_CONST;
bool ref_looping = false;
{
case IPA_REF_LOAD:
/* readonly reads are safe. */
- if (TREE_READONLY (ipa_ref_varpool_node (ref)->decl))
+ if (TREE_READONLY (ref->referred->decl))
break;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " nonreadonly global var read\n");
ref_state = IPA_PURE;
break;
case IPA_REF_STORE:
- if (ipa_ref_cannot_lead_to_return (ref))
+ if (ref->cannot_lead_to_return ())
break;
ref_state = IPA_NEITHER;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " global var write\n");
break;
case IPA_REF_ADDR:
+ case IPA_REF_CHKP:
break;
+ default:
+ gcc_unreachable ();
}
better_state (&ref_state, &ref_looping,
w_l->state_previously_known,
pure_const_names [pure_const_state],
looping);
+ /* Find the worst state of can_free for any node in the cycle. */
+ bool can_free = false;
+ w = node;
+ while (w && !can_free)
+ {
+ struct cgraph_edge *e;
+ funct_state w_l = get_function_state (w);
+
+ if (w_l->can_free
+ || w->get_availability () == AVAIL_INTERPOSABLE
+ || w->indirect_calls)
+ can_free = true;
+
+ for (e = w->callees; e && !can_free; e = e->next_callee)
+ {
+ enum availability avail;
+ struct cgraph_node *y = e->callee->
+ function_or_virtual_thunk_symbol (&avail);
+
+ if (avail > AVAIL_INTERPOSABLE)
+ can_free = get_function_state (y)->can_free;
+ else
+ can_free = true;
+ }
+ w_info = (struct ipa_dfs_info *) w->aux;
+ w = w_info->next_cycle;
+ }
+
/* Copy back the region's pure_const_state which is shared by
all nodes in the region. */
w = node;
enum pure_const_state_e this_state = pure_const_state;
bool this_looping = looping;
+ w_l->can_free = can_free;
+ w->nonfreeing_fn = !can_free;
+ if (!can_free && dump_file)
+ fprintf (dump_file, "Function found not to call free: %s\n",
+ w->name ());
+
if (w_l->state_previously_known != IPA_NEITHER
&& this_state > w_l->state_previously_known)
{
w_l->pure_const_state = this_state;
w_l->looping = this_looping;
- switch (this_state)
- {
- case IPA_CONST:
- if (!TREE_READONLY (w->decl))
- {
- warn_function_const (w->decl, !this_looping);
- if (dump_file)
- fprintf (dump_file, "Function found to be %sconst: %s\n",
- this_looping ? "looping " : "",
- w->name ());
- }
- cgraph_set_const_flag (w, true, this_looping);
- break;
+ /* Inline clones share declaration with their offline copies;
+ do not modify their declarations since the offline copy may
+ be different. */
+ if (!w->global.inlined_to)
+ switch (this_state)
+ {
+ case IPA_CONST:
+ if (!TREE_READONLY (w->decl))
+ {
+ warn_function_const (w->decl, !this_looping);
+ if (dump_file)
+ fprintf (dump_file, "Function found to be %sconst: %s\n",
+ this_looping ? "looping " : "",
+ w->name ());
+ }
+ remove_p |= w->call_for_symbol_and_aliases (cdtor_p,
+ NULL, true);
+ w->set_const_flag (true, this_looping);
+ break;
- case IPA_PURE:
- if (!DECL_PURE_P (w->decl))
- {
- warn_function_pure (w->decl, !this_looping);
- if (dump_file)
- fprintf (dump_file, "Function found to be %spure: %s\n",
- this_looping ? "looping " : "",
- w->name ());
- }
- cgraph_set_pure_flag (w, true, this_looping);
- break;
+ case IPA_PURE:
+ if (!DECL_PURE_P (w->decl))
+ {
+ warn_function_pure (w->decl, !this_looping);
+ if (dump_file)
+ fprintf (dump_file, "Function found to be %spure: %s\n",
+ this_looping ? "looping " : "",
+ w->name ());
+ }
+ remove_p |= w->call_for_symbol_and_aliases (cdtor_p,
+ NULL, true);
+ w->set_pure_flag (true, this_looping);
+ break;
- default:
- break;
- }
+ default:
+ break;
+ }
w_info = (struct ipa_dfs_info *) w->aux;
w = w_info->next_cycle;
}
ipa_free_postorder_info ();
free (order);
+ return remove_p;
}
/* Produce transitive closure over the callgraph and compute nothrow
struct cgraph_node *node;
struct cgraph_node *w;
struct cgraph_node **order =
- XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
+ XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
int order_pos;
int i;
struct ipa_dfs_info * w_info;
order_pos = ipa_reduced_postorder (order, true, false, ignore_edge);
if (dump_file)
{
- dump_cgraph (dump_file);
+ cgraph_node::dump_cgraph (dump_file);
ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
}
/* Find the worst state for any node in the cycle. */
w = node;
- while (w)
+ while (w && !can_throw)
{
struct cgraph_edge *e, *ie;
funct_state w_l = get_function_state (w);
if (w_l->can_throw
- || cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
+ || w->get_availability () == AVAIL_INTERPOSABLE)
can_throw = true;
- if (can_throw)
- break;
-
- for (e = w->callees; e; e = e->next_callee)
+ for (e = w->callees; e && !can_throw; e = e->next_callee)
{
enum availability avail;
- struct cgraph_node *y = cgraph_function_node (e->callee, &avail);
+ struct cgraph_node *y = e->callee->
+ function_or_virtual_thunk_symbol (&avail);
- if (avail > AVAIL_OVERWRITABLE)
+ if (avail > AVAIL_INTERPOSABLE)
{
funct_state y_l = get_function_state (y);
- if (can_throw)
- break;
if (y_l->can_throw && !TREE_NOTHROW (w->decl)
&& e->can_throw_external)
can_throw = true;
else if (e->can_throw_external && !TREE_NOTHROW (y->decl))
can_throw = true;
}
- for (ie = node->indirect_calls; ie; ie = ie->next_callee)
+ for (ie = w->indirect_calls; ie && !can_throw; ie = ie->next_callee)
if (ie->can_throw_external)
- {
- can_throw = true;
- break;
- }
+ can_throw = true;
w_info = (struct ipa_dfs_info *) w->aux;
w = w_info->next_cycle;
}
funct_state w_l = get_function_state (w);
if (!can_throw && !TREE_NOTHROW (w->decl))
{
- cgraph_set_nothrow_flag (w, true);
- if (dump_file)
- fprintf (dump_file, "Function found to be nothrow: %s\n",
- w->name ());
+ /* Inline clones share declaration with their offline copies;
+ do not modify their declarations since the offline copy may
+ be different. */
+ if (!w->global.inlined_to)
+ {
+ w->set_nothrow_flag (true);
+ if (dump_file)
+ fprintf (dump_file, "Function found to be nothrow: %s\n",
+ w->name ());
+ }
}
else if (can_throw && !TREE_NOTHROW (w->decl))
w_l->can_throw = true;
/* Produce the global information by preforming a transitive closure
on the local information that was produced by generate_summary. */
-static unsigned int
-propagate (void)
+unsigned int
+pass_ipa_pure_const::
+execute (function *)
{
struct cgraph_node *node;
+ bool remove_p;
- cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
- cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
- cgraph_remove_node_removal_hook (node_removal_hook_holder);
+ symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
+ symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
+ symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
/* Nothrow makes more function to not lead to return and improve
later analysis. */
propagate_nothrow ();
- propagate_pure_const ();
+ remove_p = propagate_pure_const ();
/* Cleanup. */
FOR_EACH_FUNCTION (node)
if (has_function_state (node))
free (get_function_state (node));
funct_state_vec.release ();
- return 0;
+ return remove_p ? TODO_remove_functions : 0;
}
static bool
gate_pure_const (void)
{
- return (flag_ipa_pure_const
- /* Don't bother doing anything if the program has errors. */
- && !seen_error ());
+ return flag_ipa_pure_const || in_lto_p;
}
-namespace {
-
-const pass_data pass_data_ipa_pure_const =
+pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
+ : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
+ pure_const_generate_summary, /* generate_summary */
+ pure_const_write_summary, /* write_summary */
+ pure_const_read_summary, /* read_summary */
+ NULL, /* write_optimization_summary */
+ NULL, /* read_optimization_summary */
+ NULL, /* stmt_fixup */
+ 0, /* function_transform_todo_flags_start */
+ NULL, /* function_transform */
+ NULL), /* variable_transform */
+ init_p(false),
+ function_insertion_hook_holder(NULL),
+ node_duplication_hook_holder(NULL),
+ node_removal_hook_holder(NULL)
{
- IPA_PASS, /* type */
- "pure-const", /* name */
- OPTGROUP_NONE, /* optinfo_flags */
- true, /* has_gate */
- true, /* has_execute */
- TV_IPA_PURE_CONST, /* tv_id */
- 0, /* properties_required */
- 0, /* properties_provided */
- 0, /* properties_destroyed */
- 0, /* todo_flags_start */
- 0, /* todo_flags_finish */
-};
-
-class pass_ipa_pure_const : public ipa_opt_pass_d
-{
-public:
- pass_ipa_pure_const (gcc::context *ctxt)
- : ipa_opt_pass_d (pass_data_ipa_pure_const, ctxt,
- pure_const_generate_summary, /* generate_summary */
- pure_const_write_summary, /* write_summary */
- pure_const_read_summary, /* read_summary */
- NULL, /* write_optimization_summary */
- NULL, /* read_optimization_summary */
- NULL, /* stmt_fixup */
- 0, /* function_transform_todo_flags_start */
- NULL, /* function_transform */
- NULL) /* variable_transform */
- {}
-
- /* opt_pass methods: */
- bool gate () { return gate_pure_const (); }
- unsigned int execute () { return propagate (); }
-
-}; // class pass_ipa_pure_const
-
-} // anon namespace
+}
ipa_opt_pass_d *
make_pass_ipa_pure_const (gcc::context *ctxt)
fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
return true;
}
- if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
+ if (node->get_availability () <= AVAIL_INTERPOSABLE)
{
if (dump_file)
fprintf (dump_file, "Function is not available or overwritable; not analyzing.\n");
ipa_pure_const. This pass is effective when executed together with
other optimization passes in early optimization pass queue. */
-static unsigned int
-local_pure_const (void)
+namespace {
+
+const pass_data pass_data_local_pure_const =
+{
+ GIMPLE_PASS, /* type */
+ "local-pure-const", /* name */
+ OPTGROUP_NONE, /* optinfo_flags */
+ TV_IPA_PURE_CONST, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ 0, /* todo_flags_finish */
+};
+
+class pass_local_pure_const : public gimple_opt_pass
+{
+public:
+ pass_local_pure_const (gcc::context *ctxt)
+ : gimple_opt_pass (pass_data_local_pure_const, ctxt)
+ {}
+
+ /* opt_pass methods: */
+ opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
+ virtual bool gate (function *) { return gate_pure_const (); }
+ virtual unsigned int execute (function *);
+
+}; // class pass_local_pure_const
+
+unsigned int
+pass_local_pure_const::execute (function *fun)
{
bool changed = false;
funct_state l;
bool skip;
struct cgraph_node *node;
- node = cgraph_get_node (current_function_decl);
+ node = cgraph_node::get (current_function_decl);
skip = skip_function_for_local_pure_const (node);
if (!warn_suggest_attribute_const
&& !warn_suggest_attribute_pure
/* Do NORETURN discovery. */
if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
- && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) == 0)
+ && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
{
- warn_function_noreturn (cfun->decl);
+ warn_function_noreturn (fun->decl);
if (dump_file)
- fprintf (dump_file, "Function found to be noreturn: %s\n",
- current_function_name ());
+ fprintf (dump_file, "Function found to be noreturn: %s\n",
+ current_function_name ());
/* Update declaration and reduce profile to executed once. */
TREE_THIS_VOLATILE (current_function_decl) = 1;
if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
- node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
+ node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
changed = true;
}
warn_function_const (current_function_decl, !l->looping);
if (!skip)
{
- cgraph_set_const_flag (node, true, l->looping);
+ node->set_const_flag (true, l->looping);
changed = true;
}
if (dump_file)
{
if (!skip)
{
- cgraph_set_const_flag (node, true, false);
+ node->set_const_flag (true, false);
changed = true;
}
if (dump_file)
{
if (!skip)
{
- cgraph_set_pure_flag (node, true, l->looping);
+ node->set_pure_flag (true, l->looping);
changed = true;
}
warn_function_pure (current_function_decl, !l->looping);
{
if (!skip)
{
- cgraph_set_pure_flag (node, true, false);
+ node->set_pure_flag (true, false);
changed = true;
}
if (dump_file)
}
if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
{
- cgraph_set_nothrow_flag (node, true);
+ node->set_nothrow_flag (true);
changed = true;
if (dump_file)
fprintf (dump_file, "Function found to be nothrow: %s\n",
return 0;
}
+} // anon namespace
+
+gimple_opt_pass *
+make_pass_local_pure_const (gcc::context *ctxt)
+{
+ return new pass_local_pure_const (ctxt);
+}
+
+/* Emit noreturn warnings. */
+
namespace {
-const pass_data pass_data_local_pure_const =
+const pass_data pass_data_warn_function_noreturn =
{
GIMPLE_PASS, /* type */
- "local-pure-const", /* name */
+ "*warn_function_noreturn", /* name */
OPTGROUP_NONE, /* optinfo_flags */
- true, /* has_gate */
- true, /* has_execute */
- TV_IPA_PURE_CONST, /* tv_id */
- 0, /* properties_required */
+ TV_NONE, /* tv_id */
+ PROP_cfg, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
0, /* todo_flags_finish */
};
-class pass_local_pure_const : public gimple_opt_pass
+class pass_warn_function_noreturn : public gimple_opt_pass
{
public:
- pass_local_pure_const (gcc::context *ctxt)
- : gimple_opt_pass (pass_data_local_pure_const, ctxt)
+ pass_warn_function_noreturn (gcc::context *ctxt)
+ : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
{}
/* opt_pass methods: */
- opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
- bool gate () { return gate_pure_const (); }
- unsigned int execute () { return local_pure_const (); }
+ virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
+ virtual unsigned int execute (function *fun)
+ {
+ if (!TREE_THIS_VOLATILE (current_function_decl)
+ && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
+ warn_function_noreturn (current_function_decl);
+ return 0;
+ }
-}; // class pass_local_pure_const
+}; // class pass_warn_function_noreturn
} // anon namespace
gimple_opt_pass *
-make_pass_local_pure_const (gcc::context *ctxt)
-{
- return new pass_local_pure_const (ctxt);
-}
-
-/* Emit noreturn warnings. */
-
-static unsigned int
-execute_warn_function_noreturn (void)
+make_pass_warn_function_noreturn (gcc::context *ctxt)
{
- if (!TREE_THIS_VOLATILE (current_function_decl)
- && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) == 0)
- warn_function_noreturn (current_function_decl);
- return 0;
+ return new pass_warn_function_noreturn (ctxt);
}
-static bool
-gate_warn_function_noreturn (void)
-{
- return warn_suggest_attribute_noreturn;
-}
+/* Simple local pass for pure const discovery reusing the analysis from
+ ipa_pure_const. This pass is effective when executed together with
+ other optimization passes in early optimization pass queue. */
namespace {
-const pass_data pass_data_warn_function_noreturn =
+const pass_data pass_data_nothrow =
{
GIMPLE_PASS, /* type */
- "*warn_function_noreturn", /* name */
+ "nothrow", /* name */
OPTGROUP_NONE, /* optinfo_flags */
- true, /* has_gate */
- true, /* has_execute */
- TV_NONE, /* tv_id */
- PROP_cfg, /* properties_required */
+ TV_IPA_PURE_CONST, /* tv_id */
+ 0, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
0, /* todo_flags_finish */
};
-class pass_warn_function_noreturn : public gimple_opt_pass
+class pass_nothrow : public gimple_opt_pass
{
public:
- pass_warn_function_noreturn (gcc::context *ctxt)
- : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
+ pass_nothrow (gcc::context *ctxt)
+ : gimple_opt_pass (pass_data_nothrow, ctxt)
{}
/* opt_pass methods: */
- bool gate () { return gate_warn_function_noreturn (); }
- unsigned int execute () { return execute_warn_function_noreturn (); }
+ opt_pass * clone () { return new pass_nothrow (m_ctxt); }
+ virtual bool gate (function *) { return optimize; }
+ virtual unsigned int execute (function *);
-}; // class pass_warn_function_noreturn
+}; // class pass_nothrow
+
+unsigned int
+pass_nothrow::execute (function *)
+{
+ struct cgraph_node *node;
+ basic_block this_block;
+
+ if (TREE_NOTHROW (current_function_decl))
+ return 0;
+
+ node = cgraph_node::get (current_function_decl);
+
+ /* We run during lowering, we can not really use availability yet. */
+ if (cgraph_node::get (current_function_decl)->get_availability ()
+ <= AVAIL_INTERPOSABLE)
+ {
+ if (dump_file)
+ fprintf (dump_file, "Function is interposable;"
+ " not analyzing.\n");
+ return true;
+ }
+
+ FOR_EACH_BB_FN (this_block, cfun)
+ {
+ for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
+ !gsi_end_p (gsi);
+ gsi_next (&gsi))
+ if (stmt_can_throw_external (gsi_stmt (gsi)))
+ {
+ if (is_gimple_call (gsi_stmt (gsi)))
+ {
+ tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
+ if (callee_t && recursive_call_p (current_function_decl,
+ callee_t))
+ continue;
+ }
+
+ if (dump_file)
+ {
+ fprintf (dump_file, "Statement can throw: ");
+ print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
+ }
+ return 0;
+ }
+ }
+
+ node->set_nothrow_flag (true);
+ if (dump_file)
+ fprintf (dump_file, "Function found to be nothrow: %s\n",
+ current_function_name ());
+ return 0;
+}
} // anon namespace
gimple_opt_pass *
-make_pass_warn_function_noreturn (gcc::context *ctxt)
+make_pass_nothrow (gcc::context *ctxt)
{
- return new pass_warn_function_noreturn (ctxt);
+ return new pass_nothrow (ctxt);
}
-
-