/* Callgraph based analysis of static variables.
- Copyright (C) 2004-2014 Free Software Foundation, Inc.
+ Copyright (C) 2004-2016 Free Software Foundation, Inc.
Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
This file is part of GCC.
#include "config.h"
#include "system.h"
#include "coretypes.h"
-#include "tm.h"
+#include "backend.h"
+#include "target.h"
#include "tree.h"
-#include "print-tree.h"
+#include "gimple.h"
+#include "tree-pass.h"
+#include "tree-streamer.h"
+#include "cgraph.h"
+#include "diagnostic.h"
#include "calls.h"
-#include "predict.h"
-#include "vec.h"
-#include "hashtab.h"
-#include "hash-set.h"
-#include "machmode.h"
-#include "hard-reg-set.h"
-#include "input.h"
-#include "function.h"
-#include "dominance.h"
-#include "cfg.h"
#include "cfganal.h"
-#include "basic-block.h"
-#include "tree-ssa-alias.h"
-#include "internal-fn.h"
#include "tree-eh.h"
-#include "gimple-expr.h"
-#include "is-a.h"
-#include "gimple.h"
#include "gimple-iterator.h"
#include "gimple-walk.h"
#include "tree-cfg.h"
#include "tree-ssa-loop-niter.h"
-#include "tree-inline.h"
-#include "tree-pass.h"
#include "langhooks.h"
-#include "hash-map.h"
-#include "plugin-api.h"
-#include "ipa-ref.h"
-#include "cgraph.h"
#include "ipa-utils.h"
-#include "flags.h"
-#include "diagnostic.h"
#include "gimple-pretty-print.h"
-#include "langhooks.h"
-#include "target.h"
-#include "lto-streamer.h"
-#include "data-streamer.h"
-#include "tree-streamer.h"
#include "cfgloop.h"
#include "tree-scalar-evolution.h"
#include "intl.h"
/* Wrapper around check_decl for loads in local more. */
static bool
-check_load (gimple, tree op, tree, void *data)
+check_load (gimple *, tree op, tree, void *data)
{
if (DECL_P (op))
check_decl ((funct_state)data, op, false, false);
/* Wrapper around check_decl for stores in local more. */
static bool
-check_store (gimple, tree op, tree, void *data)
+check_store (gimple *, tree op, tree, void *data)
{
if (DECL_P (op))
check_decl ((funct_state)data, op, true, false);
/* Wrapper around check_decl for loads in ipa mode. */
static bool
-check_ipa_load (gimple, tree op, tree, void *data)
+check_ipa_load (gimple *, tree op, tree, void *data)
{
if (DECL_P (op))
check_decl ((funct_state)data, op, false, true);
/* Wrapper around check_decl for stores in ipa mode. */
static bool
-check_ipa_store (gimple, tree op, tree, void *data)
+check_ipa_store (gimple *, tree op, tree, void *data)
{
if (DECL_P (op))
check_decl ((funct_state)data, op, true, true);
static void
check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
{
- gimple stmt = gsi_stmt (*gsip);
+ gimple *stmt = gsi_stmt (*gsip);
if (is_gimple_debug (stmt))
return;
+ /* Do consider clobber as side effects before IPA, so we rather inline
+ C++ destructors and keep clobber semantics than eliminate them.
+
+ TODO: We may get smarter during early optimizations on these and let
+ functions containing only clobbers to be optimized more. This is a common
+ case of C++ destructors. */
+
+ if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
+ return;
+
if (dump_file)
{
fprintf (dump_file, " scanning: ");
}
}
+/* We only propagate across edges that can throw externally and their callee
+ is not interposable. */
static bool
-ignore_edge (struct cgraph_edge *e)
+ignore_edge_for_nothrow (struct cgraph_edge *e)
{
- return (!e->can_throw_external);
+ if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
+ return true;
+
+ enum availability avail;
+ cgraph_node *n = e->callee->function_or_virtual_thunk_symbol (&avail);
+ return (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (n->decl));
}
/* Return true if NODE is self recursive function.
return false;
}
+/* Return true if N is cdtor that is not const or pure. In this case we may
+ need to remove unreachable function if it is marked const/pure. */
+
+static bool
+cdtor_p (cgraph_node *n, void *)
+{
+ if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
+ return !TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl);
+ return false;
+}
+
+/* We only propagate across edges with non-interposable callee. */
+
+static bool
+ignore_edge_for_pure_const (struct cgraph_edge *e)
+{
+ enum availability avail;
+ e->callee->function_or_virtual_thunk_symbol (&avail);
+ return (avail <= AVAIL_INTERPOSABLE);
+}
+
+
/* Produce transitive closure over the callgraph and compute pure/const
attributes. */
-static void
+static bool
propagate_pure_const (void)
{
struct cgraph_node *node;
int order_pos;
int i;
struct ipa_dfs_info * w_info;
+ bool remove_p = false;
- order_pos = ipa_reduced_postorder (order, true, false, NULL);
+ order_pos = ipa_reduced_postorder (order, true, false,
+ ignore_edge_for_pure_const);
if (dump_file)
{
cgraph_node::dump_cgraph (dump_file);
if (pure_const_state == IPA_NEITHER)
break;
- /* For overwritable nodes we can not assume anything. */
+ /* For interposable nodes we can not assume anything. */
if (w->get_availability () == AVAIL_INTERPOSABLE)
{
worse_state (&pure_const_state, &looping,
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file,
- " Overwritable. state %s looping %i\n",
+ " Interposable. state %s looping %i\n",
pure_const_names[w_l->state_previously_known],
w_l->looping_previously_known);
}
looping = true;
/* Now walk the edges and merge in callee properties. */
- for (e = w->callees; e; e = e->next_callee)
+ for (e = w->callees; e && pure_const_state != IPA_NEITHER;
+ e = e->next_callee)
{
enum availability avail;
struct cgraph_node *y = e->callee->
if (pure_const_state == IPA_NEITHER)
break;
}
- if (pure_const_state == IPA_NEITHER)
- break;
/* Now process the indirect call. */
- for (ie = w->indirect_calls; ie; ie = ie->next_callee)
+ for (ie = w->indirect_calls;
+ ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
{
enum pure_const_state_e edge_state = IPA_CONST;
bool edge_looping = false;
if (pure_const_state == IPA_NEITHER)
break;
}
- if (pure_const_state == IPA_NEITHER)
- break;
/* And finally all loads and stores. */
- for (i = 0; w->iterate_reference (i, ref); i++)
+ for (i = 0; w->iterate_reference (i, ref)
+ && pure_const_state != IPA_NEITHER; i++)
{
enum pure_const_state_e ref_state = IPA_CONST;
bool ref_looping = false;
&& this_state > w_l->state_previously_known)
{
this_state = w_l->state_previously_known;
- this_looping |= w_l->looping_previously_known;
+ if (this_state == IPA_NEITHER)
+ this_looping = w_l->looping_previously_known;
}
if (!this_looping && self_recursive_p (w))
this_looping = true;
this_looping ? "looping " : "",
w->name ());
}
+ remove_p |= w->call_for_symbol_and_aliases (cdtor_p,
+ NULL, true);
w->set_const_flag (true, this_looping);
break;
this_looping ? "looping " : "",
w->name ());
}
+ remove_p |= w->call_for_symbol_and_aliases (cdtor_p,
+ NULL, true);
w->set_pure_flag (true, this_looping);
break;
ipa_free_postorder_info ();
free (order);
+ return remove_p;
}
/* Produce transitive closure over the callgraph and compute nothrow
int i;
struct ipa_dfs_info * w_info;
- order_pos = ipa_reduced_postorder (order, true, false, ignore_edge);
+ order_pos = ipa_reduced_postorder (order, true, false,
+ ignore_edge_for_nothrow);
if (dump_file)
{
cgraph_node::dump_cgraph (dump_file);
while (w && !can_throw)
{
struct cgraph_edge *e, *ie;
- funct_state w_l = get_function_state (w);
- if (w_l->can_throw
- || w->get_availability () == AVAIL_INTERPOSABLE)
- can_throw = true;
-
- for (e = w->callees; e && !can_throw; e = e->next_callee)
+ if (!TREE_NOTHROW (w->decl))
{
- enum availability avail;
- struct cgraph_node *y = e->callee->
- function_or_virtual_thunk_symbol (&avail);
+ funct_state w_l = get_function_state (w);
- if (avail > AVAIL_INTERPOSABLE)
+ if (w_l->can_throw
+ || w->get_availability () == AVAIL_INTERPOSABLE)
+ can_throw = true;
+
+ for (e = w->callees; e && !can_throw; e = e->next_callee)
{
- funct_state y_l = get_function_state (y);
+ enum availability avail;
+
+ if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
+ continue;
+
+ struct cgraph_node *y = e->callee->
+ function_or_virtual_thunk_symbol (&avail);
- if (y_l->can_throw && !TREE_NOTHROW (w->decl)
- && e->can_throw_external)
+ /* We can use info about the callee only if we know it can
+ not be interposed. */
+ if (avail <= AVAIL_INTERPOSABLE
+ || (!TREE_NOTHROW (y->decl)
+ && get_function_state (y)->can_throw))
can_throw = true;
}
- else if (e->can_throw_external && !TREE_NOTHROW (y->decl))
- can_throw = true;
+ for (ie = w->indirect_calls; ie && !can_throw;
+ ie = ie->next_callee)
+ if (ie->can_throw_external
+ && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
+ can_throw = true;
}
- for (ie = w->indirect_calls; ie && !can_throw; ie = ie->next_callee)
- if (ie->can_throw_external)
- can_throw = true;
w_info = (struct ipa_dfs_info *) w->aux;
w = w_info->next_cycle;
}
execute (function *)
{
struct cgraph_node *node;
+ bool remove_p;
symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
/* Nothrow makes more function to not lead to return and improve
later analysis. */
propagate_nothrow ();
- propagate_pure_const ();
+ remove_p = propagate_pure_const ();
/* Cleanup. */
FOR_EACH_FUNCTION (node)
if (has_function_state (node))
free (get_function_state (node));
funct_state_vec.release ();
- return 0;
+ return remove_p ? TODO_remove_functions : 0;
}
static bool
if (node->get_availability () <= AVAIL_INTERPOSABLE)
{
if (dump_file)
- fprintf (dump_file, "Function is not available or overwritable; not analyzing.\n");
+ fprintf (dump_file, "Function is not available or interposable; not analyzing.\n");
return true;
}
return false;
{
return new pass_warn_function_noreturn (ctxt);
}
+
+/* Simple local pass for pure const discovery reusing the analysis from
+ ipa_pure_const. This pass is effective when executed together with
+ other optimization passes in early optimization pass queue. */
+
+namespace {
+
+const pass_data pass_data_nothrow =
+{
+ GIMPLE_PASS, /* type */
+ "nothrow", /* name */
+ OPTGROUP_NONE, /* optinfo_flags */
+ TV_IPA_PURE_CONST, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ 0, /* todo_flags_finish */
+};
+
+class pass_nothrow : public gimple_opt_pass
+{
+public:
+ pass_nothrow (gcc::context *ctxt)
+ : gimple_opt_pass (pass_data_nothrow, ctxt)
+ {}
+
+ /* opt_pass methods: */
+ opt_pass * clone () { return new pass_nothrow (m_ctxt); }
+ virtual bool gate (function *) { return optimize; }
+ virtual unsigned int execute (function *);
+
+}; // class pass_nothrow
+
+unsigned int
+pass_nothrow::execute (function *)
+{
+ struct cgraph_node *node;
+ basic_block this_block;
+
+ if (TREE_NOTHROW (current_function_decl))
+ return 0;
+
+ node = cgraph_node::get (current_function_decl);
+
+ /* We run during lowering, we can not really use availability yet. */
+ if (cgraph_node::get (current_function_decl)->get_availability ()
+ <= AVAIL_INTERPOSABLE)
+ {
+ if (dump_file)
+ fprintf (dump_file, "Function is interposable;"
+ " not analyzing.\n");
+ return true;
+ }
+
+ FOR_EACH_BB_FN (this_block, cfun)
+ {
+ for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
+ !gsi_end_p (gsi);
+ gsi_next (&gsi))
+ if (stmt_can_throw_external (gsi_stmt (gsi)))
+ {
+ if (is_gimple_call (gsi_stmt (gsi)))
+ {
+ tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
+ if (callee_t && recursive_call_p (current_function_decl,
+ callee_t))
+ continue;
+ }
+
+ if (dump_file)
+ {
+ fprintf (dump_file, "Statement can throw: ");
+ print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
+ }
+ return 0;
+ }
+ }
+
+ node->set_nothrow_flag (true);
+ if (dump_file)
+ fprintf (dump_file, "Function found to be nothrow: %s\n",
+ current_function_name ());
+ return 0;
+}
+
+} // anon namespace
+
+gimple_opt_pass *
+make_pass_nothrow (gcc::context *ctxt)
+{
+ return new pass_nothrow (ctxt);
+}