/* SCC value numbering for trees
- Copyright (C) 2006-2013 Free Software Foundation, Inc.
+ Copyright (C) 2006-2014 Free Software Foundation, Inc.
Contributed by Daniel Berlin <dan@dberlin.org>
This file is part of GCC.
#include "basic-block.h"
#include "gimple-pretty-print.h"
#include "tree-inline.h"
+#include "hash-table.h"
+#include "tree-ssa-alias.h"
+#include "internal-fn.h"
+#include "gimple-fold.h"
+#include "tree-eh.h"
+#include "gimple-expr.h"
+#include "is-a.h"
#include "gimple.h"
#include "gimplify.h"
#include "gimple-ssa.h"
#include "tree-dfa.h"
#include "tree-ssa.h"
#include "dumpfile.h"
-#include "hash-table.h"
#include "alloc-pool.h"
#include "flags.h"
#include "cfgloop.h"
#include "params.h"
#include "tree-ssa-propagate.h"
#include "tree-ssa-sccvn.h"
+#include "tree-cfg.h"
+#include "domwalk.h"
/* This algorithm is based on the SCC algorithm presented by Keith
Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
if (!is_gimple_assign (def_stmt))
return vn->valnum;
- /* FIXME tuples. This is incomplete and likely will miss some
- simplifications. */
+ /* Note that we can valueize here because we clear the cached
+ simplified expressions after each optimistic iteration. */
code = gimple_assign_rhs_code (def_stmt);
switch (TREE_CODE_CLASS (code))
{
0)) == SSA_NAME)
expr = fold_build1 (code,
gimple_expr_type (def_stmt),
- TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
+ vn_valueize (TREE_OPERAND
+ (gimple_assign_rhs1 (def_stmt), 0)));
break;
case tcc_unary:
expr = fold_build1 (code,
gimple_expr_type (def_stmt),
- gimple_assign_rhs1 (def_stmt));
+ vn_valueize (gimple_assign_rhs1 (def_stmt)));
break;
case tcc_binary:
expr = fold_build2 (code,
gimple_expr_type (def_stmt),
- gimple_assign_rhs1 (def_stmt),
- gimple_assign_rhs2 (def_stmt));
+ vn_valueize (gimple_assign_rhs1 (def_stmt)),
+ vn_valueize (gimple_assign_rhs2 (def_stmt)));
break;
case tcc_exceptional:
tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
{
- double_int off
- = tree_to_double_int (this_offset)
- + tree_to_double_int (bit_offset)
- .rshift (BITS_PER_UNIT == 8
- ? 3 : exact_log2 (BITS_PER_UNIT));
- if (off.fits_shwi ()
+ offset_int off
+ = (wi::to_offset (this_offset)
+ + wi::lrshift (wi::to_offset (bit_offset),
+ LOG2_BITS_PER_UNIT));
+ if (wi::fits_shwi_p (off)
/* Probibit value-numbering zero offset components
of addresses the same before the pass folding
__builtin_object_size had a chance to run
(checking cfun->after_inlining does the
trick here). */
&& (TREE_CODE (orig) != ADDR_EXPR
- || !off.is_zero ()
+ || off != 0
|| cfun->after_inlining))
- temp.off = off.low;
+ temp.off = off.to_shwi ();
}
}
}
&& TREE_CODE (temp.op1) == INTEGER_CST
&& TREE_CODE (temp.op2) == INTEGER_CST)
{
- double_int off = tree_to_double_int (temp.op0);
- off += -tree_to_double_int (temp.op1);
- off *= tree_to_double_int (temp.op2);
- if (off.fits_shwi ())
- temp.off = off.low;
+ offset_int off = ((wi::to_offset (temp.op0)
+ - wi::to_offset (temp.op1))
+ * wi::to_offset (temp.op2));
+ if (wi::fits_shwi_p (off))
+ temp.off = off.to_shwi();
}
break;
case VAR_DECL:
gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
if (addr_base != TREE_OPERAND (op->op0, 0))
{
- double_int off = tree_to_double_int (mem_op->op0);
- off = off.sext (TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
- off += double_int::from_shwi (addr_offset);
- mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
+ offset_int off = offset_int::from (mem_op->op0, SIGNED);
+ off += addr_offset;
+ mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
op->op0 = build_fold_addr_expr (addr_base);
if (tree_fits_shwi_p (mem_op->op0))
mem_op->off = tree_to_shwi (mem_op->op0);
vn_reference_op_t mem_op = &(*ops)[i - 1];
gimple def_stmt;
enum tree_code code;
- double_int off;
+ offset_int off;
def_stmt = SSA_NAME_DEF_STMT (op->op0);
if (!is_gimple_assign (def_stmt))
&& code != POINTER_PLUS_EXPR)
return;
- off = tree_to_double_int (mem_op->op0);
- off = off.sext (TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
+ off = offset_int::from (mem_op->op0, SIGNED);
/* The only thing we have to do is from &OBJ.foo.bar add the offset
from .foo.bar to the preceding MEM_REF offset and replace the
|| TREE_CODE (addr_base) != MEM_REF)
return;
- off += double_int::from_shwi (addr_offset);
+ off += addr_offset;
off += mem_ref_offset (addr_base);
op->op0 = TREE_OPERAND (addr_base, 0);
}
|| TREE_CODE (ptroff) != INTEGER_CST)
return;
- off += tree_to_double_int (ptroff);
+ off += wi::to_offset (ptroff);
op->op0 = ptr;
}
- mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
+ mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
if (tree_fits_shwi_p (mem_op->op0))
mem_op->off = tree_to_shwi (mem_op->op0);
else
&& TREE_CODE (vro->op1) == INTEGER_CST
&& TREE_CODE (vro->op2) == INTEGER_CST)
{
- double_int off = tree_to_double_int (vro->op0);
- off += -tree_to_double_int (vro->op1);
- off *= tree_to_double_int (vro->op2);
- if (off.fits_shwi ())
- vro->off = off.low;
+ offset_int off = ((wi::to_offset (vro->op0)
+ - wi::to_offset (vro->op1))
+ * wi::to_offset (vro->op2));
+ if (wi::fits_shwi_p (off))
+ vro->off = off.to_shwi ();
}
}
of VUSE. */
static void *
-vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
+vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
+ bool disambiguate_only)
{
vn_reference_t vr = (vn_reference_t)vr_;
gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
lhs_ref_ok = true;
}
}
+ else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
+ && gimple_call_num_args (def_stmt) <= 4)
+ {
+ /* For builtin calls valueize its arguments and call the
+ alias oracle again. Valueization may improve points-to
+ info of pointers and constify size and position arguments.
+ Originally this was motivated by PR61034 which has
+ conditional calls to free falsely clobbering ref because
+ of imprecise points-to info of the argument. */
+ tree oldargs[4];
+ bool valueized_anything;
+ for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
+ {
+ oldargs[i] = gimple_call_arg (def_stmt, i);
+ if (TREE_CODE (oldargs[i]) == SSA_NAME
+ && VN_INFO (oldargs[i])->valnum != oldargs[i])
+ {
+ gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
+ valueized_anything = true;
+ }
+ }
+ if (valueized_anything)
+ {
+ bool res = call_may_clobber_ref_p_1 (def_stmt, ref);
+ for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
+ gimple_call_set_arg (def_stmt, i, oldargs[i]);
+ if (!res)
+ return NULL;
+ }
+ }
+
+ if (disambiguate_only)
+ return (void *)-1;
base = ao_ref_base (ref);
offset = ref->offset;
tree base2;
HOST_WIDE_INT offset2, size2, maxsize2;
int i, j;
- vec<vn_reference_op_s>
- rhs = vNULL;
+ auto_vec<vn_reference_op_s> rhs;
vn_reference_op_t vro;
ao_ref r;
vr->operands.truncate (i + 1 + rhs.length ());
FOR_EACH_VEC_ELT (rhs, j, vro)
vr->operands[i + 1 + j] = *vro;
- rhs.release ();
vr->operands = valueize_refs (vr->operands);
vr->hashcode = vn_reference_compute_hash (vr);
tree currval = SSA_VAL (from);
HOST_WIDE_INT toff, coff;
+ /* The only thing we allow as value numbers are ssa_names
+ and invariants. So assert that here. We don't allow VN_TOP
+ as visiting a stmt should produce a value-number other than
+ that.
+ ??? Still VN_TOP can happen for unreachable code, so force
+ it to varying in that case. Not all code is prepared to
+ get VN_TOP on valueization. */
+ if (to == VN_TOP)
+ {
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, "Forcing value number to varying on "
+ "receiving VN_TOP\n");
+ to = from;
+ }
+
+ gcc_assert (to != NULL_TREE
+ && (TREE_CODE (to) == SSA_NAME
+ || is_gimple_min_invariant (to)));
+
if (from != to)
{
if (currval == from)
to = from;
}
- /* The only thing we allow as value numbers are VN_TOP, ssa_names
- and invariants. So assert that here. */
- gcc_assert (to != NULL_TREE
- && (to == VN_TOP
- || TREE_CODE (to) == SSA_NAME
- || is_gimple_min_invariant (to)));
-
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Setting value number of ");
}
static bool expr_has_constants (tree expr);
-static tree valueize_expr (tree expr);
/* Visit a copy between LHS and RHS, return true if the value number
changed. */
if (vnresult)
{
- if (vnresult->result_vdef)
+ if (vnresult->result_vdef && vdef)
changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
if (!vnresult->result && lhs)
|| TREE_CODE (val) == VIEW_CONVERT_EXPR)
&& TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
{
- tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
+ tree tem = vn_get_expr_for (TREE_OPERAND (val, 0));
if ((CONVERT_EXPR_P (tem)
|| TREE_CODE (tem) == VIEW_CONVERT_EXPR)
&& (tem = fold_unary_ignore_overflow (TREE_CODE (val),
tree result;
tree sameval = VN_TOP;
bool allsame = true;
- unsigned i;
/* TODO: We could check for this in init_sccvn, and replace this
with a gcc_assert. */
/* See if all non-TOP arguments have the same value. TOP is
equivalent to everything, so we can ignore it. */
- for (i = 0; i < gimple_phi_num_args (phi); i++)
- {
- tree def = PHI_ARG_DEF (phi, i);
+ edge_iterator ei;
+ edge e;
+ FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
+ if (e->flags & EDGE_EXECUTABLE)
+ {
+ tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
- if (TREE_CODE (def) == SSA_NAME)
- def = SSA_VAL (def);
- if (def == VN_TOP)
- continue;
- if (sameval == VN_TOP)
- {
- sameval = def;
- }
- else
- {
- if (!expressions_equal_p (def, sameval))
- {
- allsame = false;
- break;
- }
- }
- }
+ if (TREE_CODE (def) == SSA_NAME)
+ def = SSA_VAL (def);
+ if (def == VN_TOP)
+ continue;
+ if (sameval == VN_TOP)
+ {
+ sameval = def;
+ }
+ else
+ {
+ if (!expressions_equal_p (def, sameval))
+ {
+ allsame = false;
+ break;
+ }
+ }
+ }
/* If all value numbered to the same value, the phi node has that
value. */
if (is_gimple_min_invariant (sameval))
{
VN_INFO (PHI_RESULT (phi))->has_constants = true;
- VN_INFO (PHI_RESULT (phi))->expr = sameval;
+ if (sameval != VN_TOP)
+ VN_INFO (PHI_RESULT (phi))->expr = sameval;
}
else
{
VN_INFO (PHI_RESULT (phi))->has_constants = false;
- VN_INFO (PHI_RESULT (phi))->expr = sameval;
+ if (sameval != VN_TOP)
+ VN_INFO (PHI_RESULT (phi))->expr = sameval;
}
if (TREE_CODE (sameval) == SSA_NAME)
return false;
}
-/* Replace SSA_NAMES in expr with their value numbers, and return the
- result.
- This is performed in place. */
-
-static tree
-valueize_expr (tree expr)
-{
- switch (TREE_CODE_CLASS (TREE_CODE (expr)))
- {
- case tcc_binary:
- TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
- /* Fallthru. */
- case tcc_unary:
- TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
- break;
- default:;
- }
- return expr;
-}
-
/* Simplify the binary expression RHS, and return the result if
simplified. */
if (VN_INFO (op0)->has_constants
|| TREE_CODE_CLASS (code) == tcc_comparison
|| code == COMPLEX_EXPR)
- op0 = valueize_expr (vn_get_expr_for (op0));
+ op0 = vn_get_expr_for (op0);
else
op0 = vn_valueize (op0);
}
{
if (VN_INFO (op1)->has_constants
|| code == COMPLEX_EXPR)
- op1 = valueize_expr (vn_get_expr_for (op1));
+ op1 = vn_get_expr_for (op1);
else
op1 = vn_valueize (op1);
}
orig_op0 = op0;
if (VN_INFO (op0)->has_constants)
- op0 = valueize_expr (vn_get_expr_for (op0));
+ op0 = vn_get_expr_for (op0);
else if (CONVERT_EXPR_CODE_P (code)
|| code == REALPART_EXPR
|| code == IMAGPART_EXPR
{
/* We want to do tree-combining on conversion-like expressions.
Make sure we feed only SSA_NAMEs or constants to fold though. */
- tree tem = valueize_expr (vn_get_expr_for (op0));
+ tree tem = vn_get_expr_for (op0);
if (UNARY_CLASS_P (tem)
|| BINARY_CLASS_P (tem)
|| TREE_CODE (tem) == VIEW_CONVERT_EXPR
else if (is_gimple_call (stmt))
{
tree lhs = gimple_call_lhs (stmt);
-
- /* ??? We could try to simplify calls. */
-
if (lhs && TREE_CODE (lhs) == SSA_NAME)
{
- if (stmt_has_constants (stmt))
- VN_INFO (lhs)->has_constants = true;
- else
+ /* Try constant folding based on our current lattice. */
+ tree simplified = gimple_fold_stmt_to_constant_1 (stmt,
+ vn_valueize);
+ if (simplified)
+ {
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "call ");
+ print_gimple_expr (dump_file, stmt, 0, 0);
+ fprintf (dump_file, " simplified to ");
+ print_generic_expr (dump_file, simplified, 0);
+ if (TREE_CODE (lhs) == SSA_NAME)
+ fprintf (dump_file, " has constants %d\n",
+ expr_has_constants (simplified));
+ else
+ fprintf (dump_file, "\n");
+ }
+ }
+ /* Setting value numbers to constants will occasionally
+ screw up phi congruence because constants are not
+ uniquely associated with a single ssa name that can be
+ looked up. */
+ if (simplified
+ && is_gimple_min_invariant (simplified))
{
- /* We reset expr and constantness here because we may
- have been value numbering optimistically, and
- iterating. They may become non-constant in this case,
- even if they were optimistically constant. */
- VN_INFO (lhs)->has_constants = false;
- VN_INFO (lhs)->expr = NULL_TREE;
+ VN_INFO (lhs)->expr = simplified;
+ VN_INFO (lhs)->has_constants = true;
+ changed = set_ssa_val_to (lhs, simplified);
+ if (gimple_vdef (stmt))
+ changed |= set_ssa_val_to (gimple_vdef (stmt),
+ gimple_vuse (stmt));
+ goto done;
}
-
- if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
+ else if (simplified
+ && TREE_CODE (simplified) == SSA_NAME)
{
- changed = defs_to_varying (stmt);
+ changed = visit_copy (lhs, simplified);
+ if (gimple_vdef (stmt))
+ changed |= set_ssa_val_to (gimple_vdef (stmt),
+ gimple_vuse (stmt));
goto done;
}
+ else
+ {
+ if (stmt_has_constants (stmt))
+ VN_INFO (lhs)->has_constants = true;
+ else
+ {
+ /* We reset expr and constantness here because we may
+ have been value numbering optimistically, and
+ iterating. They may become non-constant in this case,
+ even if they were optimistically constant. */
+ VN_INFO (lhs)->has_constants = false;
+ VN_INFO (lhs)->expr = NULL_TREE;
+ }
+
+ if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
+ {
+ changed = defs_to_varying (stmt);
+ goto done;
+ }
+ }
}
if (!gimple_call_internal_p (stmt)
static bool
extract_and_process_scc_for_name (tree name)
{
- vec<tree> scc = vNULL;
+ auto_vec<tree> scc;
tree x;
/* Found an SCC, pop the components off the SCC stack and
"SCC size %u exceeding %u\n", scc.length (),
(unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
- scc.release ();
return false;
}
process_scc (scc);
- scc.release ();
-
return true;
}
shared_lookup_phiargs.create (0);
shared_lookup_references.create (0);
- rpo_numbers = XNEWVEC (int, last_basic_block);
+ rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
rpo_numbers_temp =
XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
set_value_id_for_result (vr->result, &vr->value_id);
}
+class cond_dom_walker : public dom_walker
+{
+public:
+ cond_dom_walker () : dom_walker (CDI_DOMINATORS), fail (false) {}
+
+ virtual void before_dom_children (basic_block);
+
+ bool fail;
+};
+
+void
+cond_dom_walker::before_dom_children (basic_block bb)
+{
+ edge e;
+ edge_iterator ei;
+
+ if (fail)
+ return;
+
+ /* If any of the predecessor edges that do not come from blocks dominated
+ by us are still marked as possibly executable consider this block
+ reachable. */
+ bool reachable = bb == ENTRY_BLOCK_PTR_FOR_FN (cfun);
+ FOR_EACH_EDGE (e, ei, bb->preds)
+ if (!dominated_by_p (CDI_DOMINATORS, e->src, bb))
+ reachable |= (e->flags & EDGE_EXECUTABLE);
+
+ /* If the block is not reachable all outgoing edges are not
+ executable. */
+ if (!reachable)
+ {
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, "Marking all outgoing edges of unreachable "
+ "BB %d as not executable\n", bb->index);
+
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ e->flags &= ~EDGE_EXECUTABLE;
+ return;
+ }
+
+ gimple stmt = last_stmt (bb);
+ if (!stmt)
+ return;
+
+ /* Value-number the last stmts SSA uses. */
+ ssa_op_iter i;
+ tree op;
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
+ if (VN_INFO (op)->visited == false
+ && !DFS (op))
+ {
+ fail = true;
+ return;
+ }
+
+ /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
+ if value-numbering can prove they are not reachable. Handling
+ computed gotos is also possible. */
+ tree val;
+ switch (gimple_code (stmt))
+ {
+ case GIMPLE_COND:
+ {
+ tree lhs = gimple_cond_lhs (stmt);
+ tree rhs = gimple_cond_rhs (stmt);
+ /* Work hard in computing the condition and take into account
+ the valueization of the defining stmt. */
+ if (TREE_CODE (lhs) == SSA_NAME)
+ lhs = vn_get_expr_for (lhs);
+ if (TREE_CODE (rhs) == SSA_NAME)
+ rhs = vn_get_expr_for (rhs);
+ val = fold_binary (gimple_cond_code (stmt),
+ boolean_type_node, lhs, rhs);
+ break;
+ }
+ case GIMPLE_SWITCH:
+ val = gimple_switch_index (stmt);
+ break;
+ case GIMPLE_GOTO:
+ val = gimple_goto_dest (stmt);
+ break;
+ default:
+ val = NULL_TREE;
+ break;
+ }
+ if (!val)
+ return;
+
+ edge taken = find_taken_edge (bb, vn_valueize (val));
+ if (!taken)
+ return;
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
+ "not executable\n", bb->index, bb->index, taken->dest->index);
+
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ if (e != taken)
+ e->flags &= ~EDGE_EXECUTABLE;
+}
+
/* Do SCCVN. Returns true if it finished, false if we bailed out
due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
how we use the alias oracle walking during the VN process. */
bool
run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
{
+ basic_block bb;
size_t i;
tree param;
VN_INFO (def)->valnum = def;
}
+ /* Mark all edges as possibly executable. */
+ FOR_ALL_BB_FN (bb, cfun)
+ {
+ edge_iterator ei;
+ edge e;
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ e->flags |= EDGE_EXECUTABLE;
+ }
+
+ /* Walk all blocks in dominator order, value-numbering the last stmts
+ SSA uses and decide whether outgoing edges are not executable. */
+ cond_dom_walker walker;
+ walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
+ if (walker.fail)
+ {
+ free_scc_vn ();
+ return false;
+ }
+
+ /* Value-number remaining SSA names. */
for (i = 1; i < num_ssa_names; ++i)
{
tree name = ssa_name (i);