/* SCC value numbering for trees
- Copyright (C) 2006, 2007, 2008, 2009, 2010
- Free Software Foundation, Inc.
+ Copyright (C) 2006-2014 Free Software Foundation, Inc.
Contributed by Daniel Berlin <dan@dberlin.org>
This file is part of GCC.
#include "coretypes.h"
#include "tm.h"
#include "tree.h"
+#include "stor-layout.h"
#include "basic-block.h"
#include "gimple-pretty-print.h"
#include "tree-inline.h"
-#include "tree-flow.h"
+#include "hash-table.h"
+#include "tree-ssa-alias.h"
+#include "internal-fn.h"
+#include "gimple-fold.h"
+#include "tree-eh.h"
+#include "gimple-expr.h"
+#include "is-a.h"
#include "gimple.h"
+#include "gimplify.h"
+#include "gimple-ssa.h"
+#include "tree-phinodes.h"
+#include "ssa-iterators.h"
+#include "stringpool.h"
+#include "tree-ssanames.h"
+#include "expr.h"
+#include "tree-dfa.h"
+#include "tree-ssa.h"
#include "dumpfile.h"
-#include "hashtab.h"
#include "alloc-pool.h"
#include "flags.h"
-#include "bitmap.h"
#include "cfgloop.h"
#include "params.h"
#include "tree-ssa-propagate.h"
#include "tree-ssa-sccvn.h"
-#include "gimple-fold.h"
+#include "tree-cfg.h"
+#include "domwalk.h"
/* This algorithm is based on the SCC algorithm presented by Keith
Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
structure copies.
*/
+
+/* vn_nary_op hashtable helpers. */
+
+struct vn_nary_op_hasher : typed_noop_remove <vn_nary_op_s>
+{
+ typedef vn_nary_op_s value_type;
+ typedef vn_nary_op_s compare_type;
+ static inline hashval_t hash (const value_type *);
+ static inline bool equal (const value_type *, const compare_type *);
+};
+
+/* Return the computed hashcode for nary operation P1. */
+
+inline hashval_t
+vn_nary_op_hasher::hash (const value_type *vno1)
+{
+ return vno1->hashcode;
+}
+
+/* Compare nary operations P1 and P2 and return true if they are
+ equivalent. */
+
+inline bool
+vn_nary_op_hasher::equal (const value_type *vno1, const compare_type *vno2)
+{
+ return vn_nary_op_eq (vno1, vno2);
+}
+
+typedef hash_table <vn_nary_op_hasher> vn_nary_op_table_type;
+typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
+
+
+/* vn_phi hashtable helpers. */
+
+static int
+vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
+
+struct vn_phi_hasher
+{
+ typedef vn_phi_s value_type;
+ typedef vn_phi_s compare_type;
+ static inline hashval_t hash (const value_type *);
+ static inline bool equal (const value_type *, const compare_type *);
+ static inline void remove (value_type *);
+};
+
+/* Return the computed hashcode for phi operation P1. */
+
+inline hashval_t
+vn_phi_hasher::hash (const value_type *vp1)
+{
+ return vp1->hashcode;
+}
+
+/* Compare two phi entries for equality, ignoring VN_TOP arguments. */
+
+inline bool
+vn_phi_hasher::equal (const value_type *vp1, const compare_type *vp2)
+{
+ return vn_phi_eq (vp1, vp2);
+}
+
+/* Free a phi operation structure VP. */
+
+inline void
+vn_phi_hasher::remove (value_type *phi)
+{
+ phi->phiargs.release ();
+}
+
+typedef hash_table <vn_phi_hasher> vn_phi_table_type;
+typedef vn_phi_table_type::iterator vn_phi_iterator_type;
+
+
+/* Compare two reference operands P1 and P2 for equality. Return true if
+ they are equal, and false otherwise. */
+
+static int
+vn_reference_op_eq (const void *p1, const void *p2)
+{
+ const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
+ const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
+
+ return (vro1->opcode == vro2->opcode
+ /* We do not care for differences in type qualification. */
+ && (vro1->type == vro2->type
+ || (vro1->type && vro2->type
+ && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
+ TYPE_MAIN_VARIANT (vro2->type))))
+ && expressions_equal_p (vro1->op0, vro2->op0)
+ && expressions_equal_p (vro1->op1, vro2->op1)
+ && expressions_equal_p (vro1->op2, vro2->op2));
+}
+
+/* Free a reference operation structure VP. */
+
+static inline void
+free_reference (vn_reference_s *vr)
+{
+ vr->operands.release ();
+}
+
+
+/* vn_reference hashtable helpers. */
+
+struct vn_reference_hasher
+{
+ typedef vn_reference_s value_type;
+ typedef vn_reference_s compare_type;
+ static inline hashval_t hash (const value_type *);
+ static inline bool equal (const value_type *, const compare_type *);
+ static inline void remove (value_type *);
+};
+
+/* Return the hashcode for a given reference operation P1. */
+
+inline hashval_t
+vn_reference_hasher::hash (const value_type *vr1)
+{
+ return vr1->hashcode;
+}
+
+inline bool
+vn_reference_hasher::equal (const value_type *v, const compare_type *c)
+{
+ return vn_reference_eq (v, c);
+}
+
+inline void
+vn_reference_hasher::remove (value_type *v)
+{
+ free_reference (v);
+}
+
+typedef hash_table <vn_reference_hasher> vn_reference_table_type;
+typedef vn_reference_table_type::iterator vn_reference_iterator_type;
+
+
/* The set of hashtables and alloc_pool's for their items. */
typedef struct vn_tables_s
{
- htab_t nary;
- htab_t phis;
- htab_t references;
+ vn_nary_op_table_type nary;
+ vn_phi_table_type phis;
+ vn_reference_table_type references;
struct obstack nary_obstack;
alloc_pool phis_pool;
alloc_pool references_pool;
} *vn_tables_t;
-static htab_t constant_to_value_id;
+
+/* vn_constant hashtable helpers. */
+
+struct vn_constant_hasher : typed_free_remove <vn_constant_s>
+{
+ typedef vn_constant_s value_type;
+ typedef vn_constant_s compare_type;
+ static inline hashval_t hash (const value_type *);
+ static inline bool equal (const value_type *, const compare_type *);
+};
+
+/* Hash table hash function for vn_constant_t. */
+
+inline hashval_t
+vn_constant_hasher::hash (const value_type *vc1)
+{
+ return vc1->hashcode;
+}
+
+/* Hash table equality function for vn_constant_t. */
+
+inline bool
+vn_constant_hasher::equal (const value_type *vc1, const compare_type *vc2)
+{
+ if (vc1->hashcode != vc2->hashcode)
+ return false;
+
+ return vn_constant_eq_with_type (vc1->constant, vc2->constant);
+}
+
+static hash_table <vn_constant_hasher> constant_to_value_id;
static bitmap constant_value_ids;
detection. */
static unsigned int next_dfs_num;
-static VEC (tree, heap) *sccstack;
+static vec<tree> sccstack;
-DEF_VEC_P(vn_ssa_aux_t);
-DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
/* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
are allocated on an obstack for locality reasons, and to free them
- without looping over the VEC. */
+ without looping over the vec. */
-static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
+static vec<vn_ssa_aux_t> vn_ssa_aux_table;
static struct obstack vn_ssa_aux_obstack;
/* Return the value numbering information for a given SSA name. */
vn_ssa_aux_t
VN_INFO (tree name)
{
- vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
- SSA_NAME_VERSION (name));
+ vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
gcc_checking_assert (res);
return res;
}
static inline void
VN_INFO_SET (tree name, vn_ssa_aux_t value)
{
- VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
- SSA_NAME_VERSION (name), value);
+ vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
}
/* Initialize the value numbering info for a given SSA name.
newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
memset (newinfo, 0, sizeof (struct vn_ssa_aux));
- if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
- VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
- SSA_NAME_VERSION (name) + 1);
- VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
- SSA_NAME_VERSION (name), newinfo);
+ if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
+ vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
+ vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
return newinfo;
}
if (!is_gimple_assign (def_stmt))
return vn->valnum;
- /* FIXME tuples. This is incomplete and likely will miss some
- simplifications. */
+ /* Note that we can valueize here because we clear the cached
+ simplified expressions after each optimistic iteration. */
code = gimple_assign_rhs_code (def_stmt);
switch (TREE_CODE_CLASS (code))
{
0)) == SSA_NAME)
expr = fold_build1 (code,
gimple_expr_type (def_stmt),
- TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
+ vn_valueize (TREE_OPERAND
+ (gimple_assign_rhs1 (def_stmt), 0)));
break;
case tcc_unary:
expr = fold_build1 (code,
gimple_expr_type (def_stmt),
- gimple_assign_rhs1 (def_stmt));
+ vn_valueize (gimple_assign_rhs1 (def_stmt)));
break;
case tcc_binary:
expr = fold_build2 (code,
gimple_expr_type (def_stmt),
- gimple_assign_rhs1 (def_stmt),
- gimple_assign_rhs2 (def_stmt));
+ vn_valueize (gimple_assign_rhs1 (def_stmt)),
+ vn_valueize (gimple_assign_rhs2 (def_stmt)));
break;
case tcc_exceptional:
return expr;
}
+/* Return the vn_kind the expression computed by the stmt should be
+ associated with. */
-/* Free a phi operation structure VP. */
-
-static void
-free_phi (void *vp)
-{
- vn_phi_t phi = (vn_phi_t) vp;
- VEC_free (tree, heap, phi->phiargs);
-}
-
-/* Free a reference operation structure VP. */
-
-static void
-free_reference (void *vp)
-{
- vn_reference_t vr = (vn_reference_t) vp;
- VEC_free (vn_reference_op_s, heap, vr->operands);
-}
-
-/* Hash table equality function for vn_constant_t. */
-
-static int
-vn_constant_eq (const void *p1, const void *p2)
+enum vn_kind
+vn_get_stmt_kind (gimple stmt)
{
- const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
- const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
-
- if (vc1->hashcode != vc2->hashcode)
- return false;
-
- return vn_constant_eq_with_type (vc1->constant, vc2->constant);
-}
-
-/* Hash table hash function for vn_constant_t. */
-
-static hashval_t
-vn_constant_hash (const void *p1)
-{
- const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
- return vc1->hashcode;
+ switch (gimple_code (stmt))
+ {
+ case GIMPLE_CALL:
+ return VN_REFERENCE;
+ case GIMPLE_PHI:
+ return VN_PHI;
+ case GIMPLE_ASSIGN:
+ {
+ enum tree_code code = gimple_assign_rhs_code (stmt);
+ tree rhs1 = gimple_assign_rhs1 (stmt);
+ switch (get_gimple_rhs_class (code))
+ {
+ case GIMPLE_UNARY_RHS:
+ case GIMPLE_BINARY_RHS:
+ case GIMPLE_TERNARY_RHS:
+ return VN_NARY;
+ case GIMPLE_SINGLE_RHS:
+ switch (TREE_CODE_CLASS (code))
+ {
+ case tcc_reference:
+ /* VOP-less references can go through unary case. */
+ if ((code == REALPART_EXPR
+ || code == IMAGPART_EXPR
+ || code == VIEW_CONVERT_EXPR
+ || code == BIT_FIELD_REF)
+ && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
+ return VN_NARY;
+
+ /* Fallthrough. */
+ case tcc_declaration:
+ return VN_REFERENCE;
+
+ case tcc_constant:
+ return VN_CONSTANT;
+
+ default:
+ if (code == ADDR_EXPR)
+ return (is_gimple_min_invariant (rhs1)
+ ? VN_CONSTANT : VN_REFERENCE);
+ else if (code == CONSTRUCTOR)
+ return VN_NARY;
+ return VN_NONE;
+ }
+ default:
+ return VN_NONE;
+ }
+ }
+ default:
+ return VN_NONE;
+ }
}
/* Lookup a value id for CONSTANT and return it. If it does not
unsigned int
get_constant_value_id (tree constant)
{
- void **slot;
+ vn_constant_s **slot;
struct vn_constant_s vc;
vc.hashcode = vn_hash_constant_with_type (constant);
vc.constant = constant;
- slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
- vc.hashcode, NO_INSERT);
+ slot = constant_to_value_id.find_slot_with_hash (&vc, vc.hashcode, NO_INSERT);
if (slot)
- return ((vn_constant_t)*slot)->value_id;
+ return (*slot)->value_id;
return 0;
}
unsigned int
get_or_alloc_constant_value_id (tree constant)
{
- void **slot;
+ vn_constant_s **slot;
struct vn_constant_s vc;
vn_constant_t vcp;
vc.hashcode = vn_hash_constant_with_type (constant);
vc.constant = constant;
- slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
- vc.hashcode, INSERT);
+ slot = constant_to_value_id.find_slot_with_hash (&vc, vc.hashcode, INSERT);
if (*slot)
- return ((vn_constant_t)*slot)->value_id;
+ return (*slot)->value_id;
vcp = XNEW (struct vn_constant_s);
vcp->hashcode = vc.hashcode;
vcp->constant = constant;
vcp->value_id = get_next_value_id ();
- *slot = (void *) vcp;
+ *slot = vcp;
bitmap_set_bit (constant_value_ids, vcp->value_id);
return vcp->value_id;
}
return bitmap_bit_p (constant_value_ids, v);
}
-/* Compare two reference operands P1 and P2 for equality. Return true if
- they are equal, and false otherwise. */
-
-static int
-vn_reference_op_eq (const void *p1, const void *p2)
-{
- const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
- const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
-
- return (vro1->opcode == vro2->opcode
- /* We do not care for differences in type qualification. */
- && (vro1->type == vro2->type
- || (vro1->type && vro2->type
- && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
- TYPE_MAIN_VARIANT (vro2->type))))
- && expressions_equal_p (vro1->op0, vro2->op0)
- && expressions_equal_p (vro1->op1, vro2->op1)
- && expressions_equal_p (vro1->op2, vro2->op2));
-}
-
/* Compute the hash for a reference operand VRO1. */
static hashval_t
return result;
}
-/* Return the hashcode for a given reference operation P1. */
-
-static hashval_t
-vn_reference_hash (const void *p1)
-{
- const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
- return vr1->hashcode;
-}
-
/* Compute a hash for the reference operation VR1 and return it. */
hashval_t
HOST_WIDE_INT off = -1;
bool deref = false;
- FOR_EACH_VEC_ELT (vn_reference_op_s, vr1->operands, i, vro)
+ FOR_EACH_VEC_ELT (vr1->operands, i, vro)
{
if (vro->opcode == MEM_REF)
deref = true;
return result;
}
-/* Return true if reference operations P1 and P2 are equivalent. This
+/* Return true if reference operations VR1 and VR2 are equivalent. This
means they have the same set of operands and vuses. */
-int
-vn_reference_eq (const void *p1, const void *p2)
+bool
+vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
{
unsigned i, j;
- const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
- const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
if (vr1->hashcode != vr2->hashcode)
return false;
vn_reference_op_t vro1, vro2;
vn_reference_op_s tem1, tem2;
bool deref1 = false, deref2 = false;
- for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
+ for (; vr1->operands.iterate (i, &vro1); i++)
{
if (vro1->opcode == MEM_REF)
deref1 = true;
break;
off1 += vro1->off;
}
- for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
+ for (; vr2->operands.iterate (j, &vro2); j++)
{
if (vro2->opcode == MEM_REF)
deref2 = true;
++j;
++i;
}
- while (VEC_length (vn_reference_op_s, vr1->operands) != i
- || VEC_length (vn_reference_op_s, vr2->operands) != j);
+ while (vr1->operands.length () != i
+ || vr2->operands.length () != j);
return true;
}
vn_reference_op_s's. */
void
-copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
+copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
{
if (TREE_CODE (ref) == TARGET_MEM_REF)
{
vn_reference_op_s temp;
+ result->reserve (3);
+
memset (&temp, 0, sizeof (temp));
temp.type = TREE_TYPE (ref);
temp.opcode = TREE_CODE (ref);
temp.op1 = TMR_STEP (ref);
temp.op2 = TMR_OFFSET (ref);
temp.off = -1;
- VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
+ result->quick_push (temp);
memset (&temp, 0, sizeof (temp));
temp.type = NULL_TREE;
temp.opcode = ERROR_MARK;
temp.op0 = TMR_INDEX2 (ref);
temp.off = -1;
- VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
+ result->quick_push (temp);
memset (&temp, 0, sizeof (temp));
temp.type = NULL_TREE;
temp.opcode = TREE_CODE (TMR_BASE (ref));
temp.op0 = TMR_BASE (ref);
temp.off = -1;
- VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
+ result->quick_push (temp);
return;
}
/* For non-calls, store the information that makes up the address. */
-
+ tree orig = ref;
while (ref)
{
vn_reference_op_s temp;
case MEM_REF:
/* The base address gets its own vn_reference_op_s structure. */
temp.op0 = TREE_OPERAND (ref, 1);
- if (host_integerp (TREE_OPERAND (ref, 1), 0))
- temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
+ if (tree_fits_shwi_p (TREE_OPERAND (ref, 1)))
+ temp.off = tree_to_shwi (TREE_OPERAND (ref, 1));
break;
case BIT_FIELD_REF:
/* Record bits and position. */
tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
{
- double_int off
- = double_int_add (tree_to_double_int (this_offset),
- double_int_rshift
- (tree_to_double_int (bit_offset),
- BITS_PER_UNIT == 8
- ? 3 : exact_log2 (BITS_PER_UNIT),
- HOST_BITS_PER_DOUBLE_INT, true));
- if (double_int_fits_in_shwi_p (off))
- temp.off = off.low;
+ offset_int off
+ = (wi::to_offset (this_offset)
+ + wi::lrshift (wi::to_offset (bit_offset),
+ LOG2_BITS_PER_UNIT));
+ if (wi::fits_shwi_p (off)
+ /* Probibit value-numbering zero offset components
+ of addresses the same before the pass folding
+ __builtin_object_size had a chance to run
+ (checking cfun->after_inlining does the
+ trick here). */
+ && (TREE_CODE (orig) != ADDR_EXPR
+ || off != 0
+ || cfun->after_inlining))
+ temp.off = off.to_shwi ();
}
}
}
&& TREE_CODE (temp.op1) == INTEGER_CST
&& TREE_CODE (temp.op2) == INTEGER_CST)
{
- double_int off = tree_to_double_int (temp.op0);
- off = double_int_add (off,
- double_int_neg
- (tree_to_double_int (temp.op1)));
- off = double_int_mul (off, tree_to_double_int (temp.op2));
- if (double_int_fits_in_shwi_p (off))
- temp.off = off.low;
+ offset_int off = ((wi::to_offset (temp.op0)
+ - wi::to_offset (temp.op1))
+ * wi::to_offset (temp.op2));
+ if (wi::fits_shwi_p (off))
+ temp.off = off.to_shwi();
}
break;
case VAR_DECL:
temp.opcode = MEM_REF;
temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
temp.off = 0;
- VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
+ result->safe_push (temp);
temp.opcode = ADDR_EXPR;
- temp.op0 = build_fold_addr_expr (ref);
+ temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
temp.type = TREE_TYPE (temp.op0);
temp.off = -1;
break;
default:
gcc_unreachable ();
}
- VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
+ result->safe_push (temp);
if (REFERENCE_CLASS_P (ref)
|| TREE_CODE (ref) == MODIFY_EXPR
bool
ao_ref_init_from_vn_reference (ao_ref *ref,
alias_set_type set, tree type,
- VEC (vn_reference_op_s, heap) *ops)
+ vec<vn_reference_op_s> ops)
{
vn_reference_op_t op;
unsigned i;
alias_set_type base_alias_set = -1;
/* First get the final access size from just the outermost expression. */
- op = VEC_index (vn_reference_op_s, ops, 0);
+ op = &ops[0];
if (op->opcode == COMPONENT_REF)
size_tree = DECL_SIZE (op->op0);
else if (op->opcode == BIT_FIELD_REF)
}
if (size_tree != NULL_TREE)
{
- if (!host_integerp (size_tree, 1))
+ if (!tree_fits_uhwi_p (size_tree))
size = -1;
else
- size = TREE_INT_CST_LOW (size_tree);
+ size = tree_to_uhwi (size_tree);
}
/* Initially, maxsize is the same as the accessed element size.
/* Compute cumulative bit-offset for nested component-refs and array-refs,
and find the ultimate containing object. */
- FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
+ FOR_EACH_VEC_ELT (ops, i, op)
{
switch (op->opcode)
{
&& op->op0
&& DECL_P (TREE_OPERAND (op->op0, 0)))
{
- vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
+ vn_reference_op_t pop = &ops[i-1];
base = TREE_OPERAND (op->op0, 0);
if (pop->off == -1)
{
/* And now the usual component-reference style ops. */
case BIT_FIELD_REF:
- offset += tree_low_cst (op->op1, 0);
+ offset += tree_to_shwi (op->op1);
break;
case COMPONENT_REF:
parts manually. */
if (op->op1
- || !host_integerp (DECL_FIELD_OFFSET (field), 1))
+ || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
max_size = -1;
else
{
- offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
+ offset += (tree_to_uhwi (DECL_FIELD_OFFSET (field))
* BITS_PER_UNIT);
offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
}
case ARRAY_RANGE_REF:
case ARRAY_REF:
/* We recorded the lower bound and the element size. */
- if (!host_integerp (op->op0, 0)
- || !host_integerp (op->op1, 0)
- || !host_integerp (op->op2, 0))
+ if (!tree_fits_shwi_p (op->op0)
+ || !tree_fits_shwi_p (op->op1)
+ || !tree_fits_shwi_p (op->op2))
max_size = -1;
else
{
- HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
- hindex -= TREE_INT_CST_LOW (op->op1);
- hindex *= TREE_INT_CST_LOW (op->op2);
+ HOST_WIDE_INT hindex = tree_to_shwi (op->op0);
+ hindex -= tree_to_shwi (op->op1);
+ hindex *= tree_to_shwi (op->op2);
hindex *= BITS_PER_UNIT;
offset += hindex;
}
void
copy_reference_ops_from_call (gimple call,
- VEC(vn_reference_op_s, heap) **result)
+ vec<vn_reference_op_s> *result)
{
vn_reference_op_s temp;
unsigned i;
temp.type = TREE_TYPE (lhs);
temp.op0 = lhs;
temp.off = -1;
- VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
+ result->safe_push (temp);
}
/* Copy the type, opcode, function being called and static chain. */
temp.op0 = gimple_call_fn (call);
temp.op1 = gimple_call_chain (call);
temp.off = -1;
- VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
+ result->safe_push (temp);
/* Copy the call arguments. As they can be references as well,
just chain them together. */
}
}
-/* Create a vector of vn_reference_op_s structures from REF, a
- REFERENCE_CLASS_P tree. The vector is not shared. */
-
-static VEC(vn_reference_op_s, heap) *
-create_reference_ops_from_ref (tree ref)
-{
- VEC (vn_reference_op_s, heap) *result = NULL;
-
- copy_reference_ops_from_ref (ref, &result);
- return result;
-}
-
/* Create a vector of vn_reference_op_s structures from CALL, a
call statement. The vector is not shared. */
-static VEC(vn_reference_op_s, heap) *
+static vec<vn_reference_op_s>
create_reference_ops_from_call (gimple call)
{
- VEC (vn_reference_op_s, heap) *result = NULL;
+ vec<vn_reference_op_s> result = vNULL;
copy_reference_ops_from_call (call, &result);
return result;
/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
*I_P to point to the last element of the replacement. */
void
-vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
+vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
unsigned int *i_p)
{
unsigned int i = *i_p;
- vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
- vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
+ vn_reference_op_t op = &(*ops)[i];
+ vn_reference_op_t mem_op = &(*ops)[i - 1];
tree addr_base;
- HOST_WIDE_INT addr_offset;
+ HOST_WIDE_INT addr_offset = 0;
/* The only thing we have to do is from &OBJ.foo.bar add the offset
from .foo.bar to the preceding MEM_REF offset and replace the
addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
&addr_offset);
gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
- if (addr_base != op->op0)
+ if (addr_base != TREE_OPERAND (op->op0, 0))
{
- double_int off = tree_to_double_int (mem_op->op0);
- off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
- off = double_int_add (off, shwi_to_double_int (addr_offset));
- mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
+ offset_int off = offset_int::from (mem_op->op0, SIGNED);
+ off += addr_offset;
+ mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
op->op0 = build_fold_addr_expr (addr_base);
- if (host_integerp (mem_op->op0, 0))
- mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
+ if (tree_fits_shwi_p (mem_op->op0))
+ mem_op->off = tree_to_shwi (mem_op->op0);
else
mem_op->off = -1;
}
/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
*I_P to point to the last element of the replacement. */
static void
-vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
+vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
unsigned int *i_p)
{
unsigned int i = *i_p;
- vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
- vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
+ vn_reference_op_t op = &(*ops)[i];
+ vn_reference_op_t mem_op = &(*ops)[i - 1];
gimple def_stmt;
enum tree_code code;
- double_int off;
+ offset_int off;
def_stmt = SSA_NAME_DEF_STMT (op->op0);
if (!is_gimple_assign (def_stmt))
&& code != POINTER_PLUS_EXPR)
return;
- off = tree_to_double_int (mem_op->op0);
- off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
+ off = offset_int::from (mem_op->op0, SIGNED);
/* The only thing we have to do is from &OBJ.foo.bar add the offset
from .foo.bar to the preceding MEM_REF offset and replace the
|| TREE_CODE (addr_base) != MEM_REF)
return;
- off = double_int_add (off, shwi_to_double_int (addr_offset));
- off = double_int_add (off, mem_ref_offset (addr_base));
+ off += addr_offset;
+ off += mem_ref_offset (addr_base);
op->op0 = TREE_OPERAND (addr_base, 0);
}
else
|| TREE_CODE (ptroff) != INTEGER_CST)
return;
- off = double_int_add (off, tree_to_double_int (ptroff));
+ off += wi::to_offset (ptroff);
op->op0 = ptr;
}
- mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
- if (host_integerp (mem_op->op0, 0))
- mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
+ mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
+ if (tree_fits_shwi_p (mem_op->op0))
+ mem_op->off = tree_to_shwi (mem_op->op0);
else
mem_op->off = -1;
if (TREE_CODE (op->op0) == SSA_NAME)
tree
fully_constant_vn_reference_p (vn_reference_t ref)
{
- VEC (vn_reference_op_s, heap) *operands = ref->operands;
+ vec<vn_reference_op_s> operands = ref->operands;
vn_reference_op_t op;
/* Try to simplify the translated expression if it is
a call to a builtin function with at most two arguments. */
- op = VEC_index (vn_reference_op_s, operands, 0);
+ op = &operands[0];
if (op->opcode == CALL_EXPR
&& TREE_CODE (op->op0) == ADDR_EXPR
&& TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
&& DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
- && VEC_length (vn_reference_op_s, operands) >= 2
- && VEC_length (vn_reference_op_s, operands) <= 3)
+ && operands.length () >= 2
+ && operands.length () <= 3)
{
vn_reference_op_t arg0, arg1 = NULL;
bool anyconst = false;
- arg0 = VEC_index (vn_reference_op_s, operands, 1);
- if (VEC_length (vn_reference_op_s, operands) > 2)
- arg1 = VEC_index (vn_reference_op_s, operands, 2);
+ arg0 = &operands[1];
+ if (operands.length () > 2)
+ arg1 = &operands[2];
if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
|| (arg0->opcode == ADDR_EXPR
&& is_gimple_min_invariant (arg0->op0)))
else if (op->opcode == ARRAY_REF
&& TREE_CODE (op->op0) == INTEGER_CST
&& integer_zerop (op->op1)
- && VEC_length (vn_reference_op_s, operands) == 2)
+ && operands.length () == 2)
{
vn_reference_op_t arg0;
- arg0 = VEC_index (vn_reference_op_s, operands, 1);
+ arg0 = &operands[1];
if (arg0->opcode == STRING_CST
&& (TYPE_MODE (op->type)
== TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
&& GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
&& GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
+ && tree_int_cst_sgn (op->op0) >= 0
&& compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
return build_int_cst_type (op->type,
(TREE_STRING_POINTER (arg0->op0)
the vector passed in is returned. *VALUEIZED_ANYTHING will specify
whether any operands were valueized. */
-static VEC (vn_reference_op_s, heap) *
-valueize_refs_1 (VEC (vn_reference_op_s, heap) *orig, bool *valueized_anything)
+static vec<vn_reference_op_s>
+valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
{
vn_reference_op_t vro;
unsigned int i;
*valueized_anything = false;
- FOR_EACH_VEC_ELT (vn_reference_op_s, orig, i, vro)
+ FOR_EACH_VEC_ELT (orig, i, vro)
{
if (vro->opcode == SSA_NAME
|| (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
if (i > 0
&& vro->op0
&& TREE_CODE (vro->op0) == ADDR_EXPR
- && VEC_index (vn_reference_op_s,
- orig, i - 1)->opcode == MEM_REF)
+ && orig[i - 1].opcode == MEM_REF)
vn_reference_fold_indirect (&orig, &i);
else if (i > 0
&& vro->opcode == SSA_NAME
- && VEC_index (vn_reference_op_s,
- orig, i - 1)->opcode == MEM_REF)
+ && orig[i - 1].opcode == MEM_REF)
vn_reference_maybe_forwprop_address (&orig, &i);
/* If it transforms a non-constant ARRAY_REF into a constant
one, adjust the constant offset. */
&& TREE_CODE (vro->op1) == INTEGER_CST
&& TREE_CODE (vro->op2) == INTEGER_CST)
{
- double_int off = tree_to_double_int (vro->op0);
- off = double_int_add (off,
- double_int_neg
- (tree_to_double_int (vro->op1)));
- off = double_int_mul (off, tree_to_double_int (vro->op2));
- if (double_int_fits_in_shwi_p (off))
- vro->off = off.low;
+ offset_int off = ((wi::to_offset (vro->op0)
+ - wi::to_offset (vro->op1))
+ * wi::to_offset (vro->op2));
+ if (wi::fits_shwi_p (off))
+ vro->off = off.to_shwi ();
}
}
return orig;
}
-static VEC (vn_reference_op_s, heap) *
-valueize_refs (VEC (vn_reference_op_s, heap) *orig)
+static vec<vn_reference_op_s>
+valueize_refs (vec<vn_reference_op_s> orig)
{
bool tem;
return valueize_refs_1 (orig, &tem);
}
-static VEC(vn_reference_op_s, heap) *shared_lookup_references;
+static vec<vn_reference_op_s> shared_lookup_references;
/* Create a vector of vn_reference_op_s structures from REF, a
REFERENCE_CLASS_P tree. The vector is shared among all callers of
this function. *VALUEIZED_ANYTHING will specify whether any
operands were valueized. */
-static VEC(vn_reference_op_s, heap) *
+static vec<vn_reference_op_s>
valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
{
if (!ref)
- return NULL;
- VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
+ return vNULL;
+ shared_lookup_references.truncate (0);
copy_reference_ops_from_ref (ref, &shared_lookup_references);
shared_lookup_references = valueize_refs_1 (shared_lookup_references,
valueized_anything);
call statement. The vector is shared among all callers of
this function. */
-static VEC(vn_reference_op_s, heap) *
+static vec<vn_reference_op_s>
valueize_shared_reference_ops_from_call (gimple call)
{
if (!call)
- return NULL;
- VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
+ return vNULL;
+ shared_lookup_references.truncate (0);
copy_reference_ops_from_call (call, &shared_lookup_references);
shared_lookup_references = valueize_refs (shared_lookup_references);
return shared_lookup_references;
static tree
vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
{
- void **slot;
+ vn_reference_s **slot;
hashval_t hash;
hash = vr->hashcode;
- slot = htab_find_slot_with_hash (current_info->references, vr,
- hash, NO_INSERT);
+ slot = current_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
if (!slot && current_info == optimistic_info)
- slot = htab_find_slot_with_hash (valid_info->references, vr,
- hash, NO_INSERT);
+ slot = valid_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
if (slot)
{
if (vnresult)
with the current VUSE and performs the expression lookup. */
static void *
-vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
+vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
+ unsigned int cnt, void *vr_)
{
vn_reference_t vr = (vn_reference_t)vr_;
- void **slot;
+ vn_reference_s **slot;
hashval_t hash;
+ /* This bounds the stmt walks we perform on reference lookups
+ to O(1) instead of O(N) where N is the number of dominating
+ stores. */
+ if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
+ return (void *)-1;
+
if (last_vuse_ptr)
*last_vuse_ptr = vuse;
vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
hash = vr->hashcode;
- slot = htab_find_slot_with_hash (current_info->references, vr,
- hash, NO_INSERT);
+ slot = current_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
if (!slot && current_info == optimistic_info)
- slot = htab_find_slot_with_hash (valid_info->references, vr,
- hash, NO_INSERT);
+ slot = valid_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
if (slot)
return *slot;
vn_reference_lookup_or_insert_for_pieces (tree vuse,
alias_set_type set,
tree type,
- VEC (vn_reference_op_s,
- heap) *operands,
+ vec<vn_reference_op_s,
+ va_heap> operands,
tree value)
{
struct vn_reference_s vr1;
else
value_id = get_or_alloc_constant_value_id (value);
return vn_reference_insert_pieces (vuse, set, type,
- VEC_copy (vn_reference_op_s, heap,
- operands), value, value_id);
+ operands.copy (), value, value_id);
}
/* Callback for walk_non_aliased_vuses. Tries to perform a lookup
of VUSE. */
static void *
-vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
+vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
+ bool disambiguate_only)
{
vn_reference_t vr = (vn_reference_t)vr_;
gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
tree base;
HOST_WIDE_INT offset, maxsize;
- static VEC (vn_reference_op_s, heap) *lhs_ops = NULL;
+ static vec<vn_reference_op_s>
+ lhs_ops = vNULL;
ao_ref lhs_ref;
bool lhs_ref_ok = false;
/* First try to disambiguate after value-replacing in the definitions LHS. */
if (is_gimple_assign (def_stmt))
{
- VEC (vn_reference_op_s, heap) *tem;
+ vec<vn_reference_op_s> tem;
tree lhs = gimple_assign_lhs (def_stmt);
bool valueized_anything = false;
/* Avoid re-allocation overhead. */
- VEC_truncate (vn_reference_op_s, lhs_ops, 0);
+ lhs_ops.truncate (0);
copy_reference_ops_from_ref (lhs, &lhs_ops);
tem = lhs_ops;
lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
lhs_ref_ok = true;
}
}
+ else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
+ && gimple_call_num_args (def_stmt) <= 4)
+ {
+ /* For builtin calls valueize its arguments and call the
+ alias oracle again. Valueization may improve points-to
+ info of pointers and constify size and position arguments.
+ Originally this was motivated by PR61034 which has
+ conditional calls to free falsely clobbering ref because
+ of imprecise points-to info of the argument. */
+ tree oldargs[4];
+ bool valueized_anything;
+ for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
+ {
+ oldargs[i] = gimple_call_arg (def_stmt, i);
+ if (TREE_CODE (oldargs[i]) == SSA_NAME
+ && VN_INFO (oldargs[i])->valnum != oldargs[i])
+ {
+ gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
+ valueized_anything = true;
+ }
+ }
+ if (valueized_anything)
+ {
+ bool res = call_may_clobber_ref_p_1 (def_stmt, ref);
+ for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
+ gimple_call_set_arg (def_stmt, i, oldargs[i]);
+ if (!res)
+ return NULL;
+ }
+ }
+
+ if (disambiguate_only)
+ return (void *)-1;
base = ao_ref_base (ref);
offset = ref->offset;
if (is_gimple_reg_type (vr->type)
&& gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
&& integer_zerop (gimple_call_arg (def_stmt, 1))
- && host_integerp (gimple_call_arg (def_stmt, 2), 1)
+ && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
&& TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
{
tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
tree base2;
HOST_WIDE_INT offset2, size2, maxsize2;
base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
- size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
+ size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
if ((unsigned HOST_WIDE_INT)size2 / 8
- == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
+ == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
&& maxsize2 != -1
&& operand_equal_p (base, base2, 0)
&& offset2 <= offset
/* 3) Assignment from a constant. We can use folds native encode/interpret
routines to extract the assigned bits. */
- else if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
+ else if (vn_walk_kind == VN_WALKREWRITE
+ && CHAR_BIT == 8 && BITS_PER_UNIT == 8
&& ref->size == maxsize
&& maxsize % BITS_PER_UNIT == 0
&& offset % BITS_PER_UNIT == 0
if (i < CONSTRUCTOR_NELTS (ctor))
{
constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
- if (compare_tree_int (elt->index, i) == 0)
- val = elt->value;
+ if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
+ {
+ if (TREE_CODE (TREE_TYPE (elt->value))
+ != VECTOR_TYPE)
+ val = elt->value;
+ }
}
}
if (val)
tree base2;
HOST_WIDE_INT offset2, size2, maxsize2;
int i, j;
- VEC (vn_reference_op_s, heap) *rhs = NULL;
+ auto_vec<vn_reference_op_s> rhs;
vn_reference_op_t vro;
ao_ref r;
/* Find the common base of ref and the lhs. lhs_ops already
contains valueized operands for the lhs. */
- i = VEC_length (vn_reference_op_s, vr->operands) - 1;
- j = VEC_length (vn_reference_op_s, lhs_ops) - 1;
+ i = vr->operands.length () - 1;
+ j = lhs_ops.length () - 1;
while (j >= 0 && i >= 0
- && vn_reference_op_eq (VEC_index (vn_reference_op_s,
- vr->operands, i),
- VEC_index (vn_reference_op_s, lhs_ops, j)))
+ && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
{
i--;
j--;
don't care here - further lookups with the rewritten operands
will simply fail if we messed up types too badly. */
if (j == 0 && i >= 0
- && VEC_index (vn_reference_op_s, lhs_ops, 0)->opcode == MEM_REF
- && VEC_index (vn_reference_op_s, lhs_ops, 0)->off != -1
- && (VEC_index (vn_reference_op_s, lhs_ops, 0)->off
- == VEC_index (vn_reference_op_s, vr->operands, i)->off))
+ && lhs_ops[0].opcode == MEM_REF
+ && lhs_ops[0].off != -1
+ && (lhs_ops[0].off == vr->operands[i].off))
i--, j--;
/* i now points to the first additional op.
/* Now re-write REF to be based on the rhs of the assignment. */
copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
/* We need to pre-pend vr->operands[0..i] to rhs. */
- if (i + 1 + VEC_length (vn_reference_op_s, rhs)
- > VEC_length (vn_reference_op_s, vr->operands))
+ if (i + 1 + rhs.length () > vr->operands.length ())
{
- VEC (vn_reference_op_s, heap) *old = vr->operands;
- VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
- i + 1 + VEC_length (vn_reference_op_s, rhs));
+ vec<vn_reference_op_s> old = vr->operands;
+ vr->operands.safe_grow (i + 1 + rhs.length ());
if (old == shared_lookup_references
&& vr->operands != old)
- shared_lookup_references = NULL;
+ shared_lookup_references = vNULL;
}
else
- VEC_truncate (vn_reference_op_s, vr->operands,
- i + 1 + VEC_length (vn_reference_op_s, rhs));
- FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
- VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
- VEC_free (vn_reference_op_s, heap, rhs);
+ vr->operands.truncate (i + 1 + rhs.length ());
+ FOR_EACH_VEC_ELT (rhs, j, vro)
+ vr->operands[i + 1 + j] = *vro;
vr->operands = valueize_refs (vr->operands);
vr->hashcode = vn_reference_compute_hash (vr);
|| TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
&& (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
|| TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
- && host_integerp (gimple_call_arg (def_stmt, 2), 1))
+ && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
{
tree lhs, rhs;
ao_ref r;
if (!tem)
return (void *)-1;
if (TREE_CODE (tem) == MEM_REF
- && host_integerp (TREE_OPERAND (tem, 1), 1))
+ && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
{
lhs = TREE_OPERAND (tem, 0);
- lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
+ lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
}
else if (DECL_P (tem))
lhs = build_fold_addr_expr (tem);
if (!tem)
return (void *)-1;
if (TREE_CODE (tem) == MEM_REF
- && host_integerp (TREE_OPERAND (tem, 1), 1))
+ && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
{
rhs = TREE_OPERAND (tem, 0);
- rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
+ rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
}
else if (DECL_P (tem))
rhs = build_fold_addr_expr (tem);
&& TREE_CODE (rhs) != ADDR_EXPR)
return (void *)-1;
- copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
+ copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
/* The bases of the destination and the references have to agree. */
if ((TREE_CODE (base) != MEM_REF
&& !DECL_P (base))
|| (TREE_CODE (base) == MEM_REF
&& (TREE_OPERAND (base, 0) != lhs
- || !host_integerp (TREE_OPERAND (base, 1), 1)))
+ || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
|| (DECL_P (base)
&& (TREE_CODE (lhs) != ADDR_EXPR
|| TREE_OPERAND (lhs, 0) != base)))
/* And the access has to be contained within the memcpy destination. */
at = offset / BITS_PER_UNIT;
if (TREE_CODE (base) == MEM_REF)
- at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
+ at += tree_to_uhwi (TREE_OPERAND (base, 1));
if (lhs_offset > at
|| lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
return (void *)-1;
/* Make room for 2 operands in the new reference. */
- if (VEC_length (vn_reference_op_s, vr->operands) < 2)
+ if (vr->operands.length () < 2)
{
- VEC (vn_reference_op_s, heap) *old = vr->operands;
- VEC_safe_grow (vn_reference_op_s, heap, vr->operands, 2);
+ vec<vn_reference_op_s> old = vr->operands;
+ vr->operands.safe_grow_cleared (2);
if (old == shared_lookup_references
&& vr->operands != old)
- shared_lookup_references = NULL;
+ shared_lookup_references.create (0);
}
else
- VEC_truncate (vn_reference_op_s, vr->operands, 2);
+ vr->operands.truncate (2);
/* The looked-through reference is a simple MEM_REF. */
memset (&op, 0, sizeof (op));
op.opcode = MEM_REF;
op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
op.off = at - lhs_offset + rhs_offset;
- VEC_replace (vn_reference_op_s, vr->operands, 0, &op);
+ vr->operands[0] = op;
op.type = TREE_TYPE (rhs);
op.opcode = TREE_CODE (rhs);
op.op0 = rhs;
op.off = -1;
- VEC_replace (vn_reference_op_s, vr->operands, 1, &op);
+ vr->operands[1] = op;
vr->hashcode = vn_reference_compute_hash (vr);
/* Adjust *ref from the new operands. */
tree
vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
- VEC (vn_reference_op_s, heap) *operands,
+ vec<vn_reference_op_s> operands,
vn_reference_t *vnresult, vn_lookup_kind kind)
{
struct vn_reference_s vr1;
*vnresult = NULL;
vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
- VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
- VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
- VEC_length (vn_reference_op_s, operands));
- memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
- VEC_address (vn_reference_op_s, operands),
+ shared_lookup_references.truncate (0);
+ shared_lookup_references.safe_grow (operands.length ());
+ memcpy (shared_lookup_references.address (),
+ operands.address (),
sizeof (vn_reference_op_s)
- * VEC_length (vn_reference_op_s, operands));
+ * operands.length ());
vr1.operands = operands = shared_lookup_references
= valueize_refs (shared_lookup_references);
vr1.type = type;
vn_reference_lookup_2,
vn_reference_lookup_3, &vr1);
if (vr1.operands != operands)
- VEC_free (vn_reference_op_s, heap, vr1.operands);
+ vr1.operands.release ();
}
if (*vnresult)
vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
vn_reference_t *vnresult)
{
- VEC (vn_reference_op_s, heap) *operands;
+ vec<vn_reference_op_s> operands;
struct vn_reference_s vr1;
tree cst;
bool valuezied_anything;
vn_reference_lookup_2,
vn_reference_lookup_3, &vr1);
if (vr1.operands != operands)
- VEC_free (vn_reference_op_s, heap, vr1.operands);
+ vr1.operands.release ();
if (wvnresult)
{
if (vnresult)
vn_reference_t
vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
{
- void **slot;
+ vn_reference_s **slot;
vn_reference_t vr1;
+ bool tem;
vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
if (TREE_CODE (result) == SSA_NAME)
else
vr1->value_id = get_or_alloc_constant_value_id (result);
vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
- vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
+ vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
vr1->type = TREE_TYPE (op);
vr1->set = get_alias_set (op);
vr1->hashcode = vn_reference_compute_hash (vr1);
vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
vr1->result_vdef = vdef;
- slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
- INSERT);
+ slot = current_info->references.find_slot_with_hash (vr1, vr1->hashcode,
+ INSERT);
/* Because we lookup stores using vuses, and value number failures
using the vdefs (see visit_reference_op_store for how and why),
vn_reference_t
vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
- VEC (vn_reference_op_s, heap) *operands,
+ vec<vn_reference_op_s> operands,
tree result, unsigned int value_id)
{
- void **slot;
+ vn_reference_s **slot;
vn_reference_t vr1;
vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
result = SSA_VAL (result);
vr1->result = result;
- slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
- INSERT);
+ slot = current_info->references.find_slot_with_hash (vr1, vr1->hashcode,
+ INSERT);
/* At this point we should have all the things inserted that we have
seen before, and we should never try inserting something that
return hash;
}
-/* Return the computed hashcode for nary operation P1. */
-
-static hashval_t
-vn_nary_op_hash (const void *p1)
-{
- const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
- return vno1->hashcode;
-}
-
-/* Compare nary operations P1 and P2 and return true if they are
+/* Compare nary operations VNO1 and VNO2 and return true if they are
equivalent. */
-int
-vn_nary_op_eq (const void *p1, const void *p2)
+bool
+vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
{
- const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
- const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
unsigned i;
if (vno1->hashcode != vno2->hashcode)
case VIEW_CONVERT_EXPR:
return 1;
+ case BIT_FIELD_REF:
+ return 3;
+
case CONSTRUCTOR:
return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
break;
+ case BIT_FIELD_REF:
+ vno->length = 3;
+ vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
+ vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
+ vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
+ break;
+
case CONSTRUCTOR:
vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
for (i = 0; i < vno->length; ++i)
break;
default:
+ gcc_checking_assert (!gimple_assign_single_p (stmt));
vno->length = gimple_num_ops (stmt) - 1;
for (i = 0; i < vno->length; ++i)
vno->op[i] = gimple_op (stmt, i + 1);
static tree
vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
{
- void **slot;
+ vn_nary_op_s **slot;
if (vnresult)
*vnresult = NULL;
vno->hashcode = vn_nary_op_compute_hash (vno);
- slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
- NO_INSERT);
+ slot = current_info->nary.find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
if (!slot && current_info == optimistic_info)
- slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
- NO_INSERT);
+ slot = valid_info->nary.find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
if (!slot)
return NULL_TREE;
if (vnresult)
- *vnresult = (vn_nary_op_t)*slot;
- return ((vn_nary_op_t)*slot)->result;
+ *vnresult = *slot;
+ return (*slot)->result;
}
/* Lookup a n-ary operation by its pieces and return the resulting value
VNO->HASHCODE first. */
static vn_nary_op_t
-vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
+vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type table,
+ bool compute_hash)
{
- void **slot;
+ vn_nary_op_s **slot;
if (compute_hash)
vno->hashcode = vn_nary_op_compute_hash (vno);
- slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
+ slot = table.find_slot_with_hash (vno, vno->hashcode, INSERT);
gcc_assert (!*slot);
*slot = vno;
/* If all PHI arguments are constants we need to distinguish
the PHI node via its type. */
- type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
- result += (INTEGRAL_TYPE_P (type)
- + (INTEGRAL_TYPE_P (type)
- ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
+ type = vp1->type;
+ result += vn_hash_type (type);
- FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
+ FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
{
if (phi1op == VN_TOP)
continue;
return result;
}
-/* Return the computed hashcode for phi operation P1. */
-
-static hashval_t
-vn_phi_hash (const void *p1)
-{
- const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
- return vp1->hashcode;
-}
-
/* Compare two phi entries for equality, ignoring VN_TOP arguments. */
static int
-vn_phi_eq (const void *p1, const void *p2)
+vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
{
- const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
- const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
-
if (vp1->hashcode != vp2->hashcode)
return false;
/* If the PHI nodes do not have compatible types
they are not the same. */
- if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
- TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
+ if (!types_compatible_p (vp1->type, vp2->type))
return false;
/* Any phi in the same block will have it's arguments in the
same edge order, because of how we store phi nodes. */
- FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
+ FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
{
- tree phi2op = VEC_index (tree, vp2->phiargs, i);
+ tree phi2op = vp2->phiargs[i];
if (phi1op == VN_TOP || phi2op == VN_TOP)
continue;
if (!expressions_equal_p (phi1op, phi2op))
return false;
}
-static VEC(tree, heap) *shared_lookup_phiargs;
+static vec<tree> shared_lookup_phiargs;
/* Lookup PHI in the current hash table, and return the resulting
value number if it exists in the hash table. Return NULL_TREE if
static tree
vn_phi_lookup (gimple phi)
{
- void **slot;
+ vn_phi_s **slot;
struct vn_phi_s vp1;
unsigned i;
- VEC_truncate (tree, shared_lookup_phiargs, 0);
+ shared_lookup_phiargs.truncate (0);
/* Canonicalize the SSA_NAME's to their value number. */
for (i = 0; i < gimple_phi_num_args (phi); i++)
{
tree def = PHI_ARG_DEF (phi, i);
def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
- VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
+ shared_lookup_phiargs.safe_push (def);
}
+ vp1.type = TREE_TYPE (gimple_phi_result (phi));
vp1.phiargs = shared_lookup_phiargs;
vp1.block = gimple_bb (phi);
vp1.hashcode = vn_phi_compute_hash (&vp1);
- slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
- NO_INSERT);
+ slot = current_info->phis.find_slot_with_hash (&vp1, vp1.hashcode, NO_INSERT);
if (!slot && current_info == optimistic_info)
- slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
- NO_INSERT);
+ slot = valid_info->phis.find_slot_with_hash (&vp1, vp1.hashcode, NO_INSERT);
if (!slot)
return NULL_TREE;
- return ((vn_phi_t)*slot)->result;
+ return (*slot)->result;
}
/* Insert PHI into the current hash table with a value number of
static vn_phi_t
vn_phi_insert (gimple phi, tree result)
{
- void **slot;
+ vn_phi_s **slot;
vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
unsigned i;
- VEC (tree, heap) *args = NULL;
+ vec<tree> args = vNULL;
/* Canonicalize the SSA_NAME's to their value number. */
for (i = 0; i < gimple_phi_num_args (phi); i++)
{
tree def = PHI_ARG_DEF (phi, i);
def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
- VEC_safe_push (tree, heap, args, def);
+ args.safe_push (def);
}
vp1->value_id = VN_INFO (result)->value_id;
+ vp1->type = TREE_TYPE (gimple_phi_result (phi));
vp1->phiargs = args;
vp1->block = gimple_bb (phi);
vp1->result = result;
vp1->hashcode = vn_phi_compute_hash (vp1);
- slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
- INSERT);
+ slot = current_info->phis.find_slot_with_hash (vp1, vp1->hashcode, INSERT);
/* Because we iterate over phi operations more than once, it's
possible the slot might already exist here, hence no assert.*/
/* Print set of components in strongly connected component SCC to OUT. */
static void
-print_scc (FILE *out, VEC (tree, heap) *scc)
+print_scc (FILE *out, vec<tree> scc)
{
tree var;
unsigned int i;
fprintf (out, "SCC consists of:");
- FOR_EACH_VEC_ELT (tree, scc, i, var)
+ FOR_EACH_VEC_ELT (scc, i, var)
{
fprintf (out, " ");
print_generic_expr (out, var, 0);
set_ssa_val_to (tree from, tree to)
{
tree currval = SSA_VAL (from);
+ HOST_WIDE_INT toff, coff;
+
+ /* The only thing we allow as value numbers are ssa_names
+ and invariants. So assert that here. We don't allow VN_TOP
+ as visiting a stmt should produce a value-number other than
+ that.
+ ??? Still VN_TOP can happen for unreachable code, so force
+ it to varying in that case. Not all code is prepared to
+ get VN_TOP on valueization. */
+ if (to == VN_TOP)
+ {
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, "Forcing value number to varying on "
+ "receiving VN_TOP\n");
+ to = from;
+ }
+
+ gcc_assert (to != NULL_TREE
+ && (TREE_CODE (to) == SSA_NAME
+ || is_gimple_min_invariant (to)));
if (from != to)
{
to = from;
}
- /* The only thing we allow as value numbers are VN_TOP, ssa_names
- and invariants. So assert that here. */
- gcc_assert (to != NULL_TREE
- && (to == VN_TOP
- || TREE_CODE (to) == SSA_NAME
- || is_gimple_min_invariant (to)));
-
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Setting value number of ");
print_generic_expr (dump_file, to, 0);
}
- if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
+ if (currval != to
+ && !operand_equal_p (currval, to, 0)
+ /* ??? For addresses involving volatile objects or types operand_equal_p
+ does not reliably detect ADDR_EXPRs as equal. We know we are only
+ getting invariant gimple addresses here, so can use
+ get_addr_base_and_unit_offset to do this comparison. */
+ && !(TREE_CODE (currval) == ADDR_EXPR
+ && TREE_CODE (to) == ADDR_EXPR
+ && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
+ == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
+ && coff == toff))
{
VN_INFO (from)->valnum = to;
if (dump_file && (dump_flags & TDF_DETAILS))
}
static bool expr_has_constants (tree expr);
-static tree valueize_expr (tree expr);
/* Visit a copy between LHS and RHS, return true if the value number
changed. */
static bool
visit_copy (tree lhs, tree rhs)
{
- /* Follow chains of copies to their destination. */
- while (TREE_CODE (rhs) == SSA_NAME
- && SSA_VAL (rhs) != rhs)
- rhs = SSA_VAL (rhs);
-
/* The copy may have a more interesting constant filled expression
(we don't, since we know our RHS is just an SSA name). */
- if (TREE_CODE (rhs) == SSA_NAME)
- {
- VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
- VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
- }
+ VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
+ VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
+
+ /* And finally valueize. */
+ rhs = SSA_VAL (rhs);
return set_ssa_val_to (lhs, rhs);
}
if (vnresult)
{
- if (vnresult->result_vdef)
+ if (vnresult->result_vdef && vdef)
changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
if (!vnresult->result && lhs)
}
else
{
- void **slot;
+ vn_reference_s **slot;
vn_reference_t vr2;
if (vdef)
changed |= set_ssa_val_to (vdef, vdef);
vr2->hashcode = vr1.hashcode;
vr2->result = lhs;
vr2->result_vdef = vdef;
- slot = htab_find_slot_with_hash (current_info->references,
- vr2, vr2->hashcode, INSERT);
+ slot = current_info->references.find_slot_with_hash (vr2, vr2->hashcode,
+ INSERT);
if (*slot)
free_reference (*slot);
*slot = vr2;
|| TREE_CODE (val) == VIEW_CONVERT_EXPR)
&& TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
{
- tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
+ tree tem = vn_get_expr_for (TREE_OPERAND (val, 0));
if ((CONVERT_EXPR_P (tem)
|| TREE_CODE (tem) == VIEW_CONVERT_EXPR)
&& (tem = fold_unary_ignore_overflow (TREE_CODE (val),
tree result;
tree sameval = VN_TOP;
bool allsame = true;
- unsigned i;
/* TODO: We could check for this in init_sccvn, and replace this
with a gcc_assert. */
/* See if all non-TOP arguments have the same value. TOP is
equivalent to everything, so we can ignore it. */
- for (i = 0; i < gimple_phi_num_args (phi); i++)
- {
- tree def = PHI_ARG_DEF (phi, i);
-
- if (TREE_CODE (def) == SSA_NAME)
- def = SSA_VAL (def);
- if (def == VN_TOP)
- continue;
- if (sameval == VN_TOP)
- {
- sameval = def;
- }
- else
- {
- if (!expressions_equal_p (def, sameval))
- {
- allsame = false;
- break;
- }
- }
- }
+ edge_iterator ei;
+ edge e;
+ FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
+ if (e->flags & EDGE_EXECUTABLE)
+ {
+ tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
+
+ if (TREE_CODE (def) == SSA_NAME)
+ def = SSA_VAL (def);
+ if (def == VN_TOP)
+ continue;
+ if (sameval == VN_TOP)
+ {
+ sameval = def;
+ }
+ else
+ {
+ if (!expressions_equal_p (def, sameval))
+ {
+ allsame = false;
+ break;
+ }
+ }
+ }
/* If all value numbered to the same value, the phi node has that
value. */
if (is_gimple_min_invariant (sameval))
{
VN_INFO (PHI_RESULT (phi))->has_constants = true;
- VN_INFO (PHI_RESULT (phi))->expr = sameval;
+ if (sameval != VN_TOP)
+ VN_INFO (PHI_RESULT (phi))->expr = sameval;
}
else
{
VN_INFO (PHI_RESULT (phi))->has_constants = false;
- VN_INFO (PHI_RESULT (phi))->expr = sameval;
+ if (sameval != VN_TOP)
+ VN_INFO (PHI_RESULT (phi))->expr = sameval;
}
if (TREE_CODE (sameval) == SSA_NAME)
static bool
stmt_has_constants (gimple stmt)
{
+ tree tem;
+
if (gimple_code (stmt) != GIMPLE_ASSIGN)
return false;
switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
{
- case GIMPLE_UNARY_RHS:
- return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
+ case GIMPLE_TERNARY_RHS:
+ tem = gimple_assign_rhs3 (stmt);
+ if (TREE_CODE (tem) == SSA_NAME)
+ tem = SSA_VAL (tem);
+ if (is_gimple_min_invariant (tem))
+ return true;
+ /* Fallthru. */
case GIMPLE_BINARY_RHS:
- return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
- || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
- case GIMPLE_TERNARY_RHS:
- return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
- || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
- || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
+ tem = gimple_assign_rhs2 (stmt);
+ if (TREE_CODE (tem) == SSA_NAME)
+ tem = SSA_VAL (tem);
+ if (is_gimple_min_invariant (tem))
+ return true;
+ /* Fallthru. */
+
case GIMPLE_SINGLE_RHS:
/* Constants inside reference ops are rarely interesting, but
it can take a lot of looking to find them. */
- return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
+ case GIMPLE_UNARY_RHS:
+ tem = gimple_assign_rhs1 (stmt);
+ if (TREE_CODE (tem) == SSA_NAME)
+ tem = SSA_VAL (tem);
+ return is_gimple_min_invariant (tem);
+
default:
gcc_unreachable ();
}
return false;
}
-/* Replace SSA_NAMES in expr with their value numbers, and return the
- result.
- This is performed in place. */
-
-static tree
-valueize_expr (tree expr)
-{
- switch (TREE_CODE_CLASS (TREE_CODE (expr)))
- {
- case tcc_binary:
- TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
- /* Fallthru. */
- case tcc_unary:
- TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
- break;
- default:;
- }
- return expr;
-}
-
/* Simplify the binary expression RHS, and return the result if
simplified. */
if (VN_INFO (op0)->has_constants
|| TREE_CODE_CLASS (code) == tcc_comparison
|| code == COMPLEX_EXPR)
- op0 = valueize_expr (vn_get_expr_for (op0));
+ op0 = vn_get_expr_for (op0);
else
op0 = vn_valueize (op0);
}
{
if (VN_INFO (op1)->has_constants
|| code == COMPLEX_EXPR)
- op1 = valueize_expr (vn_get_expr_for (op1));
+ op1 = vn_get_expr_for (op1);
else
op1 = vn_valueize (op1);
}
/* Pointer plus constant can be represented as invariant address.
Do so to allow further propatation, see also tree forwprop. */
if (code == POINTER_PLUS_EXPR
- && host_integerp (op1, 1)
+ && tree_fits_uhwi_p (op1)
&& TREE_CODE (op0) == ADDR_EXPR
&& is_gimple_min_invariant (op0))
return build_invariant_address (TREE_TYPE (op0),
TREE_OPERAND (op0, 0),
- TREE_INT_CST_LOW (op1));
+ tree_to_uhwi (op1));
/* Avoid folding if nothing changed. */
if (op0 == gimple_assign_rhs1 (stmt)
orig_op0 = op0;
if (VN_INFO (op0)->has_constants)
- op0 = valueize_expr (vn_get_expr_for (op0));
+ op0 = vn_get_expr_for (op0);
else if (CONVERT_EXPR_CODE_P (code)
|| code == REALPART_EXPR
|| code == IMAGPART_EXPR
{
/* We want to do tree-combining on conversion-like expressions.
Make sure we feed only SSA_NAMEs or constants to fold though. */
- tree tem = valueize_expr (vn_get_expr_for (op0));
+ tree tem = vn_get_expr_for (op0);
if (UNARY_CLASS_P (tem)
|| BINARY_CLASS_P (tem)
|| TREE_CODE (tem) == VIEW_CONVERT_EXPR
}
else
{
- switch (get_gimple_rhs_class (code))
+ /* First try to lookup the simplified expression. */
+ if (simplified)
{
- case GIMPLE_UNARY_RHS:
- case GIMPLE_BINARY_RHS:
- case GIMPLE_TERNARY_RHS:
- changed = visit_nary_op (lhs, stmt);
- break;
- case GIMPLE_SINGLE_RHS:
- switch (TREE_CODE_CLASS (code))
+ enum gimple_rhs_class rhs_class;
+
+
+ rhs_class = get_gimple_rhs_class (TREE_CODE (simplified));
+ if ((rhs_class == GIMPLE_UNARY_RHS
+ || rhs_class == GIMPLE_BINARY_RHS
+ || rhs_class == GIMPLE_TERNARY_RHS)
+ && valid_gimple_rhs_p (simplified))
{
- case tcc_reference:
- /* VOP-less references can go through unary case. */
- if ((code == REALPART_EXPR
- || code == IMAGPART_EXPR
- || code == VIEW_CONVERT_EXPR
- || code == BIT_FIELD_REF)
- && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
+ tree result = vn_nary_op_lookup (simplified, NULL);
+ if (result)
{
- changed = visit_nary_op (lhs, stmt);
- break;
+ changed = set_ssa_val_to (lhs, result);
+ goto done;
}
- /* Fallthrough. */
- case tcc_declaration:
- changed = visit_reference_op_load (lhs, rhs1, stmt);
- break;
- default:
- if (code == ADDR_EXPR)
- {
- changed = visit_nary_op (lhs, stmt);
- break;
- }
- else if (code == CONSTRUCTOR)
- {
- changed = visit_nary_op (lhs, stmt);
- break;
- }
- changed = defs_to_varying (stmt);
}
+ }
+
+ /* Otherwise visit the original statement. */
+ switch (vn_get_stmt_kind (stmt))
+ {
+ case VN_NARY:
+ changed = visit_nary_op (lhs, stmt);
+ break;
+ case VN_REFERENCE:
+ changed = visit_reference_op_load (lhs, rhs1, stmt);
break;
default:
changed = defs_to_varying (stmt);
else if (is_gimple_call (stmt))
{
tree lhs = gimple_call_lhs (stmt);
-
- /* ??? We could try to simplify calls. */
-
if (lhs && TREE_CODE (lhs) == SSA_NAME)
{
- if (stmt_has_constants (stmt))
- VN_INFO (lhs)->has_constants = true;
- else
+ /* Try constant folding based on our current lattice. */
+ tree simplified = gimple_fold_stmt_to_constant_1 (stmt,
+ vn_valueize);
+ if (simplified)
{
- /* We reset expr and constantness here because we may
- have been value numbering optimistically, and
- iterating. They may become non-constant in this case,
- even if they were optimistically constant. */
- VN_INFO (lhs)->has_constants = false;
- VN_INFO (lhs)->expr = NULL_TREE;
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "call ");
+ print_gimple_expr (dump_file, stmt, 0, 0);
+ fprintf (dump_file, " simplified to ");
+ print_generic_expr (dump_file, simplified, 0);
+ if (TREE_CODE (lhs) == SSA_NAME)
+ fprintf (dump_file, " has constants %d\n",
+ expr_has_constants (simplified));
+ else
+ fprintf (dump_file, "\n");
+ }
}
-
- if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
+ /* Setting value numbers to constants will occasionally
+ screw up phi congruence because constants are not
+ uniquely associated with a single ssa name that can be
+ looked up. */
+ if (simplified
+ && is_gimple_min_invariant (simplified))
+ {
+ VN_INFO (lhs)->expr = simplified;
+ VN_INFO (lhs)->has_constants = true;
+ changed = set_ssa_val_to (lhs, simplified);
+ if (gimple_vdef (stmt))
+ changed |= set_ssa_val_to (gimple_vdef (stmt),
+ gimple_vuse (stmt));
+ goto done;
+ }
+ else if (simplified
+ && TREE_CODE (simplified) == SSA_NAME)
{
- changed = defs_to_varying (stmt);
+ changed = visit_copy (lhs, simplified);
+ if (gimple_vdef (stmt))
+ changed |= set_ssa_val_to (gimple_vdef (stmt),
+ gimple_vuse (stmt));
goto done;
}
+ else
+ {
+ if (stmt_has_constants (stmt))
+ VN_INFO (lhs)->has_constants = true;
+ else
+ {
+ /* We reset expr and constantness here because we may
+ have been value numbering optimistically, and
+ iterating. They may become non-constant in this case,
+ even if they were optimistically constant. */
+ VN_INFO (lhs)->has_constants = false;
+ VN_INFO (lhs)->expr = NULL_TREE;
+ }
+
+ if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
+ {
+ changed = defs_to_varying (stmt);
+ goto done;
+ }
+ }
}
if (!gimple_call_internal_p (stmt)
We can value number 2 calls to the same function with the
same vuse and the same operands which are not subsequent
the same, because there is no code in the program that can
- compare the 2 values. */
- || gimple_vdef (stmt)))
+ compare the 2 values... */
+ || (gimple_vdef (stmt)
+ /* ... unless the call returns a pointer which does
+ not alias with anything else. In which case the
+ information that the values are distinct are encoded
+ in the IL. */
+ && !(gimple_call_return_flags (stmt) & ERF_NOALIAS))))
changed = visit_reference_op_call (lhs, stmt);
else
changed = defs_to_varying (stmt);
array will give you the members in RPO order. */
static void
-sort_scc (VEC (tree, heap) *scc)
+sort_scc (vec<tree> scc)
{
- VEC_qsort (tree, scc, compare_ops);
+ scc.qsort (compare_ops);
}
/* Insert the no longer used nary ONARY to the hash INFO. */
copy_phi (vn_phi_t ophi, vn_tables_t info)
{
vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
- void **slot;
+ vn_phi_s **slot;
memcpy (phi, ophi, sizeof (*phi));
- ophi->phiargs = NULL;
- slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
+ ophi->phiargs.create (0);
+ slot = info->phis.find_slot_with_hash (phi, phi->hashcode, INSERT);
gcc_assert (!*slot);
*slot = phi;
}
copy_reference (vn_reference_t oref, vn_tables_t info)
{
vn_reference_t ref;
- void **slot;
+ vn_reference_s **slot;
ref = (vn_reference_t) pool_alloc (info->references_pool);
memcpy (ref, oref, sizeof (*ref));
- oref->operands = NULL;
- slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
- INSERT);
+ oref->operands.create (0);
+ slot = info->references.find_slot_with_hash (ref, ref->hashcode, INSERT);
if (*slot)
free_reference (*slot);
*slot = ref;
/* Process a strongly connected component in the SSA graph. */
static void
-process_scc (VEC (tree, heap) *scc)
+process_scc (vec<tree> scc)
{
tree var;
unsigned int i;
unsigned int iterations = 0;
bool changed = true;
- htab_iterator hi;
+ vn_nary_op_iterator_type hin;
+ vn_phi_iterator_type hip;
+ vn_reference_iterator_type hir;
vn_nary_op_t nary;
vn_phi_t phi;
vn_reference_t ref;
/* If the SCC has a single member, just visit it. */
- if (VEC_length (tree, scc) == 1)
+ if (scc.length () == 1)
{
- tree use = VEC_index (tree, scc, 0);
+ tree use = scc[0];
if (VN_INFO (use)->use_processed)
return;
/* We need to make sure it doesn't form a cycle itself, which can
/* As we are value-numbering optimistically we have to
clear the expression tables and the simplified expressions
in each iteration until we converge. */
- htab_empty (optimistic_info->nary);
- htab_empty (optimistic_info->phis);
- htab_empty (optimistic_info->references);
+ optimistic_info->nary.empty ();
+ optimistic_info->phis.empty ();
+ optimistic_info->references.empty ();
obstack_free (&optimistic_info->nary_obstack, NULL);
gcc_obstack_init (&optimistic_info->nary_obstack);
empty_alloc_pool (optimistic_info->phis_pool);
empty_alloc_pool (optimistic_info->references_pool);
- FOR_EACH_VEC_ELT (tree, scc, i, var)
+ FOR_EACH_VEC_ELT (scc, i, var)
VN_INFO (var)->expr = NULL_TREE;
- FOR_EACH_VEC_ELT (tree, scc, i, var)
+ FOR_EACH_VEC_ELT (scc, i, var)
changed |= visit_use (var);
}
/* Finally, copy the contents of the no longer used optimistic
table to the valid table. */
- FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
+ FOR_EACH_HASH_TABLE_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hin)
copy_nary (nary, valid_info);
- FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
+ FOR_EACH_HASH_TABLE_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hip)
copy_phi (phi, valid_info);
- FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
+ FOR_EACH_HASH_TABLE_ELEMENT (optimistic_info->references,
+ ref, vn_reference_t, hir)
copy_reference (ref, valid_info);
current_info = valid_info;
}
-DEF_VEC_O(ssa_op_iter);
-DEF_VEC_ALLOC_O(ssa_op_iter,heap);
/* Pop the components of the found SCC for NAME off the SCC stack
and process them. Returns true if all went well, false if
static bool
extract_and_process_scc_for_name (tree name)
{
- VEC (tree, heap) *scc = NULL;
+ auto_vec<tree> scc;
tree x;
/* Found an SCC, pop the components off the SCC stack and
process them. */
do
{
- x = VEC_pop (tree, sccstack);
+ x = sccstack.pop ();
VN_INFO (x)->on_sccstack = false;
- VEC_safe_push (tree, heap, scc, x);
+ scc.safe_push (x);
} while (x != name);
/* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
- if (VEC_length (tree, scc)
+ if (scc.length ()
> (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
{
if (dump_file)
fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
- "SCC size %u exceeding %u\n", VEC_length (tree, scc),
+ "SCC size %u exceeding %u\n", scc.length (),
(unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
+
return false;
}
- if (VEC_length (tree, scc) > 1)
+ if (scc.length () > 1)
sort_scc (scc);
if (dump_file && (dump_flags & TDF_DETAILS))
process_scc (scc);
- VEC_free (tree, heap, scc);
-
return true;
}
static bool
DFS (tree name)
{
- VEC(ssa_op_iter, heap) *itervec = NULL;
- VEC(tree, heap) *namevec = NULL;
+ vec<ssa_op_iter> itervec = vNULL;
+ vec<tree> namevec = vNULL;
use_operand_p usep = NULL;
gimple defstmt;
tree use;
VN_INFO (name)->visited = true;
VN_INFO (name)->low = VN_INFO (name)->dfsnum;
- VEC_safe_push (tree, heap, sccstack, name);
+ sccstack.safe_push (name);
VN_INFO (name)->on_sccstack = true;
defstmt = SSA_NAME_DEF_STMT (name);
if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
if (!extract_and_process_scc_for_name (name))
{
- VEC_free (tree, heap, namevec);
- VEC_free (ssa_op_iter, heap, itervec);
+ namevec.release ();
+ itervec.release ();
return false;
}
/* Check if we are done. */
- if (VEC_empty (tree, namevec))
+ if (namevec.is_empty ())
{
- VEC_free (tree, heap, namevec);
- VEC_free (ssa_op_iter, heap, itervec);
+ namevec.release ();
+ itervec.release ();
return true;
}
/* Restore the last use walker and continue walking there. */
use = name;
- name = VEC_pop (tree, namevec);
- memcpy (&iter, VEC_last (ssa_op_iter, itervec),
+ name = namevec.pop ();
+ memcpy (&iter, &itervec.last (),
sizeof (ssa_op_iter));
- VEC_pop (ssa_op_iter, itervec);
+ itervec.pop ();
goto continue_walking;
}
{
/* Recurse by pushing the current use walking state on
the stack and starting over. */
- VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
- VEC_safe_push(tree, heap, namevec, name);
+ itervec.safe_push (iter);
+ namevec.safe_push (name);
name = use;
goto start_over;
static void
allocate_vn_table (vn_tables_t table)
{
- table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
- table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
- table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
- free_reference);
+ table->phis.create (23);
+ table->nary.create (23);
+ table->references.create (23);
gcc_obstack_init (&table->nary_obstack);
table->phis_pool = create_alloc_pool ("VN phis",
static void
free_vn_table (vn_tables_t table)
{
- htab_delete (table->phis);
- htab_delete (table->nary);
- htab_delete (table->references);
+ table->phis.dispose ();
+ table->nary.dispose ();
+ table->references.dispose ();
obstack_free (&table->nary_obstack, NULL);
free_alloc_pool (table->phis_pool);
free_alloc_pool (table->references_pool);
int *rpo_numbers_temp;
calculate_dominance_info (CDI_DOMINATORS);
- sccstack = NULL;
- constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
- free);
+ sccstack.create (0);
+ constant_to_value_id.create (23);
constant_value_ids = BITMAP_ALLOC (NULL);
next_dfs_num = 1;
next_value_id = 1;
- vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
+ vn_ssa_aux_table.create (num_ssa_names + 1);
/* VEC_alloc doesn't actually grow it to the right size, it just
preallocates the space to do so. */
- VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table,
- num_ssa_names + 1);
+ vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
gcc_obstack_init (&vn_ssa_aux_obstack);
- shared_lookup_phiargs = NULL;
- shared_lookup_references = NULL;
- rpo_numbers = XNEWVEC (int, last_basic_block);
- rpo_numbers_temp = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
+ shared_lookup_phiargs.create (0);
+ shared_lookup_references.create (0);
+ rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
+ rpo_numbers_temp =
+ XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
/* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
the i'th block in RPO order is bb. We want to map bb's to RPO
numbers, so we need to rearrange this array. */
- for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
+ for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
rpo_numbers[rpo_numbers_temp[j]] = j;
XDELETE (rpo_numbers_temp);
{
size_t i;
- htab_delete (constant_to_value_id);
+ constant_to_value_id.dispose ();
BITMAP_FREE (constant_value_ids);
- VEC_free (tree, heap, shared_lookup_phiargs);
- VEC_free (vn_reference_op_s, heap, shared_lookup_references);
+ shared_lookup_phiargs.release ();
+ shared_lookup_references.release ();
XDELETEVEC (rpo_numbers);
for (i = 0; i < num_ssa_names; i++)
release_ssa_name (name);
}
obstack_free (&vn_ssa_aux_obstack, NULL);
- VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
+ vn_ssa_aux_table.release ();
- VEC_free (tree, heap, sccstack);
+ sccstack.release ();
free_vn_table (valid_info);
XDELETE (valid_info);
free_vn_table (optimistic_info);
XDELETE (optimistic_info);
}
-/* Set *ID if we computed something useful in RESULT. */
+/* Set *ID according to RESULT. */
static void
set_value_id_for_result (tree result, unsigned int *id)
{
- if (result)
- {
- if (TREE_CODE (result) == SSA_NAME)
- *id = VN_INFO (result)->value_id;
- else if (is_gimple_min_invariant (result))
- *id = get_or_alloc_constant_value_id (result);
- }
+ if (result && TREE_CODE (result) == SSA_NAME)
+ *id = VN_INFO (result)->value_id;
+ else if (result && is_gimple_min_invariant (result))
+ *id = get_or_alloc_constant_value_id (result);
+ else
+ *id = get_next_value_id ();
}
/* Set the value ids in the valid hash tables. */
static void
set_hashtable_value_ids (void)
{
- htab_iterator hi;
+ vn_nary_op_iterator_type hin;
+ vn_phi_iterator_type hip;
+ vn_reference_iterator_type hir;
vn_nary_op_t vno;
vn_reference_t vr;
vn_phi_t vp;
/* Now set the value ids of the things we had put in the hash
table. */
- FOR_EACH_HTAB_ELEMENT (valid_info->nary,
- vno, vn_nary_op_t, hi)
+ FOR_EACH_HASH_TABLE_ELEMENT (valid_info->nary, vno, vn_nary_op_t, hin)
set_value_id_for_result (vno->result, &vno->value_id);
- FOR_EACH_HTAB_ELEMENT (valid_info->phis,
- vp, vn_phi_t, hi)
+ FOR_EACH_HASH_TABLE_ELEMENT (valid_info->phis, vp, vn_phi_t, hip)
set_value_id_for_result (vp->result, &vp->value_id);
- FOR_EACH_HTAB_ELEMENT (valid_info->references,
- vr, vn_reference_t, hi)
+ FOR_EACH_HASH_TABLE_ELEMENT (valid_info->references, vr, vn_reference_t, hir)
set_value_id_for_result (vr->result, &vr->value_id);
}
+class cond_dom_walker : public dom_walker
+{
+public:
+ cond_dom_walker () : dom_walker (CDI_DOMINATORS), fail (false) {}
+
+ virtual void before_dom_children (basic_block);
+
+ bool fail;
+};
+
+void
+cond_dom_walker::before_dom_children (basic_block bb)
+{
+ edge e;
+ edge_iterator ei;
+
+ if (fail)
+ return;
+
+ /* If any of the predecessor edges that do not come from blocks dominated
+ by us are still marked as possibly executable consider this block
+ reachable. */
+ bool reachable = bb == ENTRY_BLOCK_PTR_FOR_FN (cfun);
+ FOR_EACH_EDGE (e, ei, bb->preds)
+ if (!dominated_by_p (CDI_DOMINATORS, e->src, bb))
+ reachable |= (e->flags & EDGE_EXECUTABLE);
+
+ /* If the block is not reachable all outgoing edges are not
+ executable. */
+ if (!reachable)
+ {
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, "Marking all outgoing edges of unreachable "
+ "BB %d as not executable\n", bb->index);
+
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ e->flags &= ~EDGE_EXECUTABLE;
+ return;
+ }
+
+ gimple stmt = last_stmt (bb);
+ if (!stmt)
+ return;
+
+ /* Value-number the last stmts SSA uses. */
+ ssa_op_iter i;
+ tree op;
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
+ if (VN_INFO (op)->visited == false
+ && !DFS (op))
+ {
+ fail = true;
+ return;
+ }
+
+ /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
+ if value-numbering can prove they are not reachable. Handling
+ computed gotos is also possible. */
+ tree val;
+ switch (gimple_code (stmt))
+ {
+ case GIMPLE_COND:
+ {
+ tree lhs = gimple_cond_lhs (stmt);
+ tree rhs = gimple_cond_rhs (stmt);
+ /* Work hard in computing the condition and take into account
+ the valueization of the defining stmt. */
+ if (TREE_CODE (lhs) == SSA_NAME)
+ lhs = vn_get_expr_for (lhs);
+ if (TREE_CODE (rhs) == SSA_NAME)
+ rhs = vn_get_expr_for (rhs);
+ val = fold_binary (gimple_cond_code (stmt),
+ boolean_type_node, lhs, rhs);
+ break;
+ }
+ case GIMPLE_SWITCH:
+ val = gimple_switch_index (stmt);
+ break;
+ case GIMPLE_GOTO:
+ val = gimple_goto_dest (stmt);
+ break;
+ default:
+ val = NULL_TREE;
+ break;
+ }
+ if (!val)
+ return;
+
+ edge taken = find_taken_edge (bb, vn_valueize (val));
+ if (!taken)
+ return;
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
+ "not executable\n", bb->index, bb->index, taken->dest->index);
+
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ if (e != taken)
+ e->flags &= ~EDGE_EXECUTABLE;
+}
+
/* Do SCCVN. Returns true if it finished, false if we bailed out
due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
how we use the alias oracle walking during the VN process. */
bool
run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
{
+ basic_block bb;
size_t i;
tree param;
- bool changed = true;
default_vn_walk_kind = default_vn_walk_kind_;
VN_INFO (def)->valnum = def;
}
+ /* Mark all edges as possibly executable. */
+ FOR_ALL_BB_FN (bb, cfun)
+ {
+ edge_iterator ei;
+ edge e;
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ e->flags |= EDGE_EXECUTABLE;
+ }
+
+ /* Walk all blocks in dominator order, value-numbering the last stmts
+ SSA uses and decide whether outgoing edges are not executable. */
+ cond_dom_walker walker;
+ walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
+ if (walker.fail)
+ {
+ free_scc_vn ();
+ return false;
+ }
+
+ /* Value-number remaining SSA names. */
for (i = 1; i < num_ssa_names; ++i)
{
tree name = ssa_name (i);
info->value_id = get_or_alloc_constant_value_id (info->valnum);
}
- /* Propagate until they stop changing. */
- while (changed)
+ /* Propagate. */
+ for (i = 1; i < num_ssa_names; ++i)
{
- changed = false;
- for (i = 1; i < num_ssa_names; ++i)
- {
- tree name = ssa_name (i);
- vn_ssa_aux_t info;
- if (!name)
- continue;
- info = VN_INFO (name);
- if (TREE_CODE (info->valnum) == SSA_NAME
- && info->valnum != name
- && info->value_id != VN_INFO (info->valnum)->value_id)
- {
- changed = true;
- info->value_id = VN_INFO (info->valnum)->value_id;
- }
- }
+ tree name = ssa_name (i);
+ vn_ssa_aux_t info;
+ if (!name)
+ continue;
+ info = VN_INFO (name);
+ if (TREE_CODE (info->valnum) == SSA_NAME
+ && info->valnum != name
+ && info->value_id != VN_INFO (info->valnum)->value_id)
+ info->value_id = VN_INFO (info->valnum)->value_id;
}
set_hashtable_value_ids ();