+2017-09-14 Richard Sandiford <richard.sandiford@linaro.org>
+ Alan Hayward <alan.hayward@arm.com>
+ David Sherwood <david.sherwood@arm.com>
+
+ * tree.h (build_vector): Take a vec<tree> instead of a tree *.
+ * tree.c (build_vector): Likewise.
+ (build_vector_from_ctor): Update accordingly.
+ (build_vector_from_val): Likewise.
+ * gimple-fold.c (gimple_fold_stmt_to_constant_1): Likewise.
+ * tree-ssa-forwprop.c (simplify_vector_constructor): Likewise.
+ * tree-vect-generic.c (add_rshift): Likewise.
+ (expand_vector_divmod): Likewise.
+ (optimize_vector_constructor): Likewise.
+ * tree-vect-slp.c (vect_get_constant_vectors): Likewise.
+ (vect_transform_slp_perm_load): Likewise.
+ (vect_schedule_slp_instance): Likewise.
+ * tree-vect-stmts.c (vectorizable_bswap): Likewise.
+ (vectorizable_call): Likewise.
+ (vect_gen_perm_mask_any): Likewise. Add elements in order.
+ * expmed.c (make_tree): Likewise.
+ * fold-const.c (fold_negate_expr_1): Use auto_vec<tree> when building
+ a vector passed to build_vector.
+ (fold_convert_const): Likewise.
+ (exact_inverse): Likewise.
+ (fold_ternary_loc): Likewise.
+ (fold_relational_const): Likewise.
+ (const_binop): Likewise. Use VECTOR_CST_ELT directly when operating
+ on VECTOR_CSTs, rather than going through vec_cst_ctor_to_array.
+ (const_unop): Likewise. Store the reduction accumulator in a
+ variable rather than an array.
+ (vec_cst_ctor_to_array): Take the number of elements as a parameter.
+ (fold_vec_perm): Update calls accordingly. Use auto_vec<tree> for
+ the new vector, rather than constructing it after the input arrays.
+ (native_interpret_vector): Use auto_vec<tree> when building
+ a vector passed to build_vector. Add elements in order.
+ * tree-vect-loop.c (get_initial_defs_for_reduction): Use
+ auto_vec<tree> when building a vector passed to build_vector.
+ (vect_create_epilog_for_reduction): Likewise.
+ (vectorizable_induction): Likewise.
+ (get_initial_def_for_reduction): Likewise. Fix indentation of
+ case statements.
+ * config/sparc/sparc.c (sparc_handle_vis_mul8x16): Change n_elts
+ to a vec<tree> *.
+ (sparc_fold_builtin): Use auto_vec<tree> when building a vector
+ passed to build_vector.
+
2017-09-14 Richard Sandiford <richard.sandiford@linaro.org>
Alan Hayward <alan.hayward@arm.com>
David Sherwood <david.sherwood@arm.com>
the result into the array N_ELTS, whose elements are of INNER_TYPE. */
static void
-sparc_handle_vis_mul8x16 (tree *n_elts, enum sparc_builtins fncode,
+sparc_handle_vis_mul8x16 (vec<tree> *n_elts, enum sparc_builtins fncode,
tree inner_type, tree cst0, tree cst1)
{
unsigned i, num = VECTOR_CST_NELTS (cst0);
int val
= sparc_vis_mul8x16 (TREE_INT_CST_LOW (VECTOR_CST_ELT (cst0, i)),
TREE_INT_CST_LOW (VECTOR_CST_ELT (cst1, i)));
- n_elts[i] = build_int_cst (inner_type, val);
+ n_elts->quick_push (build_int_cst (inner_type, val));
}
break;
int val
= sparc_vis_mul8x16 (TREE_INT_CST_LOW (VECTOR_CST_ELT (cst0, i)),
scale);
- n_elts[i] = build_int_cst (inner_type, val);
+ n_elts->quick_push (build_int_cst (inner_type, val));
}
break;
int val
= sparc_vis_mul8x16 (TREE_INT_CST_LOW (VECTOR_CST_ELT (cst0, i)),
scale);
- n_elts[i] = build_int_cst (inner_type, val);
+ n_elts->quick_push (build_int_cst (inner_type, val));
}
break;
if (TREE_CODE (arg0) == VECTOR_CST)
{
tree inner_type = TREE_TYPE (rtype);
- tree *n_elts;
unsigned i;
- n_elts = XALLOCAVEC (tree, VECTOR_CST_NELTS (arg0));
+ auto_vec<tree, 32> n_elts (VECTOR_CST_NELTS (arg0));
for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
- n_elts[i] = build_int_cst (inner_type,
- TREE_INT_CST_LOW
- (VECTOR_CST_ELT (arg0, i)) << 4);
+ {
+ unsigned HOST_WIDE_INT val
+ = TREE_INT_CST_LOW (VECTOR_CST_ELT (arg0, i));
+ n_elts.quick_push (build_int_cst (inner_type, val << 4));
+ }
return build_vector (rtype, n_elts);
}
break;
if (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
{
tree inner_type = TREE_TYPE (rtype);
- tree *n_elts = XALLOCAVEC (tree, VECTOR_CST_NELTS (arg0));
- sparc_handle_vis_mul8x16 (n_elts, code, inner_type, arg0, arg1);
+ auto_vec<tree, 32> n_elts (VECTOR_CST_NELTS (arg0));
+ sparc_handle_vis_mul8x16 (&n_elts, code, inner_type, arg0, arg1);
return build_vector (rtype, n_elts);
}
break;
if (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
{
- tree *n_elts = XALLOCAVEC (tree, 2 * VECTOR_CST_NELTS (arg0));
+ auto_vec<tree, 32> n_elts (2 * VECTOR_CST_NELTS (arg0));
unsigned i;
for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
{
- n_elts[2*i] = VECTOR_CST_ELT (arg0, i);
- n_elts[2*i+1] = VECTOR_CST_ELT (arg1, i);
+ n_elts.quick_push (VECTOR_CST_ELT (arg0, i));
+ n_elts.quick_push (VECTOR_CST_ELT (arg1, i));
}
return build_vector (rtype, n_elts);
{
int units = CONST_VECTOR_NUNITS (x);
tree itype = TREE_TYPE (type);
- tree *elts;
int i;
/* Build a tree with vector elements. */
- elts = XALLOCAVEC (tree, units);
- for (i = units - 1; i >= 0; --i)
+ auto_vec<tree, 32> elts (units);
+ for (i = 0; i < units; ++i)
{
rtx elt = CONST_VECTOR_ELT (x, i);
- elts[i] = make_tree (itype, elt);
+ elts.quick_push (make_tree (itype, elt));
}
return build_vector (type, elts);
static tree fold_relational_const (enum tree_code, tree, tree, tree);
static tree fold_convert_const (enum tree_code, tree, tree);
static tree fold_view_convert_expr (tree, tree);
-static bool vec_cst_ctor_to_array (tree, tree *);
static tree fold_negate_expr (location_t, tree);
case VECTOR_CST:
{
int count = VECTOR_CST_NELTS (t), i;
- tree *elts = XALLOCAVEC (tree, count);
+ auto_vec<tree, 32> elts (count);
for (i = 0; i < count; i++)
{
- elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
- if (elts[i] == NULL_TREE)
+ tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
+ if (elt == NULL_TREE)
return NULL_TREE;
+ elts.quick_push (elt);
}
return build_vector (type, elts);
{
tree type = TREE_TYPE (arg1);
int count = VECTOR_CST_NELTS (arg1), i;
- tree *elts = XALLOCAVEC (tree, count);
+ auto_vec<tree, 32> elts (count);
for (i = 0; i < count; i++)
{
tree elem1 = VECTOR_CST_ELT (arg1, i);
tree elem2 = VECTOR_CST_ELT (arg2, i);
- elts[i] = const_binop (code, elem1, elem2);
+ tree elt = const_binop (code, elem1, elem2);
/* It is possible that const_binop cannot handle the given
code and return NULL_TREE */
- if (elts[i] == NULL_TREE)
+ if (elt == NULL_TREE)
return NULL_TREE;
+ elts.quick_push (elt);
}
return build_vector (type, elts);
{
tree type = TREE_TYPE (arg1);
int count = VECTOR_CST_NELTS (arg1), i;
- tree *elts = XALLOCAVEC (tree, count);
+ auto_vec<tree, 32> elts (count);
for (i = 0; i < count; i++)
{
tree elem1 = VECTOR_CST_ELT (arg1, i);
- elts[i] = const_binop (code, elem1, arg2);
+ tree elt = const_binop (code, elem1, arg2);
/* It is possible that const_binop cannot handle the given
code and return NULL_TREE. */
- if (elts[i] == NULL_TREE)
+ if (elt == NULL_TREE)
return NULL_TREE;
+ elts.quick_push (elt);
}
return build_vector (type, elts);
case VEC_PACK_TRUNC_EXPR:
case VEC_PACK_FIX_TRUNC_EXPR:
{
- tree *elts;
unsigned int out_nelts, in_nelts, i;
if (TREE_CODE (arg1) != VECTOR_CST
gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2)
&& out_nelts == TYPE_VECTOR_SUBPARTS (type));
- elts = XALLOCAVEC (tree, out_nelts);
- if (!vec_cst_ctor_to_array (arg1, elts)
- || !vec_cst_ctor_to_array (arg2, elts + in_nelts))
- return NULL_TREE;
-
+ auto_vec<tree, 32> elts (out_nelts);
for (i = 0; i < out_nelts; i++)
{
- elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
- ? NOP_EXPR : FIX_TRUNC_EXPR,
- TREE_TYPE (type), elts[i]);
- if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
+ tree elt = (i < in_nelts
+ ? VECTOR_CST_ELT (arg1, i)
+ : VECTOR_CST_ELT (arg2, i - in_nelts));
+ elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
+ ? NOP_EXPR : FIX_TRUNC_EXPR,
+ TREE_TYPE (type), elt);
+ if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
return NULL_TREE;
+ elts.quick_push (elt);
}
return build_vector (type, elts);
case VEC_WIDEN_MULT_ODD_EXPR:
{
unsigned int out_nelts, in_nelts, out, ofs, scale;
- tree *elts;
if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
return NULL_TREE;
gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2)
&& out_nelts == TYPE_VECTOR_SUBPARTS (type));
- elts = XALLOCAVEC (tree, in_nelts * 2);
- if (!vec_cst_ctor_to_array (arg1, elts)
- || !vec_cst_ctor_to_array (arg2, elts + in_nelts))
- return NULL_TREE;
-
if (code == VEC_WIDEN_MULT_LO_EXPR)
scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
else if (code == VEC_WIDEN_MULT_HI_EXPR)
else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
scale = 1, ofs = 1;
+ auto_vec<tree, 32> elts (out_nelts);
for (out = 0; out < out_nelts; out++)
{
- unsigned int in1 = (out << scale) + ofs;
- unsigned int in2 = in1 + in_nelts;
- tree t1, t2;
-
- t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
- t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
+ unsigned int in = (out << scale) + ofs;
+ tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
+ VECTOR_CST_ELT (arg1, in));
+ tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
+ VECTOR_CST_ELT (arg2, in));
if (t1 == NULL_TREE || t2 == NULL_TREE)
return NULL_TREE;
- elts[out] = const_binop (MULT_EXPR, t1, t2);
- if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
+ tree elt = const_binop (MULT_EXPR, t1, t2);
+ if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
return NULL_TREE;
+ elts.quick_push (elt);
}
return build_vector (type, elts);
/* Perform BIT_NOT_EXPR on each element individually. */
else if (TREE_CODE (arg0) == VECTOR_CST)
{
- tree *elements;
tree elem;
unsigned count = VECTOR_CST_NELTS (arg0), i;
- elements = XALLOCAVEC (tree, count);
+ auto_vec<tree, 32> elements (count);
for (i = 0; i < count; i++)
{
elem = VECTOR_CST_ELT (arg0, i);
elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
if (elem == NULL_TREE)
break;
- elements[i] = elem;
+ elements.quick_push (elem);
}
if (i == count)
return build_vector (type, elements);
case VEC_UNPACK_FLOAT_HI_EXPR:
{
unsigned int out_nelts, in_nelts, i;
- tree *elts;
enum tree_code subcode;
if (TREE_CODE (arg0) != VECTOR_CST)
out_nelts = in_nelts / 2;
gcc_assert (out_nelts == TYPE_VECTOR_SUBPARTS (type));
- elts = XALLOCAVEC (tree, in_nelts);
- if (!vec_cst_ctor_to_array (arg0, elts))
- return NULL_TREE;
-
+ unsigned int offset = 0;
if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
|| code == VEC_UNPACK_FLOAT_LO_EXPR))
- elts += out_nelts;
+ offset = out_nelts;
if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
subcode = NOP_EXPR;
else
subcode = FLOAT_EXPR;
+ auto_vec<tree, 32> elts (out_nelts);
for (i = 0; i < out_nelts; i++)
{
- elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
- if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
+ tree elt = fold_convert_const (subcode, TREE_TYPE (type),
+ VECTOR_CST_ELT (arg0, i + offset));
+ if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
return NULL_TREE;
+ elts.quick_push (elt);
}
return build_vector (type, elts);
case REDUC_PLUS_EXPR:
{
unsigned int nelts, i;
- tree *elts;
enum tree_code subcode;
if (TREE_CODE (arg0) != VECTOR_CST)
return NULL_TREE;
nelts = VECTOR_CST_NELTS (arg0);
- elts = XALLOCAVEC (tree, nelts);
- if (!vec_cst_ctor_to_array (arg0, elts))
- return NULL_TREE;
-
switch (code)
{
case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
default: gcc_unreachable ();
}
+ tree res = VECTOR_CST_ELT (arg0, 0);
for (i = 1; i < nelts; i++)
{
- elts[0] = const_binop (subcode, elts[0], elts[i]);
- if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
+ res = const_binop (subcode, res, VECTOR_CST_ELT (arg0, i));
+ if (res == NULL_TREE || !CONSTANT_CLASS_P (res))
return NULL_TREE;
}
- return elts[0];
+ return res;
}
default:
{
int len = VECTOR_CST_NELTS (arg1);
tree elttype = TREE_TYPE (type);
- tree *v = XALLOCAVEC (tree, len);
+ auto_vec<tree, 32> v (len);
for (int i = 0; i < len; ++i)
{
tree elt = VECTOR_CST_ELT (arg1, i);
tree cvt = fold_convert_const (code, elttype, elt);
if (cvt == NULL_TREE)
return NULL_TREE;
- v[i] = cvt;
+ v.quick_push (cvt);
}
return build_vector (type, v);
}
{
tree etype, elem;
int i, size, count;
- tree *elements;
etype = TREE_TYPE (type);
size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
if (size * count > len)
return NULL_TREE;
- elements = XALLOCAVEC (tree, count);
- for (i = count - 1; i >= 0; i--)
+ auto_vec<tree, 32> elements (count);
+ for (i = 0; i < count; ++i)
{
elem = native_interpret_expr (etype, ptr+(i*size), size);
if (!elem)
return NULL_TREE;
- elements[i] = elem;
+ elements.quick_push (elem);
}
return build_vector (type, elements);
}
/* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
- CONSTRUCTOR ARG into array ELTS and return true if successful. */
+ CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
+ true if successful. */
static bool
-vec_cst_ctor_to_array (tree arg, tree *elts)
+vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
{
- unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
+ unsigned int i;
if (TREE_CODE (arg) == VECTOR_CST)
{
fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
{
unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
- tree *elts;
bool need_ctor = false;
gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
|| TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
return NULL_TREE;
- elts = XALLOCAVEC (tree, nelts * 3);
- if (!vec_cst_ctor_to_array (arg0, elts)
- || !vec_cst_ctor_to_array (arg1, elts + nelts))
+ tree *in_elts = XALLOCAVEC (tree, nelts * 2);
+ if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
+ || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
return NULL_TREE;
+ auto_vec<tree, 32> out_elts (nelts);
for (i = 0; i < nelts; i++)
{
- if (!CONSTANT_CLASS_P (elts[sel[i]]))
+ if (!CONSTANT_CLASS_P (in_elts[sel[i]]))
need_ctor = true;
- elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
+ out_elts.quick_push (unshare_expr (in_elts[sel[i]]));
}
if (need_ctor)
vec<constructor_elt, va_gc> *v;
vec_alloc (v, nelts);
for (i = 0; i < nelts; i++)
- CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
+ CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
return build_constructor (type, v);
}
else
- return build_vector (type, &elts[2 * nelts]);
+ return build_vector (type, out_elts);
}
/* Try to fold a pointer difference of type TYPE two address expressions of
exact_inverse (tree type, tree cst)
{
REAL_VALUE_TYPE r;
- tree unit_type, *elts;
+ tree unit_type;
machine_mode mode;
unsigned vec_nelts, i;
return NULL_TREE;
case VECTOR_CST:
- vec_nelts = VECTOR_CST_NELTS (cst);
- elts = XALLOCAVEC (tree, vec_nelts);
- unit_type = TREE_TYPE (type);
- mode = TYPE_MODE (unit_type);
+ {
+ vec_nelts = VECTOR_CST_NELTS (cst);
+ unit_type = TREE_TYPE (type);
+ mode = TYPE_MODE (unit_type);
- for (i = 0; i < vec_nelts; i++)
- {
- r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
- if (!exact_real_inverse (mode, &r))
- return NULL_TREE;
- elts[i] = build_real (unit_type, r);
- }
+ auto_vec<tree, 32> elts (vec_nelts);
+ for (i = 0; i < vec_nelts; i++)
+ {
+ r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
+ if (!exact_real_inverse (mode, &r))
+ return NULL_TREE;
+ elts.quick_push (build_real (unit_type, r));
+ }
- return build_vector (type, elts);
+ return build_vector (type, elts);
+ }
default:
return NULL_TREE;
if (n == 1)
return VECTOR_CST_ELT (arg0, idx);
- tree *vals = XALLOCAVEC (tree, n);
+ auto_vec<tree, 32> vals (n);
for (unsigned i = 0; i < n; ++i)
- vals[i] = VECTOR_CST_ELT (arg0, idx + i);
+ vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
return build_vector (type, vals);
}
}
if (need_mask_canon && arg2 == op2)
{
- tree *tsel = XALLOCAVEC (tree, nelts);
tree eltype = TREE_TYPE (TREE_TYPE (arg2));
+ auto_vec<tree, 32> tsel (nelts);
for (i = 0; i < nelts; i++)
- tsel[i] = build_int_cst (eltype, sel[i]);
+ tsel.quick_push (build_int_cst (eltype, sel[i]));
op2 = build_vector (TREE_TYPE (arg2), tsel);
changed = true;
}
else
{
unsigned int nelts = VECTOR_CST_NELTS (arg0);
- tree *elts = XALLOCAVEC (tree, nelts);
- memcpy (elts, VECTOR_CST_ELTS (arg0), sizeof (tree) * nelts);
+ auto_vec<tree, 32> elts (nelts);
+ elts.quick_grow (nelts);
+ memcpy (&elts[0], VECTOR_CST_ELTS (arg0),
+ sizeof (tree) * nelts);
elts[k] = arg1;
return build_vector (type, elts);
}
return constant_boolean_node (true, type);
}
unsigned count = VECTOR_CST_NELTS (op0);
- tree *elts = XALLOCAVEC (tree, count);
gcc_assert (VECTOR_CST_NELTS (op1) == count
&& TYPE_VECTOR_SUBPARTS (type) == count);
+ auto_vec<tree, 32> elts (count);
for (unsigned i = 0; i < count; i++)
{
tree elem_type = TREE_TYPE (type);
if (tem == NULL_TREE)
return NULL_TREE;
- elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
+ elts.quick_push (build_int_cst (elem_type,
+ integer_zerop (tem) ? 0 : -1));
}
return build_vector (type, elts);
&& (CONSTRUCTOR_NELTS (rhs)
== TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
{
- unsigned i;
- tree val, *vec;
+ unsigned i, nelts;
+ tree val;
- vec = XALLOCAVEC (tree,
- TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
+ nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs));
+ auto_vec<tree, 32> vec (nelts);
FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
{
val = (*valueize) (val);
if (TREE_CODE (val) == INTEGER_CST
|| TREE_CODE (val) == REAL_CST
|| TREE_CODE (val) == FIXED_CST)
- vec[i] = val;
+ vec.quick_push (val);
else
return NULL_TREE;
}
}
else
{
- tree mask_type, *mask_elts;
+ tree mask_type;
if (!can_vec_perm_p (TYPE_MODE (type), false, sel))
return false;
|| GET_MODE_SIZE (TYPE_MODE (mask_type))
!= GET_MODE_SIZE (TYPE_MODE (type)))
return false;
- mask_elts = XALLOCAVEC (tree, nelts);
+ auto_vec<tree, 32> mask_elts (nelts);
for (i = 0; i < nelts; i++)
- mask_elts[i] = build_int_cst (TREE_TYPE (mask_type), sel[i]);
+ mask_elts.quick_push (build_int_cst (TREE_TYPE (mask_type), sel[i]));
op2 = build_vector (mask_type, mask_elts);
if (conv_code == ERROR_MARK)
gimple_assign_set_rhs_with_ops (gsi, VEC_PERM_EXPR, orig, orig, op2);
if (op != unknown_optab
&& optab_handler (op, TYPE_MODE (type)) != CODE_FOR_nothing)
{
- tree *vec = XALLOCAVEC (tree, nunits);
+ auto_vec<tree, 32> vec (nunits);
for (i = 0; i < nunits; i++)
- vec[i] = build_int_cst (TREE_TYPE (type), shiftcnts[i]);
+ vec.quick_push (build_int_cst (TREE_TYPE (type), shiftcnts[i]));
return gimplify_build2 (gsi, RSHIFT_EXPR, type, op0,
build_vector (type, vec));
}
unsigned int i;
signop sign_p = TYPE_SIGN (TREE_TYPE (type));
unsigned HOST_WIDE_INT mask = GET_MODE_MASK (TYPE_MODE (TREE_TYPE (type)));
- tree *vec;
tree cur_op, mulcst, tem;
optab op;
mode = -2;
}
- vec = XALLOCAVEC (tree, nunits);
-
if (use_pow2)
{
tree addend = NULL_TREE;
mask_type = build_same_sized_truth_vector_type (type);
zero = build_zero_cst (type);
cond = build2 (LT_EXPR, mask_type, op0, zero);
+ auto_vec<tree, 32> vec (nunits);
for (i = 0; i < nunits; i++)
- vec[i] = build_int_cst (TREE_TYPE (type),
- (HOST_WIDE_INT_1U
- << shifts[i]) - 1);
+ vec.quick_push (build_int_cst (TREE_TYPE (type),
+ (HOST_WIDE_INT_1U
+ << shifts[i]) - 1));
cst = build_vector (type, vec);
addend = make_ssa_name (type);
stmt = gimple_build_assign (addend, VEC_COND_EXPR, cond,
else
{
tree mask;
+ auto_vec<tree, 32> vec (nunits);
for (i = 0; i < nunits; i++)
- vec[i] = build_int_cst (TREE_TYPE (type),
- (HOST_WIDE_INT_1U
- << shifts[i]) - 1);
+ vec.quick_push (build_int_cst (TREE_TYPE (type),
+ (HOST_WIDE_INT_1U
+ << shifts[i]) - 1));
mask = build_vector (type, vec);
op = optab_for_tree_code (BIT_AND_EXPR, type, optab_default);
if (op != unknown_optab
return NULL_TREE;
}
+ auto_vec<tree, 32> vec (nunits);
for (i = 0; i < nunits; i++)
- vec[i] = build_int_cst (TREE_TYPE (type), mulc[i]);
+ vec.quick_push (build_int_cst (TREE_TYPE (type), mulc[i]));
mulcst = build_vector (type, vec);
cur_op = gimplify_build2 (gsi, MULT_HIGHPART_EXPR, type, cur_op, mulcst);
unsigned int i, j, nelts = TYPE_VECTOR_SUBPARTS (type);
bool all_same = true;
constructor_elt *elt;
- tree *cst;
gimple *g;
tree base = NULL_TREE;
optab op;
}
if (all_same)
return;
- cst = XALLOCAVEC (tree, nelts);
+ auto_vec<tree, 32> cst (nelts);
for (i = 0; i < nelts; i++)
{
- tree this_base = CONSTRUCTOR_ELT (rhs, i)->value;;
- cst[i] = build_zero_cst (TREE_TYPE (base));
+ tree this_base = CONSTRUCTOR_ELT (rhs, i)->value;
+ tree elt = build_zero_cst (TREE_TYPE (base));
while (this_base != base)
{
g = SSA_NAME_DEF_STMT (this_base);
- cst[i] = fold_binary (PLUS_EXPR, TREE_TYPE (base),
- cst[i], gimple_assign_rhs2 (g));
- if (cst[i] == NULL_TREE
- || TREE_CODE (cst[i]) != INTEGER_CST
- || TREE_OVERFLOW (cst[i]))
+ elt = fold_binary (PLUS_EXPR, TREE_TYPE (base),
+ elt, gimple_assign_rhs2 (g));
+ if (elt == NULL_TREE
+ || TREE_CODE (elt) != INTEGER_CST
+ || TREE_OVERFLOW (elt))
return;
this_base = gimple_assign_rhs1 (g);
}
+ cst.quick_push (elt);
}
for (i = 0; i < nelts; i++)
CONSTRUCTOR_ELT (rhs, i)->value = base;
enum tree_code code = gimple_assign_rhs_code (stmt);
tree def_for_init;
tree init_def;
- tree *elts;
int i;
bool nested_in_vect_loop = false;
REAL_VALUE_TYPE real_init_val = dconst0;
switch (code)
{
- case WIDEN_SUM_EXPR:
- case DOT_PROD_EXPR:
- case SAD_EXPR:
- case PLUS_EXPR:
- case MINUS_EXPR:
- case BIT_IOR_EXPR:
- case BIT_XOR_EXPR:
- case MULT_EXPR:
- case BIT_AND_EXPR:
+ case WIDEN_SUM_EXPR:
+ case DOT_PROD_EXPR:
+ case SAD_EXPR:
+ case PLUS_EXPR:
+ case MINUS_EXPR:
+ case BIT_IOR_EXPR:
+ case BIT_XOR_EXPR:
+ case MULT_EXPR:
+ case BIT_AND_EXPR:
+ {
/* ADJUSMENT_DEF is NULL when called from
vect_create_epilog_for_reduction to vectorize double reduction. */
if (adjustment_def)
def_for_init = build_int_cst (scalar_type, int_init_val);
/* Create a vector of '0' or '1' except the first element. */
- elts = XALLOCAVEC (tree, nunits);
+ auto_vec<tree, 32> elts (nunits);
+ elts.quick_grow (nunits);
for (i = nunits - 2; i >= 0; --i)
elts[i + 1] = def_for_init;
/* Option1: the first element is '0' or '1' as well. */
- if (adjustment_def)
- {
+ if (adjustment_def)
+ {
elts[0] = def_for_init;
- init_def = build_vector (vectype, elts);
- break;
- }
+
+ init_def = build_vector (vectype, elts);
+ break;
+ }
/* Option2: the first element is INIT_VAL. */
elts[0] = init_val;
CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[i]);
init_def = build_constructor (vectype, v);
}
+ }
+ break;
- break;
-
- case MIN_EXPR:
- case MAX_EXPR:
- case COND_EXPR:
+ case MIN_EXPR:
+ case MAX_EXPR:
+ case COND_EXPR:
+ {
if (adjustment_def)
{
*adjustment_def = NULL_TREE;
if (! gimple_seq_empty_p (stmts))
gsi_insert_seq_on_edge_immediate (loop_preheader_edge (loop), stmts);
init_def = build_vector_from_val (vectype, init_val);
- break;
+ }
+ break;
- default:
- gcc_unreachable ();
+ default:
+ gcc_unreachable ();
}
return init_def;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (stmt);
unsigned nunits;
tree vec_cst;
- tree *elts;
unsigned j, number_of_places_left_in_vector;
tree vector_type, scalar_type;
tree vop;
number_of_places_left_in_vector = nunits;
constant_p = true;
- elts = XALLOCAVEC (tree, nunits);
+ auto_vec<tree, 32> elts (nunits);
+ elts.quick_grow (nunits);
for (j = 0; j < number_of_copies; j++)
{
for (i = group_size - 1; stmts.iterate (i, &stmt); i--)
vector size (STEP). */
/* Create a {1,2,3,...} vector. */
- tree *vtemp = XALLOCAVEC (tree, nunits_out);
+ auto_vec<tree, 32> vtemp (nunits_out);
for (k = 0; k < nunits_out; ++k)
- vtemp[k] = build_int_cst (cr_index_scalar_type, k + 1);
+ vtemp.quick_push (build_int_cst (cr_index_scalar_type, k + 1));
tree series_vect = build_vector (cr_index_vector_type, vtemp);
/* Create a vector of the step value. */
unsigned ivn;
for (ivn = 0; ivn < nivs; ++ivn)
{
- tree *elts = XALLOCAVEC (tree, nunits);
+ auto_vec<tree, 32> elts (nunits);
bool constant_p = true;
for (unsigned eltn = 0; eltn < nunits; ++eltn)
{
}
if (! CONSTANT_CLASS_P (elt))
constant_p = false;
- elts[eltn] = elt;
+ elts.quick_push (elt);
}
if (constant_p)
new_vec = build_vector (vectype, elts);
stmt_vec_info stmt_vinfo = vinfo_for_stmt (stmt);
unsigned nunits;
tree vec_cst;
- tree *elts;
unsigned j, number_of_places_left_in_vector;
tree vector_type;
tree vop;
number_of_places_left_in_vector = nunits;
constant_p = true;
- elts = XALLOCAVEC (tree, nunits);
+ auto_vec<tree, 32> elts (nunits);
+ elts.quick_grow (nunits);
bool place_after_defs = false;
for (j = 0; j < number_of_copies; j++)
{
if (! noop_p)
{
- tree *mask_elts = XALLOCAVEC (tree, nunits);
+ auto_vec<tree, 32> mask_elts (nunits);
for (int l = 0; l < nunits; ++l)
- mask_elts[l] = build_int_cst (mask_element_type,
- mask[l]);
+ mask_elts.quick_push (build_int_cst (mask_element_type,
+ mask[l]));
mask_vec = build_vector (mask_type, mask_elts);
}
unsigned k = 0, l;
for (j = 0; j < v0.length (); ++j)
{
- tree *melts = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (vectype));
- for (l = 0; l < TYPE_VECTOR_SUBPARTS (vectype); ++l)
+ unsigned int nunits = TYPE_VECTOR_SUBPARTS (vectype);
+ auto_vec<tree, 32> melts (nunits);
+ for (l = 0; l < nunits; ++l)
{
if (k >= group_size)
k = 0;
- melts[l] = build_int_cst
- (meltype, mask[k++] * TYPE_VECTOR_SUBPARTS (vectype) + l);
+ tree t = build_int_cst (meltype, mask[k++] * nunits + l);
+ melts.quick_push (t);
}
tmask = build_vector (mvectype, melts);
if (! char_vectype)
return false;
- unsigned char *elts
- = XALLOCAVEC (unsigned char, TYPE_VECTOR_SUBPARTS (char_vectype));
+ unsigned int num_bytes = TYPE_VECTOR_SUBPARTS (char_vectype);
+ unsigned char *elts = XALLOCAVEC (unsigned char, num_bytes);
unsigned char *elt = elts;
- unsigned word_bytes = TYPE_VECTOR_SUBPARTS (char_vectype) / nunits;
+ unsigned word_bytes = num_bytes / nunits;
for (unsigned i = 0; i < nunits; ++i)
for (unsigned j = 0; j < word_bytes; ++j)
*elt++ = (i + 1) * word_bytes - j - 1;
return true;
}
- tree *telts = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (char_vectype));
- for (unsigned i = 0; i < TYPE_VECTOR_SUBPARTS (char_vectype); ++i)
- telts[i] = build_int_cst (char_type_node, elts[i]);
+ auto_vec<tree, 32> telts (num_bytes);
+ for (unsigned i = 0; i < num_bytes; ++i)
+ telts.quick_push (build_int_cst (char_type_node, elts[i]));
tree bswap_vconst = build_vector (char_vectype, telts);
/* Transform. */
if (gimple_call_internal_p (stmt)
&& gimple_call_internal_fn (stmt) == IFN_GOMP_SIMD_LANE)
{
- tree *v = XALLOCAVEC (tree, nunits_out);
- int k;
- for (k = 0; k < nunits_out; ++k)
- v[k] = build_int_cst (unsigned_type_node, j * nunits_out + k);
+ auto_vec<tree, 32> v (nunits_out);
+ for (int k = 0; k < nunits_out; ++k)
+ v.quick_push (build_int_cst (unsigned_type_node,
+ j * nunits_out + k));
tree cst = build_vector (vectype_out, v);
tree new_var
= vect_get_new_ssa_name (vectype_out, vect_simple_var, "cst_");
tree
vect_gen_perm_mask_any (tree vectype, const unsigned char *sel)
{
- tree mask_elt_type, mask_type, mask_vec, *mask_elts;
+ tree mask_elt_type, mask_type, mask_vec;
int i, nunits;
nunits = TYPE_VECTOR_SUBPARTS (vectype);
(int_mode_for_mode (TYPE_MODE (TREE_TYPE (vectype))).require (), 1);
mask_type = get_vectype_for_scalar_type (mask_elt_type);
- mask_elts = XALLOCAVEC (tree, nunits);
- for (i = nunits - 1; i >= 0; i--)
- mask_elts[i] = build_int_cst (mask_elt_type, sel[i]);
+ auto_vec<tree, 32> mask_elts (nunits);
+ for (i = 0; i < nunits; ++i)
+ mask_elts.quick_push (build_int_cst (mask_elt_type, sel[i]));
mask_vec = build_vector (mask_type, mask_elts);
return mask_vec;
}
/* Return a new VECTOR_CST node whose type is TYPE and whose values
- are in a list pointed to by VALS. */
+ are given by VALS. */
tree
-build_vector (tree type, tree *vals MEM_STAT_DECL)
+build_vector (tree type, vec<tree> vals MEM_STAT_DECL)
{
+ unsigned int nelts = vals.length ();
+ gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
int over = 0;
unsigned cnt = 0;
- tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
+ tree v = make_vector (nelts);
TREE_TYPE (v) = type;
/* Iterate through elements and check for overflow. */
- for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
+ for (cnt = 0; cnt < nelts; ++cnt)
{
tree value = vals[cnt];
tree
build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
{
- tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
- unsigned HOST_WIDE_INT idx, pos = 0;
+ unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
+ unsigned HOST_WIDE_INT idx;
tree value;
+ auto_vec<tree, 32> vec (nelts);
FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
{
if (TREE_CODE (value) == VECTOR_CST)
for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
- vec[pos++] = VECTOR_CST_ELT (value, i);
+ vec.quick_push (VECTOR_CST_ELT (value, i));
else
- vec[pos++] = value;
+ vec.quick_push (value);
}
- while (pos < TYPE_VECTOR_SUBPARTS (type))
- vec[pos++] = build_zero_cst (TREE_TYPE (type));
+ while (vec.length () < nelts)
+ vec.quick_push (build_zero_cst (TREE_TYPE (type)));
return build_vector (type, vec);
}
if (CONSTANT_CLASS_P (sc))
{
- tree *v = XALLOCAVEC (tree, nunits);
+ auto_vec<tree, 32> v (nunits);
for (i = 0; i < nunits; ++i)
- v[i] = sc;
+ v.quick_push (sc);
return build_vector (vectype, v);
}
else
extern tree build_int_cstu (tree type, unsigned HOST_WIDE_INT cst);
extern tree build_int_cst_type (tree, HOST_WIDE_INT);
extern tree make_vector (unsigned CXX_MEM_STAT_INFO);
-extern tree build_vector (tree, tree * CXX_MEM_STAT_INFO);
+extern tree build_vector (tree, vec<tree> CXX_MEM_STAT_INFO);
extern tree build_vector_from_ctor (tree, vec<constructor_elt, va_gc> *);
extern tree build_vector_from_val (tree, tree);
extern void recompute_constructor_flags (tree);