static void expand_vector_operations_1 (gimple_stmt_iterator *);
+/* Return the number of elements in a vector type TYPE that we have
+ already decided needs to be expanded piecewise. We don't support
+ this kind of expansion for variable-length vectors, since we should
+ always check for target support before introducing uses of those. */
+static unsigned int
+nunits_for_known_piecewise_op (const_tree type)
+{
+ return TYPE_VECTOR_SUBPARTS (type);
+}
+
+/* Return true if TYPE1 has more elements than TYPE2, where either
+ type may be a vector or a scalar. */
+
+static inline bool
+subparts_gt (tree type1, tree type2)
+{
+ poly_uint64 n1 = VECTOR_TYPE_P (type1) ? TYPE_VECTOR_SUBPARTS (type1) : 1;
+ poly_uint64 n2 = VECTOR_TYPE_P (type2) ? TYPE_VECTOR_SUBPARTS (type2) : 1;
+ return known_gt (n1, n2);
+}
/* Build a constant of type TYPE, made of VALUE's bits replicated
every TYPE_SIZE (INNER_TYPE) bits to fit TYPE's precision. */
vec<constructor_elt, va_gc> *v;
tree part_width = TYPE_SIZE (inner_type);
tree index = bitsize_int (0);
- int nunits = TYPE_VECTOR_SUBPARTS (type);
+ int nunits = nunits_for_known_piecewise_op (type);
int delta = tree_to_uhwi (part_width)
/ tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
int i;
if (INTEGRAL_TYPE_P (TREE_TYPE (type))
&& parts_per_word >= 4
- && TYPE_VECTOR_SUBPARTS (type) >= 4)
+ && nunits_for_known_piecewise_op (type) >= 4)
return expand_vector_parallel (gsi, f_parallel,
type, a, b, code);
else
add_rshift (gimple_stmt_iterator *gsi, tree type, tree op0, int *shiftcnts)
{
optab op;
- unsigned int i, nunits = TYPE_VECTOR_SUBPARTS (type);
+ unsigned int i, nunits = nunits_for_known_piecewise_op (type);
bool scalar_shift = true;
for (i = 1; i < nunits; i++)
bool has_vector_shift = true;
int mode = -1, this_mode;
int pre_shift = -1, post_shift;
- unsigned int nunits = TYPE_VECTOR_SUBPARTS (type);
+ unsigned int nunits = nunits_for_known_piecewise_op (type);
int *shifts = XALLOCAVEC (int, nunits * 4);
int *pre_shifts = shifts + nunits;
int *post_shifts = pre_shifts + nunits;
tree index = bitsize_int (0);
tree comp_width = width;
tree comp_index = index;
- int nunits = TYPE_VECTOR_SUBPARTS (type);
int i;
location_t loc = gimple_location (gsi_stmt (*gsi));
warning_at (loc, OPT_Wvector_operation_performance,
"vector condition will be expanded piecewise");
+ int nunits = nunits_for_known_piecewise_op (type);
vec_alloc (v, nunits);
for (i = 0; i < nunits; i++)
{
vect_type = TREE_TYPE (vect);
vect_elt_type = TREE_TYPE (vect_type);
- elements = TYPE_VECTOR_SUBPARTS (vect_type);
+ elements = nunits_for_known_piecewise_op (vect_type);
if (TREE_CODE (idx) == INTEGER_CST)
{
tree vector_compute_type
= type_for_widest_vector_mode (TREE_TYPE (type), op);
if (vector_compute_type != NULL_TREE
- && (TYPE_VECTOR_SUBPARTS (vector_compute_type)
- < TYPE_VECTOR_SUBPARTS (compute_type))
+ && subparts_gt (compute_type, vector_compute_type)
&& TYPE_VECTOR_SUBPARTS (vector_compute_type) > 1
&& (optab_handler (op, TYPE_MODE (vector_compute_type))
!= CODE_FOR_nothing))
return compute_type;
}
-/* Helper function of expand_vector_operations_1. Return number of
- vector elements for vector types or 1 for other types. */
-
-static inline int
-count_type_subparts (tree type)
-{
- return VECTOR_TYPE_P (type) ? TYPE_VECTOR_SUBPARTS (type) : 1;
-}
-
static tree
do_cond (gimple_stmt_iterator *gsi, tree inner_type, tree a, tree b,
tree bitpos, tree bitsize, enum tree_code code,
/* The rtl expander will expand vector/scalar as vector/vector
if necessary. Pick one with wider vector type. */
tree compute_vtype = get_compute_type (code, opv, type);
- if (count_type_subparts (compute_vtype)
- > count_type_subparts (compute_type))
+ if (subparts_gt (compute_vtype, compute_type))
{
compute_type = compute_vtype;
op = opv;
tree compute_rtype = get_compute_type (RSHIFT_EXPR, opr, type);
/* The rtl expander will expand vector/scalar as vector/vector
if necessary. Pick one with wider vector type. */
- if (count_type_subparts (compute_lvtype)
- > count_type_subparts (compute_ltype))
+ if (subparts_gt (compute_lvtype, compute_ltype))
{
compute_ltype = compute_lvtype;
opl = oplv;
}
- if (count_type_subparts (compute_rvtype)
- > count_type_subparts (compute_rtype))
+ if (subparts_gt (compute_rvtype, compute_rtype))
{
compute_rtype = compute_rvtype;
opr = oprv;
/* Pick the narrowest type from LSHIFT_EXPR, RSHIFT_EXPR and
BIT_IOR_EXPR. */
compute_type = compute_ltype;
- if (count_type_subparts (compute_type)
- > count_type_subparts (compute_rtype))
+ if (subparts_gt (compute_type, compute_rtype))
compute_type = compute_rtype;
- if (count_type_subparts (compute_type)
- > count_type_subparts (compute_otype))
+ if (subparts_gt (compute_type, compute_otype))
compute_type = compute_otype;
/* Verify all 3 operations can be performed in that type. */
if (compute_type != TREE_TYPE (type))