tree t;
if (!expand_vec_cmp_expr_p (TREE_TYPE (op0), type, code)
&& !expand_vec_cond_expr_p (type, TREE_TYPE (op0), code))
- t = expand_vector_piecewise (gsi, do_compare, type,
- TREE_TYPE (TREE_TYPE (op0)), op0, op1, code);
+ {
+ if (VECTOR_BOOLEAN_TYPE_P (type)
+ && VECTOR_BOOLEAN_TYPE_P (type)
+ && SCALAR_INT_MODE_P (TYPE_MODE (type))
+ && known_lt (GET_MODE_BITSIZE (TYPE_MODE (type)),
+ TYPE_VECTOR_SUBPARTS (type)
+ * GET_MODE_BITSIZE (SCALAR_TYPE_MODE
+ (TREE_TYPE (type)))))
+ {
+ tree inner_type = TREE_TYPE (TREE_TYPE (op0));
+ tree part_width = TYPE_SIZE (inner_type);
+ tree index = bitsize_int (0);
+ int nunits = nunits_for_known_piecewise_op (TREE_TYPE (op0));
+ int prec = GET_MODE_PRECISION (SCALAR_TYPE_MODE (type));
+ tree ret_type = build_nonstandard_integer_type (prec, 1);
+ tree ret_inner_type = boolean_type_node;
+ int i;
+ location_t loc = gimple_location (gsi_stmt (*gsi));
+ t = build_zero_cst (ret_type);
+
+ if (TYPE_PRECISION (ret_inner_type) != 1)
+ ret_inner_type = build_nonstandard_integer_type (1, 1);
+ warning_at (loc, OPT_Wvector_operation_performance,
+ "vector operation will be expanded piecewise");
+ for (i = 0; i < nunits;
+ i++, index = int_const_binop (PLUS_EXPR, index, part_width))
+ {
+ tree a = tree_vec_extract (gsi, inner_type, op0, part_width,
+ index);
+ tree b = tree_vec_extract (gsi, inner_type, op1, part_width,
+ index);
+ tree result = gimplify_build2 (gsi, code, ret_inner_type, a, b);
+ t = gimplify_build3 (gsi, BIT_INSERT_EXPR, ret_type, t, result,
+ bitsize_int (i));
+ }
+ t = gimplify_build1 (gsi, VIEW_CONVERT_EXPR, type, t);
+ }
+ else
+ t = expand_vector_piecewise (gsi, do_compare, type,
+ TREE_TYPE (TREE_TYPE (op0)), op0, op1,
+ code);
+ }
else
t = NULL_TREE;
tree a1 = a;
tree a2 = NULL_TREE;
bool a_is_comparison = false;
+ bool a_is_scalar_bitmask = false;
tree b = gimple_assign_rhs2 (stmt);
tree c = gimple_assign_rhs3 (stmt);
vec<constructor_elt, va_gc> *v;
warning_at (loc, OPT_Wvector_operation_performance,
"vector condition will be expanded piecewise");
+ if (!a_is_comparison
+ && VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (a))
+ && SCALAR_INT_MODE_P (TYPE_MODE (TREE_TYPE (a)))
+ && known_lt (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (a))),
+ TYPE_VECTOR_SUBPARTS (TREE_TYPE (a))
+ * GET_MODE_BITSIZE (SCALAR_TYPE_MODE
+ (TREE_TYPE (TREE_TYPE (a))))))
+ {
+ a_is_scalar_bitmask = true;
+ int prec = GET_MODE_PRECISION (SCALAR_TYPE_MODE (TREE_TYPE (a)));
+ tree atype = build_nonstandard_integer_type (prec, 1);
+ a = gimplify_build1 (gsi, VIEW_CONVERT_EXPR, atype, a);
+ }
+
int nunits = nunits_for_known_piecewise_op (type);
vec_alloc (v, nunits);
for (i = 0; i < nunits; i++)
comp_width, comp_index);
aa = fold_build2 (TREE_CODE (a), cond_type, aa1, aa2);
}
+ else if (a_is_scalar_bitmask)
+ {
+ wide_int w = wi::set_bit_in_zero (i, TYPE_PRECISION (TREE_TYPE (a)));
+ result = gimplify_build2 (gsi, BIT_AND_EXPR, TREE_TYPE (a),
+ a, wide_int_to_tree (TREE_TYPE (a), w));
+ aa = fold_build2 (NE_EXPR, boolean_type_node, result,
+ build_zero_cst (TREE_TYPE (a)));
+ }
else
aa = tree_vec_extract (gsi, cond_type, a, width, index);
result = gimplify_build3 (gsi, COND_EXPR, inner_type, aa, bb, cc);
/* A scalar operation pretending to be a vector one. */
if (VECTOR_BOOLEAN_TYPE_P (type)
&& !VECTOR_MODE_P (TYPE_MODE (type))
- && TYPE_MODE (type) != BLKmode)
+ && TYPE_MODE (type) != BLKmode
+ && (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) != tcc_comparison
+ || (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (rhs1))
+ && !VECTOR_MODE_P (TYPE_MODE (TREE_TYPE (rhs1)))
+ && TYPE_MODE (TREE_TYPE (rhs1)) != BLKmode)))
return;
/* If the vector operation is operating on all same vector elements