+2016-10-25 Jakub Jelinek <jakub@redhat.com>
+
+ PR target/78102
+ * optabs.def (vcondeq_optab, vec_cmpeq_optab): New optabs.
+ * optabs.c (expand_vec_cond_expr): For comparison codes
+ EQ_EXPR and NE_EXPR, attempt vcondeq_optab as fallback.
+ (expand_vec_cmp_expr): For comparison codes
+ EQ_EXPR and NE_EXPR, attempt vec_cmpeq_optab as fallback.
+ * optabs-tree.h (expand_vec_cmp_expr_p, expand_vec_cond_expr_p):
+ Add enum tree_code argument.
+ * optabs-query.h (get_vec_cmp_eq_icode, get_vcond_eq_icode): New
+ inline functions.
+ * optabs-tree.c (expand_vec_cmp_expr_p): Add CODE argument. For
+ CODE EQ_EXPR or NE_EXPR, attempt to use vec_cmpeq_optab as
+ fallback.
+ (expand_vec_cond_expr_p): Add CODE argument. For CODE EQ_EXPR or
+ NE_EXPR, attempt to use vcondeq_optab as fallback.
+ * tree-vect-generic.c (expand_vector_comparison,
+ expand_vector_divmod, expand_vector_condition): Adjust
+ expand_vec_cmp_expr_p and expand_vec_cond_expr_p callers.
+ * tree-vect-stmts.c (vectorizable_condition,
+ vectorizable_comparison): Likewise.
+ * tree-vect-patterns.c (vect_recog_mixed_size_cond_pattern,
+ check_bool_pattern, search_type_for_mask_1): Likewise.
+ * expr.c (do_store_flag): Likewise.
+ * doc/md.texi (@code{vec_cmpeq@var{m}@var{n}},
+ @code{vcondeq@var{m}@var{n}}): Document.
+ * config/i386/sse.md (vec_cmpeqv2div2di, vcondeq<VI8F_128:mode>v2di):
+ New expanders.
+
2016-10-25 Jeff Law <law@redhat.com>
* config/v850/v850.c (v850_handle_data_area_attribute): Fix fallthru
DONE;
})
+(define_expand "vec_cmpeqv2div2di"
+ [(set (match_operand:V2DI 0 "register_operand")
+ (match_operator:V2DI 1 ""
+ [(match_operand:V2DI 2 "register_operand")
+ (match_operand:V2DI 3 "vector_operand")]))]
+ "TARGET_SSE4_1"
+{
+ bool ok = ix86_expand_int_vec_cmp (operands);
+ gcc_assert (ok);
+ DONE;
+})
+
(define_expand "vcond<V_512:mode><VF_512:mode>"
[(set (match_operand:V_512 0 "register_operand")
(if_then_else:V_512
DONE;
})
+(define_expand "vcondeq<VI8F_128:mode>v2di"
+ [(set (match_operand:VI8F_128 0 "register_operand")
+ (if_then_else:VI8F_128
+ (match_operator 3 ""
+ [(match_operand:V2DI 4 "vector_operand")
+ (match_operand:V2DI 5 "general_operand")])
+ (match_operand:VI8F_128 1)
+ (match_operand:VI8F_128 2)))]
+ "TARGET_SSE4_1"
+{
+ bool ok = ix86_expand_int_vcond (operands);
+ gcc_assert (ok);
+ DONE;
+})
+
(define_mode_iterator VEC_PERM_AVX2
[V16QI V8HI V4SI V2DI V4SF V2DF
(V32QI "TARGET_AVX2") (V16HI "TARGET_AVX2")
@item @samp{vec_cmpu@var{m}@var{n}}
Similar to @code{vec_cmp@var{m}@var{n}} but perform unsigned vector comparison.
+@cindex @code{vec_cmpeq@var{m}@var{n}} instruction pattern
+@item @samp{vec_cmpeq@var{m}@var{n}}
+Similar to @code{vec_cmp@var{m}@var{n}} but perform equality or non-equality
+vector comparison only. If @code{vec_cmp@var{m}@var{n}}
+or @code{vec_cmpu@var{m}@var{n}} instruction pattern is supported,
+it will be preferred over @code{vec_cmpeq@var{m}@var{n}}, so there is
+no need to define this instruction pattern if the others are supported.
+
@cindex @code{vcond@var{m}@var{n}} instruction pattern
@item @samp{vcond@var{m}@var{n}}
Output a conditional vector move. Operand 0 is the destination to
Similar to @code{vcond@var{m}@var{n}} but performs unsigned vector
comparison.
+@cindex @code{vcondeq@var{m}@var{n}} instruction pattern
+@item @samp{vcondeq@var{m}@var{n}}
+Similar to @code{vcond@var{m}@var{n}} but performs equality or
+non-equality vector comparison only. If @code{vcond@var{m}@var{n}}
+or @code{vcondu@var{m}@var{n}} instruction pattern is supported,
+it will be preferred over @code{vcondeq@var{m}@var{n}}, so there is
+no need to define this instruction pattern if the others are supported.
+
@cindex @code{vcond_mask_@var{m}@var{n}} instruction pattern
@item @samp{vcond_mask_@var{m}@var{n}}
Similar to @code{vcond@var{m}@var{n}} but operand 3 holds a pre-computed
{
tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
if (VECTOR_BOOLEAN_TYPE_P (ops->type)
- && expand_vec_cmp_expr_p (TREE_TYPE (arg0), ops->type))
+ && expand_vec_cmp_expr_p (TREE_TYPE (arg0), ops->type, ops->code))
return expand_vec_cmp_expr (ops->type, ifexp, target);
else
{
return convert_optab_handler (tab, vmode, mask_mode);
}
+/* Return insn code for a comparison operator with VMODE
+ resultin MASK_MODE (only for EQ/NE). */
+
+static inline enum insn_code
+get_vec_cmp_eq_icode (machine_mode vmode, machine_mode mask_mode)
+{
+ return convert_optab_handler (vec_cmpeq_optab, vmode, mask_mode);
+}
+
/* Return insn code for a conditional operator with a comparison in
mode CMODE, unsigned if UNS is true, resulting in a value of mode VMODE. */
return convert_optab_handler (vcond_mask_optab, vmode, mmode);
}
+/* Return insn code for a conditional operator with a comparison in
+ mode CMODE (only EQ/NE), resulting in a value of mode VMODE. */
+
+static inline enum insn_code
+get_vcond_eq_icode (machine_mode vmode, machine_mode cmode)
+{
+ return convert_optab_handler (vcondeq_optab, vmode, cmode);
+}
+
/* Enumerates the possible extraction_insn operations. */
enum extraction_pattern { EP_insv, EP_extv, EP_extzv };
and resulting mask with MASK_TYPE. */
bool
-expand_vec_cmp_expr_p (tree value_type, tree mask_type)
+expand_vec_cmp_expr_p (tree value_type, tree mask_type, enum tree_code code)
{
- enum insn_code icode = get_vec_cmp_icode (TYPE_MODE (value_type),
- TYPE_MODE (mask_type),
- TYPE_UNSIGNED (value_type));
- return (icode != CODE_FOR_nothing);
+ if (get_vec_cmp_icode (TYPE_MODE (value_type), TYPE_MODE (mask_type),
+ TYPE_UNSIGNED (value_type)) != CODE_FOR_nothing)
+ return true;
+ if ((code == EQ_EXPR || code == NE_EXPR)
+ && (get_vec_cmp_eq_icode (TYPE_MODE (value_type), TYPE_MODE (mask_type))
+ != CODE_FOR_nothing))
+ return true;
+ return false;
}
/* Return TRUE iff, appropriate vector insns are available
with operand vector types in CMP_OP_TYPE. */
bool
-expand_vec_cond_expr_p (tree value_type, tree cmp_op_type)
+expand_vec_cond_expr_p (tree value_type, tree cmp_op_type, enum tree_code code)
{
machine_mode value_mode = TYPE_MODE (value_type);
machine_mode cmp_op_mode = TYPE_MODE (cmp_op_type);
return true;
if (GET_MODE_SIZE (value_mode) != GET_MODE_SIZE (cmp_op_mode)
- || GET_MODE_NUNITS (value_mode) != GET_MODE_NUNITS (cmp_op_mode)
- || get_vcond_icode (TYPE_MODE (value_type), TYPE_MODE (cmp_op_type),
- TYPE_UNSIGNED (cmp_op_type)) == CODE_FOR_nothing)
+ || GET_MODE_NUNITS (value_mode) != GET_MODE_NUNITS (cmp_op_mode))
return false;
+
+ if (get_vcond_icode (TYPE_MODE (value_type), TYPE_MODE (cmp_op_type),
+ TYPE_UNSIGNED (cmp_op_type)) == CODE_FOR_nothing
+ && ((code != EQ_EXPR && code != NE_EXPR)
+ || get_vcond_eq_icode (TYPE_MODE (value_type),
+ TYPE_MODE (cmp_op_type)) == CODE_FOR_nothing))
+ return false;
+
return true;
}
optab optab_for_tree_code (enum tree_code, const_tree, enum optab_subtype);
bool supportable_convert_operation (enum tree_code, tree, tree, tree *,
enum tree_code *);
-bool expand_vec_cmp_expr_p (tree, tree);
-bool expand_vec_cond_expr_p (tree, tree);
+bool expand_vec_cmp_expr_p (tree, tree, enum tree_code);
+bool expand_vec_cond_expr_p (tree, tree, enum tree_code);
void init_tree_optimization_optabs (tree);
#endif
icode = get_vcond_icode (mode, cmp_op_mode, unsignedp);
if (icode == CODE_FOR_nothing)
- return 0;
+ {
+ if (tcode == EQ_EXPR || tcode == NE_EXPR)
+ icode = get_vcond_eq_icode (mode, cmp_op_mode);
+ if (icode == CODE_FOR_nothing)
+ return 0;
+ }
comparison = vector_compare_rtx (tcode, op0a, op0b, unsignedp, icode, 4);
rtx_op1 = expand_normal (op1);
icode = get_vec_cmp_icode (vmode, mask_mode, unsignedp);
if (icode == CODE_FOR_nothing)
- return 0;
+ {
+ if (tcode == EQ_EXPR || tcode == NE_EXPR)
+ icode = get_vec_cmp_eq_icode (vmode, mask_mode);
+ if (icode == CODE_FOR_nothing)
+ return 0;
+ }
comparison = vector_compare_rtx (tcode, op0a, op0b, unsignedp, icode, 2);
create_output_operand (&ops[0], target, mask_mode);
OPTAB_CD(vec_store_lanes_optab, "vec_store_lanes$a$b")
OPTAB_CD(vcond_optab, "vcond$a$b")
OPTAB_CD(vcondu_optab, "vcondu$a$b")
+OPTAB_CD(vcondeq_optab, "vcondeq$a$b")
OPTAB_CD(vcond_mask_optab, "vcond_mask_$a$b")
OPTAB_CD(vec_cmp_optab, "vec_cmp$a$b")
OPTAB_CD(vec_cmpu_optab, "vec_cmpu$a$b")
+OPTAB_CD(vec_cmpeq_optab, "vec_cmpeq$a$b")
OPTAB_CD(maskload_optab, "maskload$a$b")
OPTAB_CD(maskstore_optab, "maskstore$a$b")
+2016-10-25 Jakub Jelinek <jakub@redhat.com>
+
+ PR target/78102
+ * gcc.target/i386/pr78102.c: New test.
+
2016-10-25 Fritz Reese <fritzoreese@gmail.com>
* gfortran.dg/dec_logical_xor_1.f90: New test.
--- /dev/null
+/* PR target/78102 */
+/* { dg-do compile } */
+/* { dg-options "-O2 -mno-sse4.2 -msse4.1" } */
+/* { dg-final { scan-assembler-times "pcmpeqq" 3 } } */
+
+#include <x86intrin.h>
+
+__m128i
+foo (const __m128i x, const __m128i y)
+{
+ return _mm_cmpeq_epi64 (x, y);
+}
+
+__v2di
+bar (const __v2di x, const __v2di y)
+{
+ return x == y;
+}
+
+__v2di
+baz (const __v2di x, const __v2di y)
+{
+ return x != y;
+}
tree op1, enum tree_code code)
{
tree t;
- if (!expand_vec_cmp_expr_p (TREE_TYPE (op0), type)
- && !expand_vec_cond_expr_p (type, TREE_TYPE (op0)))
+ if (!expand_vec_cmp_expr_p (TREE_TYPE (op0), type, code)
+ && !expand_vec_cond_expr_p (type, TREE_TYPE (op0), code))
t = expand_vector_piecewise (gsi, do_compare, type,
TREE_TYPE (TREE_TYPE (op0)), op0, op1, code);
else
}
}
if (addend == NULL_TREE
- && expand_vec_cond_expr_p (type, type))
+ && expand_vec_cond_expr_p (type, type, LT_EXPR))
{
tree zero, cst, cond, mask_type;
gimple *stmt;
comp_inner_type = TREE_TYPE (TREE_TYPE (a1));
}
- if (expand_vec_cond_expr_p (type, TREE_TYPE (a1)))
+ if (expand_vec_cond_expr_p (type, TREE_TYPE (a1), TREE_CODE (a)))
return;
/* TODO: try and find a smaller vector type. */
if (vectype == NULL_TREE)
return NULL;
- if (expand_vec_cond_expr_p (vectype, comp_vectype))
+ if (expand_vec_cond_expr_p (vectype, comp_vectype, TREE_CODE (cond_expr)))
return NULL;
if (itype == NULL_TREE)
if (vecitype == NULL_TREE)
return NULL;
- if (!expand_vec_cond_expr_p (vecitype, comp_vectype))
+ if (!expand_vec_cond_expr_p (vecitype, comp_vectype, TREE_CODE (cond_expr)))
return NULL;
if (GET_MODE_BITSIZE (TYPE_MODE (type)) > cmp_mode_size)
tree mask_type = get_mask_type_for_scalar_type (TREE_TYPE (rhs1));
if (mask_type
- && expand_vec_cmp_expr_p (comp_vectype, mask_type))
+ && expand_vec_cmp_expr_p (comp_vectype, mask_type, rhs_code))
return false;
if (TREE_CODE (TREE_TYPE (rhs1)) != INTEGER_TYPE)
}
else
vecitype = comp_vectype;
- if (! expand_vec_cond_expr_p (vecitype, comp_vectype))
+ if (! expand_vec_cond_expr_p (vecitype, comp_vectype, rhs_code))
return false;
}
else
mask_type = get_mask_type_for_scalar_type (TREE_TYPE (rhs1));
if (!mask_type
- || !expand_vec_cmp_expr_p (comp_vectype, mask_type))
+ || !expand_vec_cmp_expr_p (comp_vectype, mask_type, rhs_code))
{
res = NULL_TREE;
break;
if (!vec_stmt)
{
STMT_VINFO_TYPE (stmt_info) = condition_vec_info_type;
- return expand_vec_cond_expr_p (vectype, comp_vectype);
+ return expand_vec_cond_expr_p (vectype, comp_vectype,
+ TREE_CODE (cond_expr));
}
/* Transform. */
vect_model_simple_cost (stmt_info, ncopies * (1 + (bitop2 != NOP_EXPR)),
dts, NULL, NULL);
if (bitop1 == NOP_EXPR)
- return expand_vec_cmp_expr_p (vectype, mask_type);
+ return expand_vec_cmp_expr_p (vectype, mask_type, code);
else
{
machine_mode mode = TYPE_MODE (vectype);