return NULL_RTX;
}
+/* Try to simplify X given that it appears within operand OP of a
+ VEC_MERGE operation whose mask is MASK. X need not use the same
+ vector mode as the VEC_MERGE, but it must have the same number of
+ elements.
+
+ Return the simplified X on success, otherwise return NULL_RTX. */
+
+rtx
+simplify_merge_mask (rtx x, rtx mask, int op)
+{
+ gcc_assert (VECTOR_MODE_P (GET_MODE (x)));
+ poly_uint64 nunits = GET_MODE_NUNITS (GET_MODE (x));
+ if (GET_CODE (x) == VEC_MERGE && rtx_equal_p (XEXP (x, 2), mask))
+ {
+ if (side_effects_p (XEXP (x, 1 - op)))
+ return NULL_RTX;
+
+ return XEXP (x, op);
+ }
+ if (UNARY_P (x)
+ && VECTOR_MODE_P (GET_MODE (XEXP (x, 0)))
+ && known_eq (GET_MODE_NUNITS (GET_MODE (XEXP (x, 0))), nunits))
+ {
+ rtx top0 = simplify_merge_mask (XEXP (x, 0), mask, op);
+ if (top0)
+ return simplify_gen_unary (GET_CODE (x), GET_MODE (x), top0,
+ GET_MODE (XEXP (x, 0)));
+ }
+ if (BINARY_P (x)
+ && VECTOR_MODE_P (GET_MODE (XEXP (x, 0)))
+ && known_eq (GET_MODE_NUNITS (GET_MODE (XEXP (x, 0))), nunits)
+ && VECTOR_MODE_P (GET_MODE (XEXP (x, 1)))
+ && known_eq (GET_MODE_NUNITS (GET_MODE (XEXP (x, 1))), nunits))
+ {
+ rtx top0 = simplify_merge_mask (XEXP (x, 0), mask, op);
+ rtx top1 = simplify_merge_mask (XEXP (x, 1), mask, op);
+ if (top0 || top1)
+ return simplify_gen_binary (GET_CODE (x), GET_MODE (x),
+ top0 ? top0 : XEXP (x, 0),
+ top1 ? top1 : XEXP (x, 1));
+ }
+ if (GET_RTX_CLASS (GET_CODE (x)) == RTX_TERNARY
+ && VECTOR_MODE_P (GET_MODE (XEXP (x, 0)))
+ && known_eq (GET_MODE_NUNITS (GET_MODE (XEXP (x, 0))), nunits)
+ && VECTOR_MODE_P (GET_MODE (XEXP (x, 1)))
+ && known_eq (GET_MODE_NUNITS (GET_MODE (XEXP (x, 1))), nunits)
+ && VECTOR_MODE_P (GET_MODE (XEXP (x, 2)))
+ && known_eq (GET_MODE_NUNITS (GET_MODE (XEXP (x, 2))), nunits))
+ {
+ rtx top0 = simplify_merge_mask (XEXP (x, 0), mask, op);
+ rtx top1 = simplify_merge_mask (XEXP (x, 1), mask, op);
+ rtx top2 = simplify_merge_mask (XEXP (x, 2), mask, op);
+ if (top0 || top1 || top2)
+ return simplify_gen_ternary (GET_CODE (x), GET_MODE (x),
+ GET_MODE (XEXP (x, 0)),
+ top0 ? top0 : XEXP (x, 0),
+ top1 ? top1 : XEXP (x, 1),
+ top2 ? top2 : XEXP (x, 2));
+ }
+ return NULL_RTX;
+}
+
\f
/* Simplify CODE, an operation with result mode MODE and three operands,
OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became
&& !side_effects_p (op2) && !side_effects_p (op1))
return op0;
+ if (!side_effects_p (op2))
+ {
+ rtx top0 = simplify_merge_mask (op0, op2, 0);
+ rtx top1 = simplify_merge_mask (op1, op2, 1);
+ if (top0 || top1)
+ return simplify_gen_ternary (code, mode, mode,
+ top0 ? top0 : op0,
+ top1 ? top1 : op1, op2);
+ }
+
break;
default:
constm1_rtx));
}
+/* Verify simplify_merge_mask works correctly. */
+
+static void
+test_vec_merge (machine_mode mode)
+{
+ rtx op0 = make_test_reg (mode);
+ rtx op1 = make_test_reg (mode);
+ rtx op2 = make_test_reg (mode);
+ rtx op3 = make_test_reg (mode);
+ rtx op4 = make_test_reg (mode);
+ rtx op5 = make_test_reg (mode);
+ rtx mask1 = make_test_reg (SImode);
+ rtx mask2 = make_test_reg (SImode);
+ rtx vm1 = gen_rtx_VEC_MERGE (mode, op0, op1, mask1);
+ rtx vm2 = gen_rtx_VEC_MERGE (mode, op2, op3, mask1);
+ rtx vm3 = gen_rtx_VEC_MERGE (mode, op4, op5, mask1);
+
+ /* Simple vec_merge. */
+ ASSERT_EQ (op0, simplify_merge_mask (vm1, mask1, 0));
+ ASSERT_EQ (op1, simplify_merge_mask (vm1, mask1, 1));
+ ASSERT_EQ (NULL_RTX, simplify_merge_mask (vm1, mask2, 0));
+ ASSERT_EQ (NULL_RTX, simplify_merge_mask (vm1, mask2, 1));
+
+ /* Nested vec_merge.
+ It's tempting to make this simplify right down to opN, but we don't
+ because all the simplify_* functions assume that the operands have
+ already been simplified. */
+ rtx nvm = gen_rtx_VEC_MERGE (mode, vm1, vm2, mask1);
+ ASSERT_EQ (vm1, simplify_merge_mask (nvm, mask1, 0));
+ ASSERT_EQ (vm2, simplify_merge_mask (nvm, mask1, 1));
+
+ /* Intermediate unary op. */
+ rtx unop = gen_rtx_NOT (mode, vm1);
+ ASSERT_RTX_EQ (gen_rtx_NOT (mode, op0),
+ simplify_merge_mask (unop, mask1, 0));
+ ASSERT_RTX_EQ (gen_rtx_NOT (mode, op1),
+ simplify_merge_mask (unop, mask1, 1));
+
+ /* Intermediate binary op. */
+ rtx binop = gen_rtx_PLUS (mode, vm1, vm2);
+ ASSERT_RTX_EQ (gen_rtx_PLUS (mode, op0, op2),
+ simplify_merge_mask (binop, mask1, 0));
+ ASSERT_RTX_EQ (gen_rtx_PLUS (mode, op1, op3),
+ simplify_merge_mask (binop, mask1, 1));
+
+ /* Intermediate ternary op. */
+ rtx tenop = gen_rtx_FMA (mode, vm1, vm2, vm3);
+ ASSERT_RTX_EQ (gen_rtx_FMA (mode, op0, op2, op4),
+ simplify_merge_mask (tenop, mask1, 0));
+ ASSERT_RTX_EQ (gen_rtx_FMA (mode, op1, op3, op5),
+ simplify_merge_mask (tenop, mask1, 1));
+
+ /* Side effects. */
+ rtx badop0 = gen_rtx_PRE_INC (mode, op0);
+ rtx badvm = gen_rtx_VEC_MERGE (mode, badop0, op1, mask1);
+ ASSERT_EQ (badop0, simplify_merge_mask (badvm, mask1, 0));
+ ASSERT_EQ (NULL_RTX, simplify_merge_mask (badvm, mask1, 1));
+
+ /* Called indirectly. */
+ ASSERT_RTX_EQ (gen_rtx_VEC_MERGE (mode, op0, op3, mask1),
+ simplify_rtx (nvm));
+}
+
/* Verify some simplifications involving vectors. */
static void
if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
&& maybe_gt (GET_MODE_NUNITS (mode), 2))
test_vector_ops_series (mode, scalar_reg);
+ test_vec_merge (mode);
}
}
}