return gen_vec_series (mode, new_base, new_step);
}
+/* Subroutine of simplify_binary_operation_1. Un-distribute a binary
+ operation CODE with result mode MODE, operating on OP0 and OP1.
+ e.g. simplify (xor (and A C) (and (B C)) to (and (xor (A B) C).
+ Returns NULL_RTX if no simplification is possible. */
+
+static rtx
+simplify_distributive_operation (enum rtx_code code, machine_mode mode,
+ rtx op0, rtx op1)
+{
+ enum rtx_code op = GET_CODE (op0);
+ gcc_assert (GET_CODE (op1) == op);
+
+ if (rtx_equal_p (XEXP (op0, 1), XEXP (op1, 1))
+ && ! side_effects_p (XEXP (op0, 1)))
+ return simplify_gen_binary (op, mode,
+ simplify_gen_binary (code, mode,
+ XEXP (op0, 0),
+ XEXP (op1, 0)),
+ XEXP (op0, 1));
+
+ if (GET_RTX_CLASS (op) == RTX_COMM_ARITH)
+ {
+ if (rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
+ && ! side_effects_p (XEXP (op0, 0)))
+ return simplify_gen_binary (op, mode,
+ simplify_gen_binary (code, mode,
+ XEXP (op0, 1),
+ XEXP (op1, 1)),
+ XEXP (op0, 0));
+ if (rtx_equal_p (XEXP (op0, 0), XEXP (op1, 1))
+ && ! side_effects_p (XEXP (op0, 0)))
+ return simplify_gen_binary (op, mode,
+ simplify_gen_binary (code, mode,
+ XEXP (op0, 1),
+ XEXP (op1, 0)),
+ XEXP (op0, 0));
+ if (rtx_equal_p (XEXP (op0, 1), XEXP (op1, 0))
+ && ! side_effects_p (XEXP (op0, 1)))
+ return simplify_gen_binary (op, mode,
+ simplify_gen_binary (code, mode,
+ XEXP (op0, 0),
+ XEXP (op1, 1)),
+ XEXP (op0, 1));
+ }
+
+ return NULL_RTX;
+}
+
/* Subroutine of simplify_binary_operation. Simplify a binary operation
CODE with result mode MODE, operating on OP0 and OP1. If OP0 and/or
OP1 are constant pool references, TRUEOP0 and TRUEOP1 represent the
}
}
+ /* Convert (ior (and A C) (and B C)) into (and (ior A B) C). */
+ if (GET_CODE (op0) == GET_CODE (op1)
+ && (GET_CODE (op0) == AND
+ || GET_CODE (op0) == IOR
+ || GET_CODE (op0) == LSHIFTRT
+ || GET_CODE (op0) == ASHIFTRT
+ || GET_CODE (op0) == ASHIFT
+ || GET_CODE (op0) == ROTATE
+ || GET_CODE (op0) == ROTATERT))
+ {
+ tem = simplify_distributive_operation (code, mode, op0, op1);
+ if (tem)
+ return tem;
+ }
+
tem = simplify_byte_swapping_operation (code, mode, op0, op1);
if (tem)
return tem;
&& (reversed = reversed_comparison (op0, int_mode)))
return reversed;
+ /* Convert (xor (and A C) (and B C)) into (and (xor A B) C). */
+ if (GET_CODE (op0) == GET_CODE (op1)
+ && (GET_CODE (op0) == AND
+ || GET_CODE (op0) == XOR
+ || GET_CODE (op0) == LSHIFTRT
+ || GET_CODE (op0) == ASHIFTRT
+ || GET_CODE (op0) == ASHIFT
+ || GET_CODE (op0) == ROTATE
+ || GET_CODE (op0) == ROTATERT))
+ {
+ tem = simplify_distributive_operation (code, mode, op0, op1);
+ if (tem)
+ return tem;
+ }
+
tem = simplify_byte_swapping_operation (code, mode, op0, op1);
if (tem)
return tem;
&& rtx_equal_p (op1, XEXP (XEXP (op0, 1), 0)))
return simplify_gen_binary (AND, mode, op1, XEXP (op0, 0));
+ /* Convert (and (ior A C) (ior B C)) into (ior (and A B) C). */
+ if (GET_CODE (op0) == GET_CODE (op1)
+ && (GET_CODE (op0) == AND
+ || GET_CODE (op0) == IOR
+ || GET_CODE (op0) == LSHIFTRT
+ || GET_CODE (op0) == ASHIFTRT
+ || GET_CODE (op0) == ASHIFT
+ || GET_CODE (op0) == ROTATE
+ || GET_CODE (op0) == ROTATERT))
+ {
+ tem = simplify_distributive_operation (code, mode, op0, op1);
+ if (tem)
+ return tem;
+ }
+
tem = simplify_byte_swapping_operation (code, mode, op0, op1);
if (tem)
return tem;
return gen_rtx_REG (mode, test_reg_num++);
}
+static void
+test_scalar_int_ops (machine_mode mode)
+{
+ rtx op0 = make_test_reg (mode);
+ rtx op1 = make_test_reg (mode);
+ rtx six = GEN_INT (6);
+
+ rtx neg_op0 = simplify_gen_unary (NEG, mode, op0, mode);
+ rtx not_op0 = simplify_gen_unary (NOT, mode, op0, mode);
+ rtx bswap_op0 = simplify_gen_unary (BSWAP, mode, op0, mode);
+
+ rtx and_op0_op1 = simplify_gen_binary (AND, mode, op0, op1);
+ rtx ior_op0_op1 = simplify_gen_binary (IOR, mode, op0, op1);
+ rtx xor_op0_op1 = simplify_gen_binary (XOR, mode, op0, op1);
+
+ rtx and_op0_6 = simplify_gen_binary (AND, mode, op0, six);
+ rtx and_op1_6 = simplify_gen_binary (AND, mode, op1, six);
+
+ /* Test some binary identities. */
+ ASSERT_RTX_EQ (op0, simplify_gen_binary (PLUS, mode, op0, const0_rtx));
+ ASSERT_RTX_EQ (op0, simplify_gen_binary (PLUS, mode, const0_rtx, op0));
+ ASSERT_RTX_EQ (op0, simplify_gen_binary (MINUS, mode, op0, const0_rtx));
+ ASSERT_RTX_EQ (op0, simplify_gen_binary (MULT, mode, op0, const1_rtx));
+ ASSERT_RTX_EQ (op0, simplify_gen_binary (MULT, mode, const1_rtx, op0));
+ ASSERT_RTX_EQ (op0, simplify_gen_binary (DIV, mode, op0, const1_rtx));
+ ASSERT_RTX_EQ (op0, simplify_gen_binary (AND, mode, op0, constm1_rtx));
+ ASSERT_RTX_EQ (op0, simplify_gen_binary (AND, mode, constm1_rtx, op0));
+ ASSERT_RTX_EQ (op0, simplify_gen_binary (IOR, mode, op0, const0_rtx));
+ ASSERT_RTX_EQ (op0, simplify_gen_binary (IOR, mode, const0_rtx, op0));
+ ASSERT_RTX_EQ (op0, simplify_gen_binary (XOR, mode, op0, const0_rtx));
+ ASSERT_RTX_EQ (op0, simplify_gen_binary (XOR, mode, const0_rtx, op0));
+ ASSERT_RTX_EQ (op0, simplify_gen_binary (ASHIFT, mode, op0, const0_rtx));
+ ASSERT_RTX_EQ (op0, simplify_gen_binary (ROTATE, mode, op0, const0_rtx));
+ ASSERT_RTX_EQ (op0, simplify_gen_binary (ASHIFTRT, mode, op0, const0_rtx));
+ ASSERT_RTX_EQ (op0, simplify_gen_binary (LSHIFTRT, mode, op0, const0_rtx));
+ ASSERT_RTX_EQ (op0, simplify_gen_binary (ROTATERT, mode, op0, const0_rtx));
+
+ /* Test some self-inverse operations. */
+ ASSERT_RTX_EQ (op0, simplify_gen_unary (NEG, mode, neg_op0, mode));
+ ASSERT_RTX_EQ (op0, simplify_gen_unary (NOT, mode, not_op0, mode));
+ ASSERT_RTX_EQ (op0, simplify_gen_unary (BSWAP, mode, bswap_op0, mode));
+
+ /* Test some reflexive operations. */
+ ASSERT_RTX_EQ (op0, simplify_gen_binary (AND, mode, op0, op0));
+ ASSERT_RTX_EQ (op0, simplify_gen_binary (IOR, mode, op0, op0));
+ ASSERT_RTX_EQ (op0, simplify_gen_binary (SMIN, mode, op0, op0));
+ ASSERT_RTX_EQ (op0, simplify_gen_binary (SMAX, mode, op0, op0));
+ ASSERT_RTX_EQ (op0, simplify_gen_binary (UMIN, mode, op0, op0));
+ ASSERT_RTX_EQ (op0, simplify_gen_binary (UMAX, mode, op0, op0));
+
+ ASSERT_RTX_EQ (const0_rtx, simplify_gen_binary (MINUS, mode, op0, op0));
+ ASSERT_RTX_EQ (const0_rtx, simplify_gen_binary (XOR, mode, op0, op0));
+
+ /* Test simplify_distributive_operation. */
+ ASSERT_RTX_EQ (simplify_gen_binary (AND, mode, xor_op0_op1, six),
+ simplify_gen_binary (XOR, mode, and_op0_6, and_op1_6));
+ ASSERT_RTX_EQ (simplify_gen_binary (AND, mode, ior_op0_op1, six),
+ simplify_gen_binary (IOR, mode, and_op0_6, and_op1_6));
+ ASSERT_RTX_EQ (simplify_gen_binary (AND, mode, and_op0_op1, six),
+ simplify_gen_binary (AND, mode, and_op0_6, and_op1_6));
+}
+
+/* Verify some simplifications involving scalar expressions. */
+
+static void
+test_scalar_ops ()
+{
+ for (unsigned int i = 0; i < NUM_MACHINE_MODES; ++i)
+ {
+ machine_mode mode = (machine_mode) i;
+ if (SCALAR_INT_MODE_P (mode) && mode != BImode)
+ test_scalar_int_ops (mode);
+ }
+}
+
/* Test vector simplifications involving VEC_DUPLICATE in which the
operands and result have vector mode MODE. SCALAR_REG is a pseudo
register that holds one element of MODE. */
void
simplify_rtx_c_tests ()
{
+ test_scalar_ops ();
test_vector_ops ();
simplify_const_poly_int_tests<NUM_POLY_INT_COEFFS>::run ();
}