DONE;
})
+(define_expand "cstore<mode>4_signed_imm"
+ [(use (match_operator 1 "signed_comparison_operator"
+ [(match_operand:GPR 2 "gpc_reg_operand")
+ (match_operand:GPR 3 "immediate_operand")]))
+ (clobber (match_operand:GPR 0 "register_operand"))]
+ ""
+{
+ bool invert = false;
+
+ enum rtx_code cond_code = GET_CODE (operands[1]);
+
+ rtx op0 = operands[0];
+ rtx op1 = operands[2];
+ HOST_WIDE_INT val = INTVAL (operands[3]);
+
+ if (cond_code == GE || cond_code == GT)
+ {
+ cond_code = reverse_condition (cond_code);
+ invert = true;
+ }
+
+ if (cond_code == LE)
+ val++;
+
+ rtx tmp = gen_reg_rtx (<MODE>mode);
+ emit_insn (gen_add<mode>3 (tmp, op1, GEN_INT (-val)));
+ rtx x = gen_reg_rtx (<MODE>mode);
+ if (val < 0)
+ emit_insn (gen_and<mode>3 (x, op1, tmp));
+ else
+ emit_insn (gen_ior<mode>3 (x, op1, tmp));
+
+ if (invert)
+ {
+ rtx tmp = gen_reg_rtx (<MODE>mode);
+ emit_insn (gen_one_cmpl<mode>2 (tmp, x));
+ x = tmp;
+ }
+
+ int sh = GET_MODE_BITSIZE (<MODE>mode) - 1;
+ emit_insn (gen_lshr<mode>3 (op0, x, GEN_INT (sh)));
+
+ DONE;
+})
+
+(define_expand "cstore<mode>4_unsigned_imm"
+ [(use (match_operator 1 "unsigned_comparison_operator"
+ [(match_operand:GPR 2 "gpc_reg_operand")
+ (match_operand:GPR 3 "immediate_operand")]))
+ (clobber (match_operand:GPR 0 "register_operand"))]
+ ""
+{
+ bool invert = false;
+
+ enum rtx_code cond_code = GET_CODE (operands[1]);
+
+ rtx op0 = operands[0];
+ rtx op1 = operands[2];
+ HOST_WIDE_INT val = INTVAL (operands[3]);
+
+ if (cond_code == GEU || cond_code == GTU)
+ {
+ cond_code = reverse_condition (cond_code);
+ invert = true;
+ }
+
+ if (cond_code == LEU)
+ val++;
+
+ rtx tmp = gen_reg_rtx (<MODE>mode);
+ rtx tmp2 = gen_reg_rtx (<MODE>mode);
+ emit_insn (gen_add<mode>3 (tmp, op1, GEN_INT (-val)));
+ emit_insn (gen_one_cmpl<mode>2 (tmp2, op1));
+ rtx x = gen_reg_rtx (<MODE>mode);
+ if (val < 0)
+ emit_insn (gen_ior<mode>3 (x, tmp, tmp2));
+ else
+ emit_insn (gen_and<mode>3 (x, tmp, tmp2));
+
+ if (invert)
+ {
+ rtx tmp = gen_reg_rtx (<MODE>mode);
+ emit_insn (gen_one_cmpl<mode>2 (tmp, x));
+ x = tmp;
+ }
+
+ int sh = GET_MODE_BITSIZE (<MODE>mode) - 1;
+ emit_insn (gen_lshr<mode>3 (op0, x, GEN_INT (sh)));
+
+ DONE;
+})
+
(define_expand "cstore<mode>4"
[(use (match_operator 1 "rs6000_cbranch_operator"
- [(match_operand:GPR 2 "gpc_reg_operand" "")
- (match_operand:GPR 3 "reg_or_short_operand" "")]))
+ [(match_operand:GPR 2 "gpc_reg_operand")
+ (match_operand:GPR 3 "reg_or_short_operand")]))
(clobber (match_operand:GPR 0 "register_operand"))]
""
{
emit_insn (gen_cstore<mode>4_unsigned (operands[0], operands[1],
operands[2], operands[3]));
- /* The generic code knows tricks to compute signed comparisons against
- zero. Let it do its thing. */
- else if (operands[3] == const0_rtx
- && signed_comparison_operator (operands[1], VOIDmode))
- FAIL;
+ /* For signed comparisons against a constant, we can do some simple
+ bit-twiddling. */
+ else if (signed_comparison_operator (operands[1], VOIDmode)
+ && CONST_INT_P (operands[3]))
+ emit_insn (gen_cstore<mode>4_signed_imm (operands[0], operands[1],
+ operands[2], operands[3]));
+
+ /* And similarly for unsigned comparisons. */
+ else if (unsigned_comparison_operator (operands[1], VOIDmode)
+ && CONST_INT_P (operands[3]))
+ emit_insn (gen_cstore<mode>4_unsigned_imm (operands[0], operands[1],
+ operands[2], operands[3]));
/* Everything else, use the mfcr brute force. */
else