+2014-11-17 Zhenqiang Chen <zhenqiang.chen@linaro.org>
+
+ * config/aarch64/aarch64-protos.h (aarch64_ccmp_mode_to_code): New.
+ * aarch64.c (aarch64_nzcv_codes): New data.
+ (aarch64_ccmp_mode_to_code): New.
+ (aarch64_print_operand): Output nzcv.
+ config/aarch64/aarch64.md (cbranchcc4, *ccmp_and, *ccmp_ior, cstorecc4):
+ New patterns.
+ (cstore<mode>4): Handle ccmp_cc_register.
+ * config/aarch64/predicates.md (const0_operand): New.
+
2014-11-17 Zhenqiang Chen <zhenqiang.chen@linaro.org>
* config/aarch64/aarch64-modes.def: Define ccmp CC mode.
extern bool
aarch64_expand_vec_perm_const (rtx target, rtx op0, rtx op1, rtx sel);
void aarch64_atomic_assign_expand_fenv (tree *, tree *, tree *);
+int aarch64_ccmp_mode_to_code (enum machine_mode mode);
#endif /* GCC_AARCH64_PROTOS_H */
return count;
}
+/* N Z C V. */
+#define AARCH64_CC_V 1
+#define AARCH64_CC_C (1 << 1)
+#define AARCH64_CC_Z (1 << 2)
+#define AARCH64_CC_N (1 << 3)
+
+/* N Z C V flags for ccmp. The first code is for AND op and the other
+ is for IOR op. Indexed by AARCH64_COND_CODE. */
+static const int aarch64_nzcv_codes[][2] =
+{
+ {AARCH64_CC_Z, 0}, /* EQ, Z == 1. */
+ {0, AARCH64_CC_Z}, /* NE, Z == 0. */
+ {AARCH64_CC_C, 0}, /* CS, C == 1. */
+ {0, AARCH64_CC_C}, /* CC, C == 0. */
+ {AARCH64_CC_N, 0}, /* MI, N == 1. */
+ {0, AARCH64_CC_N}, /* PL, N == 0. */
+ {AARCH64_CC_V, 0}, /* VS, V == 1. */
+ {0, AARCH64_CC_V}, /* VC, V == 0. */
+ {AARCH64_CC_C, 0}, /* HI, C ==1 && Z == 0. */
+ {0, AARCH64_CC_C}, /* LS, !(C == 1 && Z == 0). */
+ {0, AARCH64_CC_V}, /* GE, N == V. */
+ {AARCH64_CC_V, 0}, /* LT, N != V. */
+ {0, AARCH64_CC_Z}, /* GT, Z == 0 && N == V. */
+ {AARCH64_CC_Z, 0}, /* LE, !(Z == 0 && N == V). */
+ {0, 0}, /* AL, Any. */
+ {0, 0}, /* NV, Any. */
+};
+
+int
+aarch64_ccmp_mode_to_code (enum machine_mode mode)
+{
+ switch (mode)
+ {
+ case CC_DNEmode:
+ return NE;
+
+ case CC_DEQmode:
+ return EQ;
+
+ case CC_DLEmode:
+ return LE;
+
+ case CC_DGTmode:
+ return GT;
+
+ case CC_DLTmode:
+ return LT;
+
+ case CC_DGEmode:
+ return GE;
+
+ case CC_DLEUmode:
+ return LEU;
+
+ case CC_DGTUmode:
+ return GTU;
+
+ case CC_DLTUmode:
+ return LTU;
+
+ case CC_DGEUmode:
+ return GEU;
+
+ default:
+ gcc_unreachable ();
+ }
+}
+
+
void
aarch64_print_operand (FILE *f, rtx x, char code)
{
output_addr_const (asm_out_file, x);
break;
+ case 'K':
+ {
+ int cond_code;
+ /* Print nzcv. */
+
+ if (!COMPARISON_P (x))
+ {
+ output_operand_lossage ("invalid operand for '%%%c'", code);
+ return;
+ }
+
+ cond_code = aarch64_get_condition_code_1 (CCmode, GET_CODE (x));
+ gcc_assert (cond_code >= 0);
+ asm_fprintf (f, "%d", aarch64_nzcv_codes[cond_code][0]);
+ }
+ break;
+
+ case 'k':
+ {
+ int cond_code;
+ /* Print nzcv. */
+
+ if (!COMPARISON_P (x))
+ {
+ output_operand_lossage ("invalid operand for '%%%c'", code);
+ return;
+ }
+
+ cond_code = aarch64_get_condition_code_1 (CCmode, GET_CODE (x));
+ gcc_assert (cond_code >= 0);
+ asm_fprintf (f, "%d", aarch64_nzcv_codes[cond_code][1]);
+ }
+ break;
+
default:
output_operand_lossage ("invalid operand prefix '%%%c'", code);
return;
"
)
+(define_expand "cbranchcc4"
+ [(set (pc) (if_then_else
+ (match_operator 0 "aarch64_comparison_operator"
+ [(match_operand 1 "cc_register" "")
+ (match_operand 2 "const0_operand")])
+ (label_ref (match_operand 3 "" ""))
+ (pc)))]
+ ""
+ "")
+
+(define_insn "*ccmp_and"
+ [(set (match_operand 1 "ccmp_cc_register" "")
+ (compare
+ (and:SI
+ (match_operator 4 "aarch64_comparison_operator"
+ [(match_operand 0 "ccmp_cc_register" "")
+ (const_int 0)])
+ (match_operator 5 "aarch64_comparison_operator"
+ [(match_operand:GPI 2 "register_operand" "r,r,r")
+ (match_operand:GPI 3 "aarch64_ccmp_operand" "r,Uss,Usn")]))
+ (const_int 0)))]
+ "aarch64_ccmp_mode_to_code (GET_MODE (operands[1])) == GET_CODE (operands[5])"
+ "@
+ ccmp\\t%<w>2, %<w>3, %k5, %m4
+ ccmp\\t%<w>2, %<w>3, %k5, %m4
+ ccmn\\t%<w>2, #%n3, %k5, %m4"
+ [(set_attr "type" "alus_sreg,alus_imm,alus_imm")]
+)
+
+(define_insn "*ccmp_ior"
+ [(set (match_operand 1 "ccmp_cc_register" "")
+ (compare
+ (ior:SI
+ (match_operator 4 "aarch64_comparison_operator"
+ [(match_operand 0 "ccmp_cc_register" "")
+ (const_int 0)])
+ (match_operator 5 "aarch64_comparison_operator"
+ [(match_operand:GPI 2 "register_operand" "r,r,r")
+ (match_operand:GPI 3 "aarch64_ccmp_operand" "r,Uss,Usn")]))
+ (const_int 0)))]
+ "aarch64_ccmp_mode_to_code (GET_MODE (operands[1])) == GET_CODE (operands[5])"
+ "@
+ ccmp\\t%<w>2, %<w>3, %K5, %M4
+ ccmp\\t%<w>2, %<w>3, %K5, %M4
+ ccmn\\t%<w>2, #%n3, %K5, %M4"
+ [(set_attr "type" "alus_sreg,alus_imm,alus_imm")]
+)
+
(define_insn "*condjump"
[(set (pc) (if_then_else (match_operator 0 "aarch64_comparison_operator"
[(match_operand 1 "cc_register" "") (const_int 0)])
"
)
+(define_expand "cstorecc4"
+ [(set (match_operand:SI 0 "register_operand")
+ (match_operator 1 "aarch64_comparison_operator"
+ [(match_operand 2 "ccmp_cc_register")
+ (match_operand 3 "const0_operand")]))]
+ ""
+"{
+ emit_insn (gen_rtx_SET (SImode, operands[0], operands[1]));
+ DONE;
+}")
+
+
(define_expand "cstore<mode>4"
[(set (match_operand:SI 0 "register_operand" "")
(match_operator:SI 1 "aarch64_comparison_operator"
(match_operand:ALLI 3 "register_operand" "")))]
""
{
- rtx ccreg;
enum rtx_code code = GET_CODE (operands[1]);
if (code == UNEQ || code == LTGT)
FAIL;
- ccreg = aarch64_gen_compare_reg (code, XEXP (operands[1], 0),
- XEXP (operands[1], 1));
- operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
+ if (!ccmp_cc_register (XEXP (operands[1], 0),
+ GET_MODE (XEXP (operands[1], 0))))
+ {
+ rtx ccreg;
+ ccreg = aarch64_gen_compare_reg (code, XEXP (operands[1], 0),
+ XEXP (operands[1], 1));
+ operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
+ }
}
)
(ior (match_code "symbol_ref")
(match_operand 0 "register_operand")))
+;; Return true if OP a (const_int 0) operand.
+(define_predicate "const0_operand"
+ (and (match_code "const_int, const_double")
+ (match_test "op == CONST0_RTX (mode)")))
+
(define_predicate "aarch64_ccmp_immediate"
(and (match_code "const_int")
(match_test "IN_RANGE (INTVAL (op), -31, 31)")))