+2020-05-11 Alex Coplan <alex.coplan@arm.com>
+
+ * config/aarch64/aarch64.c (aarch64_if_then_else_costs): Add case
+ to correctly calculate cost for new pattern (*csinv3_uxtw_insn3).
+ * config/aarch64/aarch64.md (*csinv3_utxw_insn1): New.
+ (*csinv3_uxtw_insn2): New.
+ (*csinv3_uxtw_insn3): New.
+ * config/aarch64/iterators.md (neg_not_cs): New.
+
2020-05-11 Uroš Bizjak <ubizjak@gmail.com>
PR target/95046
op1 = XEXP (op1, 0);
op2 = XEXP (op2, 0);
}
+ else if (GET_CODE (op1) == ZERO_EXTEND && op2 == const0_rtx)
+ {
+ inner = XEXP (op1, 0);
+ if (GET_CODE (inner) == NEG || GET_CODE (inner) == NOT)
+ /* CSINV/NEG with zero extend + const 0 (*csinv3_uxtw_insn3). */
+ op1 = XEXP (inner, 0);
+ }
*cost += rtx_cost (op1, VOIDmode, IF_THEN_ELSE, 1, speed);
*cost += rtx_cost (op2, VOIDmode, IF_THEN_ELSE, 2, speed);
[(set_attr "type" "csel")]
)
+(define_insn "*csinv3_uxtw_insn1"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (if_then_else:DI
+ (match_operand 1 "aarch64_comparison_operation" "")
+ (zero_extend:DI
+ (match_operand:SI 2 "register_operand" "r"))
+ (zero_extend:DI
+ (NEG_NOT:SI (match_operand:SI 3 "register_operand" "r")))))]
+ ""
+ "cs<neg_not_cs>\\t%w0, %w2, %w3, %m1"
+ [(set_attr "type" "csel")]
+)
+
+(define_insn "*csinv3_uxtw_insn2"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (if_then_else:DI
+ (match_operand 1 "aarch64_comparison_operation" "")
+ (zero_extend:DI
+ (NEG_NOT:SI (match_operand:SI 2 "register_operand" "r")))
+ (zero_extend:DI
+ (match_operand:SI 3 "register_operand" "r"))))]
+ ""
+ "cs<neg_not_cs>\\t%w0, %w3, %w2, %M1"
+ [(set_attr "type" "csel")]
+)
+
+(define_insn "*csinv3_uxtw_insn3"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (if_then_else:DI
+ (match_operand 1 "aarch64_comparison_operation" "")
+ (zero_extend:DI
+ (NEG_NOT:SI (match_operand:SI 2 "register_operand" "r")))
+ (const_int 0)))]
+ ""
+ "cs<neg_not_cs>\\t%w0, wzr, %w2, %M1"
+ [(set_attr "type" "csel")]
+)
+
;; If X can be loaded by a single CNT[BHWD] instruction,
;;
;; A = UMAX (B, X)
;; Operation names for negate and bitwise complement.
(define_code_attr neg_not_op [(neg "neg") (not "not")])
+;; csinv, csneg insn suffixes.
+(define_code_attr neg_not_cs [(neg "neg") (not "inv")])
+
;; Similar, but when the second operand is inverted.
(define_code_attr nlogical [(and "bic") (ior "orn") (xor "eon")])
+2020-05-11 Alex Coplan <alex.coplan@arm.com>
+
+ * gcc.target/aarch64/csinv-neg.c: New test.
+
2020-05-11 Kelvin Nilsen <kelvin@gcc.gnu.org>
* gcc.target/powerpc/dg-future-0.c: New.
--- /dev/null
+/* { dg-do compile } */
+/* { dg-options "-O2" } */
+
+/*
+** inv1:
+** cmp w0, 0
+** csinv w0, w1, w2, ne
+** ret
+*/
+unsigned long long
+inv1(unsigned a, unsigned b, unsigned c)
+{
+ return a ? b : ~c;
+}
+
+/*
+** inv1_local:
+** cmp w0, 0
+** csinv w0, w1, w2, ne
+** ret
+*/
+unsigned long long
+inv1_local(unsigned a, unsigned b, unsigned c)
+{
+ unsigned d = ~c;
+ return a ? b : d;
+}
+
+/*
+** inv_zero1:
+** cmp w0, 0
+** csinv w0, wzr, w1, ne
+** ret
+*/
+unsigned long long
+inv_zero1(unsigned a, unsigned b)
+{
+ return a ? 0 : ~b;
+}
+
+/*
+** inv_zero2:
+** cmp w0, 0
+** csinv w0, wzr, w1, eq
+** ret
+*/
+unsigned long long
+inv_zero2(unsigned a, unsigned b)
+{
+ return a ? ~b : 0;
+}
+
+
+/*
+** inv2:
+** cmp w0, 0
+** csinv w0, w2, w1, eq
+** ret
+*/
+unsigned long long
+inv2(unsigned a, unsigned b, unsigned c)
+{
+ return a ? ~b : c;
+}
+
+/*
+** inv2_local:
+** cmp w0, 0
+** csinv w0, w2, w1, eq
+** ret
+*/
+unsigned long long
+inv2_local(unsigned a, unsigned b, unsigned c)
+{
+ unsigned d = ~b;
+ return a ? d : c;
+}
+
+/*
+** neg1:
+** cmp w0, 0
+** csneg w0, w1, w2, ne
+** ret
+*/
+unsigned long long
+neg1(unsigned a, unsigned b, unsigned c)
+{
+ return a ? b : -c;
+}
+
+
+/*
+** neg2:
+** cmp w0, 0
+** csneg w0, w2, w1, eq
+** ret
+*/
+unsigned long long
+neg2(unsigned a, unsigned b, unsigned c)
+{
+ return a ? -b : c;
+}
+
+/* { dg-final { check-function-bodies "**" "" "" } } */