+2016-10-26 Thomas Preud'homme <thomas.preudhomme@arm.com>
+
+ * config/arm/sync.md (atomic_compare_and_swap<mode>_1): Add new ARMv8-M
+ Baseline only alternatives to (i) hold store atomic success value in a
+ return register rather than a scratch register, (ii) use a low register
+ for it and to (iii) ensure the cbranchsi insn generated by the split
+ respect the constraints of Thumb-1 cbranchsi4_insn and
+ cbranchsi4_scratch.
+ * config/arm/thumb1.md (cbranchsi4_insn): Add comment to indicate
+ constraints must match those in atomic_compare_and_swap.
+ (cbranchsi4_scratch): Likewise.
+
2016-10-26 Thomas Preud'homme <thomas.preudhomme@arm.com>
* config/arm/arm.c (arm_expand_compare_and_swap): Add new bdst local
DONE;
})
+;; Constraints of this pattern must be at least as strict as those of the
+;; cbranchsi operations in thumb1.md and aim to be as permissive.
(define_insn_and_split "atomic_compare_and_swap<mode>_1"
- [(set (match_operand 0 "cc_register_operand" "=&c") ;; bool out
+ [(set (match_operand 0 "cc_register_operand" "=&c,&l,&l,&l") ;; bool out
(unspec_volatile:CC_Z [(const_int 0)] VUNSPEC_ATOMIC_CAS))
- (set (match_operand:SI 1 "s_register_operand" "=&r") ;; val out
+ (set (match_operand:SI 1 "s_register_operand" "=&r,&l,&0,&l*h") ;; val out
(zero_extend:SI
- (match_operand:NARROW 2 "mem_noofs_operand" "+Ua"))) ;; memory
+ (match_operand:NARROW 2 "mem_noofs_operand" "+Ua,Ua,Ua,Ua"))) ;; memory
(set (match_dup 2)
(unspec_volatile:NARROW
- [(match_operand:SI 3 "arm_add_operand" "rIL") ;; expected
- (match_operand:NARROW 4 "s_register_operand" "r") ;; desired
+ [(match_operand:SI 3 "arm_add_operand" "rIL,lIL*h,J,*r") ;; expected
+ (match_operand:NARROW 4 "s_register_operand" "r,r,r,r") ;; desired
(match_operand:SI 5 "const_int_operand") ;; is_weak
(match_operand:SI 6 "const_int_operand") ;; mod_s
(match_operand:SI 7 "const_int_operand")] ;; mod_f
VUNSPEC_ATOMIC_CAS))
- (clobber (match_scratch:SI 8 "=&r"))]
+ (clobber (match_scratch:SI 8 "=&r,X,X,X"))]
"<sync_predtab>"
"#"
"&& reload_completed"
{
arm_split_compare_and_swap (operands);
DONE;
- })
+ }
+ [(set_attr "arch" "32,v8mb,v8mb,v8mb")])
(define_mode_attr cas_cmp_operand
[(SI "arm_add_operand") (DI "cmpdi_operand")])
(define_mode_attr cas_cmp_str
[(SI "rIL") (DI "rDi")])
+;; Constraints of this pattern must be at least as strict as those of the
+;; cbranchsi operations in thumb1.md and aim to be as permissive.
(define_insn_and_split "atomic_compare_and_swap<mode>_1"
- [(set (match_operand 0 "cc_register_operand" "=&c") ;; bool out
+ [(set (match_operand 0 "cc_register_operand" "=&c,&l,&l,&l") ;; bool out
(unspec_volatile:CC_Z [(const_int 0)] VUNSPEC_ATOMIC_CAS))
- (set (match_operand:SIDI 1 "s_register_operand" "=&r") ;; val out
- (match_operand:SIDI 2 "mem_noofs_operand" "+Ua")) ;; memory
+ (set (match_operand:SIDI 1 "s_register_operand" "=&r,&l,&0,&l*h") ;; val out
+ (match_operand:SIDI 2 "mem_noofs_operand" "+Ua,Ua,Ua,Ua")) ;; memory
(set (match_dup 2)
(unspec_volatile:SIDI
- [(match_operand:SIDI 3 "<cas_cmp_operand>" "<cas_cmp_str>") ;; expect
- (match_operand:SIDI 4 "s_register_operand" "r") ;; desired
+ [(match_operand:SIDI 3 "<cas_cmp_operand>" "<cas_cmp_str>,lIL*h,J,*r") ;; expect
+ (match_operand:SIDI 4 "s_register_operand" "r,r,r,r") ;; desired
(match_operand:SI 5 "const_int_operand") ;; is_weak
(match_operand:SI 6 "const_int_operand") ;; mod_s
(match_operand:SI 7 "const_int_operand")] ;; mod_f
VUNSPEC_ATOMIC_CAS))
- (clobber (match_scratch:SI 8 "=&r"))]
+ (clobber (match_scratch:SI 8 "=&r,X,X,X"))]
"<sync_predtab>"
"#"
"&& reload_completed"
{
arm_split_compare_and_swap (operands);
DONE;
- })
+ }
+ [(set_attr "arch" "32,v8mb,v8mb,v8mb")])
(define_insn_and_split "atomic_exchange<mode>"
[(set (match_operand:QHSD 0 "s_register_operand" "=&r") ;; output