*,*,yes")
])
+; Splitters for loading/storing TLS pointers from/to %a0:DI.
+; Do this only during split2, which runs after reload. At the point when split1
+; runs, some of %a0:DI occurrences might be nested inside other rtxes and thus
+; not matched. As a result, only some occurrences will be split, which will
+; prevent CSE. At the point when split2 runs, reload will have ensured that no
+; nested references exist.
+
(define_split
[(set (match_operand:DI 0 "register_operand" "")
(match_operand:DI 1 "register_operand" ""))]
- "TARGET_ZARCH && ACCESS_REG_P (operands[1])"
+ "TARGET_ZARCH && ACCESS_REG_P (operands[1]) && reload_completed"
[(set (match_dup 2) (match_dup 3))
(set (match_dup 0) (ashift:DI (match_dup 0) (const_int 32)))
(set (strict_low_part (match_dup 2)) (match_dup 4))]
(define_split
[(set (match_operand:DI 0 "register_operand" "")
(match_operand:DI 1 "register_operand" ""))]
- "TARGET_ZARCH && ACCESS_REG_P (operands[0])
+ "TARGET_ZARCH && ACCESS_REG_P (operands[0]) && reload_completed
&& dead_or_set_p (insn, operands[1])"
[(set (match_dup 3) (match_dup 2))
(set (match_dup 1) (lshiftrt:DI (match_dup 1) (const_int 32)))
(define_split
[(set (match_operand:DI 0 "register_operand" "")
(match_operand:DI 1 "register_operand" ""))]
- "TARGET_ZARCH && ACCESS_REG_P (operands[0])
+ "TARGET_ZARCH && ACCESS_REG_P (operands[0]) && reload_completed
&& !dead_or_set_p (insn, operands[1])"
[(set (match_dup 3) (match_dup 2))
(set (match_dup 1) (rotate:DI (match_dup 1) (const_int 32)))