(zero_extract:DI
(match_operand:DI 0 "register_operand" "r")
(const_int 1)
- (match_operand:DI 1 "register_operand" "r"))
+ (match_operand:DI 1 "nonmemory_operand" "rN"))
(const_int 0)))]
"TARGET_64BIT && (TARGET_USE_BT || optimize_size)"
"bt{q}\t{%1, %0|%0, %1}"
(zero_extract:SI
(match_operand:SI 0 "register_operand" "r")
(const_int 1)
- (match_operand:SI 1 "register_operand" "r"))
+ (match_operand:SI 1 "nonmemory_operand" "rN"))
(const_int 0)))]
"TARGET_USE_BT || optimize_size"
"bt{l}\t{%1, %0|%0, %1}"
PUT_CODE (operands[0], reverse_condition (GET_CODE (operands[0])));
})
+;; avoid useless masking of bit offset operand
+(define_insn_and_split "*jcc_btdi_mask_rex64"
+ [(set (pc)
+ (if_then_else (match_operator 0 "bt_comparison_operator"
+ [(zero_extract:DI
+ (match_operand:DI 1 "register_operand" "r")
+ (const_int 1)
+ (and:SI
+ (match_operand:SI 2 "register_operand" "r")
+ (match_operand:SI 3 "const_int_operand" "n")))])
+ (label_ref (match_operand 4 "" ""))
+ (pc)))]
+ "TARGET_64BIT && (TARGET_USE_BT || optimize_size)
+ && (INTVAL (operands[3]) & 0x3f) == 0x3f"
+ "#"
+ "&& 1"
+ [(set (reg:CCC FLAGS_REG)
+ (compare:CCC
+ (zero_extract:DI
+ (match_dup 1)
+ (const_int 1)
+ (match_dup 2))
+ (const_int 0)))
+ (set (pc)
+ (if_then_else (match_op_dup 0 [(reg:CCC FLAGS_REG) (const_int 0)])
+ (label_ref (match_dup 4))
+ (pc)))]
+{
+ operands[2] = simplify_gen_subreg (DImode, operands[2], SImode, 0);
+
+ PUT_CODE (operands[0], reverse_condition (GET_CODE (operands[0])));
+})
+
(define_insn_and_split "*jcc_btsi"
[(set (pc)
(if_then_else (match_operator 0 "bt_comparison_operator"
PUT_CODE (operands[0], reverse_condition (GET_CODE (operands[0])));
})
+;; avoid useless masking of bit offset operand
+(define_insn_and_split "*jcc_btsi_mask"
+ [(set (pc)
+ (if_then_else (match_operator 0 "bt_comparison_operator"
+ [(zero_extract:SI
+ (match_operand:SI 1 "register_operand" "r")
+ (const_int 1)
+ (and:SI
+ (match_operand:SI 2 "register_operand" "r")
+ (match_operand:SI 3 "const_int_operand" "n")))])
+ (label_ref (match_operand 4 "" ""))
+ (pc)))]
+ "(TARGET_USE_BT || optimize_size)
+ && (INTVAL (operands[3]) & 0x1f) == 0x1f"
+ "#"
+ "&& 1"
+ [(set (reg:CCC FLAGS_REG)
+ (compare:CCC
+ (zero_extract:SI
+ (match_dup 1)
+ (const_int 1)
+ (match_dup 2))
+ (const_int 0)))
+ (set (pc)
+ (if_then_else (match_op_dup 0 [(reg:CCC FLAGS_REG) (const_int 0)])
+ (label_ref (match_dup 4))
+ (pc)))]
+ "PUT_CODE (operands[0], reverse_condition (GET_CODE (operands[0])));")
+
(define_insn_and_split "*jcc_btsi_1"
[(set (pc)
(if_then_else (match_operator 0 "bt_comparison_operator"
PUT_CODE (operands[0], reverse_condition (GET_CODE (operands[0])));
})
+;; avoid useless masking of bit offset operand
+(define_insn_and_split "*jcc_btsi_mask_1"
+ [(set (pc)
+ (if_then_else
+ (match_operator 0 "bt_comparison_operator"
+ [(and:SI
+ (lshiftrt:SI
+ (match_operand:SI 1 "register_operand" "r")
+ (subreg:QI
+ (and:SI
+ (match_operand:SI 2 "register_operand" "r")
+ (match_operand:SI 3 "const_int_operand" "n")) 0))
+ (const_int 1))
+ (const_int 0)])
+ (label_ref (match_operand 4 "" ""))
+ (pc)))]
+ "(TARGET_USE_BT || optimize_size)
+ && (INTVAL (operands[3]) & 0x1f) == 0x1f"
+ "#"
+ "&& 1"
+ [(set (reg:CCC FLAGS_REG)
+ (compare:CCC
+ (zero_extract:SI
+ (match_dup 1)
+ (const_int 1)
+ (match_dup 2))
+ (const_int 0)))
+ (set (pc)
+ (if_then_else (match_op_dup 0 [(reg:CCC FLAGS_REG) (const_int 0)])
+ (label_ref (match_dup 4))
+ (pc)))]
+ "PUT_CODE (operands[0], reverse_condition (GET_CODE (operands[0])));")
+
;; Define combination compare-and-branch fp compare instructions to use
;; during early optimization. Splitting the operation apart early makes
;; for bad code when we want to reverse the operation.