}
+/* Output instructions to insert an inverted bit into OPERANDS[0]:
+ $0.$1 = ~$2.$3 if XBITNO = NULL
+ $0.$1 = ~$2.XBITNO if XBITNO != NULL.
+ If PLEN = NULL then output the respective instruction sequence which
+ is a combination of BST / BLD and some instruction(s) to invert the bit.
+ If PLEN != NULL then store the length of the sequence (in words) in *PLEN.
+ Return "". */
+
+const char*
+avr_out_insert_notbit (rtx_insn *insn, rtx operands[], rtx xbitno, int *plen)
+{
+ rtx op[4] = { operands[0], operands[1], operands[2],
+ xbitno == NULL_RTX ? operands [3] : xbitno };
+
+ if (INTVAL (op[1]) == 7
+ && test_hard_reg_class (LD_REGS, op[0]))
+ {
+ /* If the inserted bit number is 7 and we have a d-reg, then invert
+ the bit after the insertion by means of SUBI *,0x80. */
+
+ if (INTVAL (op[3]) == 7
+ && REGNO (op[0]) == REGNO (op[2]))
+ {
+ avr_asm_len ("subi %0,0x80", op, plen, -1);
+ }
+ else
+ {
+ avr_asm_len ("bst %2,%3" CR_TAB
+ "bld %0,%1" CR_TAB
+ "subi %0,0x80", op, plen, -3);
+ }
+ }
+ else if (test_hard_reg_class (LD_REGS, op[0])
+ && (INTVAL (op[1]) != INTVAL (op[3])
+ || !reg_overlap_mentioned_p (op[0], op[2])))
+ {
+ /* If the destination bit is in a d-reg we can jump depending
+ on the source bit and use ANDI / ORI. This just applies if we
+ have not an early-clobber situation with the bit. */
+
+ avr_asm_len ("andi %0,~(1<<%1)" CR_TAB
+ "sbrs %2,%3" CR_TAB
+ "ori %0,1<<%1", op, plen, -3);
+ }
+ else
+ {
+ /* Otherwise, invert the bit by means of COM before we store it with
+ BST and then undo the COM if needed. */
+
+ avr_asm_len ("com %2" CR_TAB
+ "bst %2,%3", op, plen, -2);
+
+ if (!reg_unused_after (insn, op[2])
+ // A simple 'reg_unused_after' is not enough because that function
+ // assumes that the destination register is overwritten completely
+ // and hence is in order for our purpose. This is not the case
+ // with BLD which just changes one bit of the destination.
+ || reg_overlap_mentioned_p (op[0], op[2]))
+ {
+ /* Undo the COM from above. */
+ avr_asm_len ("com %2", op, plen, 1);
+ }
+
+ avr_asm_len ("bld %0,%1", op, plen, 1);
+ }
+
+ return "";
+}
+
+
/* Outputs instructions needed for fixed point type conversion.
This includes converting between any fixed point type, as well
as converting to any integer type. Conversion between integer
case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break;
+ case ADJUST_LEN_INSV_NOTBIT:
+ avr_out_insert_notbit (insn, op, NULL_RTX, &len);
+ break;
+ case ADJUST_LEN_INSV_NOTBIT_0:
+ avr_out_insert_notbit (insn, op, const0_rtx, &len);
+ break;
+ case ADJUST_LEN_INSV_NOTBIT_7:
+ avr_out_insert_notbit (insn, op, GEN_INT (7), &len);
+ break;
+
default:
gcc_unreachable();
}
ashlhi, ashrhi, lshrhi,
ashlsi, ashrsi, lshrsi,
ashlpsi, ashrpsi, lshrpsi,
- insert_bits,
+ insert_bits, insv_notbit, insv_notbit_0, insv_notbit_7,
no"
(const_string "no"))
;; Define two incarnations so that we can build the cross product.
(define_code_iterator any_extend [sign_extend zero_extend])
(define_code_iterator any_extend2 [sign_extend zero_extend])
+(define_code_iterator any_extract [sign_extract zero_extract])
+(define_code_iterator any_shiftrt [lshiftrt ashiftrt])
(define_code_iterator xior [xor ior])
(define_code_iterator eqne [eq ne])
(match_operand:QI 3 "nonmemory_operand" ""))]
"optimize")
+;; Some more patterns to support moving around one bit which can be accomplished
+;; by BST + BLD in most situations. Unfortunately, there is no canonical
+;; representation, and we just implement some more cases that are not too
+;; complicated.
+
;; Insert bit $2.0 into $0.$1
(define_insn "*insv.reg"
[(set (zero_extract:QI (match_operand:QI 0 "register_operand" "+r,d,d,l,l")
[(set_attr "length" "2,1,1,2,2")
(set_attr "cc" "none,set_zn,set_zn,none,none")])
+;; Insert bit $2.$3 into $0.$1
+(define_insn "*insv.extract"
+ [(set (zero_extract:QI (match_operand:QI 0 "register_operand" "+r")
+ (const_int 1)
+ (match_operand:QI 1 "const_0_to_7_operand" "n"))
+ (any_extract:QI (match_operand:QI 2 "register_operand" "r")
+ (const_int 1)
+ (match_operand:QI 3 "const_0_to_7_operand" "n")))]
+ ""
+ "bst %2,%3\;bld %0,%1"
+ [(set_attr "length" "2")
+ (set_attr "cc" "none")])
+
+;; Insert bit $2.$3 into $0.$1
+(define_insn "*insv.shiftrt"
+ [(set (zero_extract:QI (match_operand:QI 0 "register_operand" "+r")
+ (const_int 1)
+ (match_operand:QI 1 "const_0_to_7_operand" "n"))
+ (any_shiftrt:QI (match_operand:QI 2 "register_operand" "r")
+ (match_operand:QI 3 "const_0_to_7_operand" "n")))]
+ ""
+ "bst %2,%3\;bld %0,%1"
+ [(set_attr "length" "2")
+ (set_attr "cc" "none")])
+
+;; Same, but with a NOT inverting the source bit.
+;; Insert bit ~$2.$3 into $0.$1
+(define_insn "*insv.not-shiftrt"
+ [(set (zero_extract:QI (match_operand:QI 0 "register_operand" "+r")
+ (const_int 1)
+ (match_operand:QI 1 "const_0_to_7_operand" "n"))
+ (not:QI (any_shiftrt:QI (match_operand:QI 2 "register_operand" "r")
+ (match_operand:QI 3 "const_0_to_7_operand" "n"))))]
+ ""
+ {
+ return avr_out_insert_notbit (insn, operands, NULL_RTX, NULL);
+ }
+ [(set_attr "adjust_len" "insv_notbit")
+ (set_attr "cc" "clobber")])
+
+;; Insert bit ~$2.0 into $0.$1
+(define_insn "*insv.xor1-bit.0"
+ [(set (zero_extract:QI (match_operand:QI 0 "register_operand" "+r")
+ (const_int 1)
+ (match_operand:QI 1 "const_0_to_7_operand" "n"))
+ (xor:QI (match_operand:QI 2 "register_operand" "r")
+ (const_int 1)))]
+ ""
+ {
+ return avr_out_insert_notbit (insn, operands, const0_rtx, NULL);
+ }
+ [(set_attr "adjust_len" "insv_notbit_0")
+ (set_attr "cc" "clobber")])
+
+;; Insert bit ~$2.0 into $0.$1
+(define_insn "*insv.not-bit.0"
+ [(set (zero_extract:QI (match_operand:QI 0 "register_operand" "+r")
+ (const_int 1)
+ (match_operand:QI 1 "const_0_to_7_operand" "n"))
+ (not:QI (match_operand:QI 2 "register_operand" "r")))]
+ ""
+ {
+ return avr_out_insert_notbit (insn, operands, const0_rtx, NULL);
+ }
+ [(set_attr "adjust_len" "insv_notbit_0")
+ (set_attr "cc" "clobber")])
+
+;; Insert bit ~$2.7 into $0.$1
+(define_insn "*insv.not-bit.7"
+ [(set (zero_extract:QI (match_operand:QI 0 "register_operand" "+r")
+ (const_int 1)
+ (match_operand:QI 1 "const_0_to_7_operand" "n"))
+ (ge:QI (match_operand:QI 2 "register_operand" "r")
+ (const_int 0)))]
+ ""
+ {
+ return avr_out_insert_notbit (insn, operands, GEN_INT (7), NULL);
+ }
+ [(set_attr "adjust_len" "insv_notbit_7")
+ (set_attr "cc" "clobber")])
+
+;; Insert bit ~$2.$3 into $0.$1
+(define_insn "*insv.xor-extract"
+ [(set (zero_extract:QI (match_operand:QI 0 "register_operand" "+r")
+ (const_int 1)
+ (match_operand:QI 1 "const_0_to_7_operand" "n"))
+ (any_extract:QI (xor:QI (match_operand:QI 2 "register_operand" "r")
+ (match_operand:QI 4 "const_int_operand" "n"))
+ (const_int 1)
+ (match_operand:QI 3 "const_0_to_7_operand" "n")))]
+ "INTVAL (operands[4]) & (1 << INTVAL (operands[3]))"
+ {
+ return avr_out_insert_notbit (insn, operands, NULL_RTX, NULL);
+ }
+ [(set_attr "adjust_len" "insv_notbit")
+ (set_attr "cc" "clobber")])
+
\f
;; Some combine patterns that try to fix bad code when a value is composed
;; from byte parts like in PR27663.