"operands[4] = GEN_INT (- INTVAL (operands[2]));"
)
-(define_insn "*thumb2_addsi3_compare0"
+(define_insn "thumb2_addsi3_compare0"
[(set (reg:CC_NOOV CC_REGNUM)
(compare:CC_NOOV
(plus:SI (match_operand:SI 1 "s_register_operand" "l, 0, r")
"
operands[2] = GEN_INT (32 - INTVAL (operands[2]));
")
+
+;; Define the subtract-one-and-jump insns so loop.c
+;; knows what to generate.
+(define_expand "doloop_end"
+ [(use (match_operand 0 "" "")) ; loop pseudo
+ (use (match_operand 1 "" "")) ; iterations; zero if unknown
+ (use (match_operand 2 "" "")) ; max iterations
+ (use (match_operand 3 "" "")) ; loop level
+ (use (match_operand 4 "" ""))] ; label
+ "TARGET_32BIT"
+ "
+ {
+ /* Currently SMS relies on the do-loop pattern to recognize loops
+ where (1) the control part consists of all insns defining and/or
+ using a certain 'count' register and (2) the loop count can be
+ adjusted by modifying this register prior to the loop.
+ ??? The possible introduction of a new block to initialize the
+ new IV can potentially affect branch optimizations. */
+ if (optimize > 0 && flag_modulo_sched)
+ {
+ rtx s0;
+ rtx bcomp;
+ rtx loc_ref;
+ rtx cc_reg;
+ rtx insn;
+ rtx cmp;
+
+ /* Only use this on innermost loops. */
+ if (INTVAL (operands[3]) > 1)
+ FAIL;
+
+ if (GET_MODE (operands[0]) != SImode)
+ FAIL;
+
+ s0 = operands [0];
+ if (TARGET_THUMB2)
+ insn = emit_insn (gen_thumb2_addsi3_compare0 (s0, s0, GEN_INT (-1)));
+ else
+ insn = emit_insn (gen_addsi3_compare0 (s0, s0, GEN_INT (-1)));
+
+ cmp = XVECEXP (PATTERN (insn), 0, 0);
+ cc_reg = SET_DEST (cmp);
+ bcomp = gen_rtx_NE (VOIDmode, cc_reg, const0_rtx);
+ loc_ref = gen_rtx_LABEL_REF (VOIDmode, operands [4]);
+ emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
+ gen_rtx_IF_THEN_ELSE (VOIDmode, bcomp,
+ loc_ref, pc_rtx)));
+ DONE;
+ }else
+ FAIL;
+ }")
+
rtx inc_src;
rtx condition;
rtx pattern;
+ rtx cc_reg = NULL_RTX;
+ rtx reg_orig = NULL_RTX;
/* The canonical doloop pattern we expect has one of the following
forms:
2) (set (reg) (plus (reg) (const_int -1))
(set (pc) (if_then_else (reg != 0)
(label_ref (label))
- (pc))). */
+ (pc))).
+
+ Some targets (ARM) do the comparison before the branch, as in the
+ following form:
+
+ 3) (parallel [(set (cc) (compare ((plus (reg) (const_int -1), 0)))
+ (set (reg) (plus (reg) (const_int -1)))])
+ (set (pc) (if_then_else (cc == NE)
+ (label_ref (label))
+ (pc))) */
pattern = PATTERN (doloop_pat);
{
rtx cond;
rtx prev_insn = prev_nondebug_insn (doloop_pat);
+ rtx cmp_arg1, cmp_arg2;
+ rtx cmp_orig;
- /* We expect the decrement to immediately precede the branch. */
+ /* In case the pattern is not PARALLEL we expect two forms
+ of doloop which are cases 2) and 3) above: in case 2) the
+ decrement immediately precedes the branch, while in case 3)
+ the compare and decrement instructions immediately precede
+ the branch. */
if (prev_insn == NULL_RTX || !INSN_P (prev_insn))
return 0;
cmp = pattern;
- inc = PATTERN (PREV_INSN (doloop_pat));
+ if (GET_CODE (PATTERN (prev_insn)) == PARALLEL)
+ {
+ /* The third case: the compare and decrement instructions
+ immediately precede the branch. */
+ cmp_orig = XVECEXP (PATTERN (prev_insn), 0, 0);
+ if (GET_CODE (cmp_orig) != SET)
+ return 0;
+ if (GET_CODE (SET_SRC (cmp_orig)) != COMPARE)
+ return 0;
+ cmp_arg1 = XEXP (SET_SRC (cmp_orig), 0);
+ cmp_arg2 = XEXP (SET_SRC (cmp_orig), 1);
+ if (cmp_arg2 != const0_rtx
+ || GET_CODE (cmp_arg1) != PLUS)
+ return 0;
+ reg_orig = XEXP (cmp_arg1, 0);
+ if (XEXP (cmp_arg1, 1) != GEN_INT (-1)
+ || !REG_P (reg_orig))
+ return 0;
+ cc_reg = SET_DEST (cmp_orig);
+
+ inc = XVECEXP (PATTERN (prev_insn), 0, 1);
+ }
+ else
+ inc = PATTERN (PREV_INSN (doloop_pat));
/* We expect the condition to be of the form (reg != 0) */
cond = XEXP (SET_SRC (cmp), 0);
if (GET_CODE (cond) != NE || XEXP (cond, 1) != const0_rtx)
return 0;
-
}
else
{
return 0;
if ((XEXP (condition, 0) == reg)
+ /* For the third case: */
+ || ((cc_reg != NULL_RTX)
+ && (XEXP (condition, 0) == cc_reg)
+ && (reg_orig == reg))
|| (GET_CODE (XEXP (condition, 0)) == PLUS
- && XEXP (XEXP (condition, 0), 0) == reg))
+ && XEXP (XEXP (condition, 0), 0) == reg))
{
if (GET_CODE (pattern) != PARALLEL)
- /* The second form we expect:
+ /* For the second form we expect:
(set (reg) (plus (reg) (const_int -1))
(set (pc) (if_then_else (reg != 0)
(set (reg) (plus (reg) (const_int -1)))
(additional clobbers and uses)])
- So we return that form instead.
+ For the third form we expect:
+
+ (parallel [(set (cc) (compare ((plus (reg) (const_int -1)), 0))
+ (set (reg) (plus (reg) (const_int -1)))])
+ (set (pc) (if_then_else (cc == NE)
+ (label_ref (label))
+ (pc)))
+
+ which is equivalent to the following:
+
+ (parallel [(set (cc) (compare (reg, 1))
+ (set (reg) (plus (reg) (const_int -1)))
+ (set (pc) (if_then_else (NE == cc)
+ (label_ref (label))
+ (pc))))])
+
+ So we return the second form instead for the two cases.
+
*/
condition = gen_rtx_fmt_ee (NE, VOIDmode, inc_src, const1_rtx);
continue;
}
- /* Don't handle BBs with calls or barriers, or !single_set insns,
- or auto-increment insns (to avoid creating invalid reg-moves
- for the auto-increment insns).
+ /* Don't handle BBs with calls or barriers or auto-increment insns
+ (to avoid creating invalid reg-moves for the auto-increment insns),
+ or !single_set with the exception of instructions that include
+ count_reg---these instructions are part of the control part
+ that do-loop recognizes.
??? Should handle auto-increment insns.
??? Should handle insns defining subregs. */
for (insn = head; insn != NEXT_INSN (tail); insn = NEXT_INSN (insn))
if (CALL_P (insn)
|| BARRIER_P (insn)
|| (NONDEBUG_INSN_P (insn) && !JUMP_P (insn)
- && !single_set (insn) && GET_CODE (PATTERN (insn)) != USE)
+ && !single_set (insn) && GET_CODE (PATTERN (insn)) != USE
+ && !reg_mentioned_p (count_reg, insn))
|| (FIND_REG_INC_NOTE (insn, NULL_RTX) != 0)
|| (INSN_P (insn) && (set = single_set (insn))
&& GET_CODE (SET_DEST (set)) == SUBREG))