return true;
}
+/* If MEM has sp address, return 0, if it has sp + const address,
+ return that const, if it has reg address where reg is set to sp + const
+ and FAST is false, return const, otherwise return
+ INTTYPE_MINUMUM (HOST_WIDE_INT). */
+
+static HOST_WIDE_INT
+sp_based_mem_offset (rtx_call_insn *call_insn, const_rtx mem, bool fast)
+{
+ HOST_WIDE_INT off = 0;
+ rtx addr = XEXP (mem, 0);
+ if (GET_CODE (addr) == PLUS
+ && REG_P (XEXP (addr, 0))
+ && CONST_INT_P (XEXP (addr, 1)))
+ {
+ off = INTVAL (XEXP (addr, 1));
+ addr = XEXP (addr, 0);
+ }
+ if (addr == stack_pointer_rtx)
+ return off;
+
+ if (!REG_P (addr) || fast)
+ return INTTYPE_MINIMUM (HOST_WIDE_INT);
+
+ /* If not fast, use chains to see if addr wasn't set to sp + offset. */
+ df_ref use;
+ FOR_EACH_INSN_USE (use, call_insn)
+ if (rtx_equal_p (addr, DF_REF_REG (use)))
+ break;
+
+ if (use == NULL)
+ return INTTYPE_MINIMUM (HOST_WIDE_INT);
+
+ struct df_link *defs;
+ for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
+ if (! DF_REF_IS_ARTIFICIAL (defs->ref))
+ break;
+
+ if (defs == NULL)
+ return INTTYPE_MINIMUM (HOST_WIDE_INT);
+
+ rtx set = single_set (DF_REF_INSN (defs->ref));
+ if (!set)
+ return INTTYPE_MINIMUM (HOST_WIDE_INT);
+
+ if (GET_CODE (SET_SRC (set)) != PLUS
+ || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
+ || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
+ return INTTYPE_MINIMUM (HOST_WIDE_INT);
+
+ off += INTVAL (XEXP (SET_SRC (set), 1));
+ return off;
+}
/* Try to find all stack stores of CALL_INSN arguments if
ACCUMULATE_OUTGOING_ARGS. If all stack stores have been found
if (GET_CODE (XEXP (p, 0)) == USE
&& MEM_P (XEXP (XEXP (p, 0), 0)))
{
- rtx mem = XEXP (XEXP (p, 0), 0), addr;
- HOST_WIDE_INT off = 0, size;
+ rtx mem = XEXP (XEXP (p, 0), 0);
+ HOST_WIDE_INT size;
if (!MEM_SIZE_KNOWN_P (mem) || !MEM_SIZE (mem).is_constant (&size))
return false;
- addr = XEXP (mem, 0);
- if (GET_CODE (addr) == PLUS
- && REG_P (XEXP (addr, 0))
- && CONST_INT_P (XEXP (addr, 1)))
- {
- off = INTVAL (XEXP (addr, 1));
- addr = XEXP (addr, 0);
- }
- if (addr != stack_pointer_rtx)
- {
- if (!REG_P (addr))
- return false;
- /* If not fast, use chains to see if addr wasn't set to
- sp + offset. */
- if (!fast)
- {
- df_ref use;
- struct df_link *defs;
- rtx set;
-
- FOR_EACH_INSN_USE (use, call_insn)
- if (rtx_equal_p (addr, DF_REF_REG (use)))
- break;
-
- if (use == NULL)
- return false;
-
- for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
- if (! DF_REF_IS_ARTIFICIAL (defs->ref))
- break;
-
- if (defs == NULL)
- return false;
-
- set = single_set (DF_REF_INSN (defs->ref));
- if (!set)
- return false;
-
- if (GET_CODE (SET_SRC (set)) != PLUS
- || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
- || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
- return false;
-
- off += INTVAL (XEXP (SET_SRC (set), 1));
- }
- else
- return false;
- }
+ HOST_WIDE_INT off = sp_based_mem_offset (call_insn, mem, fast);
+ if (off == INTTYPE_MINIMUM (HOST_WIDE_INT))
+ return false;
min_sp_off = MIN (min_sp_off, off);
max_sp_off = MAX (max_sp_off, off + size);
}
if (GET_CODE (XEXP (p, 0)) == USE
&& MEM_P (XEXP (XEXP (p, 0), 0)))
{
- rtx mem = XEXP (XEXP (p, 0), 0), addr;
- HOST_WIDE_INT off = 0, byte, size;
+ rtx mem = XEXP (XEXP (p, 0), 0);
/* Checked in the previous iteration. */
- size = MEM_SIZE (mem).to_constant ();
- addr = XEXP (mem, 0);
- if (GET_CODE (addr) == PLUS
- && REG_P (XEXP (addr, 0))
- && CONST_INT_P (XEXP (addr, 1)))
- {
- off = INTVAL (XEXP (addr, 1));
- addr = XEXP (addr, 0);
- }
- if (addr != stack_pointer_rtx)
- {
- df_ref use;
- struct df_link *defs;
- rtx set;
-
- FOR_EACH_INSN_USE (use, call_insn)
- if (rtx_equal_p (addr, DF_REF_REG (use)))
- break;
-
- for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
- if (! DF_REF_IS_ARTIFICIAL (defs->ref))
- break;
-
- set = single_set (DF_REF_INSN (defs->ref));
- off += INTVAL (XEXP (SET_SRC (set), 1));
- }
- for (byte = off; byte < off + size; byte++)
- {
- if (!bitmap_set_bit (sp_bytes, byte - min_sp_off))
- gcc_unreachable ();
- }
+ HOST_WIDE_INT size = MEM_SIZE (mem).to_constant ();
+ HOST_WIDE_INT off = sp_based_mem_offset (call_insn, mem, fast);
+ gcc_checking_assert (off != INTTYPE_MINIMUM (HOST_WIDE_INT));
+ for (HOST_WIDE_INT byte = off; byte < off + size; byte++)
+ if (!bitmap_set_bit (sp_bytes, byte - min_sp_off))
+ gcc_unreachable ();
}
/* Walk backwards, looking for argument stores. The search stops
ret = false;
for (insn = PREV_INSN (call_insn); insn; insn = prev_insn)
{
- rtx set, mem, addr;
- HOST_WIDE_INT off;
-
if (insn == BB_HEAD (BLOCK_FOR_INSN (call_insn)))
prev_insn = NULL;
else
if (!NONDEBUG_INSN_P (insn))
continue;
- set = single_set (insn);
+ rtx set = single_set (insn);
if (!set || SET_DEST (set) == stack_pointer_rtx)
break;
if (!MEM_P (SET_DEST (set)))
continue;
- mem = SET_DEST (set);
- addr = XEXP (mem, 0);
- off = 0;
- if (GET_CODE (addr) == PLUS
- && REG_P (XEXP (addr, 0))
- && CONST_INT_P (XEXP (addr, 1)))
- {
- off = INTVAL (XEXP (addr, 1));
- addr = XEXP (addr, 0);
- }
- if (addr != stack_pointer_rtx)
- {
- if (!REG_P (addr))
- break;
- if (!fast)
- {
- df_ref use;
- struct df_link *defs;
- rtx set;
-
- FOR_EACH_INSN_USE (use, insn)
- if (rtx_equal_p (addr, DF_REF_REG (use)))
- break;
-
- if (use == NULL)
- break;
-
- for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
- if (! DF_REF_IS_ARTIFICIAL (defs->ref))
- break;
-
- if (defs == NULL)
- break;
-
- set = single_set (DF_REF_INSN (defs->ref));
- if (!set)
- break;
-
- if (GET_CODE (SET_SRC (set)) != PLUS
- || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
- || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
- break;
-
- off += INTVAL (XEXP (SET_SRC (set), 1));
- }
- else
- break;
- }
+ rtx mem = SET_DEST (set);
+ HOST_WIDE_INT off = sp_based_mem_offset (call_insn, mem, fast);
+ if (off == INTTYPE_MINIMUM (HOST_WIDE_INT))
+ break;
HOST_WIDE_INT size;
if (!MEM_SIZE_KNOWN_P (mem)