/* Subroutines for manipulating rtx's in semantically interesting ways.
- Copyright (C) 1987-2017 Free Software Foundation, Inc.
+ Copyright (C) 1987-2019 Free Software Foundation, Inc.
This file is part of GCC.
int width = GET_MODE_PRECISION (smode);
/* You want to truncate to a _what_? */
- gcc_assert (SCALAR_INT_MODE_P (mode)
- || POINTER_BOUNDS_MODE_P (mode));
+ gcc_assert (SCALAR_INT_MODE_P (mode));
/* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
if (smode == BImode)
tree type = TREE_TYPE (name);
int unsignedp = TYPE_UNSIGNED (type);
- machine_mode mode = TYPE_MODE (type);
-
- /* Bypass TYPE_MODE when it maps vector modes to BLKmode. */
- if (mode == BLKmode)
- {
- gcc_assert (VECTOR_TYPE_P (type));
- mode = type->type_common.mode;
- }
-
- machine_mode pmode = promote_mode (type, mode, &unsignedp);
+ machine_mode pmode = promote_mode (type, TYPE_MODE (type), &unsignedp);
if (punsignedp)
*punsignedp = unsignedp;
}
if (!suppress_reg_args_size)
- add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
+ add_args_size_note (insn, stack_pointer_delta);
}
/* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
/* We expect all variable sized adjustments to be multiple of
PREFERRED_STACK_BOUNDARY. */
- if (CONST_INT_P (adjust))
- stack_pointer_delta -= INTVAL (adjust);
+ poly_int64 const_adjust;
+ if (poly_int_rtx_p (adjust, &const_adjust))
+ stack_pointer_delta -= const_adjust;
adjust_stack_1 (adjust, false);
}
/* We expect all variable sized adjustments to be multiple of
PREFERRED_STACK_BOUNDARY. */
- if (CONST_INT_P (adjust))
- stack_pointer_delta += INTVAL (adjust);
+ poly_int64 const_adjust;
+ if (poly_int_rtx_p (adjust, &const_adjust))
+ stack_pointer_delta += const_adjust;
adjust_stack_1 (adjust, true);
}
if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
update_sjlj_context ();
}
-\f
+
/* Return an rtx doing runtime alignment to REQUIRED_ALIGN on TARGET. */
-static rtx
+
+rtx
align_dynamic_address (rtx target, unsigned required_align)
{
/* CEIL_DIV_EXPR needs to worry about the addition overflowing,
/* We ought to be called always on the toplevel and stack ought to be aligned
properly. */
- gcc_assert (!(stack_pointer_delta
- % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)));
+ gcc_assert (multiple_p (stack_pointer_delta,
+ PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
/* If needed, check that we have the required amount of stack. Take into
account what has already been checked. */
stack pointer, such as acquiring the space by calling malloc(). */
if (targetm.have_allocate_stack ())
{
- struct expand_operand ops[2];
+ class expand_operand ops[2];
/* We don't have to check against the predicate for operand 0 since
TARGET is known to be a pseudo of the proper mode, which must
be valid for the operand. */
}
else
{
- int saved_stack_pointer_delta;
+ poly_int64 saved_stack_pointer_delta;
if (!STACK_GROWS_DOWNWARD)
emit_move_insn (target, virtual_stack_dynamic_rtx);
of memory. */
rtx
-get_dynamic_stack_base (HOST_WIDE_INT offset, unsigned required_align)
+get_dynamic_stack_base (poly_int64 offset, unsigned required_align)
{
rtx target;
emit_stack_probe (rtx address)
{
if (targetm.have_probe_stack_address ())
- emit_insn (targetm.gen_probe_stack_address (address));
+ {
+ class expand_operand ops[1];
+ insn_code icode = targetm.code_for_probe_stack_address;
+ create_address_operand (ops, address);
+ maybe_legitimize_operands (icode, 0, 1, ops);
+ expand_insn (icode, 1, ops);
+ }
else
{
rtx memref = gen_rtx_MEM (word_mode, address);
MEM_VOLATILE_P (memref) = 1;
+ memref = validize_mem (memref);
/* See if we have an insn to probe the stack. */
if (targetm.have_probe_stack ())
- emit_insn (targetm.gen_probe_stack (memref));
+ emit_insn (targetm.gen_probe_stack (memref));
else
- emit_move_insn (memref, const0_rtx);
+ emit_move_insn (memref, const0_rtx);
}
}
/* Next see if we have an insn to check the stack. */
else if (targetm.have_check_stack ())
{
- struct expand_operand ops[1];
+ class expand_operand ops[1];
rtx addr = memory_address (Pmode,
gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
stack_pointer_rtx,
/* We can get here with a constant size on some targets. */
rtx rounded_size, last_addr, residual;
- HOST_WIDE_INT probe_interval;
+ HOST_WIDE_INT probe_interval, probe_range;
+ bool target_probe_range_p = false;
compute_stack_clash_protection_loop_data (&rounded_size, &last_addr,
&residual, &probe_interval, size);
+ /* Get the back-end specific probe ranges. */
+ probe_range = targetm.stack_clash_protection_alloca_probe_range ();
+ target_probe_range_p = probe_range != 0;
+ gcc_assert (probe_range >= 0);
+
+ /* If no back-end specific range defined, default to the top of the newly
+ allocated range. */
+ if (probe_range == 0)
+ probe_range = probe_interval - GET_MODE_SIZE (word_mode);
+
if (rounded_size != CONST0_RTX (Pmode))
{
if (CONST_INT_P (rounded_size)
i += probe_interval)
{
anti_adjust_stack (GEN_INT (probe_interval));
-
/* The prologue does not probe residuals. Thus the offset
here to probe just beyond what the prologue had already
allocated. */
emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx,
- (probe_interval
- - GET_MODE_SIZE (word_mode))));
+ probe_range));
+
emit_insn (gen_blockage ());
}
}
anti_adjust_stack (GEN_INT (probe_interval));
/* The prologue does not probe residuals. Thus the offset here
- to probe just beyond what the prologue had already allocated. */
+ to probe just beyond what the prologue had already
+ allocated. */
emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx,
- (probe_interval
- - GET_MODE_SIZE (word_mode))));
+ probe_range));
emit_stack_clash_protection_probe_loop_end (loop_lab, end_loop,
last_addr, rotate_loop);
if (residual != CONST0_RTX (Pmode))
{
- rtx x = force_reg (Pmode, plus_constant (Pmode, residual,
- -GET_MODE_SIZE (word_mode)));
+ rtx label = NULL_RTX;
+ /* RESIDUAL could be zero at runtime and in that case *sp could
+ hold live data. Furthermore, we do not want to probe into the
+ red zone.
+
+ If TARGET_PROBE_RANGE_P then the target has promised it's safe to
+ probe at offset 0. In which case we no longer have to check for
+ RESIDUAL == 0. However we still need to probe at the right offset
+ when RESIDUAL > PROBE_RANGE, in which case we probe at PROBE_RANGE.
+
+ If !TARGET_PROBE_RANGE_P then go ahead and just guard the probe at *sp
+ on RESIDUAL != 0 at runtime if RESIDUAL is not a compile time constant.
+ */
anti_adjust_stack (residual);
- emit_stack_probe (gen_rtx_PLUS (Pmode, stack_pointer_rtx, x));
- emit_insn (gen_blockage ());
- }
- /* Some targets make optimistic assumptions in their prologues about
- how the caller may have probed the stack. Make sure we honor
- those assumptions when needed. */
- if (size != CONST0_RTX (Pmode)
- && targetm.stack_clash_protection_final_dynamic_probe (residual))
- {
- /* SIZE could be zero at runtime and in that case *sp could hold
- live data. Furthermore, we don't want to probe into the red
- zone.
-
- Go ahead and just guard a probe at *sp on SIZE != 0 at runtime
- if SIZE is not a compile time constant. */
-
- /* Ideally we would just probe at *sp. However, if SIZE is not
- a compile-time constant, but is zero at runtime, then *sp
- might hold live data. So probe at *sp if we know that
- an allocation was made, otherwise probe into the red zone
- which is obviously undesirable. */
- if (CONST_INT_P (size))
+ if (!CONST_INT_P (residual))
{
- emit_stack_probe (stack_pointer_rtx);
- emit_insn (gen_blockage ());
+ label = gen_label_rtx ();
+ rtx_code op = target_probe_range_p ? LT : EQ;
+ rtx probe_cmp_value = target_probe_range_p
+ ? gen_rtx_CONST_INT (GET_MODE (residual), probe_range)
+ : CONST0_RTX (GET_MODE (residual));
+
+ if (target_probe_range_p)
+ emit_stack_probe (stack_pointer_rtx);
+
+ emit_cmp_and_jump_insns (residual, probe_cmp_value,
+ op, NULL_RTX, Pmode, 1, label);
}
- else
+
+ rtx x = NULL_RTX;
+
+ /* If RESIDUAL isn't a constant and TARGET_PROBE_RANGE_P then we probe up
+ by the ABI defined safe value. */
+ if (!CONST_INT_P (residual) && target_probe_range_p)
+ x = GEN_INT (probe_range);
+ /* If RESIDUAL is a constant but smaller than the ABI defined safe value,
+ we still want to probe up, but the safest amount if a word. */
+ else if (target_probe_range_p)
{
- rtx label = gen_label_rtx ();
- emit_cmp_and_jump_insns (size, CONST0_RTX (GET_MODE (size)),
- EQ, NULL_RTX, Pmode, 1, label);
- emit_stack_probe (stack_pointer_rtx);
- emit_insn (gen_blockage ());
- emit_label (label);
+ if (INTVAL (residual) <= probe_range)
+ x = GEN_INT (GET_MODE_SIZE (word_mode));
+ else
+ x = GEN_INT (probe_range);
}
+ else
+ /* If nothing else, probe at the top of the new allocation. */
+ x = plus_constant (Pmode, residual, -GET_MODE_SIZE (word_mode));
+
+ emit_stack_probe (gen_rtx_PLUS (Pmode, stack_pointer_rtx, x));
+
+ emit_insn (gen_blockage ());
+ if (!CONST_INT_P (residual))
+ emit_label (label);
}
}