/* Initially, the size of the variable. Later, the size of the partition,
if this variable becomes it's partition's representative. */
- HOST_WIDE_INT size;
+ poly_uint64 size;
/* The *byte* alignment required for this variable. Or as, with the
size, the alignment for this partition. */
Return the frame offset. */
static poly_int64
-alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
+alloc_stack_frame_space (poly_int64 size, unsigned HOST_WIDE_INT align)
{
poly_int64 offset, new_frame_offset;
tree size = TREE_CODE (decl) == SSA_NAME
? TYPE_SIZE_UNIT (TREE_TYPE (decl))
: DECL_SIZE_UNIT (decl);
- v->size = tree_to_uhwi (size);
+ v->size = tree_to_poly_uint64 (size);
/* Ensure that all variables have size, so that &a != &b for any two
variables that are simultaneously live. */
- if (v->size == 0)
+ if (known_eq (v->size, 0U))
v->size = 1;
v->alignb = align_local_variable (decl);
/* An alignment of zero can mightily confuse us later. */
size_t ib = *(const size_t *)b;
unsigned int aligna = stack_vars[ia].alignb;
unsigned int alignb = stack_vars[ib].alignb;
- HOST_WIDE_INT sizea = stack_vars[ia].size;
- HOST_WIDE_INT sizeb = stack_vars[ib].size;
+ poly_int64 sizea = stack_vars[ia].size;
+ poly_int64 sizeb = stack_vars[ib].size;
tree decla = stack_vars[ia].decl;
tree declb = stack_vars[ib].decl;
bool largea, largeb;
return (int)largeb - (int)largea;
/* Secondary compare on size, decreasing */
- if (sizea > sizeb)
- return -1;
- if (sizea < sizeb)
- return 1;
+ int diff = compare_sizes_for_sort (sizeb, sizea);
+ if (diff != 0)
+ return diff;
/* Tertiary compare on true alignment, decreasing. */
if (aligna < alignb)
{
size_t i = stack_vars_sorted[si];
unsigned int ialign = stack_vars[i].alignb;
- HOST_WIDE_INT isize = stack_vars[i].size;
+ poly_int64 isize = stack_vars[i].size;
/* Ignore objects that aren't partition representatives. If we
see a var that is not a partition representative, it must
{
size_t j = stack_vars_sorted[sj];
unsigned int jalign = stack_vars[j].alignb;
- HOST_WIDE_INT jsize = stack_vars[j].size;
+ poly_int64 jsize = stack_vars[j].size;
/* Ignore objects that aren't partition representatives. */
if (stack_vars[j].representative != j)
sizes, as the shorter vars wouldn't be adequately protected.
Don't do that for "large" (unsupported) alignment objects,
those aren't protected anyway. */
- if ((asan_sanitize_stack_p ())
- && isize != jsize
+ if (asan_sanitize_stack_p ()
+ && maybe_ne (isize, jsize)
&& ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
break;
if (stack_vars[i].representative != i)
continue;
- fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
- " align %u\n", (unsigned long) i, stack_vars[i].size,
- stack_vars[i].alignb);
+ fprintf (dump_file, "Partition %lu: size ", (unsigned long) i);
+ print_dec (stack_vars[i].size, dump_file);
+ fprintf (dump_file, " align %u\n", stack_vars[i].alignb);
for (j = i; j != EOC; j = stack_vars[j].next)
{
expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
{
size_t si, i, j, n = stack_vars_num;
- HOST_WIDE_INT large_size = 0, large_alloc = 0;
+ poly_uint64 large_size = 0, large_alloc = 0;
rtx large_base = NULL;
unsigned large_align = 0;
bool large_allocation_done = false;
: DECL_RTL (decl) != pc_rtx)
continue;
- large_size += alignb - 1;
- large_size &= -(HOST_WIDE_INT)alignb;
+ large_size = aligned_upper_bound (large_size, alignb);
large_size += stack_vars[i].size;
}
}
HOST_WIDE_INT prev_offset;
if (asan_sanitize_stack_p ()
&& pred
- && frame_offset.is_constant (&prev_offset))
+ && frame_offset.is_constant (&prev_offset)
+ && stack_vars[i].size.is_constant ())
{
prev_offset = align_base (prev_offset,
MAX (alignb, ASAN_RED_ZONE_SIZE),
/* If there were any variables requiring "large" alignment, allocate
space. */
- if (large_size > 0 && ! large_allocation_done)
+ if (maybe_ne (large_size, 0U) && ! large_allocation_done)
{
poly_int64 loffset;
rtx large_allocsize;
- large_allocsize = GEN_INT (large_size);
+ large_allocsize = gen_int_mode (large_size, Pmode);
get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
loffset = alloc_stack_frame_space
- (INTVAL (large_allocsize),
+ (rtx_to_poly_int64 (large_allocsize),
PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
large_base = get_dynamic_stack_base (loffset, large_align);
large_allocation_done = true;
}
gcc_assert (large_base != NULL);
- large_alloc += alignb - 1;
- large_alloc &= -(HOST_WIDE_INT)alignb;
+ large_alloc = aligned_upper_bound (large_alloc, alignb);
offset = large_alloc;
large_alloc += stack_vars[i].size;
}
}
- gcc_assert (large_alloc == large_size);
+ gcc_assert (known_eq (large_alloc, large_size));
}
/* Take into account all sizes of partitions and reset DECL_RTLs. */
-static HOST_WIDE_INT
+static poly_uint64
account_stack_vars (void)
{
size_t si, j, i, n = stack_vars_num;
- HOST_WIDE_INT size = 0;
+ poly_uint64 size = 0;
for (si = 0; si < n; ++si)
{
static void
expand_one_stack_var_1 (tree var)
{
- HOST_WIDE_INT size;
+ poly_uint64 size;
poly_int64 offset;
unsigned byte_align;
if (TREE_CODE (var) == SSA_NAME)
{
tree type = TREE_TYPE (var);
- size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
+ size = tree_to_poly_uint64 (TYPE_SIZE_UNIT (type));
byte_align = TYPE_ALIGN_UNIT (type);
}
else
{
- size = tree_to_uhwi (DECL_SIZE_UNIT (var));
+ size = tree_to_poly_uint64 (DECL_SIZE_UNIT (var));
byte_align = align_local_variable (var);
}
tree size_unit = TREE_CODE (var) == SSA_NAME
? TYPE_SIZE_UNIT (TREE_TYPE (var))
: DECL_SIZE_UNIT (var);
+ poly_uint64 size;
/* Whether the variable is small enough for immediate allocation not to be
a problem with regard to the frame size. */
bool smallish
- = ((HOST_WIDE_INT) tree_to_uhwi (size_unit)
- < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
+ = (poly_int_tree_p (size_unit, &size)
+ && (estimated_poly_value (size)
+ < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING)));
/* If stack protection is enabled, *all* stack variables must be deferred,
so that we can re-order the strings to the top of the frame.
Return stack usage this variable is supposed to take.
*/
-static HOST_WIDE_INT
+static poly_uint64
expand_one_var (tree var, bool toplevel, bool really_expand)
{
unsigned int align = BITS_PER_UNIT;
record_alignment_for_reg_var (align);
+ poly_uint64 size;
if (TREE_CODE (origvar) == SSA_NAME)
{
gcc_assert (!VAR_P (var)
if (really_expand)
expand_one_register_var (origvar);
}
- else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
+ else if (!poly_int_tree_p (DECL_SIZE_UNIT (var), &size)
+ || !valid_constant_size_p (DECL_SIZE_UNIT (var)))
{
/* Reject variables which cover more than half of the address-space. */
if (really_expand)
expand_one_stack_var (origvar);
}
-
-
- return tree_to_uhwi (DECL_SIZE_UNIT (var));
+ return size;
}
return 0;
}
HOST_WIDE_INT
estimated_stack_frame_size (struct cgraph_node *node)
{
- HOST_WIDE_INT size = 0;
+ poly_int64 size = 0;
size_t i;
tree var;
struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
fini_vars_expansion ();
pop_cfun ();
- return size;
+ return estimated_poly_value (size);
}
/* Helper routine to check if a record or union contains an array field. */