From 0358d788d238ba7407648962f40026bd8c190308 Mon Sep 17 00:00:00 2001 From: Renlin Li Date: Wed, 21 Nov 2018 14:29:19 +0000 Subject: [PATCH] [PATCH][PR84877]Dynamically align the address for local parameter copy on the stack when required alignment is larger than MAX_SUPPORTED_STACK_ALIGNMENT As described in PR84877. https://gcc.gnu.org/bugzilla/show_bug.cgi?id=84877 The local copy of parameter on stack is not aligned. For BLKmode paramters, a local copy on the stack will be saved. There are three cases: 1) arguments passed partially on the stack, partially via registers. 2) arguments passed fully on the stack. 3) arguments passed via registers. After the change here, in all three cases, the stack slot for the local parameter copy is aligned by the data type. The stack slot is the DECL_RTL of the parameter. All the references thereafter in the function will refer to this RTL. To populate the local copy on the stack, For case 1) and 2), there are operations to move data from the caller's stack (from incoming rtl) into callee's stack. For case 3), the registers are directly saved into the stack slot. In all cases, the destination address is properly aligned. But for case 1) and case 2), the source address is not aligned by the type. It is defined by the PCS how the arguments are prepared. The block move operation is fulfilled by emit_block_move (). As far as I can see, it will use the smaller alignment of source and destination. This looks fine as long as we don't use instructions which requires a strict larger alignment than the address actually has. Here, it only changes receiving parameters. The function assign_stack_local_1 will be called in various places. Usually, the caller will constraint the ALIGN parameter. For example via STACK_SLOT_ALIGNMENT macro. assign_parm_setup_block will call assign_stack_local () with alignment from the parameter type which in this case could be larger than MAX_SUPPORTED_STACK_ALIGNMENT. The alignment operation for parameter copy on the stack is similar to stack vars. First, enough space is reserved on the stack. The size is fixed at compile time. Instructions are emitted to dynamically get an aligned address at runtime within this piece of memory. This will unavoidably increase the usage of stack. However, it really depends on how many over-aligned parameters are passed by value. gcc/ 2018-11-21 Renlin Li PR middle-end/84877 * explow.h (get_dynamic_stack_size): Declare it as external. * explow.c (record_new_stack_level): Remove function static attribute. * function.c (assign_stack_local_1): Dynamically align the stack slot addr for parameter copy on the stack. gcc/testsuite/ 2018-11-21 Renlin Li PR middle-end/84877 * gcc.dg/pr84877.c: New. From-SVN: r266345 --- gcc/ChangeLog | 8 +++++ gcc/explow.c | 5 +-- gcc/explow.h | 3 ++ gcc/function.c | 61 +++++++++++++++++++++++++++++----- gcc/testsuite/ChangeLog | 5 +++ gcc/testsuite/gcc.dg/pr84877.c | 38 +++++++++++++++++++++ 6 files changed, 110 insertions(+), 10 deletions(-) create mode 100644 gcc/testsuite/gcc.dg/pr84877.c diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 2e0be88327f..e0eedf712ba 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,11 @@ +2018-11-21 Renlin Li + + PR middle-end/84877 + * explow.h (get_dynamic_stack_size): Declare it as external. + * explow.c (record_new_stack_level): Remove function static attribute. + * function.c (assign_stack_local_1): Dynamically align the stack slot + addr for parameter copy on the stack. + 2018-11-21 Richard Biener PR bootstrap/88133 diff --git a/gcc/explow.c b/gcc/explow.c index 1dabd6ff9aa..2c5de12dee1 100644 --- a/gcc/explow.c +++ b/gcc/explow.c @@ -1176,9 +1176,10 @@ record_new_stack_level (void) if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ) update_sjlj_context (); } - + /* Return an rtx doing runtime alignment to REQUIRED_ALIGN on TARGET. */ -static rtx + +rtx align_dynamic_address (rtx target, unsigned required_align) { /* CEIL_DIV_EXPR needs to worry about the addition overflowing, diff --git a/gcc/explow.h b/gcc/explow.h index 18c13804b06..b263d353b84 100644 --- a/gcc/explow.h +++ b/gcc/explow.h @@ -104,6 +104,9 @@ extern void get_dynamic_stack_size (rtx *, unsigned, unsigned, HOST_WIDE_INT *); /* Returns the address of the dynamic stack space without allocating it. */ extern rtx get_dynamic_stack_base (poly_int64, unsigned); +/* Return an rtx doing runtime alignment to REQUIRED_ALIGN on TARGET. */ +extern rtx align_dynamic_address (rtx, unsigned); + /* Emit one stack probe at ADDRESS, an address within the stack. */ extern void emit_stack_probe (rtx); diff --git a/gcc/function.c b/gcc/function.c index 302438323c8..954e9468f01 100644 --- a/gcc/function.c +++ b/gcc/function.c @@ -377,6 +377,7 @@ assign_stack_local_1 (machine_mode mode, poly_int64 size, poly_int64 bigend_correction = 0; poly_int64 slot_offset = 0, old_frame_offset; unsigned int alignment, alignment_in_bits; + bool dynamic_align_addr = false; if (align == 0) { @@ -395,14 +396,20 @@ assign_stack_local_1 (machine_mode mode, poly_int64 size, alignment_in_bits = alignment * BITS_PER_UNIT; - /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */ if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT) { - alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT; - alignment = alignment_in_bits / BITS_PER_UNIT; + /* If the required alignment exceeds MAX_SUPPORTED_STACK_ALIGNMENT and + it is not OK to reduce it. Align the slot dynamically. */ + if (mode == BLKmode && (kind & ASLK_REDUCE_ALIGN) == 0) + dynamic_align_addr = true; + else + { + alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT; + alignment = MAX_SUPPORTED_STACK_ALIGNMENT / BITS_PER_UNIT; + } } - if (SUPPORTS_STACK_ALIGNMENT) + if (SUPPORTS_STACK_ALIGNMENT && !dynamic_align_addr) { if (crtl->stack_alignment_estimated < alignment_in_bits) { @@ -432,10 +439,42 @@ assign_stack_local_1 (machine_mode mode, poly_int64 size, } } - if (crtl->stack_alignment_needed < alignment_in_bits) - crtl->stack_alignment_needed = alignment_in_bits; - if (crtl->max_used_stack_slot_alignment < alignment_in_bits) - crtl->max_used_stack_slot_alignment = alignment_in_bits; + /* Handle overalignment here for parameter copy on the stack. + Reserved enough space for it and dynamically align the address. + No free frame_space is added here. */ + if (dynamic_align_addr) + { + rtx allocsize = gen_int_mode (size, Pmode); + get_dynamic_stack_size (&allocsize, 0, alignment_in_bits, NULL); + + /* This is the size of space needed to accommodate required size of data + with given alignment. */ + poly_int64 len = rtx_to_poly_int64 (allocsize); + old_frame_offset = frame_offset; + + if (FRAME_GROWS_DOWNWARD) + { + frame_offset -= len; + try_fit_stack_local (frame_offset, len, len, + PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT, + &slot_offset); + } + else + { + frame_offset += len; + try_fit_stack_local (old_frame_offset, len, len, + PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT, + &slot_offset); + } + goto found_space; + } + else + { + if (crtl->stack_alignment_needed < alignment_in_bits) + crtl->stack_alignment_needed = alignment_in_bits; + if (crtl->max_used_stack_slot_alignment < alignment_in_bits) + crtl->max_used_stack_slot_alignment = alignment_in_bits; + } if (mode != BLKmode || maybe_ne (size, 0)) { @@ -522,6 +561,12 @@ assign_stack_local_1 (machine_mode mode, poly_int64 size, (slot_offset + bigend_correction, Pmode)); + if (dynamic_align_addr) + { + addr = align_dynamic_address (addr, alignment_in_bits); + mark_reg_pointer (addr, alignment_in_bits); + } + x = gen_rtx_MEM (mode, addr); set_mem_align (x, alignment_in_bits); MEM_NOTRAP_P (x) = 1; diff --git a/gcc/testsuite/ChangeLog b/gcc/testsuite/ChangeLog index 548959361b8..056199612a4 100644 --- a/gcc/testsuite/ChangeLog +++ b/gcc/testsuite/ChangeLog @@ -1,3 +1,8 @@ +2018-11-21 Renlin Li + + PR middle-end/84877 + * gcc.dg/pr84877.c: New. + 2018-11-21 H.J. Lu PR target/87317 diff --git a/gcc/testsuite/gcc.dg/pr84877.c b/gcc/testsuite/gcc.dg/pr84877.c new file mode 100644 index 00000000000..8a34dd4fb66 --- /dev/null +++ b/gcc/testsuite/gcc.dg/pr84877.c @@ -0,0 +1,38 @@ +/* { dg-do run } */ +/* { dg-options "-O2" } */ + +#include + +struct U { + int M0; + int M1; +} __attribute ((aligned (16))); + +volatile struct U p0 = {1, 0}; + +void __attribute__ ((noinline)) +foo (struct U p) +{ + + volatile intptr_t mask = 0b1111; + volatile int dummy[2]; + struct U p1 = p; + dummy[1] = p.M0; + + if ((intptr_t)(&p1) & mask) + __builtin_abort (); + if ((intptr_t)(&p) & mask) + __builtin_abort (); + + if (p1.M0 != dummy[1]) + __builtin_abort (); + if (p1.M1 != p.M1) + __builtin_abort (); +} + +int +main () +{ + foo (p0); + return 0; +} -- 2.30.2