return sve_abi;
}
+/* If X is an UNSPEC_SALT_ADDR expression, return the address that it
+ wraps, otherwise return X itself. */
+
+static rtx
+strip_salt (rtx x)
+{
+ rtx search = x;
+ if (GET_CODE (search) == CONST)
+ search = XEXP (search, 0);
+ if (GET_CODE (search) == UNSPEC && XINT (search, 1) == UNSPEC_SALT_ADDR)
+ x = XVECEXP (search, 0, 0);
+ return x;
+}
+
+/* Like strip_offset, but also strip any UNSPEC_SALT_ADDR from the
+ expression. */
+
+static rtx
+strip_offset_and_salt (rtx addr, poly_int64 *offset)
+{
+ return strip_salt (strip_offset (addr, offset));
+}
+
/* Generate code to enable conditional branches in functions over 1 MiB. */
const char *
aarch64_gen_far_branch (rtx * operands, int pos_label, const char * dest,
tls_symbolic_operand_type (rtx addr)
{
enum tls_model tls_kind = TLS_MODEL_NONE;
- if (GET_CODE (addr) == CONST)
- {
- poly_int64 addend;
- rtx sym = strip_offset (addr, &addend);
- if (GET_CODE (sym) == SYMBOL_REF)
- tls_kind = SYMBOL_REF_TLS_MODEL (sym);
- }
- else if (GET_CODE (addr) == SYMBOL_REF)
+ poly_int64 offset;
+ addr = strip_offset_and_salt (addr, &offset);
+ if (GET_CODE (addr) == SYMBOL_REF)
tls_kind = SYMBOL_REF_TLS_MODEL (addr);
return tls_kind;
as_a <scalar_int_mode> (mode));
}
+/* Return the MEM rtx that provides the canary value that should be used
+ for stack-smashing protection. MODE is the mode of the memory.
+ For SSP_GLOBAL, DECL_RTL is the MEM rtx for the canary variable
+ (__stack_chk_guard), otherwise it has no useful value. SALT_TYPE
+ indicates whether the caller is performing a SET or a TEST operation. */
+
+rtx
+aarch64_stack_protect_canary_mem (machine_mode mode, rtx decl_rtl,
+ aarch64_salt_type salt_type)
+{
+ rtx addr;
+ if (aarch64_stack_protector_guard == SSP_GLOBAL)
+ {
+ gcc_assert (MEM_P (decl_rtl));
+ addr = XEXP (decl_rtl, 0);
+ poly_int64 offset;
+ rtx base = strip_offset_and_salt (addr, &offset);
+ if (!SYMBOL_REF_P (base))
+ return decl_rtl;
+
+ rtvec v = gen_rtvec (2, base, GEN_INT (salt_type));
+ addr = gen_rtx_UNSPEC (Pmode, v, UNSPEC_SALT_ADDR);
+ addr = gen_rtx_CONST (Pmode, addr);
+ addr = plus_constant (Pmode, addr, offset);
+ }
+ else
+ {
+ /* Calculate the address from the system register. */
+ rtx salt = GEN_INT (salt_type);
+ addr = gen_reg_rtx (mode);
+ if (mode == DImode)
+ emit_insn (gen_reg_stack_protect_address_di (addr, salt));
+ else
+ {
+ emit_insn (gen_reg_stack_protect_address_si (addr, salt));
+ addr = convert_memory_address (Pmode, addr);
+ }
+ addr = plus_constant (Pmode, addr, aarch64_stack_protector_guard_offset);
+ }
+ return gen_rtx_MEM (mode, force_reg (Pmode, addr));
+}
+
/* Emit an SVE predicated move from SRC to DEST. PRED is a predicate
that is known to contain PTRUE. */
static bool
aarch64_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
- rtx base, offset;
-
if (GET_CODE (x) == HIGH)
return true;
if (GET_CODE (*iter) == CONST_POLY_INT)
return true;
- split_const (x, &base, &offset);
+ poly_int64 offset;
+ rtx base = strip_offset_and_salt (x, &offset);
if (GET_CODE (base) == SYMBOL_REF || GET_CODE (base) == LABEL_REF)
{
- if (aarch64_classify_symbol (base, INTVAL (offset))
+ /* We checked for POLY_INT_CST offsets above. */
+ if (aarch64_classify_symbol (base, offset.to_constant ())
!= SYMBOL_FORCE_TO_MEM)
return true;
else
&& GET_MODE_SIZE (mode).is_constant (&const_size)
&& const_size >= 4)
{
- rtx sym, addend;
-
- split_const (x, &sym, &addend);
+ poly_int64 offset;
+ rtx sym = strip_offset_and_salt (x, &offset);
return ((GET_CODE (sym) == LABEL_REF
|| (GET_CODE (sym) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (sym)
if (allow_reg_index_p
&& aarch64_base_register_rtx_p (info->base, strict_p))
{
- rtx sym, offs;
- split_const (info->offset, &sym, &offs);
+ poly_int64 offset;
+ HOST_WIDE_INT const_offset;
+ rtx sym = strip_offset_and_salt (info->offset, &offset);
if (GET_CODE (sym) == SYMBOL_REF
- && (aarch64_classify_symbol (sym, INTVAL (offs))
+ && offset.is_constant (&const_offset)
+ && (aarch64_classify_symbol (sym, const_offset)
== SYMBOL_SMALL_ABSOLUTE))
{
/* The symbol and offset must be aligned to the access size. */
if (known_eq (ref_size, 0))
ref_size = GET_MODE_SIZE (DImode);
- return (multiple_p (INTVAL (offs), ref_size)
+ return (multiple_p (const_offset, ref_size)
&& multiple_p (align / BITS_PER_UNIT, ref_size));
}
}
bool
aarch64_symbolic_address_p (rtx x)
{
- rtx offset;
-
- split_const (x, &x, &offset);
+ poly_int64 offset;
+ x = strip_offset_and_salt (x, &offset);
return GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF;
}
switch (code)
{
case 'c':
- switch (GET_CODE (x))
+ if (CONST_INT_P (x))
+ fprintf (f, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
+ else
{
- case CONST_INT:
- fprintf (f, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
- break;
-
- case SYMBOL_REF:
- output_addr_const (f, x);
- break;
-
- case CONST:
- if (GET_CODE (XEXP (x, 0)) == PLUS
- && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)
- {
- output_addr_const (f, x);
- break;
- }
- /* Fall through. */
-
- default:
- output_operand_lossage ("unsupported operand for code '%c'", code);
+ poly_int64 offset;
+ rtx base = strip_offset_and_salt (x, &offset);
+ if (SYMBOL_REF_P (base))
+ output_addr_const (f, x);
+ else
+ output_operand_lossage ("unsupported operand for code '%c'", code);
}
break;
output_addr_const (f, x);
}
+/* Implement TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA. */
+
+static bool
+aarch64_output_addr_const_extra (FILE *file, rtx x)
+{
+ if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_SALT_ADDR)
+ {
+ output_addr_const (file, XVECEXP (x, 0, 0));
+ return true;
+ }
+ return false;
+}
+
bool
aarch64_label_mentioned_p (rtx x)
{
if (! TARGET_HAVE_TLS)
return false;
+ x = strip_salt (x);
if (GET_CODE (x) != SYMBOL_REF)
return false;
enum aarch64_symbol_type
aarch64_classify_symbol (rtx x, HOST_WIDE_INT offset)
{
+ x = strip_salt (x);
+
if (GET_CODE (x) == LABEL_REF)
{
switch (aarch64_cmodel)
bool
aarch64_legitimate_pic_operand_p (rtx x)
{
- if (GET_CODE (x) == SYMBOL_REF
- || (GET_CODE (x) == CONST
- && GET_CODE (XEXP (x, 0)) == PLUS
- && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF))
- return false;
+ poly_int64 offset;
+ x = strip_offset_and_salt (x, &offset);
+ if (GET_CODE (x) == SYMBOL_REF)
+ return false;
return true;
}
/* If an offset is being added to something else, we need to allow the
base to be moved into the destination register, meaning that there
are no free temporaries for the offset. */
- x = strip_offset (x, &offset);
+ x = strip_offset_and_salt (x, &offset);
if (!offset.is_constant () && aarch64_offset_temporaries (true, offset) > 0)
return false;
return aarch64_simd_valid_immediate (x, NULL);
}
+ x = strip_salt (x);
if (GET_CODE (x) == SYMBOL_REF && mode == DImode && CONSTANT_ADDRESS_P (x))
return true;
#undef TARGET_PRINT_OPERAND_ADDRESS
#define TARGET_PRINT_OPERAND_ADDRESS aarch64_print_operand_address
+#undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
+#define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA aarch64_output_addr_const_extra
+
#undef TARGET_OPTAB_SUPPORTED_P
#define TARGET_OPTAB_SUPPORTED_P aarch64_optab_supported_p
UNSPEC_GEN_TAG_RND ; Generate a random 4-bit MTE tag.
UNSPEC_TAG_SPACE ; Translate address to MTE tag address space.
UNSPEC_LD1RO
+ UNSPEC_SALT_ADDR
])
(define_c_enum "unspecv" [
DONE;
})
-;; Named patterns for stack smashing protection.
+;; Defined for -mstack-protector-guard=sysreg, which goes through this
+;; pattern rather than stack_protect_combined_set. Our implementation
+;; of the latter can handle both.
(define_expand "stack_protect_set"
[(match_operand 0 "memory_operand")
- (match_operand 1 "memory_operand")]
+ (match_operand 1 "")]
""
{
- machine_mode mode = GET_MODE (operands[0]);
- if (aarch64_stack_protector_guard != SSP_GLOBAL)
- {
- /* Generate access through the system register. */
- rtx tmp_reg = gen_reg_rtx (mode);
- if (mode == DImode)
- {
- emit_insn (gen_reg_stack_protect_address_di (tmp_reg));
- emit_insn (gen_adddi3 (tmp_reg, tmp_reg,
- GEN_INT (aarch64_stack_protector_guard_offset)));
- }
- else
- {
- emit_insn (gen_reg_stack_protect_address_si (tmp_reg));
- emit_insn (gen_addsi3 (tmp_reg, tmp_reg,
- GEN_INT (aarch64_stack_protector_guard_offset)));
+ emit_insn (gen_stack_protect_combined_set (operands[0], operands[1]));
+ DONE;
+})
- }
- operands[1] = gen_rtx_MEM (mode, tmp_reg);
- }
-
+(define_expand "stack_protect_combined_set"
+ [(match_operand 0 "memory_operand")
+ (match_operand 1 "")]
+ ""
+{
+ machine_mode mode = GET_MODE (operands[0]);
+ operands[1] = aarch64_stack_protect_canary_mem (mode, operands[1],
+ AARCH64_SALT_SSP_SET);
emit_insn ((mode == DImode
? gen_stack_protect_set_di
: gen_stack_protect_set_si) (operands[0], operands[1]));
DONE;
})
+;; Operand 1 is either AARCH64_SALT_SSP_SET or AARCH64_SALT_SSP_TEST.
(define_insn "reg_stack_protect_address_<mode>"
[(set (match_operand:PTR 0 "register_operand" "=r")
- (unspec:PTR [(const_int 0)]
- UNSPEC_SSP_SYSREG))]
+ (unspec:PTR [(match_operand 1 "const_int_operand")]
+ UNSPEC_SSP_SYSREG))]
"aarch64_stack_protector_guard != SSP_GLOBAL"
{
char buf[150];
[(set_attr "length" "12")
(set_attr "type" "multiple")])
+;; Defined for -mstack-protector-guard=sysreg, which goes through this
+;; pattern rather than stack_protect_combined_test. Our implementation
+;; of the latter can handle both.
(define_expand "stack_protect_test"
[(match_operand 0 "memory_operand")
- (match_operand 1 "memory_operand")
+ (match_operand 1 "")
(match_operand 2)]
""
{
- machine_mode mode = GET_MODE (operands[0]);
-
- if (aarch64_stack_protector_guard != SSP_GLOBAL)
- {
- /* Generate access through the system register. The
- sequence we want here is the access
- of the stack offset to come with
- mrs scratch_reg, <system_register>
- add scratch_reg, scratch_reg, :lo12:offset. */
- rtx tmp_reg = gen_reg_rtx (mode);
- if (mode == DImode)
- {
- emit_insn (gen_reg_stack_protect_address_di (tmp_reg));
- emit_insn (gen_adddi3 (tmp_reg, tmp_reg,
- GEN_INT (aarch64_stack_protector_guard_offset)));
- }
- else
- {
- emit_insn (gen_reg_stack_protect_address_si (tmp_reg));
- emit_insn (gen_addsi3 (tmp_reg, tmp_reg,
- GEN_INT (aarch64_stack_protector_guard_offset)));
+ emit_insn (gen_stack_protect_combined_test (operands[0], operands[1],
+ operands[2]));
+ DONE;
+})
- }
- operands[1] = gen_rtx_MEM (mode, tmp_reg);
- }
+(define_expand "stack_protect_combined_test"
+ [(match_operand 0 "memory_operand")
+ (match_operand 1 "")
+ (match_operand 2)]
+ ""
+{
+ machine_mode mode = GET_MODE (operands[0]);
+ operands[1] = aarch64_stack_protect_canary_mem (mode, operands[1],
+ AARCH64_SALT_SSP_TEST);
emit_insn ((mode == DImode
? gen_stack_protect_test_di
: gen_stack_protect_test_si) (operands[0], operands[1]));