+2019-11-19 Dennis Zhang <dennis.zhang@arm.com>
+
+ * config/aarch64/aarch64-builtins.c (enum aarch64_builtins): Add
+ AARCH64_MEMTAG_BUILTIN_START, AARCH64_MEMTAG_BUILTIN_IRG,
+ AARCH64_MEMTAG_BUILTIN_GMI, AARCH64_MEMTAG_BUILTIN_SUBP,
+ AARCH64_MEMTAG_BUILTIN_INC_TAG, AARCH64_MEMTAG_BUILTIN_SET_TAG,
+ AARCH64_MEMTAG_BUILTIN_GET_TAG, and AARCH64_MEMTAG_BUILTIN_END.
+ (aarch64_init_memtag_builtins): New.
+ (AARCH64_INIT_MEMTAG_BUILTINS_DECL): New macro.
+ (aarch64_general_init_builtins): Call aarch64_init_memtag_builtins.
+ (aarch64_expand_builtin_memtag): New.
+ (aarch64_general_expand_builtin): Call aarch64_expand_builtin_memtag.
+ (AARCH64_BUILTIN_SUBCODE): New macro.
+ (aarch64_resolve_overloaded_memtag): New.
+ (aarch64_resolve_overloaded_builtin_general): New. Call
+ aarch64_resolve_overloaded_memtag to handle overloaded MTE builtins.
+ * config/aarch64/aarch64-c.c (aarch64_update_cpp_builtins): Define
+ __ARM_FEATURE_MEMORY_TAGGING when enabled.
+ (aarch64_resolve_overloaded_builtin): Call
+ aarch64_resolve_overloaded_builtin_general.
+ * config/aarch64/aarch64-protos.h
+ (aarch64_resolve_overloaded_builtin_general): New declaration.
+ * config/aarch64/aarch64.h (AARCH64_ISA_MEMTAG): New macro.
+ (TARGET_MEMTAG): Likewise.
+ * config/aarch64/aarch64.md (UNSPEC_GEN_TAG): New unspec.
+ (UNSPEC_GEN_TAG_RND, and UNSPEC_TAG_SPACE): Likewise.
+ (irg, gmi, subp, addg, ldg, stg): New instructions.
+ * config/aarch64/arm_acle.h (__arm_mte_create_random_tag): New macro.
+ (__arm_mte_exclude_tag, __arm_mte_ptrdiff): Likewise.
+ (__arm_mte_increment_tag, __arm_mte_set_tag): Likewise.
+ (__arm_mte_get_tag): Likewise.
+ * config/aarch64/predicates.md (aarch64_memtag_tag_offset): New.
+ (aarch64_granule16_uimm6, aarch64_granule16_simm9): New.
+ * config/arm/types.md (memtag): New.
+ * doc/invoke.texi (-memtag): Update description.
+
2019-11-19 Richard Henderson <richard.henderson@linaro.org>
* config/arm/arm-c.c (arm_cpu_builtins): Use def_or_undef_macro
/* Armv8.5-a RNG instruction builtins. */
AARCH64_BUILTIN_RNG_RNDR,
AARCH64_BUILTIN_RNG_RNDRRS,
+ /* MEMTAG builtins. */
+ AARCH64_MEMTAG_BUILTIN_START,
+ AARCH64_MEMTAG_BUILTIN_IRG,
+ AARCH64_MEMTAG_BUILTIN_GMI,
+ AARCH64_MEMTAG_BUILTIN_SUBP,
+ AARCH64_MEMTAG_BUILTIN_INC_TAG,
+ AARCH64_MEMTAG_BUILTIN_SET_TAG,
+ AARCH64_MEMTAG_BUILTIN_GET_TAG,
+ AARCH64_MEMTAG_BUILTIN_END,
AARCH64_BUILTIN_MAX
};
AARCH64_BUILTIN_RNG_RNDRRS);
}
+/* Initialize the memory tagging extension (MTE) builtins. */
+struct
+{
+ tree ftype;
+ enum insn_code icode;
+} aarch64_memtag_builtin_data[AARCH64_MEMTAG_BUILTIN_END -
+ AARCH64_MEMTAG_BUILTIN_START - 1];
+
+static void
+aarch64_init_memtag_builtins (void)
+{
+ tree fntype = NULL;
+
+#define AARCH64_INIT_MEMTAG_BUILTINS_DECL(F, N, I, T) \
+ aarch64_builtin_decls[AARCH64_MEMTAG_BUILTIN_##F] \
+ = aarch64_general_add_builtin ("__builtin_aarch64_memtag_"#N, \
+ T, AARCH64_MEMTAG_BUILTIN_##F); \
+ aarch64_memtag_builtin_data[AARCH64_MEMTAG_BUILTIN_##F - \
+ AARCH64_MEMTAG_BUILTIN_START - 1] = \
+ {T, CODE_FOR_##I};
+
+ fntype = build_function_type_list (ptr_type_node, ptr_type_node,
+ uint64_type_node, NULL);
+ AARCH64_INIT_MEMTAG_BUILTINS_DECL (IRG, irg, irg, fntype);
+
+ fntype = build_function_type_list (uint64_type_node, ptr_type_node,
+ uint64_type_node, NULL);
+ AARCH64_INIT_MEMTAG_BUILTINS_DECL (GMI, gmi, gmi, fntype);
+
+ fntype = build_function_type_list (ptrdiff_type_node, ptr_type_node,
+ ptr_type_node, NULL);
+ AARCH64_INIT_MEMTAG_BUILTINS_DECL (SUBP, subp, subp, fntype);
+
+ fntype = build_function_type_list (ptr_type_node, ptr_type_node,
+ unsigned_type_node, NULL);
+ AARCH64_INIT_MEMTAG_BUILTINS_DECL (INC_TAG, inc_tag, addg, fntype);
+
+ fntype = build_function_type_list (void_type_node, ptr_type_node, NULL);
+ AARCH64_INIT_MEMTAG_BUILTINS_DECL (SET_TAG, set_tag, stg, fntype);
+
+ fntype = build_function_type_list (ptr_type_node, ptr_type_node, NULL);
+ AARCH64_INIT_MEMTAG_BUILTINS_DECL (GET_TAG, get_tag, ldg, fntype);
+
+#undef AARCH64_INIT_MEMTAG_BUILTINS_DECL
+}
/* Initialize all builtins in the AARCH64_BUILTIN_GENERAL group. */
if (TARGET_TME)
aarch64_init_tme_builtins ();
+
+ if (TARGET_MEMTAG)
+ aarch64_init_memtag_builtins ();
}
/* Implement TARGET_BUILTIN_DECL for the AARCH64_BUILTIN_GENERAL group. */
return target;
}
+/* Expand an expression EXP that calls a MEMTAG built-in FCODE
+ with result going to TARGET. */
+static rtx
+aarch64_expand_builtin_memtag (int fcode, tree exp, rtx target)
+{
+ if (TARGET_ILP32)
+ {
+ error ("Memory Tagging Extension does not support %<-mabi=ilp32%>");
+ return const0_rtx;
+ }
+
+ rtx pat = NULL;
+ enum insn_code icode = aarch64_memtag_builtin_data[fcode -
+ AARCH64_MEMTAG_BUILTIN_START - 1].icode;
+
+ rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
+ machine_mode mode0 = GET_MODE (op0);
+ op0 = force_reg (mode0 == VOIDmode ? DImode : mode0, op0);
+ op0 = convert_to_mode (DImode, op0, true);
+
+ switch (fcode)
+ {
+ case AARCH64_MEMTAG_BUILTIN_IRG:
+ case AARCH64_MEMTAG_BUILTIN_GMI:
+ case AARCH64_MEMTAG_BUILTIN_SUBP:
+ case AARCH64_MEMTAG_BUILTIN_INC_TAG:
+ {
+ if (! target
+ || GET_MODE (target) != DImode
+ || ! (*insn_data[icode].operand[0].predicate) (target, DImode))
+ target = gen_reg_rtx (DImode);
+
+ if (fcode == AARCH64_MEMTAG_BUILTIN_INC_TAG)
+ {
+ rtx op1 = expand_normal (CALL_EXPR_ARG (exp, 1));
+
+ if ((*insn_data[icode].operand[3].predicate) (op1, QImode))
+ {
+ pat = GEN_FCN (icode) (target, op0, const0_rtx, op1);
+ break;
+ }
+ error ("%Kargument %d must be a constant immediate "
+ "in range [0,15]", exp, 2);
+ return const0_rtx;
+ }
+ else
+ {
+ rtx op1 = expand_normal (CALL_EXPR_ARG (exp, 1));
+ machine_mode mode1 = GET_MODE (op1);
+ op1 = force_reg (mode1 == VOIDmode ? DImode : mode1, op1);
+ op1 = convert_to_mode (DImode, op1, true);
+ pat = GEN_FCN (icode) (target, op0, op1);
+ }
+ break;
+ }
+ case AARCH64_MEMTAG_BUILTIN_GET_TAG:
+ target = op0;
+ pat = GEN_FCN (icode) (target, op0, const0_rtx);
+ break;
+ case AARCH64_MEMTAG_BUILTIN_SET_TAG:
+ pat = GEN_FCN (icode) (op0, op0, const0_rtx);
+ break;
+ default:
+ gcc_unreachable();
+ }
+
+ if (!pat)
+ return NULL_RTX;
+
+ emit_insn (pat);
+ return target;
+}
+
/* Expand an expression EXP that calls built-in function FCODE,
with result going to TARGET if that's convenient. IGNORE is true
if the result of the builtin is ignored. */
|| fcode == AARCH64_TME_BUILTIN_TCANCEL)
return aarch64_expand_builtin_tme (fcode, exp, target);
+ if (fcode >= AARCH64_MEMTAG_BUILTIN_START
+ && fcode <= AARCH64_MEMTAG_BUILTIN_END)
+ return aarch64_expand_builtin_memtag (fcode, exp, target);
+
gcc_unreachable ();
}
reload_fenv, restore_fnenv), update_call);
}
+/* Resolve overloaded MEMTAG build-in functions. */
+#define AARCH64_BUILTIN_SUBCODE(F) \
+ (DECL_MD_FUNCTION_CODE (F) >> AARCH64_BUILTIN_SHIFT)
+
+static tree
+aarch64_resolve_overloaded_memtag (location_t loc,
+ tree fndecl, void *pass_params)
+{
+ vec<tree, va_gc> *params = static_cast<vec<tree, va_gc> *> (pass_params);
+ unsigned param_num = params ? params->length() : 0;
+ unsigned int fcode = AARCH64_BUILTIN_SUBCODE (fndecl);
+ tree inittype = aarch64_memtag_builtin_data[
+ fcode - AARCH64_MEMTAG_BUILTIN_START - 1].ftype;
+ unsigned arg_num = list_length (TYPE_ARG_TYPES (inittype)) - 1;
+
+ if (param_num != arg_num)
+ {
+ TREE_TYPE (fndecl) = inittype;
+ return NULL_TREE;
+ }
+ tree retype = NULL;
+
+ if (fcode == AARCH64_MEMTAG_BUILTIN_SUBP)
+ {
+ tree t0 = TREE_TYPE ((*params)[0]);
+ tree t1 = TREE_TYPE ((*params)[1]);
+
+ if (t0 == error_mark_node || TREE_CODE (t0) != POINTER_TYPE)
+ t0 = ptr_type_node;
+ if (t1 == error_mark_node || TREE_CODE (t1) != POINTER_TYPE)
+ t1 = ptr_type_node;
+
+ if (TYPE_MODE (t0) != DImode)
+ warning_at (loc, 1, "expected 64-bit address but argument 1 is %d-bit",
+ (int)tree_to_shwi (DECL_SIZE ((*params)[0])));
+
+ if (TYPE_MODE (t1) != DImode)
+ warning_at (loc, 1, "expected 64-bit address but argument 2 is %d-bit",
+ (int)tree_to_shwi (DECL_SIZE ((*params)[1])));
+
+ retype = build_function_type_list (ptrdiff_type_node, t0, t1, NULL);
+ }
+ else
+ {
+ tree t0 = TREE_TYPE ((*params)[0]);
+
+ if (t0 == error_mark_node || TREE_CODE (t0) != POINTER_TYPE)
+ {
+ TREE_TYPE (fndecl) = inittype;
+ return NULL_TREE;
+ }
+
+ if (TYPE_MODE (t0) != DImode)
+ warning_at (loc, 1, "expected 64-bit address but argument 1 is %d-bit",
+ (int)tree_to_shwi (DECL_SIZE ((*params)[0])));
+
+ switch (fcode)
+ {
+ case AARCH64_MEMTAG_BUILTIN_IRG:
+ retype = build_function_type_list (t0, t0, uint64_type_node, NULL);
+ break;
+ case AARCH64_MEMTAG_BUILTIN_GMI:
+ retype = build_function_type_list (uint64_type_node, t0,
+ uint64_type_node, NULL);
+ break;
+ case AARCH64_MEMTAG_BUILTIN_INC_TAG:
+ retype = build_function_type_list (t0, t0, unsigned_type_node, NULL);
+ break;
+ case AARCH64_MEMTAG_BUILTIN_SET_TAG:
+ retype = build_function_type_list (void_type_node, t0, NULL);
+ break;
+ case AARCH64_MEMTAG_BUILTIN_GET_TAG:
+ retype = build_function_type_list (t0, t0, NULL);
+ break;
+ default:
+ return NULL_TREE;
+ }
+ }
+
+ if (!retype || retype == error_mark_node)
+ TREE_TYPE (fndecl) = inittype;
+ else
+ TREE_TYPE (fndecl) = retype;
+
+ return NULL_TREE;
+}
+
+/* Called at aarch64_resolve_overloaded_builtin in aarch64-c.c. */
+tree
+aarch64_resolve_overloaded_builtin_general (location_t loc, tree function,
+ void *pass_params)
+{
+ unsigned int fcode = AARCH64_BUILTIN_SUBCODE (function);
+
+ if (fcode >= AARCH64_MEMTAG_BUILTIN_START
+ && fcode <= AARCH64_MEMTAG_BUILTIN_END)
+ return aarch64_resolve_overloaded_memtag(loc, function, pass_params);
+
+ return NULL_TREE;
+}
#undef AARCH64_CHECK_BUILTIN_MODE
#undef AARCH64_FIND_FRINT_VARIANT
aarch64_def_or_undef (TARGET_FRINT, "__ARM_FEATURE_FRINT", pfile);
aarch64_def_or_undef (TARGET_TME, "__ARM_FEATURE_TME", pfile);
aarch64_def_or_undef (TARGET_RNG, "__ARM_FEATURE_RNG", pfile);
+ aarch64_def_or_undef (TARGET_MEMTAG, "__ARM_FEATURE_MEMORY_TAGGING", pfile);
/* Not for ACLE, but required to keep "float.h" correct if we switch
target between implementations that do or do not support ARMv8.2-A
switch (code & AARCH64_BUILTIN_CLASS)
{
case AARCH64_BUILTIN_GENERAL:
- return NULL_TREE;
-
+ return aarch64_resolve_overloaded_builtin_general (location, fndecl,
+ uncast_arglist);
case AARCH64_BUILTIN_SVE:
new_fndecl = aarch64_sve::resolve_overloaded_builtin (location, subcode,
arglist);
extern const atomic_ool_names aarch64_ool_ldclr_names;
extern const atomic_ool_names aarch64_ool_ldeor_names;
+tree aarch64_resolve_overloaded_builtin_general (location_t, tree, void *);
+
#endif /* GCC_AARCH64_PROTOS_H */
#define AARCH64_ISA_RNG (aarch64_isa_flags & AARCH64_FL_RNG)
#define AARCH64_ISA_V8_5 (aarch64_isa_flags & AARCH64_FL_V8_5)
#define AARCH64_ISA_TME (aarch64_isa_flags & AARCH64_FL_TME)
+#define AARCH64_ISA_MEMTAG (aarch64_isa_flags & AARCH64_FL_MEMTAG)
/* Crypto is an optional extension to AdvSIMD. */
#define TARGET_CRYPTO (TARGET_SIMD && AARCH64_ISA_CRYPTO)
/* Random number instructions from Armv8.5-a. */
#define TARGET_RNG (AARCH64_ISA_RNG)
+/* Memory Tagging instructions optional to Armv8.5 enabled through +memtag. */
+#define TARGET_MEMTAG (AARCH64_ISA_V8_5 && AARCH64_ISA_MEMTAG)
+
/* Make sure this is always defined so we don't have to check for ifdefs
but rather use normal ifs. */
#ifndef TARGET_FIX_ERR_A53_835769_DEFAULT
UNSPEC_SVE_PREFETCH_GATHER
UNSPEC_SVE_COMPACT
UNSPEC_SVE_SPLICE
+ UNSPEC_GEN_TAG ; Generate a 4-bit MTE tag.
+ UNSPEC_GEN_TAG_RND ; Generate a random 4-bit MTE tag.
+ UNSPEC_TAG_SPACE ; Translate address to MTE tag address space.
])
(define_c_enum "unspecv" [
[(set_attr "type" "mrs")]
)
+;; Memory Tagging Extension (MTE) instructions.
+
+(define_insn "irg"
+ [(set (match_operand:DI 0 "register_operand" "=rk")
+ (ior:DI
+ (and:DI (match_operand:DI 1 "register_operand" "rk")
+ (const_int -1080863910568919041)) ;; 0xf0ff...
+ (ashift:DI (unspec:QI [(match_operand:DI 2 "register_operand" "r")]
+ UNSPEC_GEN_TAG_RND)
+ (const_int 56))))]
+ "TARGET_MEMTAG"
+ "irg\\t%0, %1, %2"
+ [(set_attr "type" "memtag")]
+)
+
+(define_insn "gmi"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (ior:DI (ashift:DI
+ (const_int 1)
+ (and:QI (lshiftrt:DI
+ (match_operand:DI 1 "register_operand" "rk")
+ (const_int 56)) (const_int 15)))
+ (match_operand:DI 2 "register_operand" "r")))]
+ "TARGET_MEMTAG"
+ "gmi\\t%0, %1, %2"
+ [(set_attr "type" "memtag")]
+)
+
+(define_insn "addg"
+ [(set (match_operand:DI 0 "register_operand" "=rk")
+ (ior:DI
+ (and:DI (plus:DI (match_operand:DI 1 "register_operand" "rk")
+ (match_operand:DI 2 "aarch64_granule16_uimm6" "i"))
+ (const_int -1080863910568919041)) ;; 0xf0ff...
+ (ashift:DI
+ (unspec:QI
+ [(and:QI (lshiftrt:DI (match_dup 1) (const_int 56)) (const_int 15))
+ (match_operand:QI 3 "aarch64_memtag_tag_offset" "i")]
+ UNSPEC_GEN_TAG)
+ (const_int 56))))]
+ "TARGET_MEMTAG"
+ "addg\\t%0, %1, #%2, #%3"
+ [(set_attr "type" "memtag")]
+)
+
+(define_insn "subp"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (minus:DI
+ (and:DI (match_operand:DI 1 "register_operand" "rk")
+ (const_int 72057594037927935)) ;; 0x00ff...
+ (and:DI (match_operand:DI 2 "register_operand" "rk")
+ (const_int 72057594037927935))))] ;; 0x00ff...
+ "TARGET_MEMTAG"
+ "subp\\t%0, %1, %2"
+ [(set_attr "type" "memtag")]
+)
+
+;; LDG will use the 16-byte aligned value of the address.
+(define_insn "ldg"
+ [(set (match_operand:DI 0 "register_operand" "+r")
+ (ior:DI
+ (and:DI (match_dup 0) (const_int -1080863910568919041)) ;; 0xf0ff...
+ (ashift:DI
+ (mem:QI (unspec:DI
+ [(and:DI (plus:DI (match_operand:DI 1 "register_operand" "rk")
+ (match_operand:DI 2 "aarch64_granule16_simm9" "i"))
+ (const_int -16))] UNSPEC_TAG_SPACE))
+ (const_int 56))))]
+ "TARGET_MEMTAG"
+ "ldg\\t%0, [%1, #%2]"
+ [(set_attr "type" "memtag")]
+)
+
+;; STG doesn't align the address but aborts with alignment fault
+;; when the address is not 16-byte aligned.
+(define_insn "stg"
+ [(set (mem:QI (unspec:DI
+ [(plus:DI (match_operand:DI 1 "register_operand" "rk")
+ (match_operand:DI 2 "aarch64_granule16_simm9" "i"))]
+ UNSPEC_TAG_SPACE))
+ (and:QI (lshiftrt:DI (match_operand:DI 0 "register_operand" "rk")
+ (const_int 56)) (const_int 15)))]
+ "TARGET_MEMTAG"
+ "stg\\t%0, [%1, #%2]"
+ [(set_attr "type" "memtag")]
+)
+
;; AdvSIMD Stuff
(include "aarch64-simd.md")
#pragma GCC pop_options
+#pragma GCC push_options
+#pragma GCC target ("arch=armv8.5-a+memtag")
+
+#define __arm_mte_create_random_tag(__ptr, __u64_mask) \
+ __builtin_aarch64_memtag_irg(__ptr, __u64_mask)
+
+#define __arm_mte_exclude_tag(__ptr, __u64_excluded) \
+ __builtin_aarch64_memtag_gmi(__ptr, __u64_excluded)
+
+#define __arm_mte_ptrdiff(__ptr_a, __ptr_b) \
+ __builtin_aarch64_memtag_subp(__ptr_a, __ptr_b)
+
+#define __arm_mte_increment_tag(__ptr, __u_offset) \
+ __builtin_aarch64_memtag_inc_tag(__ptr, __u_offset)
+
+#define __arm_mte_set_tag(__tagged_address) \
+ __builtin_aarch64_memtag_set_tag(__tagged_address)
+
+#define __arm_mte_get_tag(__address) \
+ __builtin_aarch64_memtag_get_tag(__address)
+
+#pragma GCC pop_options
+
#ifdef __cplusplus
}
#endif
(and (match_code "const_int,const_poly_int")
(match_test "known_eq (wi::to_poly_wide (op, mode),
BYTES_PER_SVE_VECTOR)")))
+
+(define_predicate "aarch64_memtag_tag_offset"
+ (and (match_code "const_int")
+ (match_test "IN_RANGE (INTVAL (op), 0, 15)")))
+
+(define_predicate "aarch64_granule16_uimm6"
+ (and (match_code "const_int")
+ (match_test "IN_RANGE (INTVAL (op), 0, 1008)
+ && !(INTVAL (op) & 0xf)")))
+
+(define_predicate "aarch64_granule16_simm9"
+ (and (match_code "const_int")
+ (match_test "IN_RANGE (INTVAL (op), -4096, 4080)
+ && !(INTVAL (op) & 0xf)")))
crypto_sm3,\
crypto_sm4,\
coproc,\
- tme"
+ tme,\
+ memtag"
(const_string "untyped"))
; Is this an (integer side) multiply with a 32-bit (or smaller) result?
enable the extension at the assembler level and does not affect code
generation.
@item memtag
-Enable the Armv8.5-a Memory Tagging Extensions. This option is only to
-enable the extension at the assembler level and does not affect code
-generation.
+Enable the Armv8.5-a Memory Tagging Extensions.
+Use of this option with architectures prior to Armv8.5-A is not supported.
@item sb
Enable the Armv8-a Speculation Barrier instruction. This option is only to
enable the extension at the assembler level and does not affect code
+2019-11-19 Dennis Zhang <dennis.zhang@arm.com>
+
+ * gcc.target/aarch64/acle/memtag_1.c: New test.
+ * gcc.target/aarch64/acle/memtag_2.c: New test.
+ * gcc.target/aarch64/acle/memtag_3.c: New test.
+
2019-11-19 Richard Henderson <richard.henderson@linaro.org>
* testsuite/gcc.target/arm/asm-flag-3.c: Skip for thumb1.
--- /dev/null
+/* Test the MEMTAG ACLE intrinsic. */
+
+/* { dg-do compile } */
+/* { dg-require-effective-target lp64 } */
+/* { dg-options "-O3 -march=armv8.5-a+memtag" } */
+
+#include "arm_acle.h"
+
+/* irg */
+
+void *
+test_memtag_1 (void *p)
+{
+ return __arm_mte_create_random_tag (p, 0);
+}
+
+/* gmi */
+
+uint64_t
+test_memtag_2 (void *p)
+{
+ return __arm_mte_exclude_tag (p, 0);
+}
+
+/* addg */
+
+void *
+test_memtag_3 (void *p)
+{
+ return __arm_mte_increment_tag (p, 1);
+}
+
+/* subp */
+
+int64_t
+test_memtag_4 (void *p, void *q)
+{
+ return __arm_mte_ptrdiff (p, q);
+}
+
+/* ldg */
+
+void *
+test_memtag_5 (void *p)
+{
+ return __arm_mte_get_tag (p);
+}
+
+/* stg */
+
+void
+test_memtag_6 (void *p)
+{
+ __arm_mte_set_tag (p);
+}
+
+/* { dg-final { scan-assembler-times {irg\tx..?, x..?, x..?\n} 1 } } */
+/* { dg-final { scan-assembler-times {gmi\tx..?, x..?, x..?\n} 1 } } */
+/* { dg-final { scan-assembler-times {subp\tx..?, x..?, x..?\n} 1 } } */
+/* { dg-final { scan-assembler-times {addg\tx..?, x..?, #0, #1\n} 1 } } */
+/* { dg-final { scan-assembler-times {ldg\tx..?, \[x..?, #0\]\n} 1 } } */
+/* { dg-final { scan-assembler-times {stg\tx..?, \[x..?, #0\]\n} 1 } } */
\ No newline at end of file
--- /dev/null
+/* Test the MEMTAG intrinsic qualifier warnings and argument errors. */
+
+/* { dg-do compile } */
+/* { dg-require-effective-target lp64 } */
+/* { dg-options "-O3 -march=armv8.5-a+memtag" } */
+
+#include "arm_acle.h"
+
+void
+test_memtag_warning_return_qualifier (void)
+{
+ const char *c;
+ volatile char *v;
+ char *n;
+ int *i;
+ int64_t d;
+
+ v = __arm_mte_get_tag(c); /* { dg-warning {assignment} } */
+ n = __arm_mte_get_tag(c); /* { dg-warning {assignment} } */
+ i = __arm_mte_get_tag(c); /* { dg-warning {assignment} } */
+ c = __arm_mte_get_tag(v); /* { dg-warning {assignment} } */
+ n = __arm_mte_get_tag(v); /* { dg-warning {assignment} } */
+
+ i = __arm_mte_create_random_tag (c, 0); /* { dg-warning {assignment} } */
+ i = __arm_mte_increment_tag (c, 0); /* { dg-warning {assignment} } */
+
+ c = __arm_mte_get_tag(n); /* No warning. */
+ d = __arm_mte_ptrdiff(c, i); /* No warning. */
+}
+
+void
+test_memtag_warning_argument (void)
+{
+ const char *c;
+ uint64_t i;
+ __arm_mte_exclude_tag(i, 0); /* { dg-warning {argument} } */
+ __arm_mte_create_random_tag (i, 0); /* { dg-warning {argument} } */
+ __arm_mte_set_tag(i); /* { dg-warning {argument} } */
+ __arm_mte_get_tag(i); /* { dg-warning {argument} } */
+ __arm_mte_increment_tag (i, 15); /* { dg-warning {argument} } */
+ __arm_mte_ptrdiff(c, i); /* { dg-warning {argument} } */
+ __arm_mte_ptrdiff(i, c); /* { dg-warning {argument} } */
+
+ __arm_mte_exclude_tag(1, 0); /* { dg-warning {argument} } */
+ __arm_mte_create_random_tag (1, 0); /* { dg-warning {argument} } */
+ __arm_mte_set_tag(1); /* { dg-warning {argument} } */
+ __arm_mte_get_tag(1); /* { dg-warning {argument} } */
+ __arm_mte_increment_tag (1, 15); /* { dg-warning {argument} } */
+ __arm_mte_ptrdiff(c, 1); /* { dg-warning {argument} } */
+ __arm_mte_ptrdiff(1, c); /* { dg-warning {argument} } */
+
+ __arm_mte_exclude_tag(0, 0); /* No warning. */
+ __arm_mte_create_random_tag (0, 0); /* No warning. */
+ __arm_mte_set_tag(0); /* No warning. */
+ __arm_mte_get_tag(0); /* No warning. */
+ __arm_mte_increment_tag (0, 15); /* No warning. */
+ __arm_mte_ptrdiff(c, 0); /* No warning. */
+ __arm_mte_ptrdiff(0, c); /* No warning. */
+}
+
+void
+test_memtag_error_argument (void)
+{
+ /* Produce errors properly for invalid arguments. */
+ __arm_mte_exclude_tag(no_decl, 0); /* { dg-error {} } */
+ __arm_mte_exclude_tag(); /* { dg-error {} } */
+ __arm_mte_ptrdiff(no_decl2, 0); /* { dg-error {} } */
+ __arm_mte_ptrdiff(0); /* { dg-error {} } */
+ __arm_mte_ptrdiff(); /* { dg-error {} } */
+}
\ No newline at end of file
--- /dev/null
+/* Test the MEMTAG intrinsic expanding errors. */
+
+/* { dg-do compile } */
+/* { dg-require-effective-target lp64 } */
+/* { dg-options "-O3 -march=armv8.5-a+memtag" } */
+
+#include "arm_acle.h"
+
+void
+test_memtag_error_expand (int i)
+{
+ const char *p;
+ p = __arm_mte_increment_tag (p, -1); /* { dg-error {in range \[0,15\]} } */
+ p = __arm_mte_increment_tag (p, 16); /* { dg-error {in range \[0,15\]} } */
+ p = __arm_mte_increment_tag (p, i); /* { dg-error {constant immediate} } */
+}
\ No newline at end of file