aboutsummaryrefslogtreecommitdiff
path: root/gcc/config
diff options
context:
space:
mode:
authorDennis Zhang <dennis.zhang@arm.com>2019-11-19 13:43:39 +0000
committerKyrylo Tkachov <ktkachov@gcc.gnu.org>2019-11-19 13:43:39 +0000
commitef01e6bbeb7e7e3fb33ca501764a1dc6a04d5210 (patch)
treef84eb2d6cf761406fa4d455369c59a225df727f7 /gcc/config
parent35ba842f23769782d1b12f31aa2cdfce70ece031 (diff)
downloadgcc-ef01e6bbeb7e7e3fb33ca501764a1dc6a04d5210.zip
gcc-ef01e6bbeb7e7e3fb33ca501764a1dc6a04d5210.tar.gz
gcc-ef01e6bbeb7e7e3fb33ca501764a1dc6a04d5210.tar.bz2
[AArch64] Implement Armv8.5-A memory tagging (MTE) intrinsics
2019-11-19 Dennis Zhang <dennis.zhang@arm.com> * config/aarch64/aarch64-builtins.c (enum aarch64_builtins): Add AARCH64_MEMTAG_BUILTIN_START, AARCH64_MEMTAG_BUILTIN_IRG, AARCH64_MEMTAG_BUILTIN_GMI, AARCH64_MEMTAG_BUILTIN_SUBP, AARCH64_MEMTAG_BUILTIN_INC_TAG, AARCH64_MEMTAG_BUILTIN_SET_TAG, AARCH64_MEMTAG_BUILTIN_GET_TAG, and AARCH64_MEMTAG_BUILTIN_END. (aarch64_init_memtag_builtins): New. (AARCH64_INIT_MEMTAG_BUILTINS_DECL): New macro. (aarch64_general_init_builtins): Call aarch64_init_memtag_builtins. (aarch64_expand_builtin_memtag): New. (aarch64_general_expand_builtin): Call aarch64_expand_builtin_memtag. (AARCH64_BUILTIN_SUBCODE): New macro. (aarch64_resolve_overloaded_memtag): New. (aarch64_resolve_overloaded_builtin_general): New. Call aarch64_resolve_overloaded_memtag to handle overloaded MTE builtins. * config/aarch64/aarch64-c.c (aarch64_update_cpp_builtins): Define __ARM_FEATURE_MEMORY_TAGGING when enabled. (aarch64_resolve_overloaded_builtin): Call aarch64_resolve_overloaded_builtin_general. * config/aarch64/aarch64-protos.h (aarch64_resolve_overloaded_builtin_general): New declaration. * config/aarch64/aarch64.h (AARCH64_ISA_MEMTAG): New macro. (TARGET_MEMTAG): Likewise. * config/aarch64/aarch64.md (UNSPEC_GEN_TAG): New unspec. (UNSPEC_GEN_TAG_RND, and UNSPEC_TAG_SPACE): Likewise. (irg, gmi, subp, addg, ldg, stg): New instructions. * config/aarch64/arm_acle.h (__arm_mte_create_random_tag): New macro. (__arm_mte_exclude_tag, __arm_mte_ptrdiff): Likewise. (__arm_mte_increment_tag, __arm_mte_set_tag): Likewise. (__arm_mte_get_tag): Likewise. * config/aarch64/predicates.md (aarch64_memtag_tag_offset): New. (aarch64_granule16_uimm6, aarch64_granule16_simm9): New. * config/arm/types.md (memtag): New. * doc/invoke.texi (-memtag): Update description. 2019-11-19 Dennis Zhang <dennis.zhang@arm.com> * gcc.target/aarch64/acle/memtag_1.c: New test. * gcc.target/aarch64/acle/memtag_2.c: New test. * gcc.target/aarch64/acle/memtag_3.c: New test. From-SVN: r278444
Diffstat (limited to 'gcc/config')
-rw-r--r--gcc/config/aarch64/aarch64-builtins.c234
-rw-r--r--gcc/config/aarch64/aarch64-c.c5
-rw-r--r--gcc/config/aarch64/aarch64-protos.h2
-rw-r--r--gcc/config/aarch64/aarch64.h4
-rw-r--r--gcc/config/aarch64/aarch64.md90
-rw-r--r--gcc/config/aarch64/arm_acle.h23
-rw-r--r--gcc/config/aarch64/predicates.md14
-rw-r--r--gcc/config/arm/types.md3
8 files changed, 372 insertions, 3 deletions
diff --git a/gcc/config/aarch64/aarch64-builtins.c b/gcc/config/aarch64/aarch64-builtins.c
index 83d6f75..c35a1b1 100644
--- a/gcc/config/aarch64/aarch64-builtins.c
+++ b/gcc/config/aarch64/aarch64-builtins.c
@@ -448,6 +448,15 @@ enum aarch64_builtins
/* Armv8.5-a RNG instruction builtins. */
AARCH64_BUILTIN_RNG_RNDR,
AARCH64_BUILTIN_RNG_RNDRRS,
+ /* MEMTAG builtins. */
+ AARCH64_MEMTAG_BUILTIN_START,
+ AARCH64_MEMTAG_BUILTIN_IRG,
+ AARCH64_MEMTAG_BUILTIN_GMI,
+ AARCH64_MEMTAG_BUILTIN_SUBP,
+ AARCH64_MEMTAG_BUILTIN_INC_TAG,
+ AARCH64_MEMTAG_BUILTIN_SET_TAG,
+ AARCH64_MEMTAG_BUILTIN_GET_TAG,
+ AARCH64_MEMTAG_BUILTIN_END,
AARCH64_BUILTIN_MAX
};
@@ -1130,6 +1139,51 @@ aarch64_init_rng_builtins (void)
AARCH64_BUILTIN_RNG_RNDRRS);
}
+/* Initialize the memory tagging extension (MTE) builtins. */
+struct
+{
+ tree ftype;
+ enum insn_code icode;
+} aarch64_memtag_builtin_data[AARCH64_MEMTAG_BUILTIN_END -
+ AARCH64_MEMTAG_BUILTIN_START - 1];
+
+static void
+aarch64_init_memtag_builtins (void)
+{
+ tree fntype = NULL;
+
+#define AARCH64_INIT_MEMTAG_BUILTINS_DECL(F, N, I, T) \
+ aarch64_builtin_decls[AARCH64_MEMTAG_BUILTIN_##F] \
+ = aarch64_general_add_builtin ("__builtin_aarch64_memtag_"#N, \
+ T, AARCH64_MEMTAG_BUILTIN_##F); \
+ aarch64_memtag_builtin_data[AARCH64_MEMTAG_BUILTIN_##F - \
+ AARCH64_MEMTAG_BUILTIN_START - 1] = \
+ {T, CODE_FOR_##I};
+
+ fntype = build_function_type_list (ptr_type_node, ptr_type_node,
+ uint64_type_node, NULL);
+ AARCH64_INIT_MEMTAG_BUILTINS_DECL (IRG, irg, irg, fntype);
+
+ fntype = build_function_type_list (uint64_type_node, ptr_type_node,
+ uint64_type_node, NULL);
+ AARCH64_INIT_MEMTAG_BUILTINS_DECL (GMI, gmi, gmi, fntype);
+
+ fntype = build_function_type_list (ptrdiff_type_node, ptr_type_node,
+ ptr_type_node, NULL);
+ AARCH64_INIT_MEMTAG_BUILTINS_DECL (SUBP, subp, subp, fntype);
+
+ fntype = build_function_type_list (ptr_type_node, ptr_type_node,
+ unsigned_type_node, NULL);
+ AARCH64_INIT_MEMTAG_BUILTINS_DECL (INC_TAG, inc_tag, addg, fntype);
+
+ fntype = build_function_type_list (void_type_node, ptr_type_node, NULL);
+ AARCH64_INIT_MEMTAG_BUILTINS_DECL (SET_TAG, set_tag, stg, fntype);
+
+ fntype = build_function_type_list (ptr_type_node, ptr_type_node, NULL);
+ AARCH64_INIT_MEMTAG_BUILTINS_DECL (GET_TAG, get_tag, ldg, fntype);
+
+#undef AARCH64_INIT_MEMTAG_BUILTINS_DECL
+}
/* Initialize all builtins in the AARCH64_BUILTIN_GENERAL group. */
@@ -1184,6 +1238,9 @@ aarch64_general_init_builtins (void)
if (TARGET_TME)
aarch64_init_tme_builtins ();
+
+ if (TARGET_MEMTAG)
+ aarch64_init_memtag_builtins ();
}
/* Implement TARGET_BUILTIN_DECL for the AARCH64_BUILTIN_GENERAL group. */
@@ -1665,6 +1722,79 @@ aarch64_expand_rng_builtin (tree exp, rtx target, int fcode, int ignore)
return target;
}
+/* Expand an expression EXP that calls a MEMTAG built-in FCODE
+ with result going to TARGET. */
+static rtx
+aarch64_expand_builtin_memtag (int fcode, tree exp, rtx target)
+{
+ if (TARGET_ILP32)
+ {
+ error ("Memory Tagging Extension does not support %<-mabi=ilp32%>");
+ return const0_rtx;
+ }
+
+ rtx pat = NULL;
+ enum insn_code icode = aarch64_memtag_builtin_data[fcode -
+ AARCH64_MEMTAG_BUILTIN_START - 1].icode;
+
+ rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
+ machine_mode mode0 = GET_MODE (op0);
+ op0 = force_reg (mode0 == VOIDmode ? DImode : mode0, op0);
+ op0 = convert_to_mode (DImode, op0, true);
+
+ switch (fcode)
+ {
+ case AARCH64_MEMTAG_BUILTIN_IRG:
+ case AARCH64_MEMTAG_BUILTIN_GMI:
+ case AARCH64_MEMTAG_BUILTIN_SUBP:
+ case AARCH64_MEMTAG_BUILTIN_INC_TAG:
+ {
+ if (! target
+ || GET_MODE (target) != DImode
+ || ! (*insn_data[icode].operand[0].predicate) (target, DImode))
+ target = gen_reg_rtx (DImode);
+
+ if (fcode == AARCH64_MEMTAG_BUILTIN_INC_TAG)
+ {
+ rtx op1 = expand_normal (CALL_EXPR_ARG (exp, 1));
+
+ if ((*insn_data[icode].operand[3].predicate) (op1, QImode))
+ {
+ pat = GEN_FCN (icode) (target, op0, const0_rtx, op1);
+ break;
+ }
+ error ("%Kargument %d must be a constant immediate "
+ "in range [0,15]", exp, 2);
+ return const0_rtx;
+ }
+ else
+ {
+ rtx op1 = expand_normal (CALL_EXPR_ARG (exp, 1));
+ machine_mode mode1 = GET_MODE (op1);
+ op1 = force_reg (mode1 == VOIDmode ? DImode : mode1, op1);
+ op1 = convert_to_mode (DImode, op1, true);
+ pat = GEN_FCN (icode) (target, op0, op1);
+ }
+ break;
+ }
+ case AARCH64_MEMTAG_BUILTIN_GET_TAG:
+ target = op0;
+ pat = GEN_FCN (icode) (target, op0, const0_rtx);
+ break;
+ case AARCH64_MEMTAG_BUILTIN_SET_TAG:
+ pat = GEN_FCN (icode) (op0, op0, const0_rtx);
+ break;
+ default:
+ gcc_unreachable();
+ }
+
+ if (!pat)
+ return NULL_RTX;
+
+ emit_insn (pat);
+ return target;
+}
+
/* Expand an expression EXP that calls built-in function FCODE,
with result going to TARGET if that's convenient. IGNORE is true
if the result of the builtin is ignored. */
@@ -1800,6 +1930,10 @@ aarch64_general_expand_builtin (unsigned int fcode, tree exp, rtx target,
|| fcode == AARCH64_TME_BUILTIN_TCANCEL)
return aarch64_expand_builtin_tme (fcode, exp, target);
+ if (fcode >= AARCH64_MEMTAG_BUILTIN_START
+ && fcode <= AARCH64_MEMTAG_BUILTIN_END)
+ return aarch64_expand_builtin_memtag (fcode, exp, target);
+
gcc_unreachable ();
}
@@ -2151,6 +2285,106 @@ aarch64_atomic_assign_expand_fenv (tree *hold, tree *clear, tree *update)
reload_fenv, restore_fnenv), update_call);
}
+/* Resolve overloaded MEMTAG build-in functions. */
+#define AARCH64_BUILTIN_SUBCODE(F) \
+ (DECL_MD_FUNCTION_CODE (F) >> AARCH64_BUILTIN_SHIFT)
+
+static tree
+aarch64_resolve_overloaded_memtag (location_t loc,
+ tree fndecl, void *pass_params)
+{
+ vec<tree, va_gc> *params = static_cast<vec<tree, va_gc> *> (pass_params);
+ unsigned param_num = params ? params->length() : 0;
+ unsigned int fcode = AARCH64_BUILTIN_SUBCODE (fndecl);
+ tree inittype = aarch64_memtag_builtin_data[
+ fcode - AARCH64_MEMTAG_BUILTIN_START - 1].ftype;
+ unsigned arg_num = list_length (TYPE_ARG_TYPES (inittype)) - 1;
+
+ if (param_num != arg_num)
+ {
+ TREE_TYPE (fndecl) = inittype;
+ return NULL_TREE;
+ }
+ tree retype = NULL;
+
+ if (fcode == AARCH64_MEMTAG_BUILTIN_SUBP)
+ {
+ tree t0 = TREE_TYPE ((*params)[0]);
+ tree t1 = TREE_TYPE ((*params)[1]);
+
+ if (t0 == error_mark_node || TREE_CODE (t0) != POINTER_TYPE)
+ t0 = ptr_type_node;
+ if (t1 == error_mark_node || TREE_CODE (t1) != POINTER_TYPE)
+ t1 = ptr_type_node;
+
+ if (TYPE_MODE (t0) != DImode)
+ warning_at (loc, 1, "expected 64-bit address but argument 1 is %d-bit",
+ (int)tree_to_shwi (DECL_SIZE ((*params)[0])));
+
+ if (TYPE_MODE (t1) != DImode)
+ warning_at (loc, 1, "expected 64-bit address but argument 2 is %d-bit",
+ (int)tree_to_shwi (DECL_SIZE ((*params)[1])));
+
+ retype = build_function_type_list (ptrdiff_type_node, t0, t1, NULL);
+ }
+ else
+ {
+ tree t0 = TREE_TYPE ((*params)[0]);
+
+ if (t0 == error_mark_node || TREE_CODE (t0) != POINTER_TYPE)
+ {
+ TREE_TYPE (fndecl) = inittype;
+ return NULL_TREE;
+ }
+
+ if (TYPE_MODE (t0) != DImode)
+ warning_at (loc, 1, "expected 64-bit address but argument 1 is %d-bit",
+ (int)tree_to_shwi (DECL_SIZE ((*params)[0])));
+
+ switch (fcode)
+ {
+ case AARCH64_MEMTAG_BUILTIN_IRG:
+ retype = build_function_type_list (t0, t0, uint64_type_node, NULL);
+ break;
+ case AARCH64_MEMTAG_BUILTIN_GMI:
+ retype = build_function_type_list (uint64_type_node, t0,
+ uint64_type_node, NULL);
+ break;
+ case AARCH64_MEMTAG_BUILTIN_INC_TAG:
+ retype = build_function_type_list (t0, t0, unsigned_type_node, NULL);
+ break;
+ case AARCH64_MEMTAG_BUILTIN_SET_TAG:
+ retype = build_function_type_list (void_type_node, t0, NULL);
+ break;
+ case AARCH64_MEMTAG_BUILTIN_GET_TAG:
+ retype = build_function_type_list (t0, t0, NULL);
+ break;
+ default:
+ return NULL_TREE;
+ }
+ }
+
+ if (!retype || retype == error_mark_node)
+ TREE_TYPE (fndecl) = inittype;
+ else
+ TREE_TYPE (fndecl) = retype;
+
+ return NULL_TREE;
+}
+
+/* Called at aarch64_resolve_overloaded_builtin in aarch64-c.c. */
+tree
+aarch64_resolve_overloaded_builtin_general (location_t loc, tree function,
+ void *pass_params)
+{
+ unsigned int fcode = AARCH64_BUILTIN_SUBCODE (function);
+
+ if (fcode >= AARCH64_MEMTAG_BUILTIN_START
+ && fcode <= AARCH64_MEMTAG_BUILTIN_END)
+ return aarch64_resolve_overloaded_memtag(loc, function, pass_params);
+
+ return NULL_TREE;
+}
#undef AARCH64_CHECK_BUILTIN_MODE
#undef AARCH64_FIND_FRINT_VARIANT
diff --git a/gcc/config/aarch64/aarch64-c.c b/gcc/config/aarch64/aarch64-c.c
index 0af859f..f3da07f 100644
--- a/gcc/config/aarch64/aarch64-c.c
+++ b/gcc/config/aarch64/aarch64-c.c
@@ -163,6 +163,7 @@ aarch64_update_cpp_builtins (cpp_reader *pfile)
aarch64_def_or_undef (TARGET_FRINT, "__ARM_FEATURE_FRINT", pfile);
aarch64_def_or_undef (TARGET_TME, "__ARM_FEATURE_TME", pfile);
aarch64_def_or_undef (TARGET_RNG, "__ARM_FEATURE_RNG", pfile);
+ aarch64_def_or_undef (TARGET_MEMTAG, "__ARM_FEATURE_MEMORY_TAGGING", pfile);
/* Not for ACLE, but required to keep "float.h" correct if we switch
target between implementations that do or do not support ARMv8.2-A
@@ -279,8 +280,8 @@ aarch64_resolve_overloaded_builtin (unsigned int uncast_location,
switch (code & AARCH64_BUILTIN_CLASS)
{
case AARCH64_BUILTIN_GENERAL:
- return NULL_TREE;
-
+ return aarch64_resolve_overloaded_builtin_general (location, fndecl,
+ uncast_arglist);
case AARCH64_BUILTIN_SVE:
new_fndecl = aarch64_sve::resolve_overloaded_builtin (location, subcode,
arglist);
diff --git a/gcc/config/aarch64/aarch64-protos.h b/gcc/config/aarch64/aarch64-protos.h
index bcb3fd4..5b1fc7b 100644
--- a/gcc/config/aarch64/aarch64-protos.h
+++ b/gcc/config/aarch64/aarch64-protos.h
@@ -773,4 +773,6 @@ extern const atomic_ool_names aarch64_ool_ldset_names;
extern const atomic_ool_names aarch64_ool_ldclr_names;
extern const atomic_ool_names aarch64_ool_ldeor_names;
+tree aarch64_resolve_overloaded_builtin_general (location_t, tree, void *);
+
#endif /* GCC_AARCH64_PROTOS_H */
diff --git a/gcc/config/aarch64/aarch64.h b/gcc/config/aarch64/aarch64.h
index 425a363..ee01909 100644
--- a/gcc/config/aarch64/aarch64.h
+++ b/gcc/config/aarch64/aarch64.h
@@ -248,6 +248,7 @@ extern unsigned aarch64_architecture_version;
#define AARCH64_ISA_RNG (aarch64_isa_flags & AARCH64_FL_RNG)
#define AARCH64_ISA_V8_5 (aarch64_isa_flags & AARCH64_FL_V8_5)
#define AARCH64_ISA_TME (aarch64_isa_flags & AARCH64_FL_TME)
+#define AARCH64_ISA_MEMTAG (aarch64_isa_flags & AARCH64_FL_MEMTAG)
/* Crypto is an optional extension to AdvSIMD. */
#define TARGET_CRYPTO (TARGET_SIMD && AARCH64_ISA_CRYPTO)
@@ -304,6 +305,9 @@ extern unsigned aarch64_architecture_version;
/* Random number instructions from Armv8.5-a. */
#define TARGET_RNG (AARCH64_ISA_RNG)
+/* Memory Tagging instructions optional to Armv8.5 enabled through +memtag. */
+#define TARGET_MEMTAG (AARCH64_ISA_V8_5 && AARCH64_ISA_MEMTAG)
+
/* Make sure this is always defined so we don't have to check for ifdefs
but rather use normal ifs. */
#ifndef TARGET_FIX_ERR_A53_835769_DEFAULT
diff --git a/gcc/config/aarch64/aarch64.md b/gcc/config/aarch64/aarch64.md
index 87e9b936..b11ead7 100644
--- a/gcc/config/aarch64/aarch64.md
+++ b/gcc/config/aarch64/aarch64.md
@@ -270,6 +270,9 @@
UNSPEC_SVE_PREFETCH_GATHER
UNSPEC_SVE_COMPACT
UNSPEC_SVE_SPLICE
+ UNSPEC_GEN_TAG ; Generate a 4-bit MTE tag.
+ UNSPEC_GEN_TAG_RND ; Generate a random 4-bit MTE tag.
+ UNSPEC_TAG_SPACE ; Translate address to MTE tag address space.
])
(define_c_enum "unspecv" [
@@ -7386,6 +7389,93 @@
[(set_attr "type" "mrs")]
)
+;; Memory Tagging Extension (MTE) instructions.
+
+(define_insn "irg"
+ [(set (match_operand:DI 0 "register_operand" "=rk")
+ (ior:DI
+ (and:DI (match_operand:DI 1 "register_operand" "rk")
+ (const_int -1080863910568919041)) ;; 0xf0ff...
+ (ashift:DI (unspec:QI [(match_operand:DI 2 "register_operand" "r")]
+ UNSPEC_GEN_TAG_RND)
+ (const_int 56))))]
+ "TARGET_MEMTAG"
+ "irg\\t%0, %1, %2"
+ [(set_attr "type" "memtag")]
+)
+
+(define_insn "gmi"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (ior:DI (ashift:DI
+ (const_int 1)
+ (and:QI (lshiftrt:DI
+ (match_operand:DI 1 "register_operand" "rk")
+ (const_int 56)) (const_int 15)))
+ (match_operand:DI 2 "register_operand" "r")))]
+ "TARGET_MEMTAG"
+ "gmi\\t%0, %1, %2"
+ [(set_attr "type" "memtag")]
+)
+
+(define_insn "addg"
+ [(set (match_operand:DI 0 "register_operand" "=rk")
+ (ior:DI
+ (and:DI (plus:DI (match_operand:DI 1 "register_operand" "rk")
+ (match_operand:DI 2 "aarch64_granule16_uimm6" "i"))
+ (const_int -1080863910568919041)) ;; 0xf0ff...
+ (ashift:DI
+ (unspec:QI
+ [(and:QI (lshiftrt:DI (match_dup 1) (const_int 56)) (const_int 15))
+ (match_operand:QI 3 "aarch64_memtag_tag_offset" "i")]
+ UNSPEC_GEN_TAG)
+ (const_int 56))))]
+ "TARGET_MEMTAG"
+ "addg\\t%0, %1, #%2, #%3"
+ [(set_attr "type" "memtag")]
+)
+
+(define_insn "subp"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (minus:DI
+ (and:DI (match_operand:DI 1 "register_operand" "rk")
+ (const_int 72057594037927935)) ;; 0x00ff...
+ (and:DI (match_operand:DI 2 "register_operand" "rk")
+ (const_int 72057594037927935))))] ;; 0x00ff...
+ "TARGET_MEMTAG"
+ "subp\\t%0, %1, %2"
+ [(set_attr "type" "memtag")]
+)
+
+;; LDG will use the 16-byte aligned value of the address.
+(define_insn "ldg"
+ [(set (match_operand:DI 0 "register_operand" "+r")
+ (ior:DI
+ (and:DI (match_dup 0) (const_int -1080863910568919041)) ;; 0xf0ff...
+ (ashift:DI
+ (mem:QI (unspec:DI
+ [(and:DI (plus:DI (match_operand:DI 1 "register_operand" "rk")
+ (match_operand:DI 2 "aarch64_granule16_simm9" "i"))
+ (const_int -16))] UNSPEC_TAG_SPACE))
+ (const_int 56))))]
+ "TARGET_MEMTAG"
+ "ldg\\t%0, [%1, #%2]"
+ [(set_attr "type" "memtag")]
+)
+
+;; STG doesn't align the address but aborts with alignment fault
+;; when the address is not 16-byte aligned.
+(define_insn "stg"
+ [(set (mem:QI (unspec:DI
+ [(plus:DI (match_operand:DI 1 "register_operand" "rk")
+ (match_operand:DI 2 "aarch64_granule16_simm9" "i"))]
+ UNSPEC_TAG_SPACE))
+ (and:QI (lshiftrt:DI (match_operand:DI 0 "register_operand" "rk")
+ (const_int 56)) (const_int 15)))]
+ "TARGET_MEMTAG"
+ "stg\\t%0, [%1, #%2]"
+ [(set_attr "type" "memtag")]
+)
+
;; AdvSIMD Stuff
(include "aarch64-simd.md")
diff --git a/gcc/config/aarch64/arm_acle.h b/gcc/config/aarch64/arm_acle.h
index 2284e71..1dfac86 100644
--- a/gcc/config/aarch64/arm_acle.h
+++ b/gcc/config/aarch64/arm_acle.h
@@ -209,6 +209,29 @@ __rndrrs (uint64_t *__res)
#pragma GCC pop_options
+#pragma GCC push_options
+#pragma GCC target ("arch=armv8.5-a+memtag")
+
+#define __arm_mte_create_random_tag(__ptr, __u64_mask) \
+ __builtin_aarch64_memtag_irg(__ptr, __u64_mask)
+
+#define __arm_mte_exclude_tag(__ptr, __u64_excluded) \
+ __builtin_aarch64_memtag_gmi(__ptr, __u64_excluded)
+
+#define __arm_mte_ptrdiff(__ptr_a, __ptr_b) \
+ __builtin_aarch64_memtag_subp(__ptr_a, __ptr_b)
+
+#define __arm_mte_increment_tag(__ptr, __u_offset) \
+ __builtin_aarch64_memtag_inc_tag(__ptr, __u_offset)
+
+#define __arm_mte_set_tag(__tagged_address) \
+ __builtin_aarch64_memtag_set_tag(__tagged_address)
+
+#define __arm_mte_get_tag(__address) \
+ __builtin_aarch64_memtag_get_tag(__address)
+
+#pragma GCC pop_options
+
#ifdef __cplusplus
}
#endif
diff --git a/gcc/config/aarch64/predicates.md b/gcc/config/aarch64/predicates.md
index 2323612..da6779e 100644
--- a/gcc/config/aarch64/predicates.md
+++ b/gcc/config/aarch64/predicates.md
@@ -874,3 +874,17 @@
(and (match_code "const_int,const_poly_int")
(match_test "known_eq (wi::to_poly_wide (op, mode),
BYTES_PER_SVE_VECTOR)")))
+
+(define_predicate "aarch64_memtag_tag_offset"
+ (and (match_code "const_int")
+ (match_test "IN_RANGE (INTVAL (op), 0, 15)")))
+
+(define_predicate "aarch64_granule16_uimm6"
+ (and (match_code "const_int")
+ (match_test "IN_RANGE (INTVAL (op), 0, 1008)
+ && !(INTVAL (op) & 0xf)")))
+
+(define_predicate "aarch64_granule16_simm9"
+ (and (match_code "const_int")
+ (match_test "IN_RANGE (INTVAL (op), -4096, 4080)
+ && !(INTVAL (op) & 0xf)")))
diff --git a/gcc/config/arm/types.md b/gcc/config/arm/types.md
index 60faad6..df39522 100644
--- a/gcc/config/arm/types.md
+++ b/gcc/config/arm/types.md
@@ -1096,7 +1096,8 @@
crypto_sm3,\
crypto_sm4,\
coproc,\
- tme"
+ tme,\
+ memtag"
(const_string "untyped"))
; Is this an (integer side) multiply with a 32-bit (or smaller) result?