diff options
-rw-r--r-- | gas/NEWS | 2 | ||||
-rw-r--r-- | gas/config/tc-aarch64.c | 11 | ||||
-rw-r--r-- | gas/doc/c-aarch64.texi | 6 | ||||
-rw-r--r-- | include/opcode/aarch64.h | 3 | ||||
-rw-r--r-- | opcodes/aarch64-tbl.h | 11 |
5 files changed, 33 insertions, 0 deletions
@@ -11,6 +11,8 @@ * Add support for Cortex-A710 for Arm. +* Add support for Scalable Matrix Extension (SME) for AArch64. + * Outputs of .ds.x directive and .tfloat directive with hex input from x86 assembler have been reduced from 12 bytes to 10 bytes to match the output of .tfloat directive. diff --git a/gas/config/tc-aarch64.c b/gas/config/tc-aarch64.c index 2ea55d8..b592e80 100644 --- a/gas/config/tc-aarch64.c +++ b/gas/config/tc-aarch64.c @@ -9244,6 +9244,17 @@ static const struct aarch64_option_cpu_value_table aarch64_features[] = { | AARCH64_FEATURE_SHA3, 0)}, {"sve2-bitperm", AARCH64_FEATURE (AARCH64_FEATURE_SVE2_BITPERM, 0), AARCH64_FEATURE (AARCH64_FEATURE_SVE2, 0)}, + {"sme", AARCH64_FEATURE (AARCH64_FEATURE_SME, 0), + AARCH64_FEATURE (AARCH64_FEATURE_SVE2 + | AARCH64_FEATURE_BFLOAT16, 0)}, + {"sme-f64", AARCH64_FEATURE (AARCH64_FEATURE_SME_F64, 0), + AARCH64_FEATURE (AARCH64_FEATURE_SME + | AARCH64_FEATURE_SVE2 + | AARCH64_FEATURE_BFLOAT16, 0)}, + {"sme-i64", AARCH64_FEATURE (AARCH64_FEATURE_SME_I64, 0), + AARCH64_FEATURE (AARCH64_FEATURE_SME + | AARCH64_FEATURE_SVE2 + | AARCH64_FEATURE_BFLOAT16, 0)}, {"bf16", AARCH64_FEATURE (AARCH64_FEATURE_BFLOAT16, 0), AARCH64_ARCH_NONE}, {"i8mm", AARCH64_FEATURE (AARCH64_FEATURE_I8MM, 0), diff --git a/gas/doc/c-aarch64.texi b/gas/doc/c-aarch64.texi index 8263c1a..6c39963 100644 --- a/gas/doc/c-aarch64.texi +++ b/gas/doc/c-aarch64.texi @@ -222,6 +222,12 @@ automatically cause those extensions to be disabled. @item @code{sm4} @tab ARMv8.2-A @tab No @tab Enable the ARMv8.2-A SM3 and SM4 cryptographic extensions. This implies @code{fp} and @code{simd}. +@item @code{sme} @tab Armv9-A @tab No + @tab Enable SME Extension. +@item @code{sme-f64} @tab Armv9-A @tab No + @tab Enable SME F64 Extension. +@item @code{sme-i64} @tab Armv9-A @tab No + @tab Enable SME I64 Extension. @item @code{ssbs} @tab ARMv8-A @tab ARMv8.5-A or later @tab Enable Speculative Store Bypassing Safe state read and write. @item @code{sve} @tab ARMv8.2-A @tab Armv9-A or later diff --git a/include/opcode/aarch64.h b/include/opcode/aarch64.h index 420b211..e361920 100644 --- a/include/opcode/aarch64.h +++ b/include/opcode/aarch64.h @@ -51,6 +51,7 @@ typedef uint32_t aarch64_insn; #define AARCH64_FEATURE_V8_4 (1ULL << 11) /* ARMv8.4 processors. */ #define AARCH64_FEATURE_V8_R (1ULL << 12) /* Armv8-R processors. */ #define AARCH64_FEATURE_V8_7 (1ULL << 13) /* Armv8.7 processors. */ +#define AARCH64_FEATURE_SME (1ULL << 14) /* Scalable Matrix Extension. */ #define AARCH64_FEATURE_LS64 (1ULL << 15) /* Atomic 64-byte load/store. */ #define AARCH64_FEATURE_PAC (1ULL << 16) /* v8.3 Pointer Authentication. */ #define AARCH64_FEATURE_FP (1ULL << 17) /* FP instructions. */ @@ -91,6 +92,8 @@ typedef uint32_t aarch64_insn; #define AARCH64_FEATURE_F64MM (1ULL << 54) #define AARCH64_FEATURE_FLAGM (1ULL << 55) /* v8.4 Flag Manipulation. */ #define AARCH64_FEATURE_V9 (1ULL << 56) /* Armv9.0-A processors. */ +#define AARCH64_FEATURE_SME_F64 (1ULL << 57) /* SME F64. */ +#define AARCH64_FEATURE_SME_I64 (1ULL << 58) /* SME I64. */ /* Crypto instructions are the combination of AES and SHA2. */ #define AARCH64_FEATURE_CRYPTO (AARCH64_FEATURE_SHA2 | AARCH64_FEATURE_AES) diff --git a/opcodes/aarch64-tbl.h b/opcodes/aarch64-tbl.h index 17ea851..d63b081 100644 --- a/opcodes/aarch64-tbl.h +++ b/opcodes/aarch64-tbl.h @@ -2401,6 +2401,14 @@ static const aarch64_feature_set aarch64_feature_sve2sm4 = AARCH64_FEATURE (AARCH64_FEATURE_SVE2 | AARCH64_FEATURE_SVE2_SM4, 0); static const aarch64_feature_set aarch64_feature_sve2bitperm = AARCH64_FEATURE (AARCH64_FEATURE_SVE2 | AARCH64_FEATURE_SVE2_BITPERM, 0); +static const aarch64_feature_set aarch64_feature_sme = + AARCH64_FEATURE (AARCH64_FEATURE_SVE2 | AARCH64_FEATURE_SME, 0); +static const aarch64_feature_set aarch64_feature_sme_f64 = + AARCH64_FEATURE (AARCH64_FEATURE_SVE2 | AARCH64_FEATURE_SME + | AARCH64_FEATURE_SME_F64, 0); +static const aarch64_feature_set aarch64_feature_sme_i64 = + AARCH64_FEATURE (AARCH64_FEATURE_SVE2 | AARCH64_FEATURE_SME + | AARCH64_FEATURE_SME_I64, 0); static const aarch64_feature_set aarch64_feature_v8_6 = AARCH64_FEATURE (AARCH64_FEATURE_V8_6, 0); static const aarch64_feature_set aarch64_feature_v8_7 = @@ -2458,6 +2466,9 @@ static const aarch64_feature_set aarch64_feature_flagm = #define SVE2_SHA3 &aarch64_feature_sve2sha3 #define SVE2_SM4 &aarch64_feature_sve2sm4 #define SVE2_BITPERM &aarch64_feature_sve2bitperm +#define SME &aarch64_feature_sme +#define SME_F64 &aarch64_feature_sme_f64 +#define SME_I64 &aarch64_feature_sme_i64 #define ARMV8_6 &aarch64_feature_v8_6 #define ARMV8_6_SVE &aarch64_feature_v8_6 #define BFLOAT16_SVE &aarch64_feature_bfloat16_sve |