diff options
author | Tamar Christina <tamar.christina@arm.com> | 2017-11-09 11:21:31 +0000 |
---|---|---|
committer | Nick Clifton <nickc@redhat.com> | 2017-11-09 11:21:31 +0000 |
commit | b6b9ca0c3ec9589d0dd40b2b86ba748a361b48eb (patch) | |
tree | 2c4abe3fa1daf67b9c26fb249689cd78b2b7d8df /opcodes | |
parent | 21b81e67c73368dbbc9f6356ac5a7eb274789a32 (diff) | |
download | gdb-b6b9ca0c3ec9589d0dd40b2b86ba748a361b48eb.zip gdb-b6b9ca0c3ec9589d0dd40b2b86ba748a361b48eb.tar.gz gdb-b6b9ca0c3ec9589d0dd40b2b86ba748a361b48eb.tar.bz2 |
Split the ARM Crypto ISA extensions for AES and SHA1+2 into their own options (+aes and +sha2). The reason for the split is because with the introduction of Armv8.4-a the implementation of AES has explicitly been made independent of the implementation of the other crypto extensions.
gas * config/tc-aarch64.c (aarch64_arch_option_table): Add armv8.4-a.
(aarch64_features): Added SM4 and SHA3.
include * opcode/aarch64.h:
(AARCH64_FEATURE_V8_4, AARCH64_FEATURE_SM4): New.
(AARCH64_ARCH_V8_4, AARCH64_FEATURE_SHA3): New.
opcodes * aarch64-tbl.h
(aarch64_feature_v8_4, aarch64_feature_crypto_v8_2): New.
(aarch64_feature_sm4, aarch64_feature_sha3): New.
(aarch64_feature_fp_16_v8_2): New.
(ARMV8_4, SHA3, SM4, CRYPTO_V8_2, FP_F16_V8_2): New.
(V8_4_INSN, CRYPTO_V8_2_INSN): New.
(SHA3_INSN, SM4_INSN, FP16_V8_2_INSN): New.
Diffstat (limited to 'opcodes')
-rw-r--r-- | opcodes/ChangeLog | 10 | ||||
-rw-r--r-- | opcodes/aarch64-tbl.h | 27 |
2 files changed, 37 insertions, 0 deletions
diff --git a/opcodes/ChangeLog b/opcodes/ChangeLog index ffa56bb..410df9c 100644 --- a/opcodes/ChangeLog +++ b/opcodes/ChangeLog @@ -1,3 +1,13 @@ +2017-11-09 Tamar Christina <tamar.christina@arm.com> + + * aarch64-tbl.h + (aarch64_feature_v8_4, aarch64_feature_crypto_v8_2): New. + (aarch64_feature_sm4, aarch64_feature_sha3): New. + (aarch64_feature_fp_16_v8_2): New. + (ARMV8_4, SHA3, SM4, CRYPTO_V8_2, FP_F16_V8_2): New. + (V8_4_INSN, CRYPTO_V8_2_INSN): New. + (SHA3_INSN, SM4_INSN, FP16_V8_2_INSN): New. + 2017-11-08 Tamar Christina <tamar.christina@arm.com> * aarch64-tbl.h (aarch64_feature_crypto): Add AES and SHA2. diff --git a/opcodes/aarch64-tbl.h b/opcodes/aarch64-tbl.h index 9d4f3a4..a99f5f5 100644 --- a/opcodes/aarch64-tbl.h +++ b/opcodes/aarch64-tbl.h @@ -2041,6 +2041,18 @@ static const aarch64_feature_set aarch64_feature_sha2 = AARCH64_FEATURE (AARCH64_FEATURE_V8 | AARCH64_FEATURE_SHA2, 0); static const aarch64_feature_set aarch64_feature_aes = AARCH64_FEATURE (AARCH64_FEATURE_V8 | AARCH64_FEATURE_AES, 0); +static const aarch64_feature_set aarch64_feature_v8_4 = + AARCH64_FEATURE (AARCH64_FEATURE_V8_4, 0); +static const aarch64_feature_set aarch64_feature_crypto_v8_2 = + AARCH64_FEATURE (AARCH64_FEATURE_V8_2 | AARCH64_FEATURE_CRYPTO, 0); +static const aarch64_feature_set aarch64_feature_sm4 = + AARCH64_FEATURE (AARCH64_FEATURE_V8_2 | AARCH64_FEATURE_SM4, 0); +static const aarch64_feature_set aarch64_feature_sha3 = + AARCH64_FEATURE (AARCH64_FEATURE_V8_2 | AARCH64_FEATURE_SHA2 + | AARCH64_FEATURE_SHA3, 0); +static const aarch64_feature_set aarch64_feature_fp_16_v8_2 = + AARCH64_FEATURE (AARCH64_FEATURE_V8_2 | AARCH64_FEATURE_F16 + | AARCH64_FEATURE_FP, 0); #define CORE &aarch64_feature_v8 #define FP &aarch64_feature_fp @@ -2062,6 +2074,11 @@ static const aarch64_feature_set aarch64_feature_aes = #define RCPC &aarch64_feature_rcpc #define SHA2 &aarch64_feature_sha2 #define AES &aarch64_feature_aes +#define ARMV8_4 &aarch64_feature_v8_4 +#define SHA3 &aarch64_feature_sha3 +#define SM4 &aarch64_feature_sm4 +#define CRYPTO_V8_2 &aarch64_feature_crypto_v8_2 +#define FP_F16_V8_2 &aarch64_feature_fp_16_v8_2 #define DOTPROD &aarch64_feature_dotprod #define CORE_INSN(NAME,OPCODE,MASK,CLASS,OP,OPS,QUALS,FLAGS) \ @@ -2099,6 +2116,16 @@ static const aarch64_feature_set aarch64_feature_aes = { NAME, OPCODE, MASK, CLASS, 0, SHA2, OPS, QUALS, FLAGS, 0, NULL } #define AES_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \ { NAME, OPCODE, MASK, CLASS, 0, AES, OPS, QUALS, FLAGS, 0, NULL } +#define V8_4_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \ + { NAME, OPCODE, MASK, CLASS, 0, ARMV8_4, OPS, QUALS, FLAGS, 0, NULL } +#define CRYPTO_V8_2_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \ + { NAME, OPCODE, MASK, CLASS, 0, CRYPTO_V8_2, OPS, QUALS, FLAGS, 0, NULL } +#define SHA3_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \ + { NAME, OPCODE, MASK, CLASS, 0, SHA3, OPS, QUALS, FLAGS, 0, NULL } +#define SM4_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \ + { NAME, OPCODE, MASK, CLASS, 0, SM4, OPS, QUALS, FLAGS, 0, NULL } +#define FP16_V8_2_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \ + { NAME, OPCODE, MASK, CLASS, 0, FP_F16_V8_2, OPS, QUALS, FLAGS, 0, NULL } #define DOT_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \ { NAME, OPCODE, MASK, CLASS, 0, DOTPROD, OPS, QUALS, FLAGS, 0, NULL } |