From 21458a27101aeda7abd498f4c48a2192b0fef62f Mon Sep 17 00:00:00 2001 From: Chih-Min Chao <48193236+chihminchao@users.noreply.github.com> Date: Thu, 3 Dec 2020 09:57:31 +0800 Subject: rvv: index load/store have benn separated into ordered and unordered parts (#611) ref: https://github.com/riscv/riscv-v-spec/commit/511d0b84a3848de437fd01990d078feaa2871b11 Signed-off-by: Chih-Min Chao --- riscv/encoding.h | 168 +++++++++++++++++++++++++++++++------------------------ 1 file changed, 96 insertions(+), 72 deletions(-) (limited to 'riscv/encoding.h') diff --git a/riscv/encoding.h b/riscv/encoding.h index 784b6fe..042b7b2 100644 --- a/riscv/encoding.h +++ b/riscv/encoding.h @@ -1215,6 +1215,38 @@ #define MASK_VSE512_V 0x1df0707f #define MATCH_VSE1024_V 0x10007027 #define MASK_VSE1024_V 0x1df0707f +#define MATCH_VLUXEI8_V 0x4000007 +#define MASK_VLUXEI8_V 0x1c00707f +#define MATCH_VLUXEI16_V 0x4005007 +#define MASK_VLUXEI16_V 0x1c00707f +#define MATCH_VLUXEI32_V 0x4006007 +#define MASK_VLUXEI32_V 0x1c00707f +#define MATCH_VLUXEI64_V 0x4007007 +#define MASK_VLUXEI64_V 0x1c00707f +#define MATCH_VLUXEI128_V 0x14000007 +#define MASK_VLUXEI128_V 0x1c00707f +#define MATCH_VLUXEI256_V 0x14005007 +#define MASK_VLUXEI256_V 0x1c00707f +#define MATCH_VLUXEI512_V 0x14006007 +#define MASK_VLUXEI512_V 0x1c00707f +#define MATCH_VLUXEI1024_V 0x14007007 +#define MASK_VLUXEI1024_V 0x1c00707f +#define MATCH_VSUXEI8_V 0x4000027 +#define MASK_VSUXEI8_V 0x1c00707f +#define MATCH_VSUXEI16_V 0x4005027 +#define MASK_VSUXEI16_V 0x1c00707f +#define MATCH_VSUXEI32_V 0x4006027 +#define MASK_VSUXEI32_V 0x1c00707f +#define MATCH_VSUXEI64_V 0x4007027 +#define MASK_VSUXEI64_V 0x1c00707f +#define MATCH_VSUXEI128_V 0x14000027 +#define MASK_VSUXEI128_V 0x1c00707f +#define MATCH_VSUXEI256_V 0x14005027 +#define MASK_VSUXEI256_V 0x1c00707f +#define MATCH_VSUXEI512_V 0x14006027 +#define MASK_VSUXEI512_V 0x1c00707f +#define MATCH_VSUXEI1024_V 0x14007027 +#define MASK_VSUXEI1024_V 0x1c00707f #define MATCH_VLSE8_V 0x8000007 #define MASK_VLSE8_V 0x1c00707f #define MATCH_VLSE16_V 0x8005007 @@ -1247,54 +1279,38 @@ #define MASK_VSSE512_V 0x1c00707f #define MATCH_VSSE1024_V 0x18007027 #define MASK_VSSE1024_V 0x1c00707f -#define MATCH_VLXEI8_V 0xc000007 -#define MASK_VLXEI8_V 0x1c00707f -#define MATCH_VLXEI16_V 0xc005007 -#define MASK_VLXEI16_V 0x1c00707f -#define MATCH_VLXEI32_V 0xc006007 -#define MASK_VLXEI32_V 0x1c00707f -#define MATCH_VLXEI64_V 0xc007007 -#define MASK_VLXEI64_V 0x1c00707f -#define MATCH_VLXEI128_V 0x1c000007 -#define MASK_VLXEI128_V 0x1c00707f -#define MATCH_VLXEI256_V 0x1c005007 -#define MASK_VLXEI256_V 0x1c00707f -#define MATCH_VLXEI512_V 0x1c006007 -#define MASK_VLXEI512_V 0x1c00707f -#define MATCH_VLXEI1024_V 0x1c007007 -#define MASK_VLXEI1024_V 0x1c00707f -#define MATCH_VSXEI8_V 0xc000027 -#define MASK_VSXEI8_V 0x1c00707f -#define MATCH_VSXEI16_V 0xc005027 -#define MASK_VSXEI16_V 0x1c00707f -#define MATCH_VSXEI32_V 0xc006027 -#define MASK_VSXEI32_V 0x1c00707f -#define MATCH_VSXEI64_V 0xc007027 -#define MASK_VSXEI64_V 0x1c00707f -#define MATCH_VSXEI128_V 0x1c000027 -#define MASK_VSXEI128_V 0x1c00707f -#define MATCH_VSXEI256_V 0x1c005027 -#define MASK_VSXEI256_V 0x1c00707f -#define MATCH_VSXEI512_V 0x1c006027 -#define MASK_VSXEI512_V 0x1c00707f -#define MATCH_VSXEI1024_V 0x1c007027 -#define MASK_VSXEI1024_V 0x1c00707f -#define MATCH_VSUXEI8_V 0x4000027 -#define MASK_VSUXEI8_V 0x1c00707f -#define MATCH_VSUXEI16_V 0x4005027 -#define MASK_VSUXEI16_V 0x1c00707f -#define MATCH_VSUXEI32_V 0x4006027 -#define MASK_VSUXEI32_V 0x1c00707f -#define MATCH_VSUXEI64_V 0x4007027 -#define MASK_VSUXEI64_V 0x1c00707f -#define MATCH_VSUXEI128_V 0x14000027 -#define MASK_VSUXEI128_V 0x1c00707f -#define MATCH_VSUXEI256_V 0x14005027 -#define MASK_VSUXEI256_V 0x1c00707f -#define MATCH_VSUXEI512_V 0x14006027 -#define MASK_VSUXEI512_V 0x1c00707f -#define MATCH_VSUXEI1024_V 0x14007027 -#define MASK_VSUXEI1024_V 0x1c00707f +#define MATCH_VLOXEI8_V 0xc000007 +#define MASK_VLOXEI8_V 0x1c00707f +#define MATCH_VLOXEI16_V 0xc005007 +#define MASK_VLOXEI16_V 0x1c00707f +#define MATCH_VLOXEI32_V 0xc006007 +#define MASK_VLOXEI32_V 0x1c00707f +#define MATCH_VLOXEI64_V 0xc007007 +#define MASK_VLOXEI64_V 0x1c00707f +#define MATCH_VLOXEI128_V 0x1c000007 +#define MASK_VLOXEI128_V 0x1c00707f +#define MATCH_VLOXEI256_V 0x1c005007 +#define MASK_VLOXEI256_V 0x1c00707f +#define MATCH_VLOXEI512_V 0x1c006007 +#define MASK_VLOXEI512_V 0x1c00707f +#define MATCH_VLOXEI1024_V 0x1c007007 +#define MASK_VLOXEI1024_V 0x1c00707f +#define MATCH_VSOXEI8_V 0xc000027 +#define MASK_VSOXEI8_V 0x1c00707f +#define MATCH_VSOXEI16_V 0xc005027 +#define MASK_VSOXEI16_V 0x1c00707f +#define MATCH_VSOXEI32_V 0xc006027 +#define MASK_VSOXEI32_V 0x1c00707f +#define MATCH_VSOXEI64_V 0xc007027 +#define MASK_VSOXEI64_V 0x1c00707f +#define MATCH_VSOXEI128_V 0x1c000027 +#define MASK_VSOXEI128_V 0x1c00707f +#define MATCH_VSOXEI256_V 0x1c005027 +#define MASK_VSOXEI256_V 0x1c00707f +#define MATCH_VSOXEI512_V 0x1c006027 +#define MASK_VSOXEI512_V 0x1c00707f +#define MATCH_VSOXEI1024_V 0x1c007027 +#define MASK_VSOXEI1024_V 0x1c00707f #define MATCH_VLE8FF_V 0x1000007 #define MASK_VLE8FF_V 0x1df0707f #define MATCH_VLE16FF_V 0x1005007 @@ -2822,6 +2838,22 @@ DECLARE_INSN(vse128_v, MATCH_VSE128_V, MASK_VSE128_V) DECLARE_INSN(vse256_v, MATCH_VSE256_V, MASK_VSE256_V) DECLARE_INSN(vse512_v, MATCH_VSE512_V, MASK_VSE512_V) DECLARE_INSN(vse1024_v, MATCH_VSE1024_V, MASK_VSE1024_V) +DECLARE_INSN(vluxei8_v, MATCH_VLUXEI8_V, MASK_VLUXEI8_V) +DECLARE_INSN(vluxei16_v, MATCH_VLUXEI16_V, MASK_VLUXEI16_V) +DECLARE_INSN(vluxei32_v, MATCH_VLUXEI32_V, MASK_VLUXEI32_V) +DECLARE_INSN(vluxei64_v, MATCH_VLUXEI64_V, MASK_VLUXEI64_V) +DECLARE_INSN(vluxei128_v, MATCH_VLUXEI128_V, MASK_VLUXEI128_V) +DECLARE_INSN(vluxei256_v, MATCH_VLUXEI256_V, MASK_VLUXEI256_V) +DECLARE_INSN(vluxei512_v, MATCH_VLUXEI512_V, MASK_VLUXEI512_V) +DECLARE_INSN(vluxei1024_v, MATCH_VLUXEI1024_V, MASK_VLUXEI1024_V) +DECLARE_INSN(vsuxei8_v, MATCH_VSUXEI8_V, MASK_VSUXEI8_V) +DECLARE_INSN(vsuxei16_v, MATCH_VSUXEI16_V, MASK_VSUXEI16_V) +DECLARE_INSN(vsuxei32_v, MATCH_VSUXEI32_V, MASK_VSUXEI32_V) +DECLARE_INSN(vsuxei64_v, MATCH_VSUXEI64_V, MASK_VSUXEI64_V) +DECLARE_INSN(vsuxei128_v, MATCH_VSUXEI128_V, MASK_VSUXEI128_V) +DECLARE_INSN(vsuxei256_v, MATCH_VSUXEI256_V, MASK_VSUXEI256_V) +DECLARE_INSN(vsuxei512_v, MATCH_VSUXEI512_V, MASK_VSUXEI512_V) +DECLARE_INSN(vsuxei1024_v, MATCH_VSUXEI1024_V, MASK_VSUXEI1024_V) DECLARE_INSN(vlse8_v, MATCH_VLSE8_V, MASK_VLSE8_V) DECLARE_INSN(vlse16_v, MATCH_VLSE16_V, MASK_VLSE16_V) DECLARE_INSN(vlse32_v, MATCH_VLSE32_V, MASK_VLSE32_V) @@ -2838,30 +2870,22 @@ DECLARE_INSN(vsse128_v, MATCH_VSSE128_V, MASK_VSSE128_V) DECLARE_INSN(vsse256_v, MATCH_VSSE256_V, MASK_VSSE256_V) DECLARE_INSN(vsse512_v, MATCH_VSSE512_V, MASK_VSSE512_V) DECLARE_INSN(vsse1024_v, MATCH_VSSE1024_V, MASK_VSSE1024_V) -DECLARE_INSN(vlxei8_v, MATCH_VLXEI8_V, MASK_VLXEI8_V) -DECLARE_INSN(vlxei16_v, MATCH_VLXEI16_V, MASK_VLXEI16_V) -DECLARE_INSN(vlxei32_v, MATCH_VLXEI32_V, MASK_VLXEI32_V) -DECLARE_INSN(vlxei64_v, MATCH_VLXEI64_V, MASK_VLXEI64_V) -DECLARE_INSN(vlxei128_v, MATCH_VLXEI128_V, MASK_VLXEI128_V) -DECLARE_INSN(vlxei256_v, MATCH_VLXEI256_V, MASK_VLXEI256_V) -DECLARE_INSN(vlxei512_v, MATCH_VLXEI512_V, MASK_VLXEI512_V) -DECLARE_INSN(vlxei1024_v, MATCH_VLXEI1024_V, MASK_VLXEI1024_V) -DECLARE_INSN(vsxei8_v, MATCH_VSXEI8_V, MASK_VSXEI8_V) -DECLARE_INSN(vsxei16_v, MATCH_VSXEI16_V, MASK_VSXEI16_V) -DECLARE_INSN(vsxei32_v, MATCH_VSXEI32_V, MASK_VSXEI32_V) -DECLARE_INSN(vsxei64_v, MATCH_VSXEI64_V, MASK_VSXEI64_V) -DECLARE_INSN(vsxei128_v, MATCH_VSXEI128_V, MASK_VSXEI128_V) -DECLARE_INSN(vsxei256_v, MATCH_VSXEI256_V, MASK_VSXEI256_V) -DECLARE_INSN(vsxei512_v, MATCH_VSXEI512_V, MASK_VSXEI512_V) -DECLARE_INSN(vsxei1024_v, MATCH_VSXEI1024_V, MASK_VSXEI1024_V) -DECLARE_INSN(vsuxei8_v, MATCH_VSUXEI8_V, MASK_VSUXEI8_V) -DECLARE_INSN(vsuxei16_v, MATCH_VSUXEI16_V, MASK_VSUXEI16_V) -DECLARE_INSN(vsuxei32_v, MATCH_VSUXEI32_V, MASK_VSUXEI32_V) -DECLARE_INSN(vsuxei64_v, MATCH_VSUXEI64_V, MASK_VSUXEI64_V) -DECLARE_INSN(vsuxei128_v, MATCH_VSUXEI128_V, MASK_VSUXEI128_V) -DECLARE_INSN(vsuxei256_v, MATCH_VSUXEI256_V, MASK_VSUXEI256_V) -DECLARE_INSN(vsuxei512_v, MATCH_VSUXEI512_V, MASK_VSUXEI512_V) -DECLARE_INSN(vsuxei1024_v, MATCH_VSUXEI1024_V, MASK_VSUXEI1024_V) +DECLARE_INSN(vloxei8_v, MATCH_VLOXEI8_V, MASK_VLOXEI8_V) +DECLARE_INSN(vloxei16_v, MATCH_VLOXEI16_V, MASK_VLOXEI16_V) +DECLARE_INSN(vloxei32_v, MATCH_VLOXEI32_V, MASK_VLOXEI32_V) +DECLARE_INSN(vloxei64_v, MATCH_VLOXEI64_V, MASK_VLOXEI64_V) +DECLARE_INSN(vloxei128_v, MATCH_VLOXEI128_V, MASK_VLOXEI128_V) +DECLARE_INSN(vloxei256_v, MATCH_VLOXEI256_V, MASK_VLOXEI256_V) +DECLARE_INSN(vloxei512_v, MATCH_VLOXEI512_V, MASK_VLOXEI512_V) +DECLARE_INSN(vloxei1024_v, MATCH_VLOXEI1024_V, MASK_VLOXEI1024_V) +DECLARE_INSN(vsoxei8_v, MATCH_VSOXEI8_V, MASK_VSOXEI8_V) +DECLARE_INSN(vsoxei16_v, MATCH_VSOXEI16_V, MASK_VSOXEI16_V) +DECLARE_INSN(vsoxei32_v, MATCH_VSOXEI32_V, MASK_VSOXEI32_V) +DECLARE_INSN(vsoxei64_v, MATCH_VSOXEI64_V, MASK_VSOXEI64_V) +DECLARE_INSN(vsoxei128_v, MATCH_VSOXEI128_V, MASK_VSOXEI128_V) +DECLARE_INSN(vsoxei256_v, MATCH_VSOXEI256_V, MASK_VSOXEI256_V) +DECLARE_INSN(vsoxei512_v, MATCH_VSOXEI512_V, MASK_VSOXEI512_V) +DECLARE_INSN(vsoxei1024_v, MATCH_VSOXEI1024_V, MASK_VSOXEI1024_V) DECLARE_INSN(vle8ff_v, MATCH_VLE8FF_V, MASK_VLE8FF_V) DECLARE_INSN(vle16ff_v, MATCH_VLE16FF_V, MASK_VLE16FF_V) DECLARE_INSN(vle32ff_v, MATCH_VLE32FF_V, MASK_VLE32FF_V) -- cgit v1.1