From b8cf978cfe9c2cc65a925c97cb2694b114de3637 Mon Sep 17 00:00:00 2001 From: Chih-Min Chao Date: Tue, 4 Jun 2019 00:25:24 -0700 Subject: rvv: change vseq.?? to vmseq.?? and related insns Signed-off-by: Chih-Min Chao --- riscv/encoding.h | 120 +++++++++++++++++++++++++++---------------------------- 1 file changed, 60 insertions(+), 60 deletions(-) (limited to 'riscv/encoding.h') diff --git a/riscv/encoding.h b/riscv/encoding.h index 9ea069e..7d28ee6 100644 --- a/riscv/encoding.h +++ b/riscv/encoding.h @@ -1040,22 +1040,22 @@ #define MASK_VSBC_VX 0xfe00707f #define MATCH_VMERGE_VX 0x5c004057 #define MASK_VMERGE_VX 0xfc00707f -#define MATCH_VSEQ_VX 0x60004057 -#define MASK_VSEQ_VX 0xfc00707f -#define MATCH_VSNE_VX 0x64004057 -#define MASK_VSNE_VX 0xfc00707f -#define MATCH_VSLTU_VX 0x68004057 -#define MASK_VSLTU_VX 0xfc00707f -#define MATCH_VSLT_VX 0x6c004057 -#define MASK_VSLT_VX 0xfc00707f -#define MATCH_VSLEU_VX 0x70004057 -#define MASK_VSLEU_VX 0xfc00707f -#define MATCH_VSLE_VX 0x74004057 -#define MASK_VSLE_VX 0xfc00707f -#define MATCH_VSGTU_VX 0x78004057 -#define MASK_VSGTU_VX 0xfc00707f -#define MATCH_VSGT_VX 0x7c004057 -#define MASK_VSGT_VX 0xfc00707f +#define MATCH_VMSEQ_VX 0x60004057 +#define MASK_VMSEQ_VX 0xfc00707f +#define MATCH_VMSNE_VX 0x64004057 +#define MASK_VMSNE_VX 0xfc00707f +#define MATCH_VMSLTU_VX 0x68004057 +#define MASK_VMSLTU_VX 0xfc00707f +#define MATCH_VMSLT_VX 0x6c004057 +#define MASK_VMSLT_VX 0xfc00707f +#define MATCH_VMSLEU_VX 0x70004057 +#define MASK_VMSLEU_VX 0xfc00707f +#define MATCH_VMSLE_VX 0x74004057 +#define MASK_VMSLE_VX 0xfc00707f +#define MATCH_VMSGTU_VX 0x78004057 +#define MASK_VMSGTU_VX 0xfc00707f +#define MATCH_VMSGT_VX 0x7c004057 +#define MASK_VMSGT_VX 0xfc00707f #define MATCH_VSADDU_VX 0x80004057 #define MASK_VSADDU_VX 0xfc00707f #define MATCH_VSADD_VX 0x84004057 @@ -1122,18 +1122,18 @@ #define MASK_VSBC_VV 0xfe00707f #define MATCH_VMERGE_VV 0x5c000057 #define MASK_VMERGE_VV 0xfc00707f -#define MATCH_VSEQ_VV 0x60000057 -#define MASK_VSEQ_VV 0xfc00707f -#define MATCH_VSNE_VV 0x64000057 -#define MASK_VSNE_VV 0xfc00707f -#define MATCH_VSLTU_VV 0x68000057 -#define MASK_VSLTU_VV 0xfc00707f -#define MATCH_VSLT_VV 0x6c000057 -#define MASK_VSLT_VV 0xfc00707f -#define MATCH_VSLEU_VV 0x70000057 -#define MASK_VSLEU_VV 0xfc00707f -#define MATCH_VSLE_VV 0x74000057 -#define MASK_VSLE_VV 0xfc00707f +#define MATCH_VMSEQ_VV 0x60000057 +#define MASK_VMSEQ_VV 0xfc00707f +#define MATCH_VMSNE_VV 0x64000057 +#define MASK_VMSNE_VV 0xfc00707f +#define MATCH_VMSLTU_VV 0x68000057 +#define MASK_VMSLTU_VV 0xfc00707f +#define MATCH_VMSLT_VV 0x6c000057 +#define MASK_VMSLT_VV 0xfc00707f +#define MATCH_VMSLEU_VV 0x70000057 +#define MASK_VMSLEU_VV 0xfc00707f +#define MATCH_VMSLE_VV 0x74000057 +#define MASK_VMSLE_VV 0xfc00707f #define MATCH_VSADDU_VV 0x80000057 #define MASK_VSADDU_VV 0xfc00707f #define MATCH_VSADD_VV 0x84000057 @@ -1202,18 +1202,18 @@ #define MASK_VADC_VI 0xfe00707f #define MATCH_VMERGE_VI 0x5c003057 #define MASK_VMERGE_VI 0xfc00707f -#define MATCH_VSEQ_VI 0x60003057 -#define MASK_VSEQ_VI 0xfc00707f -#define MATCH_VSNE_VI 0x64003057 -#define MASK_VSNE_VI 0xfc00707f -#define MATCH_VSLEU_VI 0x70003057 -#define MASK_VSLEU_VI 0xfc00707f -#define MATCH_VSLE_VI 0x74003057 -#define MASK_VSLE_VI 0xfc00707f -#define MATCH_VSGTU_VI 0x78003057 -#define MASK_VSGTU_VI 0xfc00707f -#define MATCH_VSGT_VI 0x7c003057 -#define MASK_VSGT_VI 0xfc00707f +#define MATCH_VMSEQ_VI 0x60003057 +#define MASK_VMSEQ_VI 0xfc00707f +#define MATCH_VMSNE_VI 0x64003057 +#define MASK_VMSNE_VI 0xfc00707f +#define MATCH_VMSLEU_VI 0x70003057 +#define MASK_VMSLEU_VI 0xfc00707f +#define MATCH_VMSLE_VI 0x74003057 +#define MASK_VMSLE_VI 0xfc00707f +#define MATCH_VMSGTU_VI 0x78003057 +#define MASK_VMSGTU_VI 0xfc00707f +#define MATCH_VMSGT_VI 0x7c003057 +#define MASK_VMSGT_VI 0xfc00707f #define MATCH_VSADDU_VI 0x80003057 #define MASK_VSADDU_VI 0xfc00707f #define MATCH_VSADD_VI 0x84003057 @@ -2133,14 +2133,14 @@ DECLARE_INSN(vslidedown_vx, MATCH_VSLIDEDOWN_VX, MASK_VSLIDEDOWN_VX) DECLARE_INSN(vadc_vx, MATCH_VADC_VX, MASK_VADC_VX) DECLARE_INSN(vsbc_vx, MATCH_VSBC_VX, MASK_VSBC_VX) DECLARE_INSN(vmerge_vx, MATCH_VMERGE_VX, MASK_VMERGE_VX) -DECLARE_INSN(vseq_vx, MATCH_VSEQ_VX, MASK_VSEQ_VX) -DECLARE_INSN(vsne_vx, MATCH_VSNE_VX, MASK_VSNE_VX) -DECLARE_INSN(vsltu_vx, MATCH_VSLTU_VX, MASK_VSLTU_VX) -DECLARE_INSN(vslt_vx, MATCH_VSLT_VX, MASK_VSLT_VX) -DECLARE_INSN(vsleu_vx, MATCH_VSLEU_VX, MASK_VSLEU_VX) -DECLARE_INSN(vsle_vx, MATCH_VSLE_VX, MASK_VSLE_VX) -DECLARE_INSN(vsgtu_vx, MATCH_VSGTU_VX, MASK_VSGTU_VX) -DECLARE_INSN(vsgt_vx, MATCH_VSGT_VX, MASK_VSGT_VX) +DECLARE_INSN(vmseq_vx, MATCH_VMSEQ_VX, MASK_VMSEQ_VX) +DECLARE_INSN(vmsne_vx, MATCH_VMSNE_VX, MASK_VMSNE_VX) +DECLARE_INSN(vmsltu_vx, MATCH_VMSLTU_VX, MASK_VMSLTU_VX) +DECLARE_INSN(vmslt_vx, MATCH_VMSLT_VX, MASK_VMSLT_VX) +DECLARE_INSN(vmsleu_vx, MATCH_VMSLEU_VX, MASK_VMSLEU_VX) +DECLARE_INSN(vmsle_vx, MATCH_VMSLE_VX, MASK_VMSLE_VX) +DECLARE_INSN(vmsgtu_vx, MATCH_VMSGTU_VX, MASK_VMSGTU_VX) +DECLARE_INSN(vmsgt_vx, MATCH_VMSGT_VX, MASK_VMSGT_VX) DECLARE_INSN(vsaddu_vx, MATCH_VSADDU_VX, MASK_VSADDU_VX) DECLARE_INSN(vsadd_vx, MATCH_VSADD_VX, MASK_VSADD_VX) DECLARE_INSN(vssubu_vx, MATCH_VSSUBU_VX, MASK_VSSUBU_VX) @@ -2174,12 +2174,12 @@ DECLARE_INSN(vrgather_vv, MATCH_VRGATHER_VV, MASK_VRGATHER_VV) DECLARE_INSN(vadc_vv, MATCH_VADC_VV, MASK_VADC_VV) DECLARE_INSN(vsbc_vv, MATCH_VSBC_VV, MASK_VSBC_VV) DECLARE_INSN(vmerge_vv, MATCH_VMERGE_VV, MASK_VMERGE_VV) -DECLARE_INSN(vseq_vv, MATCH_VSEQ_VV, MASK_VSEQ_VV) -DECLARE_INSN(vsne_vv, MATCH_VSNE_VV, MASK_VSNE_VV) -DECLARE_INSN(vsltu_vv, MATCH_VSLTU_VV, MASK_VSLTU_VV) -DECLARE_INSN(vslt_vv, MATCH_VSLT_VV, MASK_VSLT_VV) -DECLARE_INSN(vsleu_vv, MATCH_VSLEU_VV, MASK_VSLEU_VV) -DECLARE_INSN(vsle_vv, MATCH_VSLE_VV, MASK_VSLE_VV) +DECLARE_INSN(vmseq_vv, MATCH_VMSEQ_VV, MASK_VMSEQ_VV) +DECLARE_INSN(vmsne_vv, MATCH_VMSNE_VV, MASK_VMSNE_VV) +DECLARE_INSN(vmsltu_vv, MATCH_VMSLTU_VV, MASK_VMSLTU_VV) +DECLARE_INSN(vmslt_vv, MATCH_VMSLT_VV, MASK_VMSLT_VV) +DECLARE_INSN(vmsleu_vv, MATCH_VMSLEU_VV, MASK_VMSLEU_VV) +DECLARE_INSN(vmsle_vv, MATCH_VMSLE_VV, MASK_VMSLE_VV) DECLARE_INSN(vsaddu_vv, MATCH_VSADDU_VV, MASK_VSADDU_VV) DECLARE_INSN(vsadd_vv, MATCH_VSADD_VV, MASK_VSADD_VV) DECLARE_INSN(vssubu_vv, MATCH_VSSUBU_VV, MASK_VSSUBU_VV) @@ -2214,12 +2214,12 @@ DECLARE_INSN(vslideup_vi, MATCH_VSLIDEUP_VI, MASK_VSLIDEUP_VI) DECLARE_INSN(vslidedown_vi, MATCH_VSLIDEDOWN_VI, MASK_VSLIDEDOWN_VI) DECLARE_INSN(vadc_vi, MATCH_VADC_VI, MASK_VADC_VI) DECLARE_INSN(vmerge_vi, MATCH_VMERGE_VI, MASK_VMERGE_VI) -DECLARE_INSN(vseq_vi, MATCH_VSEQ_VI, MASK_VSEQ_VI) -DECLARE_INSN(vsne_vi, MATCH_VSNE_VI, MASK_VSNE_VI) -DECLARE_INSN(vsleu_vi, MATCH_VSLEU_VI, MASK_VSLEU_VI) -DECLARE_INSN(vsle_vi, MATCH_VSLE_VI, MASK_VSLE_VI) -DECLARE_INSN(vsgtu_vi, MATCH_VSGTU_VI, MASK_VSGTU_VI) -DECLARE_INSN(vsgt_vi, MATCH_VSGT_VI, MASK_VSGT_VI) +DECLARE_INSN(vmseq_vi, MATCH_VMSEQ_VI, MASK_VMSEQ_VI) +DECLARE_INSN(vmsne_vi, MATCH_VMSNE_VI, MASK_VMSNE_VI) +DECLARE_INSN(vmsleu_vi, MATCH_VMSLEU_VI, MASK_VMSLEU_VI) +DECLARE_INSN(vmsle_vi, MATCH_VMSLE_VI, MASK_VMSLE_VI) +DECLARE_INSN(vmsgtu_vi, MATCH_VMSGTU_VI, MASK_VMSGTU_VI) +DECLARE_INSN(vmsgt_vi, MATCH_VMSGT_VI, MASK_VMSGT_VI) DECLARE_INSN(vsaddu_vi, MATCH_VSADDU_VI, MASK_VSADDU_VI) DECLARE_INSN(vsadd_vi, MATCH_VSADD_VI, MASK_VSADD_VI) DECLARE_INSN(vaadd_vi, MATCH_VAADD_VI, MASK_VAADD_VI) -- cgit v1.1