aboutsummaryrefslogtreecommitdiff
path: root/riscv/encoding.h
diff options
context:
space:
mode:
authorAndrew Waterman <andrew@sifive.com>2019-11-15 14:33:28 -0800
committerAndrew Waterman <andrew@sifive.com>2019-11-15 14:41:19 -0800
commit4679a2c9815f74d6f38b4ab3bbd10499bcf86501 (patch)
treea815b299182478dcb7801d66980b9639d761c343 /riscv/encoding.h
parent2704790df5d16868571bacf4c521df4bac87f452 (diff)
downloadspike-4679a2c9815f74d6f38b4ab3bbd10499bcf86501.zip
spike-4679a2c9815f74d6f38b4ab3bbd10499bcf86501.tar.gz
spike-4679a2c9815f74d6f38b4ab3bbd10499bcf86501.tar.bz2
Re-encode vaadd/vasub; remove vaadd.vi; add vaaddu/vasubu
Diffstat (limited to 'riscv/encoding.h')
-rw-r--r--riscv/encoding.h98
1 files changed, 55 insertions, 43 deletions
diff --git a/riscv/encoding.h b/riscv/encoding.h
index 17ba2d3..8620092 100644
--- a/riscv/encoding.h
+++ b/riscv/encoding.h
@@ -221,7 +221,7 @@
#endif
#endif
-/* Automatically generated by parse-opcodes. */
+/* Automatically generated by parse_opcodes. */
#ifndef RISCV_ENCODING_H
#define RISCV_ENCODING_H
#define MATCH_BEQ 0x63
@@ -1012,6 +1012,8 @@
#define MASK_VFNCVT_F_X_V 0xfc0ff07f
#define MATCH_VFNCVT_F_F_V 0x880a1057
#define MASK_VFNCVT_F_F_V 0xfc0ff07f
+#define MATCH_VFNCVT_ROD_F_F_V 0x880a9057
+#define MASK_VFNCVT_ROD_F_F_V 0xfc0ff07f
#define MATCH_VFSQRT_V 0x8c001057
#define MASK_VFSQRT_V 0xfc0ff07f
#define MATCH_VFCLASS_V 0x8c081057
@@ -1066,14 +1068,14 @@
#define MASK_VSLIDEUP_VX 0xfc00707f
#define MATCH_VSLIDEDOWN_VX 0x3c004057
#define MASK_VSLIDEDOWN_VX 0xfc00707f
-#define MATCH_VADC_VXM 0x42004057
+#define MATCH_VADC_VXM 0x40004057
#define MASK_VADC_VXM 0xfe00707f
-#define MATCH_VMADC_VXM 0x46004057
-#define MASK_VMADC_VXM 0xfe00707f
-#define MATCH_VSBC_VXM 0x4a004057
+#define MATCH_VMADC_VXM 0x44004057
+#define MASK_VMADC_VXM 0xfc00707f
+#define MATCH_VSBC_VXM 0x48004057
#define MASK_VSBC_VXM 0xfe00707f
-#define MATCH_VMSBC_VXM 0x4e004057
-#define MASK_VMSBC_VXM 0xfe00707f
+#define MATCH_VMSBC_VXM 0x4c004057
+#define MASK_VMSBC_VXM 0xfc00707f
#define MATCH_VMERGE_VXM 0x5c004057
#define MASK_VMERGE_VXM 0xfe00707f
#define MATCH_VMV_V_X 0x5e004057
@@ -1102,12 +1104,8 @@
#define MASK_VSSUBU_VX 0xfc00707f
#define MATCH_VSSUB_VX 0x8c004057
#define MASK_VSSUB_VX 0xfc00707f
-#define MATCH_VAADD_VX 0x90004057
-#define MASK_VAADD_VX 0xfc00707f
#define MATCH_VSLL_VX 0x94004057
#define MASK_VSLL_VX 0xfc00707f
-#define MATCH_VASUB_VX 0x98004057
-#define MASK_VASUB_VX 0xfc00707f
#define MATCH_VSMUL_VX 0x9c004057
#define MASK_VSMUL_VX 0xfc00707f
#define MATCH_VSRL_VX 0xa0004057
@@ -1130,10 +1128,10 @@
#define MASK_VWSMACCU_VX 0xfc00707f
#define MATCH_VWSMACC_VX 0xf4004057
#define MASK_VWSMACC_VX 0xfc00707f
-#define MATCH_VWSMACCSU_VX 0xf8004057
-#define MASK_VWSMACCSU_VX 0xfc00707f
-#define MATCH_VWSMACCUS_VX 0xfc004057
+#define MATCH_VWSMACCUS_VX 0xf8004057
#define MASK_VWSMACCUS_VX 0xfc00707f
+#define MATCH_VWSMACCSU_VX 0xfc004057
+#define MASK_VWSMACCSU_VX 0xfc00707f
#define MATCH_VADD_VV 0x57
#define MASK_VADD_VV 0xfc00707f
#define MATCH_VSUB_VV 0x8000057
@@ -1154,14 +1152,14 @@
#define MASK_VXOR_VV 0xfc00707f
#define MATCH_VRGATHER_VV 0x30000057
#define MASK_VRGATHER_VV 0xfc00707f
-#define MATCH_VADC_VVM 0x42000057
+#define MATCH_VADC_VVM 0x40000057
#define MASK_VADC_VVM 0xfe00707f
-#define MATCH_VMADC_VVM 0x46000057
-#define MASK_VMADC_VVM 0xfe00707f
-#define MATCH_VSBC_VVM 0x4a000057
+#define MATCH_VMADC_VVM 0x44000057
+#define MASK_VMADC_VVM 0xfc00707f
+#define MATCH_VSBC_VVM 0x48000057
#define MASK_VSBC_VVM 0xfe00707f
-#define MATCH_VMSBC_VVM 0x4e000057
-#define MASK_VMSBC_VVM 0xfe00707f
+#define MATCH_VMSBC_VVM 0x4c000057
+#define MASK_VMSBC_VVM 0xfc00707f
#define MATCH_VMERGE_VVM 0x5c000057
#define MASK_VMERGE_VVM 0xfe00707f
#define MATCH_VMV_V_V 0x5e000057
@@ -1186,12 +1184,8 @@
#define MASK_VSSUBU_VV 0xfc00707f
#define MATCH_VSSUB_VV 0x8c000057
#define MASK_VSSUB_VV 0xfc00707f
-#define MATCH_VAADD_VV 0x90000057
-#define MASK_VAADD_VV 0xfc00707f
#define MATCH_VSLL_VV 0x94000057
#define MASK_VSLL_VV 0xfc00707f
-#define MATCH_VASUB_VV 0x98000057
-#define MASK_VASUB_VV 0xfc00707f
#define MATCH_VSMUL_VV 0x9c000057
#define MASK_VSMUL_VV 0xfc00707f
#define MATCH_VSRL_VV 0xa0000057
@@ -1222,7 +1216,7 @@
#define MASK_VWSMACCU_VV 0xfc00707f
#define MATCH_VWSMACC_VV 0xf4000057
#define MASK_VWSMACC_VV 0xfc00707f
-#define MATCH_VWSMACCSU_VV 0xf8000057
+#define MATCH_VWSMACCSU_VV 0xfc000057
#define MASK_VWSMACCSU_VV 0xfc00707f
#define MATCH_VADD_VI 0x3057
#define MASK_VADD_VI 0xfc00707f
@@ -1240,10 +1234,10 @@
#define MASK_VSLIDEUP_VI 0xfc00707f
#define MATCH_VSLIDEDOWN_VI 0x3c003057
#define MASK_VSLIDEDOWN_VI 0xfc00707f
-#define MATCH_VADC_VIM 0x42003057
+#define MATCH_VADC_VIM 0x40003057
#define MASK_VADC_VIM 0xfe00707f
-#define MATCH_VMADC_VIM 0x46003057
-#define MASK_VMADC_VIM 0xfe00707f
+#define MATCH_VMADC_VIM 0x44003057
+#define MASK_VMADC_VIM 0xfc00707f
#define MATCH_VMERGE_VIM 0x5c003057
#define MASK_VMERGE_VIM 0xfe00707f
#define MATCH_VMV_V_I 0x5e003057
@@ -1264,8 +1258,6 @@
#define MASK_VSADDU_VI 0xfc00707f
#define MATCH_VSADD_VI 0x84003057
#define MASK_VSADD_VI 0xfc00707f
-#define MATCH_VAADD_VI 0x90003057
-#define MASK_VAADD_VI 0xfc00707f
#define MATCH_VSLL_VI 0x94003057
#define MASK_VSLL_VI 0xfc00707f
#define MATCH_VSRL_VI 0xa0003057
@@ -1300,10 +1292,18 @@
#define MASK_VREDMAXU_VS 0xfc00707f
#define MATCH_VREDMAX_VS 0x1c002057
#define MASK_VREDMAX_VS 0xfc00707f
+#define MATCH_VAADDU_VV 0x20002057
+#define MASK_VAADDU_VV 0xfc00707f
+#define MATCH_VAADD_VV 0x24002057
+#define MASK_VAADD_VV 0xfc00707f
+#define MATCH_VASUBU_VV 0x28002057
+#define MASK_VASUBU_VV 0xfc00707f
+#define MATCH_VASUB_VV 0x2c002057
+#define MASK_VASUB_VV 0xfc00707f
#define MATCH_VMV_X_S 0x42002057
#define MASK_VMV_X_S 0xfe0ff07f
-#define MATCH_VCOMPRESS_VM 0x5c002057
-#define MASK_VCOMPRESS_VM 0xfc00707f
+#define MATCH_VCOMPRESS_VM 0x5e002057
+#define MASK_VCOMPRESS_VM 0xfe00707f
#define MATCH_VMANDNOT_MM 0x60002057
#define MASK_VMANDNOT_MM 0xfc00707f
#define MATCH_VMAND_MM 0x64002057
@@ -1384,8 +1384,16 @@
#define MASK_VWMACCU_VV 0xfc00707f
#define MATCH_VWMACC_VV 0xf4002057
#define MASK_VWMACC_VV 0xfc00707f
-#define MATCH_VWMACCSU_VV 0xf8002057
+#define MATCH_VWMACCSU_VV 0xfc002057
#define MASK_VWMACCSU_VV 0xfc00707f
+#define MATCH_VAADDU_VX 0x20006057
+#define MASK_VAADDU_VX 0xfc00707f
+#define MATCH_VAADD_VX 0x24006057
+#define MASK_VAADD_VX 0xfc00707f
+#define MATCH_VASUBU_VX 0x28006057
+#define MASK_VASUBU_VX 0xfc00707f
+#define MATCH_VASUB_VX 0x2c006057
+#define MASK_VASUB_VX 0xfc00707f
#define MATCH_VMV_S_X 0x42006057
#define MASK_VMV_S_X 0xfff0707f
#define MATCH_VSLIDE1UP_VX 0x38006057
@@ -1442,10 +1450,10 @@
#define MASK_VWMACCU_VX 0xfc00707f
#define MATCH_VWMACC_VX 0xf4006057
#define MASK_VWMACC_VX 0xfc00707f
-#define MATCH_VWMACCSU_VX 0xf8006057
-#define MASK_VWMACCSU_VX 0xfc00707f
-#define MATCH_VWMACCUS_VX 0xfc006057
+#define MATCH_VWMACCUS_VX 0xf8006057
#define MASK_VWMACCUS_VX 0xfc00707f
+#define MATCH_VWMACCSU_VX 0xfc006057
+#define MASK_VWMACCSU_VX 0xfc00707f
#define MATCH_VAMOSWAPW_V 0x800602f
#define MASK_VAMOSWAPW_V 0xf800707f
#define MATCH_VAMOADDW_V 0x602f
@@ -2164,6 +2172,7 @@ DECLARE_INSN(vfncvt_x_f_v, MATCH_VFNCVT_X_F_V, MASK_VFNCVT_X_F_V)
DECLARE_INSN(vfncvt_f_xu_v, MATCH_VFNCVT_F_XU_V, MASK_VFNCVT_F_XU_V)
DECLARE_INSN(vfncvt_f_x_v, MATCH_VFNCVT_F_X_V, MASK_VFNCVT_F_X_V)
DECLARE_INSN(vfncvt_f_f_v, MATCH_VFNCVT_F_F_V, MASK_VFNCVT_F_F_V)
+DECLARE_INSN(vfncvt_rod_f_f_v, MATCH_VFNCVT_ROD_F_F_V, MASK_VFNCVT_ROD_F_F_V)
DECLARE_INSN(vfsqrt_v, MATCH_VFSQRT_V, MASK_VFSQRT_V)
DECLARE_INSN(vfclass_v, MATCH_VFCLASS_V, MASK_VFCLASS_V)
DECLARE_INSN(vfwadd_vv, MATCH_VFWADD_VV, MASK_VFWADD_VV)
@@ -2209,9 +2218,7 @@ DECLARE_INSN(vsaddu_vx, MATCH_VSADDU_VX, MASK_VSADDU_VX)
DECLARE_INSN(vsadd_vx, MATCH_VSADD_VX, MASK_VSADD_VX)
DECLARE_INSN(vssubu_vx, MATCH_VSSUBU_VX, MASK_VSSUBU_VX)
DECLARE_INSN(vssub_vx, MATCH_VSSUB_VX, MASK_VSSUB_VX)
-DECLARE_INSN(vaadd_vx, MATCH_VAADD_VX, MASK_VAADD_VX)
DECLARE_INSN(vsll_vx, MATCH_VSLL_VX, MASK_VSLL_VX)
-DECLARE_INSN(vasub_vx, MATCH_VASUB_VX, MASK_VASUB_VX)
DECLARE_INSN(vsmul_vx, MATCH_VSMUL_VX, MASK_VSMUL_VX)
DECLARE_INSN(vsrl_vx, MATCH_VSRL_VX, MASK_VSRL_VX)
DECLARE_INSN(vsra_vx, MATCH_VSRA_VX, MASK_VSRA_VX)
@@ -2223,8 +2230,8 @@ DECLARE_INSN(vnclipu_vx, MATCH_VNCLIPU_VX, MASK_VNCLIPU_VX)
DECLARE_INSN(vnclip_vx, MATCH_VNCLIP_VX, MASK_VNCLIP_VX)
DECLARE_INSN(vwsmaccu_vx, MATCH_VWSMACCU_VX, MASK_VWSMACCU_VX)
DECLARE_INSN(vwsmacc_vx, MATCH_VWSMACC_VX, MASK_VWSMACC_VX)
-DECLARE_INSN(vwsmaccsu_vx, MATCH_VWSMACCSU_VX, MASK_VWSMACCSU_VX)
DECLARE_INSN(vwsmaccus_vx, MATCH_VWSMACCUS_VX, MASK_VWSMACCUS_VX)
+DECLARE_INSN(vwsmaccsu_vx, MATCH_VWSMACCSU_VX, MASK_VWSMACCSU_VX)
DECLARE_INSN(vadd_vv, MATCH_VADD_VV, MASK_VADD_VV)
DECLARE_INSN(vsub_vv, MATCH_VSUB_VV, MASK_VSUB_VV)
DECLARE_INSN(vminu_vv, MATCH_VMINU_VV, MASK_VMINU_VV)
@@ -2251,9 +2258,7 @@ DECLARE_INSN(vsaddu_vv, MATCH_VSADDU_VV, MASK_VSADDU_VV)
DECLARE_INSN(vsadd_vv, MATCH_VSADD_VV, MASK_VSADD_VV)
DECLARE_INSN(vssubu_vv, MATCH_VSSUBU_VV, MASK_VSSUBU_VV)
DECLARE_INSN(vssub_vv, MATCH_VSSUB_VV, MASK_VSSUB_VV)
-DECLARE_INSN(vaadd_vv, MATCH_VAADD_VV, MASK_VAADD_VV)
DECLARE_INSN(vsll_vv, MATCH_VSLL_VV, MASK_VSLL_VV)
-DECLARE_INSN(vasub_vv, MATCH_VASUB_VV, MASK_VASUB_VV)
DECLARE_INSN(vsmul_vv, MATCH_VSMUL_VV, MASK_VSMUL_VV)
DECLARE_INSN(vsrl_vv, MATCH_VSRL_VV, MASK_VSRL_VV)
DECLARE_INSN(vsra_vv, MATCH_VSRA_VV, MASK_VSRA_VV)
@@ -2290,7 +2295,6 @@ DECLARE_INSN(vmsgtu_vi, MATCH_VMSGTU_VI, MASK_VMSGTU_VI)
DECLARE_INSN(vmsgt_vi, MATCH_VMSGT_VI, MASK_VMSGT_VI)
DECLARE_INSN(vsaddu_vi, MATCH_VSADDU_VI, MASK_VSADDU_VI)
DECLARE_INSN(vsadd_vi, MATCH_VSADD_VI, MASK_VSADD_VI)
-DECLARE_INSN(vaadd_vi, MATCH_VAADD_VI, MASK_VAADD_VI)
DECLARE_INSN(vsll_vi, MATCH_VSLL_VI, MASK_VSLL_VI)
DECLARE_INSN(vsrl_vi, MATCH_VSRL_VI, MASK_VSRL_VI)
DECLARE_INSN(vsra_vi, MATCH_VSRA_VI, MASK_VSRA_VI)
@@ -2308,6 +2312,10 @@ DECLARE_INSN(vredminu_vs, MATCH_VREDMINU_VS, MASK_VREDMINU_VS)
DECLARE_INSN(vredmin_vs, MATCH_VREDMIN_VS, MASK_VREDMIN_VS)
DECLARE_INSN(vredmaxu_vs, MATCH_VREDMAXU_VS, MASK_VREDMAXU_VS)
DECLARE_INSN(vredmax_vs, MATCH_VREDMAX_VS, MASK_VREDMAX_VS)
+DECLARE_INSN(vaaddu_vv, MATCH_VAADDU_VV, MASK_VAADDU_VV)
+DECLARE_INSN(vaadd_vv, MATCH_VAADD_VV, MASK_VAADD_VV)
+DECLARE_INSN(vasubu_vv, MATCH_VASUBU_VV, MASK_VASUBU_VV)
+DECLARE_INSN(vasub_vv, MATCH_VASUB_VV, MASK_VASUB_VV)
DECLARE_INSN(vmv_x_s, MATCH_VMV_X_S, MASK_VMV_X_S)
DECLARE_INSN(vcompress_vm, MATCH_VCOMPRESS_VM, MASK_VCOMPRESS_VM)
DECLARE_INSN(vmandnot_mm, MATCH_VMANDNOT_MM, MASK_VMANDNOT_MM)
@@ -2351,6 +2359,10 @@ DECLARE_INSN(vwmul_vv, MATCH_VWMUL_VV, MASK_VWMUL_VV)
DECLARE_INSN(vwmaccu_vv, MATCH_VWMACCU_VV, MASK_VWMACCU_VV)
DECLARE_INSN(vwmacc_vv, MATCH_VWMACC_VV, MASK_VWMACC_VV)
DECLARE_INSN(vwmaccsu_vv, MATCH_VWMACCSU_VV, MASK_VWMACCSU_VV)
+DECLARE_INSN(vaaddu_vx, MATCH_VAADDU_VX, MASK_VAADDU_VX)
+DECLARE_INSN(vaadd_vx, MATCH_VAADD_VX, MASK_VAADD_VX)
+DECLARE_INSN(vasubu_vx, MATCH_VASUBU_VX, MASK_VASUBU_VX)
+DECLARE_INSN(vasub_vx, MATCH_VASUB_VX, MASK_VASUB_VX)
DECLARE_INSN(vmv_s_x, MATCH_VMV_S_X, MASK_VMV_S_X)
DECLARE_INSN(vslide1up_vx, MATCH_VSLIDE1UP_VX, MASK_VSLIDE1UP_VX)
DECLARE_INSN(vslide1down_vx, MATCH_VSLIDE1DOWN_VX, MASK_VSLIDE1DOWN_VX)
@@ -2379,8 +2391,8 @@ DECLARE_INSN(vwmulsu_vx, MATCH_VWMULSU_VX, MASK_VWMULSU_VX)
DECLARE_INSN(vwmul_vx, MATCH_VWMUL_VX, MASK_VWMUL_VX)
DECLARE_INSN(vwmaccu_vx, MATCH_VWMACCU_VX, MASK_VWMACCU_VX)
DECLARE_INSN(vwmacc_vx, MATCH_VWMACC_VX, MASK_VWMACC_VX)
-DECLARE_INSN(vwmaccsu_vx, MATCH_VWMACCSU_VX, MASK_VWMACCSU_VX)
DECLARE_INSN(vwmaccus_vx, MATCH_VWMACCUS_VX, MASK_VWMACCUS_VX)
+DECLARE_INSN(vwmaccsu_vx, MATCH_VWMACCSU_VX, MASK_VWMACCSU_VX)
DECLARE_INSN(vamoswapw_v, MATCH_VAMOSWAPW_V, MASK_VAMOSWAPW_V)
DECLARE_INSN(vamoaddw_v, MATCH_VAMOADDW_V, MASK_VAMOADDW_V)
DECLARE_INSN(vamoxorw_v, MATCH_VAMOXORW_V, MASK_VAMOXORW_V)