aboutsummaryrefslogtreecommitdiff
path: root/pk/fp.c
diff options
context:
space:
mode:
Diffstat (limited to 'pk/fp.c')
-rw-r--r--pk/fp.c104
1 files changed, 52 insertions, 52 deletions
diff --git a/pk/fp.c b/pk/fp.c
index d9eb427..55e416d 100644
--- a/pk/fp.c
+++ b/pk/fp.c
@@ -61,22 +61,22 @@ int emulate_fp(trapframe_t* tf)
#define IS_INSN(x) ((tf->insn & MASK_ ## x) == MATCH_ ## x)
- if(IS_INSN(L_S))
+ if(IS_INSN(LF_W))
{
validate_address(tf, effective_address_load, 4, 0);
set_fp_reg(RRD, 0, *(uint32_t*)effective_address_load);
}
- else if(IS_INSN(L_D))
+ else if(IS_INSN(LF_D))
{
validate_address(tf, effective_address_load, 8, 0);
set_fp_reg(RRD, 1, *(uint64_t*)effective_address_load);
}
- else if(IS_INSN(S_S))
+ else if(IS_INSN(SF_W))
{
validate_address(tf, effective_address_store, 4, 1);
*(uint32_t*)effective_address_store = frs2s;
}
- else if(IS_INSN(S_D))
+ else if(IS_INSN(SF_D))
{
validate_address(tf, effective_address_store, 8, 1);
*(uint64_t*)effective_address_store = frs2d;
@@ -95,101 +95,101 @@ int emulate_fp(trapframe_t* tf)
set_fp_reg(RRD, 1, XRS1);
else if(IS_INSN(MTFLH_D))
set_fp_reg(RRD, 1, (uint32_t)XRS1 | (XRS2 << 32));
- else if(IS_INSN(SGNINJ_S))
+ else if(IS_INSN(FSINJ_S))
set_fp_reg(RRD, 0, (frs1s &~ (uint32_t)INT32_MIN) | (frs2s & (uint32_t)INT32_MIN));
- else if(IS_INSN(SGNINJ_D))
+ else if(IS_INSN(FSINJ_D))
set_fp_reg(RRD, 1, (frs1d &~ INT64_MIN) | (frs2d & INT64_MIN));
- else if(IS_INSN(SGNINJN_S))
+ else if(IS_INSN(FSINJN_S))
set_fp_reg(RRD, 0, (frs1s &~ (uint32_t)INT32_MIN) | ((~frs2s) & (uint32_t)INT32_MIN));
- else if(IS_INSN(SGNINJN_D))
+ else if(IS_INSN(FSINJN_D))
set_fp_reg(RRD, 1, (frs1d &~ INT64_MIN) | ((~frs2d) & INT64_MIN));
- else if(IS_INSN(SGNMUL_S))
+ else if(IS_INSN(FSMUL_S))
set_fp_reg(RRD, 0, frs1s ^ (frs2s & (uint32_t)INT32_MIN));
- else if(IS_INSN(SGNMUL_D))
+ else if(IS_INSN(FSMUL_D))
set_fp_reg(RRD, 1, frs1d ^ (frs2d & INT64_MIN));
- else if(IS_INSN(C_EQ_S))
+ else if(IS_INSN(FC_EQ_S))
XRDR = f32_eq(frs1s, frs2s);
- else if(IS_INSN(C_EQ_D))
+ else if(IS_INSN(FC_EQ_D))
XRDR = f64_eq(frs1d, frs2d);
- else if(IS_INSN(C_LE_S))
+ else if(IS_INSN(FC_LE_S))
XRDR = f32_le(frs1s, frs2s);
- else if(IS_INSN(C_LE_D))
+ else if(IS_INSN(FC_LE_D))
XRDR = f64_le(frs1d, frs2d);
- else if(IS_INSN(C_LT_S))
+ else if(IS_INSN(FC_LT_S))
XRDR = f32_lt(frs1s, frs2s);
- else if(IS_INSN(C_LT_D))
+ else if(IS_INSN(FC_LT_D))
XRDR = f64_lt(frs1d, frs2d);
- else if(IS_INSN(CVT_S_W))
+ else if(IS_INSN(FCVT_S_W))
set_fp_reg(RRD, 0, i32_to_f32(XRS1));
- else if(IS_INSN(CVT_S_L))
+ else if(IS_INSN(FCVT_S_L))
set_fp_reg(RRD, 0, i64_to_f32(XRS1));
- else if(IS_INSN(CVT_S_D))
+ else if(IS_INSN(FCVT_S_D))
set_fp_reg(RRD, 0, f64_to_f32(frs1d));
- else if(IS_INSN(CVT_D_W))
+ else if(IS_INSN(FCVT_D_W))
set_fp_reg(RRD, 1, i32_to_f64(XRS1));
- else if(IS_INSN(CVT_D_L))
+ else if(IS_INSN(FCVT_D_L))
set_fp_reg(RRD, 1, i64_to_f64(XRS1));
- else if(IS_INSN(CVT_D_S))
+ else if(IS_INSN(FCVT_D_S))
set_fp_reg(RRD, 1, f32_to_f64(frs1s));
- else if(IS_INSN(CVTU_S_W))
+ else if(IS_INSN(FCVTU_S_W))
set_fp_reg(RRD, 0, ui32_to_f32(XRS1));
- else if(IS_INSN(CVTU_S_L))
+ else if(IS_INSN(FCVTU_S_L))
set_fp_reg(RRD, 0, ui64_to_f32(XRS1));
- else if(IS_INSN(CVTU_D_W))
+ else if(IS_INSN(FCVTU_D_W))
set_fp_reg(RRD, 1, ui32_to_f64(XRS1));
- else if(IS_INSN(CVTU_D_L))
+ else if(IS_INSN(FCVTU_D_L))
set_fp_reg(RRD, 1, ui64_to_f64(XRS1));
- else if(IS_INSN(ADD_S))
+ else if(IS_INSN(FADD_S))
set_fp_reg(RRD, 0, f32_add(frs1s, frs2s));
- else if(IS_INSN(ADD_D))
+ else if(IS_INSN(FADD_D))
set_fp_reg(RRD, 1, f64_add(frs1d, frs2d));
- else if(IS_INSN(SUB_S))
+ else if(IS_INSN(FSUB_S))
set_fp_reg(RRD, 0, f32_sub(frs1s, frs2s));
- else if(IS_INSN(SUB_D))
+ else if(IS_INSN(FSUB_D))
set_fp_reg(RRD, 1, f64_sub(frs1d, frs2d));
- else if(IS_INSN(MUL_S))
+ else if(IS_INSN(FMUL_S))
set_fp_reg(RRD, 0, f32_mul(frs1s, frs2s));
- else if(IS_INSN(MUL_D))
+ else if(IS_INSN(FMUL_D))
set_fp_reg(RRD, 1, f64_mul(frs1d, frs2d));
- else if(IS_INSN(MADD_S))
+ else if(IS_INSN(FMADD_S))
set_fp_reg(RRD, 0, f32_mulAdd(frs1s, frs2s, frs3s));
- else if(IS_INSN(MADD_D))
+ else if(IS_INSN(FMADD_D))
set_fp_reg(RRD, 1, f64_mulAdd(frs1d, frs2d, frs3d));
- else if(IS_INSN(MSUB_S))
+ else if(IS_INSN(FMSUB_S))
set_fp_reg(RRD, 0, f32_mulAdd(frs1s, frs2s, frs3s ^ (uint32_t)INT32_MIN));
- else if(IS_INSN(MSUB_D))
+ else if(IS_INSN(FMSUB_D))
set_fp_reg(RRD, 1, f64_mulAdd(frs1d, frs2d, frs3d ^ INT64_MIN));
- else if(IS_INSN(NMADD_S))
+ else if(IS_INSN(FNMADD_S))
set_fp_reg(RRD, 0, f32_mulAdd(frs1s, frs2s, frs3s) ^ (uint32_t)INT32_MIN);
- else if(IS_INSN(NMADD_D))
+ else if(IS_INSN(FNMADD_D))
set_fp_reg(RRD, 1, f64_mulAdd(frs1d, frs2d, frs3d) ^ INT64_MIN);
- else if(IS_INSN(NMSUB_S))
+ else if(IS_INSN(FNMSUB_S))
set_fp_reg(RRD, 0, f32_mulAdd(frs1s, frs2s, frs3s ^ (uint32_t)INT32_MIN) ^ (uint32_t)INT32_MIN);
- else if(IS_INSN(NMSUB_D))
+ else if(IS_INSN(FNMSUB_D))
set_fp_reg(RRD, 1, f64_mulAdd(frs1d, frs2d, frs3d ^ INT64_MIN) ^ INT64_MIN);
- else if(IS_INSN(DIV_S))
+ else if(IS_INSN(FDIV_S))
set_fp_reg(RRD, 0, f32_div(frs1s, frs2s));
- else if(IS_INSN(DIV_D))
+ else if(IS_INSN(FDIV_D))
set_fp_reg(RRD, 1, f64_div(frs1d, frs2d));
- else if(IS_INSN(SQRT_S))
+ else if(IS_INSN(FSQRT_S))
set_fp_reg(RRD, 0, f32_sqrt(frs1s));
- else if(IS_INSN(SQRT_D))
+ else if(IS_INSN(FSQRT_D))
set_fp_reg(RRD, 1, f64_sqrt(frs1d));
- else if(IS_INSN(CVT_W_S))
+ else if(IS_INSN(FCVT_W_S))
XRDR = f32_to_i32_r_minMag(frs1s,true);
- else if(IS_INSN(CVT_W_D))
+ else if(IS_INSN(FCVT_W_D))
XRDR = f64_to_i32_r_minMag(frs1d,true);
- else if(IS_INSN(CVT_L_S))
+ else if(IS_INSN(FCVT_L_S))
XRDR = f32_to_i64_r_minMag(frs1s,true);
- else if(IS_INSN(CVT_L_D))
+ else if(IS_INSN(FCVT_L_D))
XRDR = f64_to_i64_r_minMag(frs1d,true);
- else if(IS_INSN(CVTU_W_S))
+ else if(IS_INSN(FCVTU_W_S))
XRDR = f32_to_ui32_r_minMag(frs1s,true);
- else if(IS_INSN(CVTU_W_D))
+ else if(IS_INSN(FCVTU_W_D))
XRDR = f64_to_ui32_r_minMag(frs1d,true);
- else if(IS_INSN(CVTU_L_S))
+ else if(IS_INSN(FCVTU_L_S))
XRDR = f32_to_ui64_r_minMag(frs1s,true);
- else if(IS_INSN(CVTU_L_D))
+ else if(IS_INSN(FCVTU_L_D))
XRDR = f64_to_ui64_r_minMag(frs1d,true);
else
return -1;