aboutsummaryrefslogtreecommitdiff
path: root/env
diff options
context:
space:
mode:
Diffstat (limited to 'env')
-rw-r--r--env/pcr.h4
-rw-r--r--env/v/entry.S22
-rw-r--r--env/v/riscv_test.h57
-rw-r--r--env/v/vm.c41
4 files changed, 83 insertions, 41 deletions
diff --git a/env/pcr.h b/env/pcr.h
index 75a349f..b90884b 100644
--- a/env/pcr.h
+++ b/env/pcr.h
@@ -11,10 +11,10 @@
#define SR_U64 0x00000020
#define SR_S64 0x00000040
#define SR_VM 0x00000080
-#define SR_EV 0x00000100
+#define SR_EA 0x00000100
#define SR_IM 0x00FF0000
#define SR_IP 0xFF000000
-#define SR_ZERO ~(SR_S|SR_PS|SR_EI|SR_PEI|SR_EF|SR_U64|SR_S64|SR_VM|SR_EV|SR_IM|SR_IP)
+#define SR_ZERO ~(SR_S|SR_PS|SR_EI|SR_PEI|SR_EF|SR_U64|SR_S64|SR_VM|SR_EA|SR_IM|SR_IP)
#define SR_IM_SHIFT 16
#define SR_IP_SHIFT 24
diff --git a/env/v/entry.S b/env/v/entry.S
index 7f54f3d..77107ff 100644
--- a/env/v/entry.S
+++ b/env/v/entry.S
@@ -71,16 +71,16 @@ save_tf: # write the trap frame onto the stack
li x6, CAUSE_FAULT_FETCH
beq x3, x5, 1f
beq x3, x6, 1f
- lh x3,0(x4)
- lh x4,2(x4)
- sh x3, 36*REGBYTES(x2)
- sh x4,2+36*REGBYTES(x2)
+ lh x5,0(x4)
+ lh x6,2(x4)
+ sh x5, 36*REGBYTES(x2)
+ sh x6,2+36*REGBYTES(x2)
1:
- #mfpcr x3,ASM_CR(PCR_VECBANK) # vecbank
- #STORE x3,37*REGBYTES(x2)
- #mfpcr x3,ASM_CR(PCR_VECCFG) # veccfg
- #STORE x3,38*REGBYTES(x2)
+ bge x3, x0, 1f
+ vxcptcause x3
+ STORE x3,37*REGBYTES(x2)
+1:
ret
@@ -148,13 +148,11 @@ trap_entry:
move sp,x2
setpcr status, SR_EI
move a0,x2
-#if 0
mfpcr ra,status
- and ra,ra,SR_EV
+ and ra,ra,SR_EA
beqz ra, 2f
- addi x2,x2,39*REGBYTES
+ addi x2,x2,38*REGBYTES
vxcptsave x2
-#endif
2:jal handle_trap
# when coming from kernel, continue below its stack
diff --git a/env/v/riscv_test.h b/env/v/riscv_test.h
index bfd5197..75a1388 100644
--- a/env/v/riscv_test.h
+++ b/env/v/riscv_test.h
@@ -53,22 +53,14 @@ userstart: \
#include "../pcr.h"
#include "../hwacha_xcpt.h"
-#define vvcfg(nxregs, nfregs) ({ \
- asm volatile ("vvcfg %0,%1" : : "r"(nxregs), "r"(nfregs)); })
-
-#define vsetvl(vl) ({ long __tmp; \
- asm volatile ("vsetvl %0,%1" : "=r"(__tmp) : "r"(vl)); })
-
-#define vcfg(word) ({ vvcfg((word)>>12, (word)>>18); vsetvl((word)); })
-
#define dword_bit_cmd(dw) ((dw >> 32) & 0x1)
#define dword_bit_cnt(dw) (!dword_bit_cmd(dw))
#define dword_bit_imm1(dw) ((dw >> 35) & 0x1)
#define dword_bit_imm2(dw) ((dw >> 34) & 0x1)
#define dword_bit_pf(dw) ((dw >> 36) & 0x1)
-#define fencevl() ({ \
- asm volatile ("fence.v.l" ::: "memory"); })
+#define fence() ({ \
+ asm volatile ("fence" ::: "memory"); })
#define vxcptkill() ({ \
asm volatile ("vxcptkill"); })
@@ -94,10 +86,50 @@ userstart: \
#define PGSHIFT 13
#define PGSIZE (1 << PGSHIFT)
-#define SIZEOF_TRAPFRAME_T 1336
+#define SIZEOF_TRAPFRAME_T 1328
#ifndef __ASSEMBLER__
+static inline void vsetcfg(long cfg)
+{
+ asm volatile ("vsetcfg %0" : : "r"(cfg));
+}
+
+static inline void vsetvl(long vl)
+{
+ long __tmp;
+ asm volatile ("vsetvl %0,%1" : "=r"(__tmp) : "r"(vl));
+}
+
+static inline long vgetcfg()
+{
+ int cfg;
+ asm volatile ("vgetcfg %0" : "=r"(cfg) :);
+ return cfg;
+}
+
+static inline long vgetvl()
+{
+ int vl;
+ asm volatile ("vgetvl %0" : "=r"(vl) :);
+}
+
+static inline long vxcptaux()
+{
+ int aux;
+ asm volatile ("vxcptaux %0" : "=r"(aux) :);
+ return aux;
+}
+
+static inline void vxcptrestore(long* mem)
+{
+ asm volatile("vxcptrestore %0" : : "r"(mem) : "memory");
+}
+
+static inline void vxcptevac(long* mem)
+{
+ asm volatile ("vxcptevac %0" : : "r"(mem));
+}
typedef unsigned long pte_t;
#define LEVELS (sizeof(pte_t) == sizeof(uint64_t) ? 3 : 2)
@@ -114,8 +146,7 @@ typedef struct
long badvaddr;
long cause;
long insn;
- long vecbank;
- long veccfg;
+ long hwacha_cause;
long evac[128];
} trapframe_t;
#endif
diff --git a/env/v/vm.c b/env/v/vm.c
index 3552124..6b61c02 100644
--- a/env/v/vm.c
+++ b/env/v/vm.c
@@ -101,17 +101,22 @@ void handle_fault(unsigned long addr)
static void emulate_vxcptsave(trapframe_t* tf)
{
- long where = tf->gpr[(tf->insn >> 22) & 0x1F];
+ long* where = (long*)tf->gpr[(tf->insn >> 15) & 0x1F];
- asm volatile ("vxcptevac %0" : : "r"(where));
- fencevl();
+ where[0] = vgetcfg();
+ where[1] = vgetvl();
+ vxcptevac(&where[2]);
+ fence();
}
static void do_vxcptrestore(long* where)
{
+ vsetcfg(where[0]);
+ vsetvl(where[1]);
+
vxcpthold();
- int idx = 0;
+ int idx = 2;
long dword, cmd, pf;
int first = 1;
@@ -154,21 +159,17 @@ static void do_vxcptrestore(long* where)
static void emulate_vxcptrestore(trapframe_t* tf)
{
- long* where = (long*)tf->gpr[(tf->insn >> 22) & 0x1F];
+ long* where = (long*)tf->gpr[(tf->insn >> 15) & 0x1F];
vxcptkill();
- //vcfg(tf->veccfg);
do_vxcptrestore(where);
}
static void restore_vector(trapframe_t* tf)
{
- mtpcr(PCR_VECBANK, tf->vecbank);
- //vcfg(tf->veccfg);
-
if (mfpcr(PCR_IMPL) == IMPL_ROCKET)
do_vxcptrestore(tf->evac);
else
- asm volatile("vxcptrestore %0" : : "r"(tf->evac) : "memory");
+ vxcptrestore(tf->evac);
}
void handle_trap(trapframe_t* tf)
@@ -203,14 +204,26 @@ void handle_trap(trapframe_t* tf)
}
else if (tf->cause == CAUSE_FAULT_LOAD || tf->cause == CAUSE_FAULT_STORE)
handle_fault(tf->badvaddr);
+ else if ((tf->cause << 1) == (IRQ_COP << 1))
+ {
+ if (tf->hwacha_cause == HWACHA_CAUSE_VF_FAULT_FETCH ||
+ tf->hwacha_cause == HWACHA_CAUSE_FAULT_LOAD ||
+ tf->hwacha_cause == HWACHA_CAUSE_FAULT_STORE)
+ {
+ long badvaddr = vxcptaux();
+ handle_fault(badvaddr);
+ }
+ else
+ assert(0);
+ }
else
assert(0);
out:
-#if 0
- if (!(tf->sr & SR_PS) && (tf->sr & SR_EV))
+ if (!(tf->sr & SR_PS) && (tf->sr & SR_EA)) {
restore_vector(tf);
-#endif
+ tf->sr |= SR_PEI;
+ }
pop_tf(tf);
}
@@ -255,7 +268,7 @@ void vm_boot(long test_addr, long seed)
trapframe_t tf;
memset(&tf, 0, sizeof(tf));
- tf.sr = SR_EF | SR_EV | SR_S | SR_U64 | SR_S64 | SR_VM;
+ tf.sr = SR_PEI | ((1 << IRQ_COP) << SR_IM_SHIFT) | SR_EF | SR_EA | SR_S | SR_U64 | SR_S64 | SR_VM;
tf.epc = test_addr;
pop_tf(&tf);