aboutsummaryrefslogtreecommitdiff
path: root/riscv
diff options
context:
space:
mode:
authorAndrew Waterman <andrew@sifive.com>2020-06-08 14:12:36 -0700
committerAndrew Waterman <andrew@sifive.com>2020-06-08 14:12:36 -0700
commit090a083f0d6499b830622bb10d4486afa1f2b448 (patch)
tree72ab1f980281027656a2f2b96e85c89021f7788c /riscv
parent33a6eb57564c257037780ddd2691ca621c44a55b (diff)
downloadspike-090a083f0d6499b830622bb10d4486afa1f2b448.zip
spike-090a083f0d6499b830622bb10d4486afa1f2b448.tar.gz
spike-090a083f0d6499b830622bb10d4486afa1f2b448.tar.bz2
Fix priority of misaligned exceptions for store-conditional
Previously, we unintentionally prioritized access faults and page faults. Resolves #431
Diffstat (limited to 'riscv')
-rw-r--r--riscv/insns/sc_d.h7
-rw-r--r--riscv/insns/sc_w.h7
-rw-r--r--riscv/mmu.h5
3 files changed, 14 insertions, 5 deletions
diff --git a/riscv/insns/sc_d.h b/riscv/insns/sc_d.h
index f44d873..54023ed 100644
--- a/riscv/insns/sc_d.h
+++ b/riscv/insns/sc_d.h
@@ -1,8 +1,11 @@
require_extension('A');
require_rv64;
-bool have_reservation = MMU.check_load_reservation(RS1);
-MMU.amo_uint64(RS1, [&](uint64_t lhs) { return have_reservation ? RS2 : lhs; });
+bool have_reservation = MMU.check_load_reservation(RS1, 8);
+
+if (have_reservation)
+ MMU.store_uint64(RS1, RS2);
+
MMU.yield_load_reservation();
WRITE_RD(!have_reservation);
diff --git a/riscv/insns/sc_w.h b/riscv/insns/sc_w.h
index fe4fcdc..e430dcb 100644
--- a/riscv/insns/sc_w.h
+++ b/riscv/insns/sc_w.h
@@ -1,7 +1,10 @@
require_extension('A');
-bool have_reservation = MMU.check_load_reservation(RS1);
-MMU.amo_uint32(RS1, [&](uint32_t lhs) { return have_reservation ? RS2 : lhs; });
+bool have_reservation = MMU.check_load_reservation(RS1, 4);
+
+if (have_reservation)
+ MMU.store_uint32(RS1, RS2);
+
MMU.yield_load_reservation();
WRITE_RD(!have_reservation);
diff --git a/riscv/mmu.h b/riscv/mmu.h
index b84fd4a..f89d139 100644
--- a/riscv/mmu.h
+++ b/riscv/mmu.h
@@ -222,8 +222,11 @@ public:
throw trap_load_access_fault(vaddr); // disallow LR to I/O space
}
- inline bool check_load_reservation(reg_t vaddr)
+ inline bool check_load_reservation(reg_t vaddr, size_t size)
{
+ if (vaddr & (size-1))
+ throw trap_store_address_misaligned(vaddr);
+
reg_t paddr = translate(vaddr, 1, STORE);
if (auto host_addr = sim->addr_to_mem(paddr))
return load_reservation_address == refill_tlb(vaddr, paddr, host_addr, STORE).target_offset + vaddr;