aboutsummaryrefslogtreecommitdiff
path: root/include
diff options
context:
space:
mode:
Diffstat (limited to 'include')
-rw-r--r--include/qemu/atomic.h11
1 files changed, 11 insertions, 0 deletions
diff --git a/include/qemu/atomic.h b/include/qemu/atomic.h
index 5bc4d6c..96db6e9 100644
--- a/include/qemu/atomic.h
+++ b/include/qemu/atomic.h
@@ -36,7 +36,18 @@
#define smp_wmb() ({ barrier(); __atomic_thread_fence(__ATOMIC_RELEASE); barrier(); })
#define smp_rmb() ({ barrier(); __atomic_thread_fence(__ATOMIC_ACQUIRE); barrier(); })
+/* Most compilers currently treat consume and acquire the same, but really
+ * no processors except Alpha need a barrier here. Leave it in if
+ * using Thread Sanitizer to avoid warnings, otherwise optimize it away.
+ */
+#if defined(__SANITIZE_THREAD__)
#define smp_read_barrier_depends() ({ barrier(); __atomic_thread_fence(__ATOMIC_CONSUME); barrier(); })
+#elsif defined(__alpha__)
+#define smp_read_barrier_depends() asm volatile("mb":::"memory")
+#else
+#define smp_read_barrier_depends() barrier()
+#endif
+
/* Weak atomic operations prevent the compiler moving other
* loads/stores past the atomic operation load/store. However there is