aboutsummaryrefslogtreecommitdiff
path: root/host/include
diff options
context:
space:
mode:
authorRichard Henderson <richard.henderson@linaro.org>2023-05-19 21:12:34 -0700
committerRichard Henderson <richard.henderson@linaro.org>2023-05-30 09:51:11 -0700
commit480dfba2c9fe4e67f0d1f01a20c9a0fd09587ece (patch)
tree5480842fe5618540bd1ad5f18fcfe1a3c62721ee /host/include
parent6479dd74f14d51f207509aa1b77b039b7bd32bff (diff)
downloadqemu-480dfba2c9fe4e67f0d1f01a20c9a0fd09587ece.zip
qemu-480dfba2c9fe4e67f0d1f01a20c9a0fd09587ece.tar.gz
qemu-480dfba2c9fe4e67f0d1f01a20c9a0fd09587ece.tar.bz2
qemu/atomic128: Add x86_64 atomic128-ldst.h
With CPUINFO_ATOMIC_VMOVDQA, we can perform proper atomic load/store without cmpxchg16b. Reviewed-by: Alex Bennée <alex.bennee@linaro.org> Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
Diffstat (limited to 'host/include')
-rw-r--r--host/include/x86_64/host/atomic128-ldst.h68
1 files changed, 68 insertions, 0 deletions
diff --git a/host/include/x86_64/host/atomic128-ldst.h b/host/include/x86_64/host/atomic128-ldst.h
new file mode 100644
index 0000000..adc9332
--- /dev/null
+++ b/host/include/x86_64/host/atomic128-ldst.h
@@ -0,0 +1,68 @@
+/*
+ * SPDX-License-Identifier: GPL-2.0-or-later
+ * Load/store for 128-bit atomic operations, x86_64 version.
+ *
+ * Copyright (C) 2023 Linaro, Ltd.
+ *
+ * See docs/devel/atomics.rst for discussion about the guarantees each
+ * atomic primitive is meant to provide.
+ */
+
+#ifndef AARCH64_ATOMIC128_LDST_H
+#define AARCH64_ATOMIC128_LDST_H
+
+#ifdef CONFIG_INT128_TYPE
+#include "host/cpuinfo.h"
+#include "tcg/debug-assert.h"
+
+/*
+ * Through clang 16, with -mcx16, __atomic_load_n is incorrectly
+ * expanded to a read-write operation: lock cmpxchg16b.
+ */
+
+#define HAVE_ATOMIC128_RO likely(cpuinfo & CPUINFO_ATOMIC_VMOVDQA)
+#define HAVE_ATOMIC128_RW 1
+
+static inline Int128 atomic16_read_ro(const Int128 *ptr)
+{
+ Int128Alias r;
+
+ tcg_debug_assert(HAVE_ATOMIC128_RO);
+ asm("vmovdqa %1, %0" : "=x" (r.i) : "m" (*ptr));
+
+ return r.s;
+}
+
+static inline Int128 atomic16_read_rw(Int128 *ptr)
+{
+ __int128_t *ptr_align = __builtin_assume_aligned(ptr, 16);
+ Int128Alias r;
+
+ if (HAVE_ATOMIC128_RO) {
+ asm("vmovdqa %1, %0" : "=x" (r.i) : "m" (*ptr_align));
+ } else {
+ r.i = __sync_val_compare_and_swap_16(ptr_align, 0, 0);
+ }
+ return r.s;
+}
+
+static inline void atomic16_set(Int128 *ptr, Int128 val)
+{
+ __int128_t *ptr_align = __builtin_assume_aligned(ptr, 16);
+ Int128Alias new = { .s = val };
+
+ if (HAVE_ATOMIC128_RO) {
+ asm("vmovdqa %1, %0" : "=m"(*ptr_align) : "x" (new.i));
+ } else {
+ __int128_t old;
+ do {
+ old = *ptr_align;
+ } while (!__sync_bool_compare_and_swap_16(ptr_align, old, new.i));
+ }
+}
+#else
+/* Provide QEMU_ERROR stubs. */
+#include "host/include/generic/host/atomic128-ldst.h"
+#endif
+
+#endif /* AARCH64_ATOMIC128_LDST_H */