aboutsummaryrefslogtreecommitdiff
path: root/include/lock.h
diff options
context:
space:
mode:
authorNicholas Piggin <npiggin@gmail.com>2017-11-29 15:36:56 +1000
committerStewart Smith <stewart@linux.vnet.ibm.com>2017-12-03 22:08:53 -0600
commit1486a08de557b8f237a066a57cc2c74961ba36e0 (patch)
tree493986a01191b9d5449fb43dc421a8ea7a1e1dc5 /include/lock.h
parent1e85912b921028bafa3a68fa286682a5d21a1223 (diff)
downloadskiboot-1486a08de557b8f237a066a57cc2c74961ba36e0.zip
skiboot-1486a08de557b8f237a066a57cc2c74961ba36e0.tar.gz
skiboot-1486a08de557b8f237a066a57cc2c74961ba36e0.tar.bz2
core/lock: Introduce atomic cmpxchg and implement try_lock with it
cmpxchg will be used in a subsequent change, and this reduces the amount of asm code. Signed-off-by: Nicholas Piggin <npiggin@gmail.com> [stewart: fix some ifdef __TEST__ foo to ensure unittests work] Signed-off-by: Stewart Smith <stewart@linux.vnet.ibm.com>
Diffstat (limited to 'include/lock.h')
-rw-r--r--include/lock.h61
1 files changed, 59 insertions, 2 deletions
diff --git a/include/lock.h b/include/lock.h
index 0ac943d..4b0b29d 100644
--- a/include/lock.h
+++ b/include/lock.h
@@ -18,12 +18,13 @@
#define __LOCK_H
#include <stdbool.h>
+#include <processor.h>
struct lock {
/* Lock value has bit 63 as lock bit and the PIR of the owner
* in the top 32-bit
*/
- unsigned long lock_val;
+ uint64_t lock_val;
/*
* Set to true if lock is involved in the console flush path
@@ -63,7 +64,63 @@ static inline void init_lock(struct lock *l)
*l = (struct lock)LOCK_UNLOCKED;
}
-extern bool __try_lock(struct lock *l);
+#ifndef __TEST__
+/*
+ * Bare cmpxchg, no barriers.
+ */
+static inline uint32_t __cmpxchg32(uint32_t *mem, uint32_t old, uint32_t new)
+{
+ uint32_t prev;
+
+ asm volatile(
+ "# __cmpxchg32 \n"
+ "1: lwarx %0,0,%2 \n"
+ " cmpw %0,%3 \n"
+ " bne- 2f \n"
+ " stwcx. %4,0,%2 \n"
+ " bne- 1b \n"
+ "2: \n"
+
+ : "=&r"(prev), "+m"(*mem)
+ : "r"(mem), "r"(old), "r"(new)
+ : "cr0");
+
+ return prev;
+}
+
+static inline uint64_t __cmpxchg64(uint64_t *mem, uint64_t old, uint64_t new)
+{
+ uint64_t prev;
+
+ asm volatile(
+ "# __cmpxchg64 \n"
+ "1: ldarx %0,0,%2 \n"
+ " cmpd %0,%3 \n"
+ " bne- 2f \n"
+ " stdcx. %4,0,%2 \n"
+ " bne- 1b \n"
+ "2: \n"
+
+ : "=&r"(prev), "+m"(*mem)
+ : "r"(mem), "r"(old), "r"(new)
+ : "cr0");
+
+ return prev;
+}
+
+static inline uint32_t cmpxchg32(uint32_t *mem, uint32_t old, uint32_t new)
+{
+ uint32_t prev;
+
+ sync();
+ prev = __cmpxchg32(mem, old,new);
+ sync();
+
+ return prev;
+}
+
+#endif /* __TEST_ */
+
extern bool try_lock(struct lock *l);
extern void lock(struct lock *l);
extern void unlock(struct lock *l);