aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAdhemerval Zanella <azanella@linux.vnet.ibm.com>2014-11-25 14:32:54 -0500
committerAdhemerval Zanella <azanella@linux.vnet.ibm.com>2014-11-26 07:06:28 -0500
commit704f794714704ba430d84d10d6809acaf7ca59bf (patch)
tree753816581f7d1f54d4af4921d776077b31a9a4e4
parentcdcb42d7f786fe5ee1ca60065924d0b5c6649dd0 (diff)
downloadglibc-704f794714704ba430d84d10d6809acaf7ca59bf.zip
glibc-704f794714704ba430d84d10d6809acaf7ca59bf.tar.gz
glibc-704f794714704ba430d84d10d6809acaf7ca59bf.tar.bz2
powerpc: Fix missing barriers in atomic_exchange_and_add_{acq,rel}
On powerpc, atomic_exchange_and_add is implemented without any barriers. This patchs adds the missing instruction and memory barrier for acquire and release semanthics.
-rw-r--r--ChangeLog16
-rw-r--r--csu/tst-atomic.c16
-rw-r--r--sysdeps/powerpc/bits/atomic.h50
-rw-r--r--sysdeps/powerpc/powerpc32/bits/atomic.h6
-rw-r--r--sysdeps/powerpc/powerpc64/bits/atomic.h28
5 files changed, 116 insertions, 0 deletions
diff --git a/ChangeLog b/ChangeLog
index 50be79d..efd18ee 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,19 @@
+2014-11-26 Adhemerval Zanella <azanella@linux.ibm.com>
+
+ * csu/tst-atomic.c (do_test): Add atomic_exchange_and_add_{acq,rel}
+ tests.
+ * sysdeps/powerpc/bits/atomic.h
+ (__arch_atomic_exchange_and_add_32_acq): Add definition.
+ (__arch_atomic_exchange_and_add_32_rel): Likewise.
+ (atomic_exchange_and_add_acq): Likewise.
+ (atomic_exchange_and_add_rel): Likewise.
+ * sysdeps/powerpc/powerpc32/bits/atomic.h
+ (__arch_atomic_exchange_and_add_64_acq): Add definition.
+ (__arch_atomic_exchange_and_add_64_rel): Likewise.
+ * sysdeps/powerpc/powerpc64/bits/atomic.h
+ (__arch_atomic_exchange_and_add_64_acq): Add definition.
+ (__arch_atomic_exchange_and_add_64_rel): Likewise.
+
2014-11-26 Torvald Riegel <triegel@redhat.com>
* nptl/tpp.c (__init_sched_fifo_prio, __pthread_tpp_change_priority):
diff --git a/csu/tst-atomic.c b/csu/tst-atomic.c
index c6e786d..5ab651e 100644
--- a/csu/tst-atomic.c
+++ b/csu/tst-atomic.c
@@ -113,6 +113,22 @@ do_test (void)
ret = 1;
}
+ mem = 2;
+ if (atomic_exchange_and_add_acq (&mem, 11) != 2
+ || mem != 13)
+ {
+ puts ("atomic_exchange_and_add test failed");
+ ret = 1;
+ }
+
+ mem = 2;
+ if (atomic_exchange_and_add_rel (&mem, 11) != 2
+ || mem != 13)
+ {
+ puts ("atomic_exchange_and_add test failed");
+ ret = 1;
+ }
+
mem = -21;
atomic_add (&mem, 22);
if (mem != 1)
diff --git a/sysdeps/powerpc/bits/atomic.h b/sysdeps/powerpc/bits/atomic.h
index f312676..b05b0f7 100644
--- a/sysdeps/powerpc/bits/atomic.h
+++ b/sysdeps/powerpc/bits/atomic.h
@@ -152,6 +152,34 @@ typedef uintmax_t uatomic_max_t;
__val; \
})
+#define __arch_atomic_exchange_and_add_32_acq(mem, value) \
+ ({ \
+ __typeof (*mem) __val, __tmp; \
+ __asm __volatile ("1: lwarx %0,0,%3" MUTEX_HINT_ACQ "\n" \
+ " add %1,%0,%4\n" \
+ " stwcx. %1,0,%3\n" \
+ " bne- 1b\n" \
+ __ARCH_ACQ_INSTR \
+ : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
+ : "b" (mem), "r" (value), "m" (*mem) \
+ : "cr0", "memory"); \
+ __val; \
+ })
+
+#define __arch_atomic_exchange_and_add_32_rel(mem, value) \
+ ({ \
+ __typeof (*mem) __val, __tmp; \
+ __asm __volatile (__ARCH_REL_INSTR "\n" \
+ "1: lwarx %0,0,%3" MUTEX_HINT_REL "\n" \
+ " add %1,%0,%4\n" \
+ " stwcx. %1,0,%3\n" \
+ " bne- 1b" \
+ : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
+ : "b" (mem), "r" (value), "m" (*mem) \
+ : "cr0", "memory"); \
+ __val; \
+ })
+
#define __arch_atomic_increment_val_32(mem) \
({ \
__typeof (*(mem)) __val; \
@@ -252,6 +280,28 @@ typedef uintmax_t uatomic_max_t;
abort (); \
__result; \
})
+#define atomic_exchange_and_add_acq(mem, value) \
+ ({ \
+ __typeof (*(mem)) __result; \
+ if (sizeof (*mem) == 4) \
+ __result = __arch_atomic_exchange_and_add_32_acq (mem, value); \
+ else if (sizeof (*mem) == 8) \
+ __result = __arch_atomic_exchange_and_add_64_acq (mem, value); \
+ else \
+ abort (); \
+ __result; \
+ })
+#define atomic_exchange_and_add_rel(mem, value) \
+ ({ \
+ __typeof (*(mem)) __result; \
+ if (sizeof (*mem) == 4) \
+ __result = __arch_atomic_exchange_and_add_32_rel (mem, value); \
+ else if (sizeof (*mem) == 8) \
+ __result = __arch_atomic_exchange_and_add_64_rel (mem, value); \
+ else \
+ abort (); \
+ __result; \
+ })
#define atomic_increment_val(mem) \
({ \
diff --git a/sysdeps/powerpc/powerpc32/bits/atomic.h b/sysdeps/powerpc/powerpc32/bits/atomic.h
index 117b5a0..e2a1bf4 100644
--- a/sysdeps/powerpc/powerpc32/bits/atomic.h
+++ b/sysdeps/powerpc/powerpc32/bits/atomic.h
@@ -98,6 +98,12 @@
#define __arch_atomic_exchange_and_add_64(mem, value) \
({ abort (); (*mem) = (value); })
+#define __arch_atomic_exchange_and_add_64_acq(mem, value) \
+ ({ abort (); (*mem) = (value); })
+
+#define __arch_atomic_exchange_and_add_64_rel(mem, value) \
+ ({ abort (); (*mem) = (value); })
+
#define __arch_atomic_increment_val_64(mem) \
({ abort (); (*mem)++; })
diff --git a/sysdeps/powerpc/powerpc64/bits/atomic.h b/sysdeps/powerpc/powerpc64/bits/atomic.h
index 83b5dfe..46117b0 100644
--- a/sysdeps/powerpc/powerpc64/bits/atomic.h
+++ b/sysdeps/powerpc/powerpc64/bits/atomic.h
@@ -186,6 +186,34 @@
__val; \
})
+#define __arch_atomic_exchange_and_add_64_acq(mem, value) \
+ ({ \
+ __typeof (*mem) __val, __tmp; \
+ __asm __volatile ("1: ldarx %0,0,%3" MUTEX_HINT_ACQ "\n" \
+ " add %1,%0,%4\n" \
+ " stdcx. %1,0,%3\n" \
+ " bne- 1b\n" \
+ __ARCH_ACQ_INSTR \
+ : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
+ : "b" (mem), "r" (value), "m" (*mem) \
+ : "cr0", "memory"); \
+ __val; \
+ })
+
+#define __arch_atomic_exchange_and_add_64_rel(mem, value) \
+ ({ \
+ __typeof (*mem) __val, __tmp; \
+ __asm __volatile (__ARCH_REL_INSTR "\n" \
+ "1: ldarx %0,0,%3" MUTEX_HINT_REL "\n" \
+ " add %1,%0,%4\n" \
+ " stdcx. %1,0,%3\n" \
+ " bne- 1b" \
+ : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
+ : "b" (mem), "r" (value), "m" (*mem) \
+ : "cr0", "memory"); \
+ __val; \
+ })
+
#define __arch_atomic_increment_val_64(mem) \
({ \
__typeof (*(mem)) __val; \