aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--ChangeLog5
-rw-r--r--linuxthreads/spinlock.h4
-rw-r--r--linuxthreads/sysdeps/powerpc/pt-machine.h8
-rw-r--r--sysdeps/powerpc/strchr.S12
4 files changed, 20 insertions, 9 deletions
diff --git a/ChangeLog b/ChangeLog
index 342c1fe..c81ec9a 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,8 @@
+2000-07-21 Ulrich Drepper <drepper@redhat.com>
+
+ * sysdeps/powerpc/strchr.S: Correct bugs introduced in BP-ification.
+ Patch by Franz Sirl <Franz.Sirl-kernel@lauterbach.com>.
+
2000-07-21 Wolfram Gloger <wg@malloc.de>
* malloc/malloc.c (chunk_alloc): Try mmap_chunk() for smaller
diff --git a/linuxthreads/spinlock.h b/linuxthreads/spinlock.h
index 703b72d..6d3d343 100644
--- a/linuxthreads/spinlock.h
+++ b/linuxthreads/spinlock.h
@@ -48,6 +48,10 @@ static inline int compare_and_swap(long * ptr, long oldval, long newval,
#elif defined(HAS_COMPARE_AND_SWAP)
+#ifdef IMPLEMENT_TAS_WITH_CAS
+#define testandset(p) !__compare_and_swap(p, 0, 1)
+#endif
+
#ifdef HAS_COMPARE_AND_SWAP_WITH_RELEASE_SEMANTICS
static inline int
diff --git a/linuxthreads/sysdeps/powerpc/pt-machine.h b/linuxthreads/sysdeps/powerpc/pt-machine.h
index c4af484..39defcd 100644
--- a/linuxthreads/sysdeps/powerpc/pt-machine.h
+++ b/linuxthreads/sysdeps/powerpc/pt-machine.h
@@ -28,7 +28,7 @@
/* For multiprocessor systems, we want to ensure all memory accesses
are completed before we reset a lock. On other systems, we still
need to make sure that the compiler has flushed everything to memory. */
-#define MEMORY_BARRIER() __asm__ ("sync" : : : "memory")
+#define MEMORY_BARRIER() __asm__ __volatile__ ("sync" : : : "memory")
/* Get some notion of the current stack. Need not be exactly the top
of the stack, just something somewhere in the current frame. */
@@ -36,9 +36,11 @@
register char * stack_pointer __asm__ ("r1");
/* Compare-and-swap for semaphores. */
-/* note that test-and-set(x) is the same as compare-and-swap(x, 0, 1) */
+/* note that test-and-set(x) is the same as !compare-and-swap(x, 0, 1) */
#define HAS_COMPARE_AND_SWAP
+#define IMPLEMENT_TAS_WITH_CAS
+
#if BROKEN_PPC_ASM_CR0
static
#else
@@ -50,7 +52,7 @@ __compare_and_swap (long int *p, long int oldval, long int newval)
int ret;
MEMORY_BARRIER ();
- __asm__ (
+ __asm__ __volatile__ (
"0: lwarx %0,0,%1 ;"
" xor. %0,%3,%0;"
" bne 1f;"
diff --git a/sysdeps/powerpc/strchr.S b/sysdeps/powerpc/strchr.S
index fd67c30..1556dea 100644
--- a/sysdeps/powerpc/strchr.S
+++ b/sysdeps/powerpc/strchr.S
@@ -34,7 +34,7 @@ ENTRY (BP_SYM (strchr))
# define rCHR r5 /* byte we're looking for, spread over the whole word */
# define rWORD r8 /* the current word */
#else
-# define rSTR r3 /* current word pointer */
+# define rSTR r8 /* current word pointer */
# define rCHR r4 /* byte we're looking for, spread over the whole word */
# define rWORD r5 /* the current word */
#endif
@@ -52,10 +52,10 @@ ENTRY (BP_SYM (strchr))
rlwimi rCHR, rCHR, 8, 16, 23
li rMASK, -1
rlwimi rCHR, rCHR, 16, 0, 15
- rlwinm rIGN, rSTR, 3, 27, 28
+ rlwinm rIGN, rRTN, 3, 27, 28
lis rFEFE, -0x101
lis r7F7F, 0x7f7f
- clrrwi rSTR, rSTR, 2
+ clrrwi rSTR, rRTN, 2
addi rFEFE, rFEFE, -0x101
addi r7F7F, r7F7F, 0x7f7f
/* Test the first (partial?) word. */
@@ -90,7 +90,7 @@ L(loopentry):
happened, though. */
L(missed):
and. rTMP1, rTMP1, rTMP2
- li rSTR, 0
+ li rRTN, 0
STORE_RETURN_VALUE (rSTR)
beqlr
/* It did happen. Decide which one was first...
@@ -108,7 +108,7 @@ L(missed):
bgtlr
cntlzw rCLZB, rTMP2
srwi rCLZB, rCLZB, 3
- add rSTR, rSTR, rCLZB
+ add rRTN, rSTR, rCLZB
CHECK_BOUNDS_HIGH_RTN (rSTR, rTMP2, twlge)
STORE_RETURN_VALUE (rSTR)
blr
@@ -121,7 +121,7 @@ L(foundit):
cntlzw rCLZB, rTMP2
subi rSTR, rSTR, 4
srwi rCLZB, rCLZB, 3
- add rSTR, rSTR, rCLZB
+ add rRTN, rSTR, rCLZB
CHECK_BOUNDS_HIGH_RTN (rSTR, rTMP2, twlge)
STORE_RETURN_VALUE (rSTR)
blr