aboutsummaryrefslogtreecommitdiff
path: root/sysdeps/i386/i486
diff options
context:
space:
mode:
authorUlrich Drepper <drepper@redhat.com>2006-10-10 00:51:29 +0000
committerUlrich Drepper <drepper@redhat.com>2006-10-10 00:51:29 +0000
commit1100f84983f22e570a5081cbe79b0ef8fe4952d7 (patch)
tree3472df1372abf7816fb10f02573ba114c5b5a003 /sysdeps/i386/i486
parent7484f797e4d4f9c174d4391f59d208e83027b285 (diff)
downloadglibc-1100f84983f22e570a5081cbe79b0ef8fe4952d7.zip
glibc-1100f84983f22e570a5081cbe79b0ef8fe4952d7.tar.gz
glibc-1100f84983f22e570a5081cbe79b0ef8fe4952d7.tar.bz2
Jakub Jelinek <jakub@redhat.com>
Implement reference counting of scope records. * elf/dl-close.c (_dl_close): Remove all scopes from removed objects from the list in objects which remain. Always allocate new scope record. * elf/dl-open.c (dl_open_worker): When growing array for scopes, don't resize, allocate a new one. * elf/dl-runtime.c: Update reference counters before using a scope array. * elf/dl-sym.c: Likewise. * elf/dl-libc.c: Adjust for l_scope name change. * elf/dl-load.c: Likewise. * elf/dl-object.c: Likewise. * elf/rtld.c: Likewise. * include/link.h: Inlcude <rtld-lowlevel.h>. Define struct r_scoperec. Replace r_scope with pointer to r_scoperec structure. Add l_scoperec_lock. * sysdeps/generic/ldsodefs.h: Include <rtld-lowlevel.h>. * sysdeps/generic/rtld-lowlevel.h: New file. * include/atomic.h: Rename atomic_and to atomic_and_val and atomic_or to atomic_or_val. Define new macros atomic_and and atomic_or which do not return values. * sysdeps/x86_64/bits/atomic.h: Define atomic_and and atomic_or. Various cleanups. * sysdeps/i386/i486/bits/atomic.h: Likewise.
Diffstat (limited to 'sysdeps/i386/i486')
-rw-r--r--sysdeps/i386/i486/bits/atomic.h222
1 files changed, 132 insertions, 90 deletions
diff --git a/sysdeps/i386/i486/bits/atomic.h b/sysdeps/i386/i486/bits/atomic.h
index c748761..541d7a5 100644
--- a/sysdeps/i386/i486/bits/atomic.h
+++ b/sysdeps/i386/i486/bits/atomic.h
@@ -1,4 +1,4 @@
-/* Copyright (C) 2002, 2003, 2004 Free Software Foundation, Inc.
+/* Copyright (C) 2002, 2003, 2004, 2006 Free Software Foundation, Inc.
This file is part of the GNU C Library.
Contributed by Ulrich Drepper <drepper@redhat.com>, 2002.
@@ -168,34 +168,35 @@ typedef uintmax_t uatomic_max_t;
#define atomic_add(mem, value) \
- (void) ({ if (__builtin_constant_p (value) && (value) == 1) \
- atomic_increment (mem); \
- else if (__builtin_constant_p (value) && (value) == -1) \
- atomic_decrement (mem); \
- else if (sizeof (*mem) == 1) \
- __asm __volatile (LOCK_PREFIX "addb %b1, %0" \
- : "=m" (*mem) \
- : "ir" (value), "m" (*mem)); \
- else if (sizeof (*mem) == 2) \
- __asm __volatile (LOCK_PREFIX "addw %w1, %0" \
- : "=m" (*mem) \
- : "ir" (value), "m" (*mem)); \
- else if (sizeof (*mem) == 4) \
- __asm __volatile (LOCK_PREFIX "addl %1, %0" \
- : "=m" (*mem) \
- : "ir" (value), "m" (*mem)); \
- else \
- { \
- __typeof (value) __addval = (value); \
- __typeof (mem) __memp = (mem); \
- __typeof (*mem) __oldval = *__memp; \
- __typeof (*mem) __tmpval; \
- do \
- __tmpval = __oldval; \
- while ((__oldval = __arch_compare_and_exchange_val_64_acq \
- (__memp, __oldval + __addval, __oldval)) == __tmpval); \
- } \
- })
+ do { \
+ if (__builtin_constant_p (value) && (value) == 1) \
+ atomic_increment (mem); \
+ else if (__builtin_constant_p (value) && (value) == -1) \
+ atomic_decrement (mem); \
+ else if (sizeof (*mem) == 1) \
+ __asm __volatile (LOCK_PREFIX "addb %b1, %0" \
+ : "=m" (*mem) \
+ : "ir" (value), "m" (*mem)); \
+ else if (sizeof (*mem) == 2) \
+ __asm __volatile (LOCK_PREFIX "addw %w1, %0" \
+ : "=m" (*mem) \
+ : "ir" (value), "m" (*mem)); \
+ else if (sizeof (*mem) == 4) \
+ __asm __volatile (LOCK_PREFIX "addl %1, %0" \
+ : "=m" (*mem) \
+ : "ir" (value), "m" (*mem)); \
+ else \
+ { \
+ __typeof (value) __addval = (value); \
+ __typeof (mem) __memp = (mem); \
+ __typeof (*mem) __oldval = *__memp; \
+ __typeof (*mem) __tmpval; \
+ do \
+ __tmpval = __oldval; \
+ while ((__oldval = __arch_compare_and_exchange_val_64_acq \
+ (__memp, __oldval + __addval, __oldval)) == __tmpval); \
+ } \
+ } while (0)
#define atomic_add_negative(mem, value) \
@@ -237,29 +238,30 @@ typedef uintmax_t uatomic_max_t;
#define atomic_increment(mem) \
- (void) ({ if (sizeof (*mem) == 1) \
- __asm __volatile (LOCK_PREFIX "incb %b0" \
- : "=m" (*mem) \
- : "m" (*mem)); \
- else if (sizeof (*mem) == 2) \
- __asm __volatile (LOCK_PREFIX "incw %w0" \
- : "=m" (*mem) \
- : "m" (*mem)); \
- else if (sizeof (*mem) == 4) \
- __asm __volatile (LOCK_PREFIX "incl %0" \
- : "=m" (*mem) \
- : "m" (*mem)); \
- else \
- { \
- __typeof (mem) __memp = (mem); \
- __typeof (*mem) __oldval = *__memp; \
- __typeof (*mem) __tmpval; \
- do \
- __tmpval = __oldval; \
- while ((__oldval = __arch_compare_and_exchange_val_64_acq \
- (__memp, __oldval + 1, __oldval)) == __tmpval); \
- } \
- })
+ do { \
+ if (sizeof (*mem) == 1) \
+ __asm __volatile (LOCK_PREFIX "incb %b0" \
+ : "=m" (*mem) \
+ : "m" (*mem)); \
+ else if (sizeof (*mem) == 2) \
+ __asm __volatile (LOCK_PREFIX "incw %w0" \
+ : "=m" (*mem) \
+ : "m" (*mem)); \
+ else if (sizeof (*mem) == 4) \
+ __asm __volatile (LOCK_PREFIX "incl %0" \
+ : "=m" (*mem) \
+ : "m" (*mem)); \
+ else \
+ { \
+ __typeof (mem) __memp = (mem); \
+ __typeof (*mem) __oldval = *__memp; \
+ __typeof (*mem) __tmpval; \
+ do \
+ __tmpval = __oldval; \
+ while ((__oldval = __arch_compare_and_exchange_val_64_acq \
+ (__memp, __oldval + 1, __oldval)) == __tmpval); \
+ } \
+ } while (0)
#define atomic_increment_and_test(mem) \
@@ -282,29 +284,30 @@ typedef uintmax_t uatomic_max_t;
#define atomic_decrement(mem) \
- (void) ({ if (sizeof (*mem) == 1) \
- __asm __volatile (LOCK_PREFIX "decb %b0" \
- : "=m" (*mem) \
- : "m" (*mem)); \
- else if (sizeof (*mem) == 2) \
- __asm __volatile (LOCK_PREFIX "decw %w0" \
- : "=m" (*mem) \
- : "m" (*mem)); \
- else if (sizeof (*mem) == 4) \
- __asm __volatile (LOCK_PREFIX "decl %0" \
- : "=m" (*mem) \
- : "m" (*mem)); \
- else \
- { \
- __typeof (mem) __memp = (mem); \
- __typeof (*mem) __oldval = *__memp; \
- __typeof (*mem) __tmpval; \
- do \
- __tmpval = __oldval; \
- while ((__oldval = __arch_compare_and_exchange_val_64_acq \
- (__memp, __oldval - 1, __oldval)) == __tmpval); \
- } \
- })
+ do { \
+ if (sizeof (*mem) == 1) \
+ __asm __volatile (LOCK_PREFIX "decb %b0" \
+ : "=m" (*mem) \
+ : "m" (*mem)); \
+ else if (sizeof (*mem) == 2) \
+ __asm __volatile (LOCK_PREFIX "decw %w0" \
+ : "=m" (*mem) \
+ : "m" (*mem)); \
+ else if (sizeof (*mem) == 4) \
+ __asm __volatile (LOCK_PREFIX "decl %0" \
+ : "=m" (*mem) \
+ : "m" (*mem)); \
+ else \
+ { \
+ __typeof (mem) __memp = (mem); \
+ __typeof (*mem) __oldval = *__memp; \
+ __typeof (*mem) __tmpval; \
+ do \
+ __tmpval = __oldval; \
+ while ((__oldval = __arch_compare_and_exchange_val_64_acq \
+ (__memp, __oldval - 1, __oldval)) == __tmpval); \
+ } \
+ } while (0)
#define atomic_decrement_and_test(mem) \
@@ -327,21 +330,22 @@ typedef uintmax_t uatomic_max_t;
#define atomic_bit_set(mem, bit) \
- (void) ({ if (sizeof (*mem) == 1) \
- __asm __volatile (LOCK_PREFIX "orb %b2, %0" \
- : "=m" (*mem) \
- : "m" (*mem), "ir" (1 << (bit))); \
- else if (sizeof (*mem) == 2) \
- __asm __volatile (LOCK_PREFIX "orw %w2, %0" \
- : "=m" (*mem) \
- : "m" (*mem), "ir" (1 << (bit))); \
- else if (sizeof (*mem) == 4) \
- __asm __volatile (LOCK_PREFIX "orl %2, %0" \
- : "=m" (*mem) \
- : "m" (*mem), "ir" (1 << (bit))); \
- else \
- abort (); \
- })
+ do { \
+ if (sizeof (*mem) == 1) \
+ __asm __volatile (LOCK_PREFIX "orb %b2, %0" \
+ : "=m" (*mem) \
+ : "m" (*mem), "ir" (1 << (bit))); \
+ else if (sizeof (*mem) == 2) \
+ __asm __volatile (LOCK_PREFIX "orw %w2, %0" \
+ : "=m" (*mem) \
+ : "m" (*mem), "ir" (1 << (bit))); \
+ else if (sizeof (*mem) == 4) \
+ __asm __volatile (LOCK_PREFIX "orl %2, %0" \
+ : "=m" (*mem) \
+ : "m" (*mem), "ir" (1 << (bit))); \
+ else \
+ abort (); \
+ } while (0)
#define atomic_bit_test_set(mem, bit) \
@@ -364,3 +368,41 @@ typedef uintmax_t uatomic_max_t;
#define atomic_delay() asm ("rep; nop")
+
+
+#define atomic_and(mem, mask) \
+ do { \
+ if (sizeof (*mem) == 1) \
+ __asm __volatile (LOCK_PREFIX "andb %1, %b0" \
+ : "=m" (*mem) \
+ : "ir" (mask), "m" (*mem)); \
+ else if (sizeof (*mem) == 2) \
+ __asm __volatile (LOCK_PREFIX "andw %1, %w0" \
+ : "=m" (*mem) \
+ : "ir" (mask), "m" (*mem)); \
+ else if (sizeof (*mem) == 4) \
+ __asm __volatile (LOCK_PREFIX "andl %1, %0" \
+ : "=m" (*mem) \
+ : "ir" (mask), "m" (*mem)); \
+ else \
+ abort (); \
+ } while (0)
+
+
+#define atomic_or(mem, mask) \
+ do { \
+ if (sizeof (*mem) == 1) \
+ __asm __volatile (LOCK_PREFIX "orb %1, %b0" \
+ : "=m" (*mem) \
+ : "ir" (mask), "m" (*mem)); \
+ else if (sizeof (*mem) == 2) \
+ __asm __volatile (LOCK_PREFIX "orw %1, %w0" \
+ : "=m" (*mem) \
+ : "ir" (mask), "m" (*mem)); \
+ else if (sizeof (*mem) == 4) \
+ __asm __volatile (LOCK_PREFIX "orl %1, %0" \
+ : "=m" (*mem) \
+ : "ir" (mask), "m" (*mem)); \
+ else \
+ abort (); \
+ } while (0)