aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--sysdeps/aarch64/__longjmp.S2
-rw-r--r--sysdeps/aarch64/dl-tlsdesc.S6
-rw-r--r--sysdeps/aarch64/memchr.S4
-rw-r--r--sysdeps/aarch64/memcmp.S6
-rw-r--r--sysdeps/aarch64/memcpy.S12
-rw-r--r--sysdeps/aarch64/memrchr.S4
-rw-r--r--sysdeps/aarch64/memset.S4
-rw-r--r--sysdeps/aarch64/multiarch/memchr_nosimd.S4
-rw-r--r--sysdeps/aarch64/multiarch/memcpy_advsimd.S12
-rw-r--r--sysdeps/aarch64/multiarch/memcpy_falkor.S12
-rw-r--r--sysdeps/aarch64/multiarch/memcpy_thunderx.S12
-rw-r--r--sysdeps/aarch64/multiarch/memcpy_thunderx2.S12
-rw-r--r--sysdeps/aarch64/multiarch/memset_base64.S4
-rw-r--r--sysdeps/aarch64/multiarch/memset_kunpeng.S4
-rw-r--r--sysdeps/aarch64/multiarch/strlen_asimd.S2
-rw-r--r--sysdeps/aarch64/setjmp.S2
-rw-r--r--sysdeps/aarch64/strchr.S2
-rw-r--r--sysdeps/aarch64/strchrnul.S2
-rw-r--r--sysdeps/aarch64/strcmp.S4
-rw-r--r--sysdeps/aarch64/strcpy.S4
-rw-r--r--sysdeps/aarch64/strlen.S3
-rw-r--r--sysdeps/aarch64/strnlen.S5
-rw-r--r--sysdeps/aarch64/strrchr.S2
-rw-r--r--sysdeps/aarch64/sysdep.h6
-rw-r--r--sysdeps/unix/sysv/linux/aarch64/clone.S13
-rw-r--r--sysdeps/unix/sysv/linux/aarch64/getcontext.S2
-rw-r--r--sysdeps/unix/sysv/linux/aarch64/setcontext.S2
-rw-r--r--sysdeps/unix/sysv/linux/aarch64/swapcontext.S2
28 files changed, 74 insertions, 75 deletions
diff --git a/sysdeps/aarch64/__longjmp.S b/sysdeps/aarch64/__longjmp.S
index f906077..601dbff 100644
--- a/sysdeps/aarch64/__longjmp.S
+++ b/sysdeps/aarch64/__longjmp.S
@@ -46,7 +46,7 @@ ENTRY (__longjmp)
cfi_offset(d14, JB_D14<<3)
cfi_offset(d15, JB_D15<<3)
- DELOUSE (0)
+ PTR_ARG (0)
ldp x19, x20, [x0, #JB_X19<<3]
ldp x21, x22, [x0, #JB_X21<<3]
diff --git a/sysdeps/aarch64/dl-tlsdesc.S b/sysdeps/aarch64/dl-tlsdesc.S
index db8a064..1666a1d 100644
--- a/sysdeps/aarch64/dl-tlsdesc.S
+++ b/sysdeps/aarch64/dl-tlsdesc.S
@@ -75,7 +75,7 @@
.align 2
_dl_tlsdesc_return:
BTI_C
- DELOUSE (0)
+ PTR_ARG (0)
ldr PTR_REG (0), [x0, #PTR_SIZE]
RET
cfi_endproc
@@ -99,7 +99,7 @@ _dl_tlsdesc_undefweak:
BTI_C
str x1, [sp, #-16]!
cfi_adjust_cfa_offset (16)
- DELOUSE (0)
+ PTR_ARG (0)
ldr PTR_REG (0), [x0, #PTR_SIZE]
mrs x1, tpidr_el0
sub PTR_REG (0), PTR_REG (0), PTR_REG (1)
@@ -145,7 +145,7 @@ _dl_tlsdesc_undefweak:
.align 2
_dl_tlsdesc_dynamic:
BTI_C
- DELOUSE (0)
+ PTR_ARG (0)
/* Save just enough registers to support fast path, if we fall
into slow path we will save additional registers. */
diff --git a/sysdeps/aarch64/memchr.S b/sysdeps/aarch64/memchr.S
index 23f30f6..13db282 100644
--- a/sysdeps/aarch64/memchr.S
+++ b/sysdeps/aarch64/memchr.S
@@ -61,8 +61,8 @@
string, counting trailing zeros identifies exactly which byte matched. */
ENTRY (MEMCHR)
- DELOUSE (0)
- DELOUSE (2)
+ PTR_ARG (0)
+ SIZE_ARG (2)
bic src, srcin, 15
cbz cntin, L(nomatch)
ld1 {vdata.16b}, [src]
diff --git a/sysdeps/aarch64/memcmp.S b/sysdeps/aarch64/memcmp.S
index 827f54f..5176aea 100644
--- a/sysdeps/aarch64/memcmp.S
+++ b/sysdeps/aarch64/memcmp.S
@@ -42,9 +42,9 @@
#define tmp2 x8
ENTRY_ALIGN (memcmp, 6)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
subs limit, limit, 16
b.lo L(less16)
diff --git a/sysdeps/aarch64/memcpy.S b/sysdeps/aarch64/memcpy.S
index e0b4c45..cfc1c75 100644
--- a/sysdeps/aarch64/memcpy.S
+++ b/sysdeps/aarch64/memcpy.S
@@ -73,9 +73,9 @@
*/
ENTRY_ALIGN (MEMCPY, 6)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
add srcend, src, count
add dstend, dstin, count
@@ -209,9 +209,9 @@ END (MEMCPY)
libc_hidden_builtin_def (MEMCPY)
ENTRY_ALIGN (MEMMOVE, 4)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
add srcend, src, count
add dstend, dstin, count
diff --git a/sysdeps/aarch64/memrchr.S b/sysdeps/aarch64/memrchr.S
index c25f430..bdd899f 100644
--- a/sysdeps/aarch64/memrchr.S
+++ b/sysdeps/aarch64/memrchr.S
@@ -59,8 +59,8 @@
string, counting trailing zeros identifies exactly which byte matched. */
ENTRY (__memrchr)
- DELOUSE (0)
- DELOUSE (2)
+ PTR_ARG (0)
+ SIZE_ARG (2)
add end, srcin, cntin
sub endm1, end, 1
bic src, endm1, 15
diff --git a/sysdeps/aarch64/memset.S b/sysdeps/aarch64/memset.S
index ac577f1..7a472fd 100644
--- a/sysdeps/aarch64/memset.S
+++ b/sysdeps/aarch64/memset.S
@@ -31,8 +31,8 @@
ENTRY_ALIGN (MEMSET, 6)
- DELOUSE (0)
- DELOUSE (2)
+ PTR_ARG (0)
+ SIZE_ARG (2)
dup v0.16B, valw
add dstend, dstin, count
diff --git a/sysdeps/aarch64/multiarch/memchr_nosimd.S b/sysdeps/aarch64/multiarch/memchr_nosimd.S
index 41ce10e..3045b49 100644
--- a/sysdeps/aarch64/multiarch/memchr_nosimd.S
+++ b/sysdeps/aarch64/multiarch/memchr_nosimd.S
@@ -64,8 +64,8 @@
ENTRY_ALIGN (MEMCHR, 6)
- DELOUSE (0)
- DELOUSE (2)
+ PTR_ARG (0)
+ SIZE_ARG (2)
/* Do not dereference srcin if no bytes to compare. */
cbz cntin, L(none_chr)
diff --git a/sysdeps/aarch64/multiarch/memcpy_advsimd.S b/sysdeps/aarch64/multiarch/memcpy_advsimd.S
index 48bb6d7..9d39ad8 100644
--- a/sysdeps/aarch64/multiarch/memcpy_advsimd.S
+++ b/sysdeps/aarch64/multiarch/memcpy_advsimd.S
@@ -64,9 +64,9 @@
from the end. */
ENTRY (__memcpy_simd)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
add srcend, src, count
add dstend, dstin, count
@@ -181,9 +181,9 @@ libc_hidden_builtin_def (__memcpy_simd)
ENTRY (__memmove_simd)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
add srcend, src, count
add dstend, dstin, count
diff --git a/sysdeps/aarch64/multiarch/memcpy_falkor.S b/sysdeps/aarch64/multiarch/memcpy_falkor.S
index 8dfc2c7..bebc16b 100644
--- a/sysdeps/aarch64/multiarch/memcpy_falkor.S
+++ b/sysdeps/aarch64/multiarch/memcpy_falkor.S
@@ -73,9 +73,9 @@
#if IS_IN (libc)
ENTRY_ALIGN (__memcpy_falkor, 6)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
cmp count, 32
add srcend, src, count
@@ -218,9 +218,9 @@ libc_hidden_builtin_def (__memcpy_falkor)
ENTRY_ALIGN (__memmove_falkor, 6)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
cmp count, 32
add srcend, src, count
diff --git a/sysdeps/aarch64/multiarch/memcpy_thunderx.S b/sysdeps/aarch64/multiarch/memcpy_thunderx.S
index e940757..c04b173 100644
--- a/sysdeps/aarch64/multiarch/memcpy_thunderx.S
+++ b/sysdeps/aarch64/multiarch/memcpy_thunderx.S
@@ -81,9 +81,9 @@
ENTRY_ALIGN (MEMMOVE, 6)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
sub tmp1, dstin, src
cmp count, 96
@@ -95,9 +95,9 @@ END (MEMMOVE)
libc_hidden_builtin_def (MEMMOVE)
ENTRY (MEMCPY)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
prfm PLDL1KEEP, [src]
add srcend, src, count
diff --git a/sysdeps/aarch64/multiarch/memcpy_thunderx2.S b/sysdeps/aarch64/multiarch/memcpy_thunderx2.S
index 68e9945..0096c4c 100644
--- a/sysdeps/aarch64/multiarch/memcpy_thunderx2.S
+++ b/sysdeps/aarch64/multiarch/memcpy_thunderx2.S
@@ -97,9 +97,9 @@
ENTRY_ALIGN (MEMMOVE, 6)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
add srcend, src, count
cmp count, 16
@@ -127,9 +127,9 @@ libc_hidden_builtin_def (MEMMOVE)
.p2align 4
ENTRY (MEMCPY)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ SIZE_ARG (2)
add srcend, src, count
cmp count, 16
diff --git a/sysdeps/aarch64/multiarch/memset_base64.S b/sysdeps/aarch64/multiarch/memset_base64.S
index 8f85cd1..aac05be 100644
--- a/sysdeps/aarch64/multiarch/memset_base64.S
+++ b/sysdeps/aarch64/multiarch/memset_base64.S
@@ -36,8 +36,8 @@
ENTRY_ALIGN (MEMSET, 6)
- DELOUSE (0)
- DELOUSE (2)
+ PTR_ARG (0)
+ SIZE_ARG (2)
bfi valw, valw, 8, 8
bfi valw, valw, 16, 16
diff --git a/sysdeps/aarch64/multiarch/memset_kunpeng.S b/sysdeps/aarch64/multiarch/memset_kunpeng.S
index 8e051d4..793a659 100644
--- a/sysdeps/aarch64/multiarch/memset_kunpeng.S
+++ b/sysdeps/aarch64/multiarch/memset_kunpeng.S
@@ -31,8 +31,8 @@
ENTRY_ALIGN (MEMSET, 6)
- DELOUSE (0)
- DELOUSE (2)
+ PTR_ARG (0)
+ SIZE_ARG (2)
dup v0.16B, valw
add dstend, dstin, count
diff --git a/sysdeps/aarch64/multiarch/strlen_asimd.S b/sysdeps/aarch64/multiarch/strlen_asimd.S
index bc5a4ea..154a288 100644
--- a/sysdeps/aarch64/multiarch/strlen_asimd.S
+++ b/sysdeps/aarch64/multiarch/strlen_asimd.S
@@ -86,7 +86,7 @@
character, return the length, if not, continue in the main loop. */
ENTRY (__strlen_asimd)
- DELOUSE (0)
+ PTR_ARG (0)
and tmp1, srcin, MIN_PAGE_SIZE - 1
cmp tmp1, MIN_PAGE_SIZE - 32
diff --git a/sysdeps/aarch64/setjmp.S b/sysdeps/aarch64/setjmp.S
index 28fdd3f..57e5982 100644
--- a/sysdeps/aarch64/setjmp.S
+++ b/sysdeps/aarch64/setjmp.S
@@ -33,7 +33,7 @@ END (_setjmp)
libc_hidden_def (_setjmp)
ENTRY (__sigsetjmp)
- DELOUSE (0)
+ PTR_ARG (0)
1:
stp x19, x20, [x0, #JB_X19<<3]
diff --git a/sysdeps/aarch64/strchr.S b/sysdeps/aarch64/strchr.S
index fd1b941..5d21ca7 100644
--- a/sysdeps/aarch64/strchr.S
+++ b/sysdeps/aarch64/strchr.S
@@ -56,7 +56,7 @@
string, counting trailing zeros identifies exactly which byte matched. */
ENTRY (strchr)
- DELOUSE (0)
+ PTR_ARG (0)
bic src, srcin, 15
dup vrepchr.16b, chrin
ld1 {vdata.16b}, [src]
diff --git a/sysdeps/aarch64/strchrnul.S b/sysdeps/aarch64/strchrnul.S
index 1ae4598..cc9d2b9 100644
--- a/sysdeps/aarch64/strchrnul.S
+++ b/sysdeps/aarch64/strchrnul.S
@@ -54,7 +54,7 @@
string, counting trailing zeros identifies exactly which byte matched. */
ENTRY (__strchrnul)
- DELOUSE (0)
+ PTR_ARG (0)
bic src, srcin, 15
dup vrepchr.16b, chrin
ld1 {vdata.16b}, [src]
diff --git a/sysdeps/aarch64/strcmp.S b/sysdeps/aarch64/strcmp.S
index 77d7218..63b8390 100644
--- a/sysdeps/aarch64/strcmp.S
+++ b/sysdeps/aarch64/strcmp.S
@@ -62,8 +62,8 @@
NUL too in big-endian, byte-reverse the data before the NUL check. */
ENTRY(strcmp)
- DELOUSE (0)
- DELOUSE (1)
+ PTR_ARG (0)
+ PTR_ARG (1)
sub off2, src2, src1
mov zeroones, REP8_01
and tmp, src1, 7
diff --git a/sysdeps/aarch64/strcpy.S b/sysdeps/aarch64/strcpy.S
index 80b16a0..2926b6d 100644
--- a/sysdeps/aarch64/strcpy.S
+++ b/sysdeps/aarch64/strcpy.S
@@ -73,8 +73,8 @@
string, counting trailing zeros identifies exactly which byte matched. */
ENTRY (STRCPY)
- DELOUSE (0)
- DELOUSE (1)
+ PTR_ARG (0)
+ PTR_ARG (1)
bic src, srcin, 15
mov wtmp, 0xf00f
ld1 {vdata.16b}, [src]
diff --git a/sysdeps/aarch64/strlen.S b/sysdeps/aarch64/strlen.S
index e314fff..a4a3092 100644
--- a/sysdeps/aarch64/strlen.S
+++ b/sysdeps/aarch64/strlen.S
@@ -54,8 +54,7 @@
string, counting trailing zeros identifies exactly which byte matched. */
ENTRY (STRLEN)
- DELOUSE (0)
- DELOUSE (1)
+ PTR_ARG (0)
bic src, srcin, 15
mov wtmp, 0xf00f
ld1 {vdata.16b}, [src]
diff --git a/sysdeps/aarch64/strnlen.S b/sysdeps/aarch64/strnlen.S
index 086a5c7..5699105 100644
--- a/sysdeps/aarch64/strnlen.S
+++ b/sysdeps/aarch64/strnlen.S
@@ -55,9 +55,8 @@
#define REP8_80 0x8080808080808080
ENTRY_ALIGN_AND_PAD (__strnlen, 6, 9)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
+ PTR_ARG (0)
+ SIZE_ARG (1)
cbz limit, L(hit_limit)
mov zeroones, #REP8_01
bic src, srcin, #15
diff --git a/sysdeps/aarch64/strrchr.S b/sysdeps/aarch64/strrchr.S
index a9b2bf4..878fa16 100644
--- a/sysdeps/aarch64/strrchr.S
+++ b/sysdeps/aarch64/strrchr.S
@@ -59,7 +59,7 @@
if the relevant byte matched the NUL end of string. */
ENTRY(strrchr)
- DELOUSE (0)
+ PTR_ARG (0)
bic src, srcin, 15
dup vrepchr.16b, chrin
mov wtmp, 0x3003
diff --git a/sysdeps/aarch64/sysdep.h b/sysdeps/aarch64/sysdep.h
index 2d802db..e3e2f5f 100644
--- a/sysdeps/aarch64/sysdep.h
+++ b/sysdeps/aarch64/sysdep.h
@@ -25,12 +25,14 @@
# define AARCH64_R(NAME) R_AARCH64_ ## NAME
# define PTR_REG(n) x##n
# define PTR_LOG_SIZE 3
-# define DELOUSE(n)
+# define PTR_ARG(n)
+# define SIZE_ARG(n)
#else
# define AARCH64_R(NAME) R_AARCH64_P32_ ## NAME
# define PTR_REG(n) w##n
# define PTR_LOG_SIZE 2
-# define DELOUSE(n) mov w##n, w##n
+# define PTR_ARG(n) mov w##n, w##n
+# define SIZE_ARG(n) mov w##n, w##n
#endif
#define PTR_SIZE (1<<PTR_LOG_SIZE)
diff --git a/sysdeps/unix/sysv/linux/aarch64/clone.S b/sysdeps/unix/sysv/linux/aarch64/clone.S
index 2b14106..802b238 100644
--- a/sysdeps/unix/sysv/linux/aarch64/clone.S
+++ b/sysdeps/unix/sysv/linux/aarch64/clone.S
@@ -33,13 +33,12 @@
*/
.text
ENTRY(__clone)
- DELOUSE (0)
- DELOUSE (1)
- DELOUSE (2)
- DELOUSE (3)
- DELOUSE (4)
- DELOUSE (5)
- DELOUSE (6)
+ PTR_ARG (0)
+ PTR_ARG (1)
+ PTR_ARG (3)
+ PTR_ARG (4)
+ PTR_ARG (5)
+ PTR_ARG (6)
/* Save args for the child. */
mov x10, x0
mov x11, x2
diff --git a/sysdeps/unix/sysv/linux/aarch64/getcontext.S b/sysdeps/unix/sysv/linux/aarch64/getcontext.S
index 8571556..1fe119a 100644
--- a/sysdeps/unix/sysv/linux/aarch64/getcontext.S
+++ b/sysdeps/unix/sysv/linux/aarch64/getcontext.S
@@ -30,7 +30,7 @@
.text
ENTRY(__getcontext)
- DELOUSE (0)
+ PTR_ARG (0)
/* The saved context will return to the getcontext() call point
with a return value of 0 */
str xzr, [x0, oX0 + 0 * SZREG]
diff --git a/sysdeps/unix/sysv/linux/aarch64/setcontext.S b/sysdeps/unix/sysv/linux/aarch64/setcontext.S
index 61fb813..01b9981 100644
--- a/sysdeps/unix/sysv/linux/aarch64/setcontext.S
+++ b/sysdeps/unix/sysv/linux/aarch64/setcontext.S
@@ -34,7 +34,7 @@
.text
ENTRY (__setcontext)
- DELOUSE (0)
+ PTR_ARG (0)
/* Save a copy of UCP. */
mov x9, x0
diff --git a/sysdeps/unix/sysv/linux/aarch64/swapcontext.S b/sysdeps/unix/sysv/linux/aarch64/swapcontext.S
index f8c66f0..2c1a4af 100644
--- a/sysdeps/unix/sysv/linux/aarch64/swapcontext.S
+++ b/sysdeps/unix/sysv/linux/aarch64/swapcontext.S
@@ -27,7 +27,7 @@
.text
ENTRY(__swapcontext)
- DELOUSE (0)
+ PTR_ARG (0)
/* Set the value returned when swapcontext() returns in this context.
And set up x1 to become the return address of the caller, so we
can return there with a normal RET instead of an indirect jump. */