aboutsummaryrefslogtreecommitdiff
path: root/sysdeps
diff options
context:
space:
mode:
Diffstat (limited to 'sysdeps')
-rw-r--r--sysdeps/aarch64/multiarch/memset_oryon1.S26
-rw-r--r--sysdeps/x86_64/multiarch/strncat-evex.S4
2 files changed, 2 insertions, 28 deletions
diff --git a/sysdeps/aarch64/multiarch/memset_oryon1.S b/sysdeps/aarch64/multiarch/memset_oryon1.S
index 6fa28a9..b63c16e 100644
--- a/sysdeps/aarch64/multiarch/memset_oryon1.S
+++ b/sysdeps/aarch64/multiarch/memset_oryon1.S
@@ -93,8 +93,6 @@ L(set_long):
cmp count, 256
ccmp valw, 0, 0, cs
b.eq L(try_zva)
- cmp count, #32768
- b.hi L(set_long_with_nontemp)
/* Small-size or non-zero memset does not use DC ZVA. */
sub count, dstend, dst
@@ -117,30 +115,6 @@ L(set_long):
stp val, val, [dstend, -16]
ret
-L(set_long_with_nontemp):
- /* Small-size or non-zero memset does not use DC ZVA. */
- sub count, dstend, dst
-
- /* Adjust count and bias for loop. By subtracting extra 1 from count,
- it is easy to use tbz instruction to check whether loop tailing
- count is less than 33 bytes, so as to bypass 2 unnecessary stps. */
- sub count, count, 64+16+1
-
-1: stnp val, val, [dst, 16]
- stnp val, val, [dst, 32]
- stnp val, val, [dst, 48]
- stnp val, val, [dst, 64]
- add dst, dst, #64
- subs count, count, 64
- b.hs 1b
-
- tbz count, 5, 1f /* Remaining count is less than 33 bytes? */
- stnp val, val, [dst, 16]
- stnp val, val, [dst, 32]
-1: stnp val, val, [dstend, -32]
- stnp val, val, [dstend, -16]
- ret
-
L(try_zva):
/* Write the first and last 64 byte aligned block using stp rather
than using DC ZVA as it is faster. */
diff --git a/sysdeps/x86_64/multiarch/strncat-evex.S b/sysdeps/x86_64/multiarch/strncat-evex.S
index e6717bd..fbec351 100644
--- a/sysdeps/x86_64/multiarch/strncat-evex.S
+++ b/sysdeps/x86_64/multiarch/strncat-evex.S
@@ -44,7 +44,7 @@
# define VPCMPEQ vpcmpeqd
# define CHAR_SIZE 4
-# define REP_MOVS rep movsd
+# define REP_MOVS rep movsl
# define VMASK_REG VR10
# define FIND_FIRST_ONE(src, dst) movl $CHAR_PER_VEC, %dst; bsf %src, %dst
@@ -514,7 +514,7 @@ L(page_cross_small):
cmova %edx, %ecx
# ifdef USE_AS_WCSCPY
- rep movsd
+ rep movsl
# else
rep movsb
# endif