diff options
author | H.J. Lu <hjl.tools@gmail.com> | 2018-05-23 11:25:20 -0700 |
---|---|---|
committer | H.J. Lu <hjl.tools@gmail.com> | 2018-05-23 11:25:42 -0700 |
commit | 727b38df054b26d7410c275930f72135061ab9e1 (patch) | |
tree | fb74b6f63c81f68de29c0438c0666719a556a6a2 | |
parent | 2834fb46109a8a7c9161eb8d0d9ddc707a717144 (diff) | |
download | glibc-727b38df054b26d7410c275930f72135061ab9e1.zip glibc-727b38df054b26d7410c275930f72135061ab9e1.tar.gz glibc-727b38df054b26d7410c275930f72135061ab9e1.tar.bz2 |
x86-64: Skip zero length in __mem[pcpy|move|set]_erms
This patch skips zero length in __mempcpy_erms, __memmove_erms and
__memset_erms.
Tested on x86-64.
* sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S
(__mempcpy_erms): Skip zero length.
(__memmove_erms): Likewise.
* sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S
(__memset_erms): Likewise.
-rw-r--r-- | ChangeLog | 8 | ||||
-rw-r--r-- | sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S | 6 | ||||
-rw-r--r-- | sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S | 5 |
3 files changed, 19 insertions, 0 deletions
@@ -1,3 +1,11 @@ +2018-05-23 H.J. Lu <hongjiu.lu@intel.com> + + * sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S + (__mempcpy_erms): Skip zero length. + (__memmove_erms): Likewise. + * sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S + (__memset_erms): Likewise. + 2018-05-23 Joseph Myers <joseph@codesourcery.com> * sysdeps/alpha/Implies: Remove alpha/soft-fp. diff --git a/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S b/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S index 1404131..e2ede45 100644 --- a/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S +++ b/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S @@ -156,6 +156,9 @@ END (__mempcpy_chk_erms) /* Only used to measure performance of REP MOVSB. */ ENTRY (__mempcpy_erms) movq %rdi, %rax + /* Skip zero length. */ + testq %rdx, %rdx + jz 2f addq %rdx, %rax jmp L(start_movsb) END (__mempcpy_erms) @@ -167,6 +170,9 @@ END (__memmove_chk_erms) ENTRY (__memmove_erms) movq %rdi, %rax + /* Skip zero length. */ + testq %rdx, %rdx + jz 2f L(start_movsb): movq %rdx, %rcx cmpq %rsi, %rdi diff --git a/sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S b/sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S index 0a827f3..dc9cb88 100644 --- a/sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S +++ b/sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S @@ -128,6 +128,11 @@ END (__memset_chk_erms) /* Only used to measure performance of REP STOSB. */ ENTRY (__memset_erms) + /* Skip zero length. */ + testq %rdx, %rdx + jnz L(stosb) + movq %rdi, %rax + ret # else /* Provide a hidden symbol to debugger. */ .hidden MEMSET_SYMBOL (__memset, erms) |