diff options
author | H.J. Lu <hjl.tools@gmail.com> | 2017-08-04 12:27:00 -0700 |
---|---|---|
committer | H.J. Lu <hjl.tools@gmail.com> | 2017-08-04 12:27:18 -0700 |
commit | fc11ff8d0acebbf0802854156d45cb1d8bb7c079 (patch) | |
tree | f8cdc12b7c6a47541b3afcf1b26394de8cfd199f | |
parent | 19f1a11e7ea2a5082bae9d9a079338c5658ba954 (diff) | |
download | glibc-fc11ff8d0acebbf0802854156d45cb1d8bb7c079.zip glibc-fc11ff8d0acebbf0802854156d45cb1d8bb7c079.tar.gz glibc-fc11ff8d0acebbf0802854156d45cb1d8bb7c079.tar.bz2 |
x86-64: Use IFUNC memcpy and mempcpy in libc.a
Since apply_irel is called before memcpy and mempcpy are called, we
can use IFUNC memcpy and mempcpy in libc.a.
* sysdeps/x86_64/memmove.S (MEMCPY_SYMBOL): Don't check SHARED.
(MEMPCPY_SYMBOL): Likewise.
* sysdeps/x86_64/multiarch/ifunc-impl-list.c
(__libc_ifunc_impl_list): Test memcpy and mempcpy in libc.a.
* sysdeps/x86_64/multiarch/memcpy-ssse3-back.S: Also include
in libc.a.
* sysdeps/x86_64/multiarch/memcpy-ssse3.S: Likewise.
* sysdeps/x86_64/multiarch/memmove-avx512-no-vzeroupper.S:
Likewise.
* sysdeps/x86_64/multiarch/memcpy.c: Also include in libc.a.
(__hidden_ver1): Don't use in libc.a.
* sysdeps/x86_64/multiarch/memmove-sse2-unaligned-erms.S
(__mempcpy): Don't create a weak alias in libc.a.
* sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S: Support
libc.a.
* sysdeps/x86_64/multiarch/mempcpy.c: Also include in libc.a.
(__hidden_ver1): Don't use in libc.a.
-rw-r--r-- | ChangeLog | 20 | ||||
-rw-r--r-- | sysdeps/x86_64/memmove.S | 4 | ||||
-rw-r--r-- | sysdeps/x86_64/multiarch/ifunc-impl-list.c | 4 | ||||
-rw-r--r-- | sysdeps/x86_64/multiarch/memcpy-ssse3-back.S | 5 | ||||
-rw-r--r-- | sysdeps/x86_64/multiarch/memcpy-ssse3.S | 5 | ||||
-rw-r--r-- | sysdeps/x86_64/multiarch/memcpy.c | 8 | ||||
-rw-r--r-- | sysdeps/x86_64/multiarch/memmove-avx512-no-vzeroupper.S | 6 | ||||
-rw-r--r-- | sysdeps/x86_64/multiarch/memmove-sse2-unaligned-erms.S | 4 | ||||
-rw-r--r-- | sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S | 18 | ||||
-rw-r--r-- | sysdeps/x86_64/multiarch/mempcpy.c | 8 |
10 files changed, 44 insertions, 38 deletions
@@ -1,5 +1,25 @@ 2017-08-04 H.J. Lu <hongjiu.lu@intel.com> + * sysdeps/x86_64/memmove.S (MEMCPY_SYMBOL): Don't check SHARED. + (MEMPCPY_SYMBOL): Likewise. + * sysdeps/x86_64/multiarch/ifunc-impl-list.c + (__libc_ifunc_impl_list): Test memcpy and mempcpy in libc.a. + * sysdeps/x86_64/multiarch/memcpy-ssse3-back.S: Also include + in libc.a. + * sysdeps/x86_64/multiarch/memcpy-ssse3.S: Likewise. + * sysdeps/x86_64/multiarch/memmove-avx512-no-vzeroupper.S: + Likewise. + * sysdeps/x86_64/multiarch/memcpy.c: Also include in libc.a. + (__hidden_ver1): Don't use in libc.a. + * sysdeps/x86_64/multiarch/memmove-sse2-unaligned-erms.S + (__mempcpy): Don't create a weak alias in libc.a. + * sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S: Support + libc.a. + * sysdeps/x86_64/multiarch/mempcpy.c: Also include in libc.a. + (__hidden_ver1): Don't use in libc.a. + +2017-08-04 H.J. Lu <hongjiu.lu@intel.com> + * config.make.in (have-insert): New. * configure.ac (libc_cv_insert): New. Set to yes if linker supports INSERT in linker script. diff --git a/sysdeps/x86_64/memmove.S b/sysdeps/x86_64/memmove.S index 5bbae990..24efe83 100644 --- a/sysdeps/x86_64/memmove.S +++ b/sysdeps/x86_64/memmove.S @@ -29,7 +29,7 @@ #define SECTION(p) p #ifdef USE_MULTIARCH -# if !defined SHARED || !IS_IN (libc) +# if !IS_IN (libc) # define MEMCPY_SYMBOL(p,s) memcpy # endif #else @@ -39,7 +39,7 @@ # define MEMCPY_SYMBOL(p,s) memcpy # endif #endif -#if !defined SHARED || !defined USE_MULTIARCH || !IS_IN (libc) +#if !defined USE_MULTIARCH || !IS_IN (libc) # define MEMPCPY_SYMBOL(p,s) __mempcpy #endif #ifndef MEMMOVE_SYMBOL diff --git a/sysdeps/x86_64/multiarch/ifunc-impl-list.c b/sysdeps/x86_64/multiarch/ifunc-impl-list.c index b761e70..df5ca4c 100644 --- a/sysdeps/x86_64/multiarch/ifunc-impl-list.c +++ b/sysdeps/x86_64/multiarch/ifunc-impl-list.c @@ -447,6 +447,7 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array, __memcpy_chk_sse2_unaligned_erms) IFUNC_IMPL_ADD (array, i, __memcpy_chk, 1, __memcpy_chk_erms)) +#endif /* Support sysdeps/x86_64/multiarch/memcpy.c. */ IFUNC_IMPL (i, name, memcpy, @@ -474,6 +475,7 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array, __memcpy_sse2_unaligned_erms) IFUNC_IMPL_ADD (array, i, memcpy, 1, __memcpy_erms)) +#ifdef SHARED /* Support sysdeps/x86_64/multiarch/mempcpy_chk.c. */ IFUNC_IMPL (i, name, __mempcpy_chk, IFUNC_IMPL_ADD (array, i, __mempcpy_chk, @@ -503,6 +505,7 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array, __mempcpy_chk_sse2_unaligned_erms) IFUNC_IMPL_ADD (array, i, __mempcpy_chk, 1, __mempcpy_chk_erms)) +#endif /* Support sysdeps/x86_64/multiarch/mempcpy.c. */ IFUNC_IMPL (i, name, mempcpy, @@ -539,6 +542,7 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array, __strncmp_ssse3) IFUNC_IMPL_ADD (array, i, strncmp, 1, __strncmp_sse2)) +#ifdef SHARED /* Support sysdeps/x86_64/multiarch/wmemset_chk.c. */ IFUNC_IMPL (i, name, __wmemset_chk, IFUNC_IMPL_ADD (array, i, __wmemset_chk, 1, diff --git a/sysdeps/x86_64/multiarch/memcpy-ssse3-back.S b/sysdeps/x86_64/multiarch/memcpy-ssse3-back.S index 4e060a2..ce53993 100644 --- a/sysdeps/x86_64/multiarch/memcpy-ssse3-back.S +++ b/sysdeps/x86_64/multiarch/memcpy-ssse3-back.S @@ -19,10 +19,7 @@ #include <sysdep.h> -#if IS_IN (libc) \ - && (defined SHARED \ - || defined USE_AS_MEMMOVE \ - || !defined USE_MULTIARCH) +#if IS_IN (libc) #include "asm-syntax.h" diff --git a/sysdeps/x86_64/multiarch/memcpy-ssse3.S b/sysdeps/x86_64/multiarch/memcpy-ssse3.S index f3ea52a..0ac4c21 100644 --- a/sysdeps/x86_64/multiarch/memcpy-ssse3.S +++ b/sysdeps/x86_64/multiarch/memcpy-ssse3.S @@ -19,10 +19,7 @@ #include <sysdep.h> -#if IS_IN (libc) \ - && (defined SHARED \ - || defined USE_AS_MEMMOVE \ - || !defined USE_MULTIARCH) +#if IS_IN (libc) #include "asm-syntax.h" diff --git a/sysdeps/x86_64/multiarch/memcpy.c b/sysdeps/x86_64/multiarch/memcpy.c index 6a2d353..273bc61 100644 --- a/sysdeps/x86_64/multiarch/memcpy.c +++ b/sysdeps/x86_64/multiarch/memcpy.c @@ -17,10 +17,8 @@ License along with the GNU C Library; if not, see <http://www.gnu.org/licenses/>. */ -/* Define multiple versions only for the definition in lib and for - DSO. In static binaries we need memcpy before the initialization - happened. */ -#if defined SHARED && IS_IN (libc) +/* Define multiple versions only for the definition in libc. */ +#if IS_IN (libc) # define memcpy __redirect_memcpy # include <string.h> # undef memcpy @@ -31,8 +29,10 @@ libc_ifunc_redirected (__redirect_memcpy, __new_memcpy, IFUNC_SELECTOR ()); +# ifdef SHARED __hidden_ver1 (__new_memcpy, __GI_memcpy, __redirect_memcpy) __attribute__ ((visibility ("hidden"))); +# endif # include <shlib-compat.h> versioned_symbol (libc, __new_memcpy, memcpy, GLIBC_2_14); diff --git a/sysdeps/x86_64/multiarch/memmove-avx512-no-vzeroupper.S b/sysdeps/x86_64/multiarch/memmove-avx512-no-vzeroupper.S index f3ef105..7ca365a 100644 --- a/sysdeps/x86_64/multiarch/memmove-avx512-no-vzeroupper.S +++ b/sysdeps/x86_64/multiarch/memmove-avx512-no-vzeroupper.S @@ -23,7 +23,6 @@ # include "asm-syntax.h" .section .text.avx512,"ax",@progbits -# if defined SHARED && !defined USE_AS_MEMPCPY && !defined USE_AS_MEMMOVE ENTRY (__mempcpy_chk_avx512_no_vzeroupper) cmpq %rdx, %rcx jb HIDDEN_JUMPTARGET (__chk_fail) @@ -34,14 +33,11 @@ ENTRY (__mempcpy_avx512_no_vzeroupper) addq %rdx, %rax jmp L(start) END (__mempcpy_avx512_no_vzeroupper) -# endif -# ifdef SHARED ENTRY (__memmove_chk_avx512_no_vzeroupper) cmpq %rdx, %rcx jb HIDDEN_JUMPTARGET (__chk_fail) END (__memmove_chk_avx512_no_vzeroupper) -# endif ENTRY (__memmove_avx512_no_vzeroupper) mov %rdi, %rax @@ -413,8 +409,6 @@ L(gobble_256bytes_nt_loop_bkw): jmp L(check) END (__memmove_avx512_no_vzeroupper) -# ifdef SHARED strong_alias (__memmove_avx512_no_vzeroupper, __memcpy_avx512_no_vzeroupper) strong_alias (__memmove_chk_avx512_no_vzeroupper, __memcpy_chk_avx512_no_vzeroupper) -# endif #endif diff --git a/sysdeps/x86_64/multiarch/memmove-sse2-unaligned-erms.S b/sysdeps/x86_64/multiarch/memmove-sse2-unaligned-erms.S index 743064b..cfb604d 100644 --- a/sysdeps/x86_64/multiarch/memmove-sse2-unaligned-erms.S +++ b/sysdeps/x86_64/multiarch/memmove-sse2-unaligned-erms.S @@ -18,9 +18,7 @@ #if IS_IN (libc) # define MEMMOVE_SYMBOL(p,s) p##_sse2_##s -#endif - -#if !defined SHARED || !IS_IN (libc) +#else weak_alias (__mempcpy, mempcpy) #endif diff --git a/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S b/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S index d694e8b..2b476d6 100644 --- a/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S +++ b/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S @@ -112,13 +112,11 @@ ENTRY (MEMMOVE_CHK_SYMBOL (__mempcpy_chk, unaligned)) END (MEMMOVE_CHK_SYMBOL (__mempcpy_chk, unaligned)) #endif -#if VEC_SIZE == 16 || defined SHARED ENTRY (MEMPCPY_SYMBOL (__mempcpy, unaligned)) movq %rdi, %rax addq %rdx, %rax jmp L(start) END (MEMPCPY_SYMBOL (__mempcpy, unaligned)) -#endif #if defined SHARED && IS_IN (libc) ENTRY (MEMMOVE_CHK_SYMBOL (__memmove_chk, unaligned)) @@ -151,7 +149,6 @@ L(nop): END (MEMMOVE_SYMBOL (__memmove, unaligned)) # if VEC_SIZE == 16 -# if defined SHARED ENTRY (__mempcpy_chk_erms) cmpq %rdx, %rcx jb HIDDEN_JUMPTARGET (__chk_fail) @@ -163,7 +160,6 @@ ENTRY (__mempcpy_erms) addq %rdx, %rax jmp L(start_movsb) END (__mempcpy_erms) -# endif ENTRY (__memmove_chk_erms) cmpq %rdx, %rcx @@ -193,10 +189,8 @@ L(movsb_backward): cld ret END (__memmove_erms) -# if defined SHARED strong_alias (__memmove_erms, __memcpy_erms) strong_alias (__memmove_chk_erms, __memcpy_chk_erms) -# endif # endif # ifdef SHARED @@ -204,6 +198,7 @@ ENTRY (MEMMOVE_CHK_SYMBOL (__mempcpy_chk, unaligned_erms)) cmpq %rdx, %rcx jb HIDDEN_JUMPTARGET (__chk_fail) END (MEMMOVE_CHK_SYMBOL (__mempcpy_chk, unaligned_erms)) +# endif ENTRY (MEMMOVE_SYMBOL (__mempcpy, unaligned_erms)) movq %rdi, %rax @@ -211,6 +206,7 @@ ENTRY (MEMMOVE_SYMBOL (__mempcpy, unaligned_erms)) jmp L(start_erms) END (MEMMOVE_SYMBOL (__mempcpy, unaligned_erms)) +# ifdef SHARED ENTRY (MEMMOVE_CHK_SYMBOL (__memmove_chk, unaligned_erms)) cmpq %rdx, %rcx jb HIDDEN_JUMPTARGET (__chk_fail) @@ -546,19 +542,19 @@ L(loop_large_backward): #endif END (MEMMOVE_SYMBOL (__memmove, unaligned_erms)) -#ifdef SHARED -# if IS_IN (libc) -# ifdef USE_MULTIARCH +#if IS_IN (libc) +# ifdef USE_MULTIARCH strong_alias (MEMMOVE_SYMBOL (__memmove, unaligned_erms), MEMMOVE_SYMBOL (__memcpy, unaligned_erms)) +# ifdef SHARED strong_alias (MEMMOVE_SYMBOL (__memmove_chk, unaligned_erms), MEMMOVE_SYMBOL (__memcpy_chk, unaligned_erms)) # endif +# endif +# ifdef SHARED strong_alias (MEMMOVE_CHK_SYMBOL (__memmove_chk, unaligned), MEMMOVE_CHK_SYMBOL (__memcpy_chk, unaligned)) # endif #endif -#if VEC_SIZE == 16 || defined SHARED strong_alias (MEMMOVE_SYMBOL (__memmove, unaligned), MEMCPY_SYMBOL (__memcpy, unaligned)) -#endif diff --git a/sysdeps/x86_64/multiarch/mempcpy.c b/sysdeps/x86_64/multiarch/mempcpy.c index e627b00..49e9896 100644 --- a/sysdeps/x86_64/multiarch/mempcpy.c +++ b/sysdeps/x86_64/multiarch/mempcpy.c @@ -17,10 +17,8 @@ License along with the GNU C Library; if not, see <http://www.gnu.org/licenses/>. */ -/* Define multiple versions only for the definition in lib and for - DSO. In static binaries we need mempcpy before the initialization - happened. */ -#if defined SHARED && IS_IN (libc) +/* Define multiple versions only for the definition in libc. */ +#if IS_IN (libc) # define mempcpy __redirect_mempcpy # define __mempcpy __redirect___mempcpy # define NO_MEMPCPY_STPCPY_REDIRECT @@ -35,8 +33,10 @@ libc_ifunc_redirected (__redirect_mempcpy, __mempcpy, IFUNC_SELECTOR ()); weak_alias (__mempcpy, mempcpy) +# ifdef SHARED __hidden_ver1 (__mempcpy, __GI___mempcpy, __redirect___mempcpy) __attribute__ ((visibility ("hidden"))); __hidden_ver1 (mempcpy, __GI_mempcpy, __redirect_mempcpy) __attribute__ ((visibility ("hidden"))); +# endif #endif |