diff options
-rw-r--r-- | ChangeLog | 6 | ||||
-rw-r--r-- | NEWS | 1 | ||||
-rw-r--r-- | sysdeps/x86_64/dl-trampoline.h | 12 |
3 files changed, 17 insertions, 2 deletions
@@ -1,3 +1,9 @@ +2018-01-19 H.J. Lu <hongjiu.lu@intel.com> + + [BZ #22715] + * sysdeps/x86_64/dl-trampoline.h (_dl_runtime_profile): Properly + align La_x86_64_retval to VEC_SIZE. + 2018-01-11 Florian Weimer <fweimer@redhat.com> * sysdeps/x86/cpu-features.c (init_cpu_features): Move check for @@ -48,6 +48,7 @@ The following bugs are resolved with this release: [21624] Unsafe alloca allows local attackers to alias stack and heap (CVE-2017-1000366) [21666] Avoid .symver on common symbols [22641] x86: Fix mis-merge of XSAVE ld.so trampoline selection + [22715] x86-64: Properly align La_x86_64_retval to VEC_SIZE Version 2.23 diff --git a/sysdeps/x86_64/dl-trampoline.h b/sysdeps/x86_64/dl-trampoline.h index aadc91d..c5f9237 100644 --- a/sysdeps/x86_64/dl-trampoline.h +++ b/sysdeps/x86_64/dl-trampoline.h @@ -446,8 +446,16 @@ _dl_runtime_profile: # ifdef RESTORE_AVX /* sizeof(La_x86_64_retval). Need extra space for 2 SSE registers to detect if xmm0/xmm1 registers are changed - by audit module. */ - sub $(LRV_SIZE + XMM_SIZE*2), %RSP_LP + by audit module. Since rsp is aligned to VEC_SIZE, we + need to make sure that the address of La_x86_64_retval + + LRV_VECTOR0_OFFSET is aligned to VEC_SIZE. */ +# define LRV_SPACE (LRV_SIZE + XMM_SIZE*2) +# define LRV_MISALIGNED ((LRV_SIZE + LRV_VECTOR0_OFFSET) & (VEC_SIZE - 1)) +# if LRV_MISALIGNED == 0 + sub $LRV_SPACE, %RSP_LP +# else + sub $(LRV_SPACE + VEC_SIZE - LRV_MISALIGNED), %RSP_LP +# endif # else sub $LRV_SIZE, %RSP_LP # sizeof(La_x86_64_retval) # endif |