diff options
author | Uros Bizjak <ubizjak@gmail.com> | 2025-08-25 09:50:37 +0200 |
---|---|---|
committer | H.J. Lu <hjl.tools@gmail.com> | 2025-08-28 04:57:24 -0700 |
commit | 9a1cb8f783cb98d4c5fd180c43855fdbb74fbe71 (patch) | |
tree | 788c9423ac7c738370fedde3b6b60a4e196f1e23 | |
parent | c49a32d7eb0466544cce089cd2affab15694dc5a (diff) | |
download | glibc-9a1cb8f783cb98d4c5fd180c43855fdbb74fbe71.zip glibc-9a1cb8f783cb98d4c5fd180c43855fdbb74fbe71.tar.gz glibc-9a1cb8f783cb98d4c5fd180c43855fdbb74fbe71.tar.bz2 |
x32: Fix, optimize and cleanup RSEQ_* accessors
Add missing "memory" clobber to accessors. The "memory" clobber
tells the compiler that the assembly code performs memory reads
or writes to items other than those listed in the input and output
operands (for example, accessing the memory pointed to by one of
the input parameters).
Use MOVZBL instead of MOVB when reading 1-byte memory location
into a register. MOVB to a register actually inserts into the LSB
of the word-sized register, making the result dependent on
the previous register value. MOVZBL avoids this issue.
Change %P asm operand modifiers to %c. The ācā modifier is a
generic asm operand modifier that requires a constant operand and
prints the constant expression without punctuation.
Replace %b asm operand modifiers with explicit casts. Explicit
casts inform the compiler which part of the register value is used,
allowing it to perform additional optimizations (e.g. narrowing the
preceding operation).
Remove %q asm operand modifiers. Since the value is already cast
to 'long long int', the compiler will emit a 64-bit register name
in the assembly without needing %q.
No functional changes intended.
Tested-by: H.J. Lu <hjl.tools@gmail.com>
Co-Authored-By: H.J. Lu <hjl.tools@gmail.com>
Signed-off-by: H.J. Lu <hjl.tools@gmail.com>
Signed-off-by: Uros Bizjak <ubizjak@gmail.com>
Cc: Florian Weimer <fweimer@redhat.com>
Cc: Carlos O'Donell <carlos@redhat.com>
Reviewed-by: H.J. Lu <hjl.tools@gmail.com>
-rw-r--r-- | sysdeps/x86_64/x32/nptl/rseq-access.h | 70 |
1 files changed, 39 insertions, 31 deletions
diff --git a/sysdeps/x86_64/x32/nptl/rseq-access.h b/sysdeps/x86_64/x32/nptl/rseq-access.h index 8386ebd..d9bb215 100644 --- a/sysdeps/x86_64/x32/nptl/rseq-access.h +++ b/sysdeps/x86_64/x32/nptl/rseq-access.h @@ -18,29 +18,32 @@ /* Read member of the RSEQ area directly, with single-copy atomicity semantics. */ #define RSEQ_GETMEM_ONCE(member) \ - ({ __typeof (RSEQ_SELF()->member) __value; \ + ({ \ + __typeof (RSEQ_SELF()->member) __value; \ _Static_assert (sizeof (__value) == 1 \ || sizeof (__value) == 4 \ || sizeof (__value) == 8, \ - "size of rseq data"); \ + "size of rseq data"); \ if (sizeof (__value) == 1) \ - asm volatile ("movb %%fs:%P2(%q3),%b0" \ - : "=q" (__value) \ - : "0" (0), "i" (offsetof (struct rseq_area, member)), \ - "r" ((long long int) __rseq_offset)); \ + asm volatile ("movzbl %%fs:%c1(%2),%k0" \ + : "=r" (__value) \ + : "i" (offsetof (struct rseq_area, member)), \ + "r" ((long long int) __rseq_offset) \ + : "memory" ); \ else if (sizeof (__value) == 4) \ - asm volatile ("movl %%fs:%P1(%q2),%0" \ + asm volatile ("movl %%fs:%c1(%2),%0" \ : "=r" (__value) \ : "i" (offsetof (struct rseq_area, member)), \ - "r" ((long long int) __rseq_offset)); \ + "r" ((long long int) __rseq_offset) \ + : "memory"); \ else /* 8 */ \ - { \ - asm volatile ("movq %%fs:%P1(%q2),%q0" \ - : "=r" (__value) \ - : "i" (offsetof (struct rseq_area, member)), \ - "r" ((long long int) __rseq_offset)); \ - } \ - __value; }) + asm volatile ("movq %%fs:%c1(%2),%0" \ + : "=r" (__value) \ + : "i" (offsetof (struct rseq_area, member)), \ + "r" ((long long int) __rseq_offset) \ + : "memory"); \ + __value; \ + }) /* Read member of the RSEQ area directly. */ #define RSEQ_GETMEM(member) RSEQ_GETMEM_ONCE(member) @@ -59,27 +62,32 @@ _Static_assert (sizeof (RSEQ_SELF()->member) == 1 \ || sizeof (RSEQ_SELF()->member) == 4 \ || sizeof (RSEQ_SELF()->member) == 8, \ - "size of rseq data"); \ + "size of rseq data"); \ if (sizeof (RSEQ_SELF()->member) == 1) \ - asm volatile ("movb %b0,%%fs:%P1(%q2)" : \ - : "iq" (value), \ + asm volatile ("movb %0,%%fs:%c1(%2)" \ + : \ + : "iq" ((uint8_t) cast_to_integer (value)), \ "i" (offsetof (struct rseq_area, member)), \ - "r" ((long long int) __rseq_offset)); \ + "r" ((long long int) __rseq_offset) \ + : "memory"); \ else if (sizeof (RSEQ_SELF()->member) == 4) \ - asm volatile ("movl %0,%%fs:%P1(%q2)" : \ - : IMM_MODE (value), \ + asm volatile ("movl %0,%%fs:%c1(%2)" \ + : \ + : IMM_MODE ((uint32_t) cast_to_integer (value)), \ "i" (offsetof (struct rseq_area, member)), \ - "r" ((long long int) __rseq_offset)); \ + "r" ((long long int) __rseq_offset) \ + : "memory"); \ else /* 8 */ \ - { \ - /* Since movq takes a signed 32-bit immediate or a register source \ - operand, use "er" constraint for 32-bit signed integer constant \ - or register. */ \ - asm volatile ("movq %q0,%%fs:%P1(%q2)" : \ - : "er" ((uint64_t) cast_to_integer (value)), \ - "i" (offsetof (struct rseq_area, member)), \ - "r" ((long long int) __rseq_offset)); \ - }}) + /* Since movq takes a signed 32-bit immediate or a register source \ + operand, use "er" constraint for 32-bit signed integer constant \ + or register. */ \ + asm volatile ("movq %0,%%fs:%c1(%2)" \ + : \ + : "er" ((uint64_t) cast_to_integer (value)), \ + "i" (offsetof (struct rseq_area, member)), \ + "r" ((long long int) __rseq_offset) \ + : "memory"); \ + }) /* Set member of the RSEQ area directly. */ #define RSEQ_SETMEM(member, value) RSEQ_SETMEM_ONCE(member, value) |