aboutsummaryrefslogtreecommitdiff
path: root/libgomp/work.c
diff options
context:
space:
mode:
authorJakub Jelinek <jakub@redhat.com>2021-10-20 09:34:51 +0200
committerJakub Jelinek <jakub@redhat.com>2021-10-20 09:34:51 +0200
commitc7abdf46fb7ac9a0c37f120feff3fcc3a752584f (patch)
treed44e8ae64477a9dc5975549901a8e0ec5210f614 /libgomp/work.c
parentd4044db034b40c275b5f287d5854a102d22e07c0 (diff)
downloadgcc-c7abdf46fb7ac9a0c37f120feff3fcc3a752584f.zip
gcc-c7abdf46fb7ac9a0c37f120feff3fcc3a752584f.tar.gz
gcc-c7abdf46fb7ac9a0c37f120feff3fcc3a752584f.tar.bz2
openmp: Fix up struct gomp_work_share handling [PR102838]
If GOMP_HAVE_EFFICIENT_ALIGNED_ALLOC is not defined, the intent was to treat the split of the structure between first cacheline (64 bytes) as mostly write-once, use afterwards and second cacheline as rw just as an optimization. But as has been reported, with vectorization enabled at -O2 it can now result in aligned vector 16-byte or larger stores. When not having posix_memalign/aligned_alloc/memalign or other similar API, alloc.c emulates it but it needs to allocate extra memory for the dynamic realignment. So, for the GOMP_HAVE_EFFICIENT_ALIGNED_ALLOC not defined case, this patch stops using aligned (64) attribute in the middle of the structure and instead inserts padding that puts the second half of the structure at offset 64 bytes. And when GOMP_HAVE_EFFICIENT_ALIGNED_ALLOC is defined, usually it was allocated as aligned, but for the orphaned case it could still be allocated just with gomp_malloc without guaranteed proper alignment. 2021-10-20 Jakub Jelinek <jakub@redhat.com> PR libgomp/102838 * libgomp.h (struct gomp_work_share_1st_cacheline): New type. (struct gomp_work_share): Only use aligned(64) attribute if GOMP_HAVE_EFFICIENT_ALIGNED_ALLOC is defined, otherwise just add padding before lock to ensure lock is at offset 64 bytes into the structure. (gomp_workshare_struct_check1, gomp_workshare_struct_check2): New poor man's static assertions. * work.c (gomp_work_share_start): Use gomp_aligned_alloc instead of gomp_malloc if GOMP_HAVE_EFFICIENT_ALIGNED_ALLOC.
Diffstat (limited to 'libgomp/work.c')
-rw-r--r--libgomp/work.c5
1 files changed, 5 insertions, 0 deletions
diff --git a/libgomp/work.c b/libgomp/work.c
index 9d63eec..bf25591 100644
--- a/libgomp/work.c
+++ b/libgomp/work.c
@@ -191,7 +191,12 @@ gomp_work_share_start (size_t ordered)
/* Work sharing constructs can be orphaned. */
if (team == NULL)
{
+#ifdef GOMP_HAVE_EFFICIENT_ALIGNED_ALLOC
+ ws = gomp_aligned_alloc (__alignof (struct gomp_work_share),
+ sizeof (*ws));
+#else
ws = gomp_malloc (sizeof (*ws));
+#endif
gomp_init_work_share (ws, ordered, 1);
thr->ts.work_share = ws;
return true;