aboutsummaryrefslogtreecommitdiff
path: root/libgomp/work.c
diff options
context:
space:
mode:
authorJakub Jelinek <jakub@redhat.com>2021-11-18 09:10:40 +0100
committerJakub Jelinek <jakub@redhat.com>2021-11-18 09:10:40 +0100
commit17da2c7425ea1f5bf417b954f444dbe1f1618a1c (patch)
tree9ce2dd3deabea300a014828072e33f3abfa6c277 /libgomp/work.c
parent7a2aa63fad06a72d9770b08491f1a7809eac7c50 (diff)
downloadgcc-17da2c7425ea1f5bf417b954f444dbe1f1618a1c.zip
gcc-17da2c7425ea1f5bf417b954f444dbe1f1618a1c.tar.gz
gcc-17da2c7425ea1f5bf417b954f444dbe1f1618a1c.tar.bz2
libgomp: Ensure that either gomp_team is properly aligned [PR102838]
struct gomp_team has struct gomp_work_share array inside of it. If that latter structure has 64-byte aligned member in the middle, the whole struct gomp_team needs to be 64-byte aligned, but we weren't allocating it using gomp_aligned_alloc. This patch fixes that, except that on gcn team_malloc is special, so I've instead decided at least for now to avoid using aligned member and use the padding instead on gcn. 2021-11-18 Jakub Jelinek <jakub@redhat.com> PR libgomp/102838 * libgomp.h (GOMP_USE_ALIGNED_WORK_SHARES): Define if GOMP_HAVE_EFFICIENT_ALIGNED_ALLOC is defined and __AMDGCN__ is not. (struct gomp_work_share): Use GOMP_USE_ALIGNED_WORK_SHARES instead of GOMP_HAVE_EFFICIENT_ALIGNED_ALLOC. * work.c (alloc_work_share, gomp_work_share_start): Likewise. * team.c (gomp_new_team): If GOMP_USE_ALIGNED_WORK_SHARES, use gomp_aligned_alloc instead of team_malloc.
Diffstat (limited to 'libgomp/work.c')
-rw-r--r--libgomp/work.c4
1 files changed, 2 insertions, 2 deletions
diff --git a/libgomp/work.c b/libgomp/work.c
index bf25591..b75ba48 100644
--- a/libgomp/work.c
+++ b/libgomp/work.c
@@ -78,7 +78,7 @@ alloc_work_share (struct gomp_team *team)
team->work_share_chunk *= 2;
/* Allocating gomp_work_share structures aligned is just an
optimization, don't do it when using the fallback method. */
-#ifdef GOMP_HAVE_EFFICIENT_ALIGNED_ALLOC
+#ifdef GOMP_USE_ALIGNED_WORK_SHARES
ws = gomp_aligned_alloc (__alignof (struct gomp_work_share),
team->work_share_chunk
* sizeof (struct gomp_work_share));
@@ -191,7 +191,7 @@ gomp_work_share_start (size_t ordered)
/* Work sharing constructs can be orphaned. */
if (team == NULL)
{
-#ifdef GOMP_HAVE_EFFICIENT_ALIGNED_ALLOC
+#ifdef GOMP_USE_ALIGNED_WORK_SHARES
ws = gomp_aligned_alloc (__alignof (struct gomp_work_share),
sizeof (*ws));
#else