aboutsummaryrefslogtreecommitdiff
path: root/libgomp
diff options
context:
space:
mode:
Diffstat (limited to 'libgomp')
-rw-r--r--libgomp/libgomp.h6
-rw-r--r--libgomp/team.c5
-rw-r--r--libgomp/work.c4
3 files changed, 12 insertions, 3 deletions
diff --git a/libgomp/libgomp.h b/libgomp/libgomp.h
index ceef643..299cf42 100644
--- a/libgomp/libgomp.h
+++ b/libgomp/libgomp.h
@@ -95,6 +95,10 @@ enum memmodel
#define GOMP_HAVE_EFFICIENT_ALIGNED_ALLOC 1
#endif
+#if defined(GOMP_HAVE_EFFICIENT_ALIGNED_ALLOC) && !defined(__AMDGCN__)
+#define GOMP_USE_ALIGNED_WORK_SHARES 1
+#endif
+
extern void *gomp_malloc (size_t) __attribute__((malloc));
extern void *gomp_malloc_cleared (size_t) __attribute__((malloc));
extern void *gomp_realloc (void *, size_t);
@@ -348,7 +352,7 @@ struct gomp_work_share
are in a different cache line. */
/* This lock protects the update of the following members. */
-#ifdef GOMP_HAVE_EFFICIENT_ALIGNED_ALLOC
+#ifdef GOMP_USE_ALIGNED_WORK_SHARES
gomp_mutex_t lock __attribute__((aligned (64)));
#else
char pad[64 - offsetof (struct gomp_work_share_1st_cacheline, pad)];
diff --git a/libgomp/team.c b/libgomp/team.c
index 3bcc817..19cc392 100644
--- a/libgomp/team.c
+++ b/libgomp/team.c
@@ -177,7 +177,12 @@ gomp_new_team (unsigned nthreads)
{
size_t extra = sizeof (team->ordered_release[0])
+ sizeof (team->implicit_task[0]);
+#ifdef GOMP_USE_ALIGNED_WORK_SHARES
+ team = gomp_aligned_alloc (__alignof (struct gomp_team),
+ sizeof (*team) + nthreads * extra);
+#else
team = team_malloc (sizeof (*team) + nthreads * extra);
+#endif
#ifndef HAVE_SYNC_BUILTINS
gomp_mutex_init (&team->work_share_list_free_lock);
diff --git a/libgomp/work.c b/libgomp/work.c
index bf25591..b75ba48 100644
--- a/libgomp/work.c
+++ b/libgomp/work.c
@@ -78,7 +78,7 @@ alloc_work_share (struct gomp_team *team)
team->work_share_chunk *= 2;
/* Allocating gomp_work_share structures aligned is just an
optimization, don't do it when using the fallback method. */
-#ifdef GOMP_HAVE_EFFICIENT_ALIGNED_ALLOC
+#ifdef GOMP_USE_ALIGNED_WORK_SHARES
ws = gomp_aligned_alloc (__alignof (struct gomp_work_share),
team->work_share_chunk
* sizeof (struct gomp_work_share));
@@ -191,7 +191,7 @@ gomp_work_share_start (size_t ordered)
/* Work sharing constructs can be orphaned. */
if (team == NULL)
{
-#ifdef GOMP_HAVE_EFFICIENT_ALIGNED_ALLOC
+#ifdef GOMP_USE_ALIGNED_WORK_SHARES
ws = gomp_aligned_alloc (__alignof (struct gomp_work_share),
sizeof (*ws));
#else