diff options
author | Jakub Jelinek <jakub@redhat.com> | 2021-11-18 09:07:31 +0100 |
---|---|---|
committer | Jakub Jelinek <jakub@redhat.com> | 2021-11-18 09:07:31 +0100 |
commit | 7a2aa63fad06a72d9770b08491f1a7809eac7c50 (patch) | |
tree | c251c62b7511133488cbaf393625f35042911603 | |
parent | a72b7a455c144b29609c8ecf4dff12dc9040cf2c (diff) | |
download | gcc-7a2aa63fad06a72d9770b08491f1a7809eac7c50.zip gcc-7a2aa63fad06a72d9770b08491f1a7809eac7c50.tar.gz gcc-7a2aa63fad06a72d9770b08491f1a7809eac7c50.tar.bz2 |
libgomp: Fix up aligned_alloc arguments [PR102838]
C says that aligned_alloc size must be an integral multiple of alignment.
While glibc doesn't care about it, apparently Solaris does.
So, this patch decreases the priority of aligned_alloc among the other
variants because it needs more work and can waste more memory and rounds
up the size to multiple of alignment.
2021-11-18 Jakub Jelinek <jakub@redhat.com>
PR libgomp/102838
* alloc.c (gomp_aligned_alloc): Prefer _aligned_alloc over
memalign over posix_memalign over aligned_alloc over fallback
with malloc instead of aligned_alloc over _aligned_alloc over
posix_memalign over memalign over fallback with malloc. For
aligned_alloc, round up size up to multiple of al.
-rw-r--r-- | libgomp/alloc.c | 18 |
1 files changed, 12 insertions, 6 deletions
diff --git a/libgomp/alloc.c b/libgomp/alloc.c index 6ff9cb9..3109b86 100644 --- a/libgomp/alloc.c +++ b/libgomp/alloc.c @@ -65,18 +65,24 @@ gomp_aligned_alloc (size_t al, size_t size) void *ret; if (al < sizeof (void *)) al = sizeof (void *); -#ifdef HAVE_ALIGNED_ALLOC - ret = aligned_alloc (al, size); -#elif defined(HAVE__ALIGNED_MALLOC) +#ifdef HAVE__ALIGNED_MALLOC ret = _aligned_malloc (size, al); -#elif defined(HAVE_POSIX_MEMALIGN) - if (posix_memalign (&ret, al, size) != 0) - ret = NULL; #elif defined(HAVE_MEMALIGN) { extern void *memalign (size_t, size_t); ret = memalign (al, size); } +#elif defined(HAVE_POSIX_MEMALIGN) + if (posix_memalign (&ret, al, size) != 0) + ret = NULL; +#lif defined(HAVE_ALIGNED_ALLOC) + { + size_t sz = (size + al - 1) & ~(al - 1); + if (__builtin_expect (sz >= size, 1)) + ret = aligned_alloc (al, sz); + else + ret = NULL; + } #else ret = NULL; if ((al & (al - 1)) == 0 && size) |