aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJonathan Wakely <jwakely@redhat.com>2018-07-23 20:40:28 +0100
committerJonathan Wakely <redi@gcc.gnu.org>2018-07-23 20:40:28 +0100
commit2d6c8eea1d7ae5b3b2b3c2508f78de918c73f4c4 (patch)
tree2db36a97ddc96b86ee3c32f89c5cfb365cbe2c5a
parent651a79532c8e87ac096419d9852a295f9e284fe4 (diff)
downloadgcc-2d6c8eea1d7ae5b3b2b3c2508f78de918c73f4c4.zip
gcc-2d6c8eea1d7ae5b3b2b3c2508f78de918c73f4c4.tar.gz
gcc-2d6c8eea1d7ae5b3b2b3c2508f78de918c73f4c4.tar.bz2
PR libstdc++/70940 optimize pmr::resource_adaptor for allocators using malloc
pmr::resource_adaptor can avoid allocating an oversized buffer and doing manual alignment within that buffer when the wrapped allocator is known to always meet the requested alignment. Specifically, if the allocator is known to use malloc or new directly, then we can call the allocator directly for any fundamental alignment. PR libstdc++/70940 * include/experimental/memory_resource (__resource_adaptor_common::_AlignMgr::_M_unadjust): Add assertion. (__resource_adaptor_common::__guaranteed_alignment): New helper to give maximum alignment an allocator guarantees. Specialize for known allocators using new and malloc. (__resource_adaptor_imp::do_allocate): Use __guaranteed_alignment. (__resource_adaptor_imp::do_deallocate): Likewise. * testsuite/experimental/memory_resource/new_delete_resource.cc: Check that new and delete are called with expected sizes. From-SVN: r262935
-rw-r--r--libstdc++-v3/ChangeLog13
-rw-r--r--libstdc++-v3/include/experimental/memory_resource32
-rw-r--r--libstdc++-v3/testsuite/experimental/memory_resource/new_delete_resource.cc69
3 files changed, 94 insertions, 20 deletions
diff --git a/libstdc++-v3/ChangeLog b/libstdc++-v3/ChangeLog
index c9cd62a..ba78ab8 100644
--- a/libstdc++-v3/ChangeLog
+++ b/libstdc++-v3/ChangeLog
@@ -1,3 +1,16 @@
+2018-07-23 Jonathan Wakely <jwakely@redhat.com>
+
+ PR libstdc++/70940
+ * include/experimental/memory_resource
+ (__resource_adaptor_common::_AlignMgr::_M_unadjust): Add assertion.
+ (__resource_adaptor_common::__guaranteed_alignment): New helper to
+ give maximum alignment an allocator guarantees. Specialize for known
+ allocators using new and malloc.
+ (__resource_adaptor_imp::do_allocate): Use __guaranteed_alignment.
+ (__resource_adaptor_imp::do_deallocate): Likewise.
+ * testsuite/experimental/memory_resource/new_delete_resource.cc:
+ Check that new and delete are called with expected sizes.
+
2018-07-20 Jonathan Wakely <jwakely@redhat.com>
PR libstdc++/86595
diff --git a/libstdc++-v3/include/experimental/memory_resource b/libstdc++-v3/include/experimental/memory_resource
index 1965fdc..61273fc 100644
--- a/libstdc++-v3/include/experimental/memory_resource
+++ b/libstdc++-v3/include/experimental/memory_resource
@@ -36,6 +36,13 @@
#include <ext/new_allocator.h>
#include <experimental/bits/lfts_config.h>
+namespace __gnu_cxx _GLIBCXX_VISIBILITY(default)
+{
+_GLIBCXX_BEGIN_NAMESPACE_VERSION
+ template<typename _Tp> class malloc_allocator;
+_GLIBCXX_END_NAMESPACE_VERSION
+} // namespace __gnu_cxx
+
namespace std {
_GLIBCXX_BEGIN_NAMESPACE_VERSION
@@ -307,6 +314,10 @@ namespace pmr {
__orig_ptr = __ptr - _S_read<unsigned int>(__end);
else // (__token_size == sizeof(char*))
__orig_ptr = _S_read<char*>(__end);
+ // The adjustment is always less than the requested alignment,
+ // so if that isn't true now then either the wrong size was passed
+ // to deallocate or the token was overwritten by a buffer overflow:
+ __glibcxx_assert(static_cast<size_t>(__ptr - __orig_ptr) < _M_align);
return __orig_ptr;
}
@@ -345,6 +356,23 @@ namespace pmr {
return __val;
}
};
+
+ template<typename _Alloc>
+ struct __guaranteed_alignment : std::integral_constant<size_t, 1> { };
+
+ template<typename _Tp>
+ struct __guaranteed_alignment<__gnu_cxx::new_allocator<_Tp>>
+ : std::alignment_of<std::max_align_t>::type { };
+
+ template<typename _Tp>
+ struct __guaranteed_alignment<__gnu_cxx::malloc_allocator<_Tp>>
+ : std::alignment_of<std::max_align_t>::type { };
+
+#if _GLIBCXX_USE_ALLOCATOR_NEW
+ template<typename _Tp>
+ struct __guaranteed_alignment<std::allocator<_Tp>>
+ : std::alignment_of<std::max_align_t>::type { };
+#endif
};
// 8.7.1 __resource_adaptor_imp
@@ -392,7 +420,7 @@ namespace pmr {
virtual void*
do_allocate(size_t __bytes, size_t __alignment) override
{
- if (__alignment == 1)
+ if (__alignment <= __guaranteed_alignment<_Alloc>::value)
return _M_alloc.allocate(__bytes);
const _AlignMgr __mgr(__bytes, __alignment);
@@ -407,7 +435,7 @@ namespace pmr {
override
{
auto __ptr = static_cast<char*>(__p);
- if (__alignment == 1)
+ if (__alignment <= __guaranteed_alignment<_Alloc>::value)
{
_M_alloc.deallocate(__ptr, __bytes);
return;
diff --git a/libstdc++-v3/testsuite/experimental/memory_resource/new_delete_resource.cc b/libstdc++-v3/testsuite/experimental/memory_resource/new_delete_resource.cc
index 692e520..a7c4b37 100644
--- a/libstdc++-v3/testsuite/experimental/memory_resource/new_delete_resource.cc
+++ b/libstdc++-v3/testsuite/experimental/memory_resource/new_delete_resource.cc
@@ -18,16 +18,21 @@
// { dg-do run { target c++14 } }
#include <experimental/memory_resource>
+#include <cstdlib>
#include <testsuite_hooks.h>
bool new_called = false;
bool delete_called = false;
+std::size_t bytes_allocated = 0;
void* operator new(std::size_t n)
{
new_called = true;
if (void* p = malloc(n))
+ {
+ bytes_allocated += n;
return p;
+ }
throw std::bad_alloc();
}
@@ -35,13 +40,17 @@ void operator delete(void* p)
{
delete_called = true;
std::free(p);
+ bytes_allocated = 0; // assume everything getting deleted
}
-void operator delete(void* p, std::size_t)
+void operator delete(void* p, std::size_t n)
{
- ::operator delete(p);
+ delete_called = true;
+ std::free(p);
+ bytes_allocated -= n;
}
+
template<std::size_t A>
bool aligned(void* p)
{
@@ -92,36 +101,60 @@ test02()
void
test03()
+
{
using std::max_align_t;
using std::size_t;
void* p = nullptr;
+ bytes_allocated = 0;
+
memory_resource* r1 = new_delete_resource();
p = r1->allocate(1);
+ VERIFY( bytes_allocated == 1 );
VERIFY( aligned<max_align_t>(p) );
r1->deallocate(p, 1);
- p = r1->allocate(1, alignof(short));
+ VERIFY( bytes_allocated == 0 );
+
+ p = r1->allocate(2, alignof(char));
+ VERIFY( bytes_allocated == 2 );
+ VERIFY( aligned<max_align_t>(p) );
+ r1->deallocate(p, 2);
+ VERIFY( bytes_allocated == 0 );
+
+ p = r1->allocate(3, alignof(short));
+ VERIFY( bytes_allocated == 3 );
VERIFY( aligned<short>(p) );
- r1->deallocate(p, 1, alignof(short));
- p = r1->allocate(1, alignof(long));
+ r1->deallocate(p, 3, alignof(short));
+ VERIFY( bytes_allocated == 0 );
+
+ p = r1->allocate(4, alignof(long));
+ VERIFY( bytes_allocated == 4 );
VERIFY( aligned<long>(p) );
- r1->deallocate(p, 1, alignof(long));
- constexpr size_t big_al = alignof(max_align_t) * 8;
- p = r1->allocate(1, big_al);
- VERIFY( aligned<big_al>(p) );
- r1->deallocate(p, 1, big_al);
-
- // Test extended alignments
- p = r1->allocate(1024, al6);
+ r1->deallocate(p, 4, alignof(long));
+ VERIFY( bytes_allocated == 0 );
+
+ // Test extended aligments:
+ p = r1->allocate(777, al6);
+ VERIFY( bytes_allocated >= 777 );
+ VERIFY( bytes_allocated < (777 + al6 + 8) ); // reasonable upper bound
VERIFY( aligned<al6>(p) );
- r1->deallocate(p, 1024, al6);
- p = r1->allocate(1024, al12);
+ r1->deallocate(p, 777, al6);
+ VERIFY( bytes_allocated == 0 );
+
+ p = r1->allocate(888, al12);
+ VERIFY( bytes_allocated >= 888 );
+ VERIFY( bytes_allocated < (888 + al12 + 8) ); // reasonable upper bound
VERIFY( aligned<al12>(p) );
- r1->deallocate(p, 1024, al12);
- p = r1->allocate(1024, al18);
+ r1->deallocate(p, 888, al12);
+ VERIFY( bytes_allocated == 0 );
+
+ p = r1->allocate(999, al18);
+ VERIFY( bytes_allocated >= 999 );
+ VERIFY( bytes_allocated < (999 + al18 + 8) ); // reasonable upper bound
VERIFY( aligned<al18>(p) );
- r1->deallocate(p, 1024, al18);
+ r1->deallocate(p, 999, al18);
+ VERIFY( bytes_allocated == 0 );
}
int main()