public inbox for libstdc++@gcc.gnu.org
 help / color / mirror / Atom feed
* [PATCH 5/5][_Hahtable] Prealloc nodes on copy
@ 2022-06-20 16:58 François Dumont
  0 siblings, 0 replies; only message in thread
From: François Dumont @ 2022-06-20 16:58 UTC (permalink / raw)
  To: libstdc++; +Cc: gcc-patches

[-- Attachment #1: Type: text/plain, Size: 1127 bytes --]

libstdc++: [_Hashtable] Prealloc nodes on _Hashtable copy

Prealloc nodes on copy to reduce memory fragmentation of nodes. Create a new
assignment method which copy hashtable instances respecting order of 
nodes in the
bucket rather than order of node in the singly-linked list.

libstdc++-v3/ChangeLog:

     * include/bits/hashtable_policy.h: Include stl_tempbuf.h.
     (_Hashtable_alloc<>::_M_allocate_node_ptr): New.
     (_PreAllocHashtableNodes<>): New.
     * include/bits/hashtable.h 
(_Hashtable<>::__prealloc_hashtable_nodes_gen_t): New.
     (_Hashtable<>::_M_assign_stable):New.
     (_Hashtable<>::operator=(const _Hashtable&)): Use latter.
     (_Hashtable(const _Hashtable&)): Likewise.
     (_Hashtable(const _Hashtable&, const allocator_type&)): Likewise.
     (_Hashtable(_Hashtable&&, __node_alloc_type&&, false_type)): Likewise.
     * testsuite/util/exception/safety.h (setup_base::compare): Compare 
containers
     rather than compare iterators.
     * testsuite/23_containers/unordered_multiset/cons/copy.cc (main): 
Likewise.

Tested under Linux x86_64.

François

[-- Attachment #2: 5_hashtable_prealloc_nodes.patch --]
[-- Type: text/x-patch, Size: 11253 bytes --]

diff --git a/libstdc++-v3/include/bits/hashtable.h b/libstdc++-v3/include/bits/hashtable.h
index 011a707605f..b497f16d017 100644
--- a/libstdc++-v3/include/bits/hashtable.h
+++ b/libstdc++-v3/include/bits/hashtable.h
@@ -290,6 +290,9 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
 	__detail::_ReuseOrAllocNode<__node_alloc_type>;
       using __alloc_node_gen_cache_bbegin_t =
 	__detail::_AllocNode<__node_alloc_type, __detail::_CacheBBeginPolicy>;
+      using __prealloc_hashtable_nodes_gen_t =
+	__detail::_PreAllocHashtableNodes<__node_alloc_type>;
+
       using __node_builder_t =
 	__detail::_NodeBuilder<_ExtractKey>;
       using __no_cache_bbegin_policy_t =
@@ -483,6 +486,10 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
 	void
 	_M_assign(_Ht&&, const _NodeGenerator&);
 
+      template<typename _Ht, typename _NodeGenerator>
+	void
+	_M_assign_stable(_Ht&&, const _NodeGenerator&);
+
       void
       _M_move_assign(_Hashtable&&, true_type);
 
@@ -1364,10 +1371,10 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
 	      _M_bucket_count = __ht._M_bucket_count;
 	      _M_element_count = __ht._M_element_count;
 	      _M_rehash_policy = __ht._M_rehash_policy;
-	      __alloc_node_gen_cache_bbegin_t __node_gen(*this);
 	      __try
 		{
-		  _M_assign(__ht, __node_gen);
+		  __prealloc_hashtable_nodes_gen_t __node_gen(__ht, *this);
+		  _M_assign_stable(__ht, __node_gen);
 		}
 	      __catch(...)
 		{
@@ -1487,6 +1494,72 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
 	  }
       }
 
+  template<typename _Key, typename _Value, typename _Alloc,
+	   typename _ExtractKey, typename _Equal,
+	   typename _Hash, typename _RangeHash, typename _Unused,
+	   typename _RehashPolicy, typename _Traits>
+    template<typename _Ht, typename _NodeGenerator>
+      void
+      _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
+		 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
+      _M_assign_stable(_Ht&& __ht, const _NodeGenerator& __node_gen)
+      {
+	__buckets_ptr __buckets = nullptr;
+	if (!_M_buckets)
+	  _M_buckets = __buckets = _M_allocate_buckets(_M_bucket_count);
+
+	__try
+	  {
+	    if (!__ht._M_before_begin._M_nxt)
+	      return;
+
+	    __node_ptr __prev_n = nullptr;
+	    for (std::size_t __bkt = 0; __bkt != _M_bucket_count; ++__bkt)
+	      {
+		if (__ht._M_buckets[__bkt] == nullptr)
+		  continue;
+
+		__node_ptr __ht_n =
+		  static_cast<__node_ptr>(__ht._M_buckets[__bkt]->_M_nxt);
+		__node_ptr __prev_ht_n = __ht_n;
+		__node_base_ptr __nxt_bkt_n = __bkt < _M_bucket_count - 1
+		  ? __ht._M_buckets[__bkt + 1] : nullptr;
+		do
+		  {
+		    __node_ptr __this_n
+		      = __node_gen(__prev_n, __bkt,
+				   __fwd_value_for<_Ht>(__ht_n->_M_v()));
+		    this->_M_copy_code(*__this_n, *__ht_n);
+		    if (__prev_n)
+		      {
+			if (!_M_buckets[__bkt])
+			  _M_buckets[__bkt] = __prev_n;
+			__prev_n->_M_nxt = __this_n;
+		      }
+		    else
+		      {
+			_M_buckets[__bkt] = &_M_before_begin;
+			_M_before_begin._M_nxt = __this_n;
+		      }
+
+		    __prev_n = __this_n;
+		    __prev_ht_n = __ht_n;
+		    __ht_n = __ht_n->_M_next();
+		  }
+		while (__ht_n
+		       && __ht._M_is_nxt_in_bucket(__bkt, __prev_ht_n,
+						   __nxt_bkt_n));
+	      }
+	  }
+	__catch(...)
+	  {
+	    clear();
+	    if (__buckets)
+	      _M_deallocate_buckets();
+	    __throw_exception_again;
+	  }
+      }
+
   template<typename _Key, typename _Value, typename _Alloc,
 	   typename _ExtractKey, typename _Equal,
 	   typename _Hash, typename _RangeHash, typename _Unused,
@@ -1575,8 +1648,8 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
       _M_element_count(__ht._M_element_count),
       _M_rehash_policy(__ht._M_rehash_policy)
     {
-      __alloc_node_gen_cache_bbegin_t __node_gen(*this);
-      _M_assign(__ht, __node_gen);
+      __prealloc_hashtable_nodes_gen_t __node_gen(__ht, *this);
+      _M_assign_stable(__ht, __node_gen);
     }
 
   template<typename _Key, typename _Value, typename _Alloc,
@@ -1629,8 +1702,8 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
       _M_element_count(__ht._M_element_count),
       _M_rehash_policy(__ht._M_rehash_policy)
     {
-      __alloc_node_gen_cache_bbegin_t __node_gen(*this);
-      _M_assign(__ht, __node_gen);
+      __prealloc_hashtable_nodes_gen_t __node_gen(__ht, *this);
+      _M_assign_stable(__ht, __node_gen);
     }
 
   template<typename _Key, typename _Value, typename _Alloc,
@@ -1669,11 +1742,11 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
 	}
       else
 	{
-	  __alloc_node_gen_cache_bbegin_t __node_gen(*this);
+	  __prealloc_hashtable_nodes_gen_t __node_gen(__ht, *this);
 	  using _Fwd_Ht = __conditional_t<
 	    __move_if_noexcept_cond<value_type>::value,
 	    const _Hashtable&, _Hashtable&&>;
-	  _M_assign(std::forward<_Fwd_Ht>(__ht), __node_gen);
+	  _M_assign_stable(std::forward<_Fwd_Ht>(__ht), __node_gen);
 	  __ht.clear();
 	}
     }
diff --git a/libstdc++-v3/include/bits/hashtable_policy.h b/libstdc++-v3/include/bits/hashtable_policy.h
index ff206a6ed20..becafcd3249 100644
--- a/libstdc++-v3/include/bits/hashtable_policy.h
+++ b/libstdc++-v3/include/bits/hashtable_policy.h
@@ -37,6 +37,7 @@
 #include <bits/stl_pair.h>	// for std::pair
 #include <ext/aligned_buffer.h>	// for __gnu_cxx::__aligned_buffer
 #include <ext/alloc_traits.h>	// for std::__alloc_rebind
+#include <bits/stl_tempbuf.h>	// for std::get_temporary_buffer.
 #include <ext/numeric_traits.h>	// for __gnu_cxx::__int_traits
 
 namespace std _GLIBCXX_VISIBILITY(default)
@@ -291,6 +292,113 @@ namespace __detail
       __hashtable_alloc& _M_h;
     };
 
+  template<typename _NodeAlloc>
+    struct _PreAllocHashtableNodes
+    {
+    private:
+      using __node_alloc_type = _NodeAlloc;
+      using __hashtable_alloc = _Hashtable_alloc<__node_alloc_type>;
+      using __node_alloc_traits =
+	typename __hashtable_alloc::__node_alloc_traits;
+      using __node_base = typename __hashtable_alloc::__node_base;
+
+    public:
+      using __node_ptr = typename __hashtable_alloc::__node_ptr;
+
+      template<typename _Ht>
+	_PreAllocHashtableNodes(_Ht& __ht, __hashtable_alloc& __ha)
+	: _M_h(__ha), _M_before_free_nodes()
+	, _M_buckets(std::get_temporary_buffer<__node_base*>(__ht.bucket_count()))
+	{
+	  __builtin_memset(_M_buckets.first, 0,
+			   _M_buckets.second * sizeof(__node_base*));
+	  __try
+	    {
+	      __node_ptr __hint = nullptr;
+	      __node_base* __prev = std::__addressof(_M_before_free_nodes);
+	      for (std::size_t __bkt = 0; __bkt != _M_buckets.second; ++__bkt)
+		for (auto __lit = __ht.begin(__bkt); __lit != __ht.end(__bkt);
+		     ++__lit)
+		  {
+		    __node_ptr __tmp = _M_h._M_allocate_node_ptr(__hint);
+		    if (!_M_buckets.first[__bkt])
+		      _M_buckets.first[__bkt] = __prev;
+
+		    __prev->_M_nxt = __tmp;
+		    __prev = __hint = __tmp;
+		  }
+	    }
+	  __catch(...)
+	  {
+	    _M_deallocate_nodes();
+	    __throw_exception_again;
+	  }
+	}
+
+      _PreAllocHashtableNodes(const _PreAllocHashtableNodes&) = delete;
+
+      ~_PreAllocHashtableNodes()
+      { _M_deallocate_nodes(); }
+
+      template<typename... _Args>
+	__node_ptr
+	operator()(__node_ptr __hint, std::size_t __bkt, _Args&&... __args) const
+	{
+	  if (__bkt >= _M_buckets.second || _M_buckets.first[__bkt] == nullptr)
+	    return _M_h._M_allocate_node(__hint,
+					 std::forward<_Args>(__args)...);
+
+	  if (_M_buckets.first[__bkt] != std::__addressof(_M_before_free_nodes))
+	    {
+	      _M_buckets.first[_M_last_access_bkt] = nullptr;
+	      _M_buckets.first[__bkt] = std::__addressof(_M_before_free_nodes);
+	    }
+
+	  _M_last_access_bkt = __bkt;
+	  auto __prev = _M_buckets.first[__bkt];
+	  __node_ptr __node = static_cast<__node_ptr>(__prev->_M_nxt);
+	  _M_before_free_nodes._M_nxt = __node->_M_nxt;
+	  if (_M_before_free_nodes._M_nxt == nullptr)
+	    _M_buckets.first[__bkt] = nullptr;
+
+	  __node->_M_nxt = nullptr;
+	  auto& __a = _M_h._M_node_allocator();
+	  __try
+	    {
+	      __node_alloc_traits::construct(__a, __node->_M_valptr(),
+					     std::forward<_Args>(__args)...);
+	    }
+	  __catch(...)
+	  {
+	    _M_h._M_deallocate_node_ptr(__node);
+	    __throw_exception_again;
+	  }
+
+	  return __node;
+	}
+
+    private:
+      void
+      _M_deallocate_nodes()
+      {
+	__node_ptr __n
+	  = static_cast<__node_ptr>(_M_before_free_nodes._M_nxt);
+	while (__n)
+	  {
+	    __node_ptr __tmp = __n;
+	    __n = __n->_M_next();
+	    _M_h._M_deallocate_node_ptr(__tmp);
+	  }
+
+	std::return_temporary_buffer(_M_buckets.first);
+      }
+
+      mutable __node_base _M_before_free_nodes;
+      mutable std::pair<__node_base**, std::ptrdiff_t> _M_buckets;
+      mutable std::size_t _M_last_access_bkt = 0;
+      __hashtable_alloc& _M_h;
+    };
+
   // Auxiliary types used for all instantiations of _Hashtable nodes
   // and iterators.
 
@@ -2031,6 +2139,10 @@ namespace __detail
       _M_node_allocator() const
       { return __ebo_node_alloc::_M_cget(); }
 
+      // Allocate a node.
+      __node_ptr
+      _M_allocate_node_ptr(__node_ptr __hint);
+
       // Allocate a node and construct an element within it.
       template<typename... _Args>
 	__node_ptr
@@ -2058,6 +2170,27 @@ namespace __detail
 
   // Definitions of class template _Hashtable_alloc's out-of-line member
   // functions.
+  template<typename _NodeAlloc>
+    auto
+    _Hashtable_alloc<_NodeAlloc>::_M_allocate_node_ptr(__node_ptr __hint)
+    -> __node_ptr
+    {
+      auto& __alloc = _M_node_allocator();
+      typename __node_alloc_traits::pointer __nptr;
+      if (__hint)
+	{
+	  typedef typename __node_alloc_traits::const_pointer _CPtr;
+	  auto __hptr = std::pointer_traits<_CPtr>::pointer_to(*__hint);
+	  __nptr = __node_alloc_traits::allocate(__alloc, 1, __hptr);
+	}
+      else
+	__nptr = __node_alloc_traits::allocate(__alloc, 1);
+
+      __node_ptr __n = std::__to_address(__nptr);
+      ::new ((void*)__n) __node_type;
+      return __n;
+    }
+
   template<typename _NodeAlloc>
     template<typename... _Args>
       auto
diff --git a/libstdc++-v3/testsuite/23_containers/unordered_multiset/cons/copy.cc b/libstdc++-v3/testsuite/23_containers/unordered_multiset/cons/copy.cc
index 22bedb9065f..c4e521ab662 100644
--- a/libstdc++-v3/testsuite/23_containers/unordered_multiset/cons/copy.cc
+++ b/libstdc++-v3/testsuite/23_containers/unordered_multiset/cons/copy.cc
@@ -38,6 +38,6 @@ int main()
 
   std::unordered_multiset<int> copy(ref);
   VERIFY( copy.size() == ref.size() );
-  VERIFY( std::equal(ref.begin(), ref.end(), copy.begin()) );
+  VERIFY( copy == ref );
   return 0;
 }
diff --git a/libstdc++-v3/testsuite/util/exception/safety.h b/libstdc++-v3/testsuite/util/exception/safety.h
index 8ef91648af6..ad55812be1a 100644
--- a/libstdc++-v3/testsuite/util/exception/safety.h
+++ b/libstdc++-v3/testsuite/util/exception/safety.h
@@ -235,12 +235,11 @@ namespace __gnu_test
 		"setup_base::compare containers size not equal");
 
 	// Should test iterator validity before and after exception.
-	bool __equal_it = std::equal(__test.begin(), __test.end(),
-				     __control.begin());
+	bool __equal = __test == __control;
 
-	if (!__equal_it)
+	if (!__equal)
 	  throw std::logic_error(
-		"setup_base::compare containers iterators not equal");
+		"setup_base::compare containers not equal");
 
 	return true;
       }

^ permalink raw reply	[flat|nested] only message in thread

only message in thread, other threads:[~2022-06-20 16:58 UTC | newest]

Thread overview: (only message) (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2022-06-20 16:58 [PATCH 5/5][_Hahtable] Prealloc nodes on copy François Dumont

This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for read-only IMAP folder(s) and NNTP newsgroup(s).