Daily bump.
[gcc.git] / libstdc++-v3 / include / bits / hashtable_policy.h
1 // Internal policy header for unordered_set and unordered_map -*- C++ -*-
2
3 // Copyright (C) 2010-2021 Free Software Foundation, Inc.
4 //
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
9 // any later version.
10
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
15
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
19
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
24
25 /** @file bits/hashtable_policy.h
26 * This is an internal header file, included by other library headers.
27 * Do not attempt to use it directly.
28 * @headername{unordered_map,unordered_set}
29 */
30
31 #ifndef _HASHTABLE_POLICY_H
32 #define _HASHTABLE_POLICY_H 1
33
34 #include <tuple> // for std::tuple, std::forward_as_tuple
35 #include <bits/stl_algobase.h> // for std::min, std::is_permutation.
36 #include <ext/numeric_traits.h> // for __gnu_cxx::__int_traits
37
38 namespace std _GLIBCXX_VISIBILITY(default)
39 {
40 _GLIBCXX_BEGIN_NAMESPACE_VERSION
41
42 template<typename _Key, typename _Value, typename _Alloc,
43 typename _ExtractKey, typename _Equal,
44 typename _Hash, typename _RangeHash, typename _Unused,
45 typename _RehashPolicy, typename _Traits>
46 class _Hashtable;
47
48 namespace __detail
49 {
50 /**
51 * @defgroup hashtable-detail Base and Implementation Classes
52 * @ingroup unordered_associative_containers
53 * @{
54 */
55 template<typename _Key, typename _Value, typename _ExtractKey,
56 typename _Equal, typename _Hash, typename _RangeHash,
57 typename _Unused, typename _Traits>
58 struct _Hashtable_base;
59
60 // Helper function: return distance(first, last) for forward
61 // iterators, or 0/1 for input iterators.
62 template<class _Iterator>
63 inline typename std::iterator_traits<_Iterator>::difference_type
64 __distance_fw(_Iterator __first, _Iterator __last,
65 std::input_iterator_tag)
66 { return __first != __last ? 1 : 0; }
67
68 template<class _Iterator>
69 inline typename std::iterator_traits<_Iterator>::difference_type
70 __distance_fw(_Iterator __first, _Iterator __last,
71 std::forward_iterator_tag)
72 { return std::distance(__first, __last); }
73
74 template<class _Iterator>
75 inline typename std::iterator_traits<_Iterator>::difference_type
76 __distance_fw(_Iterator __first, _Iterator __last)
77 { return __distance_fw(__first, __last,
78 std::__iterator_category(__first)); }
79
80 struct _Identity
81 {
82 template<typename _Tp>
83 _Tp&&
84 operator()(_Tp&& __x) const noexcept
85 { return std::forward<_Tp>(__x); }
86 };
87
88 struct _Select1st
89 {
90 template<typename _Tp>
91 auto
92 operator()(_Tp&& __x) const noexcept
93 -> decltype(std::get<0>(std::forward<_Tp>(__x)))
94 { return std::get<0>(std::forward<_Tp>(__x)); }
95 };
96
97 template<typename _NodeAlloc>
98 struct _Hashtable_alloc;
99
100 // Functor recycling a pool of nodes and using allocation once the pool is
101 // empty.
102 template<typename _NodeAlloc>
103 struct _ReuseOrAllocNode
104 {
105 private:
106 using __node_alloc_type = _NodeAlloc;
107 using __hashtable_alloc = _Hashtable_alloc<__node_alloc_type>;
108 using __node_alloc_traits =
109 typename __hashtable_alloc::__node_alloc_traits;
110 using __node_type = typename __hashtable_alloc::__node_type;
111
112 public:
113 _ReuseOrAllocNode(__node_type* __nodes, __hashtable_alloc& __h)
114 : _M_nodes(__nodes), _M_h(__h) { }
115 _ReuseOrAllocNode(const _ReuseOrAllocNode&) = delete;
116
117 ~_ReuseOrAllocNode()
118 { _M_h._M_deallocate_nodes(_M_nodes); }
119
120 template<typename _Arg>
121 __node_type*
122 operator()(_Arg&& __arg) const
123 {
124 if (_M_nodes)
125 {
126 __node_type* __node = _M_nodes;
127 _M_nodes = _M_nodes->_M_next();
128 __node->_M_nxt = nullptr;
129 auto& __a = _M_h._M_node_allocator();
130 __node_alloc_traits::destroy(__a, __node->_M_valptr());
131 __try
132 {
133 __node_alloc_traits::construct(__a, __node->_M_valptr(),
134 std::forward<_Arg>(__arg));
135 }
136 __catch(...)
137 {
138 _M_h._M_deallocate_node_ptr(__node);
139 __throw_exception_again;
140 }
141 return __node;
142 }
143 return _M_h._M_allocate_node(std::forward<_Arg>(__arg));
144 }
145
146 private:
147 mutable __node_type* _M_nodes;
148 __hashtable_alloc& _M_h;
149 };
150
151 // Functor similar to the previous one but without any pool of nodes to
152 // recycle.
153 template<typename _NodeAlloc>
154 struct _AllocNode
155 {
156 private:
157 using __hashtable_alloc = _Hashtable_alloc<_NodeAlloc>;
158 using __node_type = typename __hashtable_alloc::__node_type;
159
160 public:
161 _AllocNode(__hashtable_alloc& __h)
162 : _M_h(__h) { }
163
164 template<typename _Arg>
165 __node_type*
166 operator()(_Arg&& __arg) const
167 { return _M_h._M_allocate_node(std::forward<_Arg>(__arg)); }
168
169 private:
170 __hashtable_alloc& _M_h;
171 };
172
173 // Auxiliary types used for all instantiations of _Hashtable nodes
174 // and iterators.
175
176 /**
177 * struct _Hashtable_traits
178 *
179 * Important traits for hash tables.
180 *
181 * @tparam _Cache_hash_code Boolean value. True if the value of
182 * the hash function is stored along with the value. This is a
183 * time-space tradeoff. Storing it may improve lookup speed by
184 * reducing the number of times we need to call the _Hash or _Equal
185 * functors.
186 *
187 * @tparam _Constant_iterators Boolean value. True if iterator and
188 * const_iterator are both constant iterator types. This is true
189 * for unordered_set and unordered_multiset, false for
190 * unordered_map and unordered_multimap.
191 *
192 * @tparam _Unique_keys Boolean value. True if the return value
193 * of _Hashtable::count(k) is always at most one, false if it may
194 * be an arbitrary number. This is true for unordered_set and
195 * unordered_map, false for unordered_multiset and
196 * unordered_multimap.
197 */
198 template<bool _Cache_hash_code, bool _Constant_iterators, bool _Unique_keys>
199 struct _Hashtable_traits
200 {
201 using __hash_cached = __bool_constant<_Cache_hash_code>;
202 using __constant_iterators = __bool_constant<_Constant_iterators>;
203 using __unique_keys = __bool_constant<_Unique_keys>;
204 };
205
206 /**
207 * struct _Hash_node_base
208 *
209 * Nodes, used to wrap elements stored in the hash table. A policy
210 * template parameter of class template _Hashtable controls whether
211 * nodes also store a hash code. In some cases (e.g. strings) this
212 * may be a performance win.
213 */
214 struct _Hash_node_base
215 {
216 _Hash_node_base* _M_nxt;
217
218 _Hash_node_base() noexcept : _M_nxt() { }
219
220 _Hash_node_base(_Hash_node_base* __next) noexcept : _M_nxt(__next) { }
221 };
222
223 /**
224 * struct _Hash_node_value_base
225 *
226 * Node type with the value to store.
227 */
228 template<typename _Value>
229 struct _Hash_node_value_base
230 {
231 typedef _Value value_type;
232
233 __gnu_cxx::__aligned_buffer<_Value> _M_storage;
234
235 _Value*
236 _M_valptr() noexcept
237 { return _M_storage._M_ptr(); }
238
239 const _Value*
240 _M_valptr() const noexcept
241 { return _M_storage._M_ptr(); }
242
243 _Value&
244 _M_v() noexcept
245 { return *_M_valptr(); }
246
247 const _Value&
248 _M_v() const noexcept
249 { return *_M_valptr(); }
250 };
251
252 /**
253 * Primary template struct _Hash_node_code_cache.
254 */
255 template<bool _Cache_hash_code>
256 struct _Hash_node_code_cache
257 { };
258
259 /**
260 * Specialization for node with cache, struct _Hash_node_code_cache.
261 */
262 template<>
263 struct _Hash_node_code_cache<true>
264 { std::size_t _M_hash_code; };
265
266 template<typename _Value, bool _Cache_hash_code>
267 struct _Hash_node_value
268 : _Hash_node_value_base<_Value>
269 , _Hash_node_code_cache<_Cache_hash_code>
270 { };
271
272 /**
273 * Primary template struct _Hash_node.
274 */
275 template<typename _Value, bool _Cache_hash_code>
276 struct _Hash_node
277 : _Hash_node_base
278 , _Hash_node_value<_Value, _Cache_hash_code>
279 {
280 _Hash_node*
281 _M_next() const noexcept
282 { return static_cast<_Hash_node*>(this->_M_nxt); }
283 };
284
285 /// Base class for node iterators.
286 template<typename _Value, bool _Cache_hash_code>
287 struct _Node_iterator_base
288 {
289 using __node_type = _Hash_node<_Value, _Cache_hash_code>;
290
291 __node_type* _M_cur;
292
293 _Node_iterator_base() : _M_cur(nullptr) { }
294 _Node_iterator_base(__node_type* __p) noexcept
295 : _M_cur(__p) { }
296
297 void
298 _M_incr() noexcept
299 { _M_cur = _M_cur->_M_next(); }
300
301 friend bool
302 operator==(const _Node_iterator_base& __x, const _Node_iterator_base& __y)
303 noexcept
304 { return __x._M_cur == __y._M_cur; }
305
306 #if __cpp_impl_three_way_comparison < 201907L
307 friend bool
308 operator!=(const _Node_iterator_base& __x, const _Node_iterator_base& __y)
309 noexcept
310 { return __x._M_cur != __y._M_cur; }
311 #endif
312 };
313
314 /// Node iterators, used to iterate through all the hashtable.
315 template<typename _Value, bool __constant_iterators, bool __cache>
316 struct _Node_iterator
317 : public _Node_iterator_base<_Value, __cache>
318 {
319 private:
320 using __base_type = _Node_iterator_base<_Value, __cache>;
321 using __node_type = typename __base_type::__node_type;
322
323 public:
324 typedef _Value value_type;
325 typedef std::ptrdiff_t difference_type;
326 typedef std::forward_iterator_tag iterator_category;
327
328 using pointer = typename std::conditional<__constant_iterators,
329 const value_type*, value_type*>::type;
330
331 using reference = typename std::conditional<__constant_iterators,
332 const value_type&, value_type&>::type;
333
334 _Node_iterator() = default;
335
336 explicit
337 _Node_iterator(__node_type* __p) noexcept
338 : __base_type(__p) { }
339
340 reference
341 operator*() const noexcept
342 { return this->_M_cur->_M_v(); }
343
344 pointer
345 operator->() const noexcept
346 { return this->_M_cur->_M_valptr(); }
347
348 _Node_iterator&
349 operator++() noexcept
350 {
351 this->_M_incr();
352 return *this;
353 }
354
355 _Node_iterator
356 operator++(int) noexcept
357 {
358 _Node_iterator __tmp(*this);
359 this->_M_incr();
360 return __tmp;
361 }
362 };
363
364 /// Node const_iterators, used to iterate through all the hashtable.
365 template<typename _Value, bool __constant_iterators, bool __cache>
366 struct _Node_const_iterator
367 : public _Node_iterator_base<_Value, __cache>
368 {
369 private:
370 using __base_type = _Node_iterator_base<_Value, __cache>;
371 using __node_type = typename __base_type::__node_type;
372
373 public:
374 typedef _Value value_type;
375 typedef std::ptrdiff_t difference_type;
376 typedef std::forward_iterator_tag iterator_category;
377
378 typedef const value_type* pointer;
379 typedef const value_type& reference;
380
381 _Node_const_iterator() = default;
382
383 explicit
384 _Node_const_iterator(__node_type* __p) noexcept
385 : __base_type(__p) { }
386
387 _Node_const_iterator(const _Node_iterator<_Value, __constant_iterators,
388 __cache>& __x) noexcept
389 : __base_type(__x._M_cur) { }
390
391 reference
392 operator*() const noexcept
393 { return this->_M_cur->_M_v(); }
394
395 pointer
396 operator->() const noexcept
397 { return this->_M_cur->_M_valptr(); }
398
399 _Node_const_iterator&
400 operator++() noexcept
401 {
402 this->_M_incr();
403 return *this;
404 }
405
406 _Node_const_iterator
407 operator++(int) noexcept
408 {
409 _Node_const_iterator __tmp(*this);
410 this->_M_incr();
411 return __tmp;
412 }
413 };
414
415 // Many of class template _Hashtable's template parameters are policy
416 // classes. These are defaults for the policies.
417
418 /// Default range hashing function: use division to fold a large number
419 /// into the range [0, N).
420 struct _Mod_range_hashing
421 {
422 typedef std::size_t first_argument_type;
423 typedef std::size_t second_argument_type;
424 typedef std::size_t result_type;
425
426 result_type
427 operator()(first_argument_type __num,
428 second_argument_type __den) const noexcept
429 { return __num % __den; }
430 };
431
432 /// Default ranged hash function H. In principle it should be a
433 /// function object composed from objects of type H1 and H2 such that
434 /// h(k, N) = h2(h1(k), N), but that would mean making extra copies of
435 /// h1 and h2. So instead we'll just use a tag to tell class template
436 /// hashtable to do that composition.
437 struct _Default_ranged_hash { };
438
439 /// Default value for rehash policy. Bucket size is (usually) the
440 /// smallest prime that keeps the load factor small enough.
441 struct _Prime_rehash_policy
442 {
443 using __has_load_factor = true_type;
444
445 _Prime_rehash_policy(float __z = 1.0) noexcept
446 : _M_max_load_factor(__z), _M_next_resize(0) { }
447
448 float
449 max_load_factor() const noexcept
450 { return _M_max_load_factor; }
451
452 // Return a bucket size no smaller than n.
453 std::size_t
454 _M_next_bkt(std::size_t __n) const;
455
456 // Return a bucket count appropriate for n elements
457 std::size_t
458 _M_bkt_for_elements(std::size_t __n) const
459 { return __builtin_ceil(__n / (double)_M_max_load_factor); }
460
461 // __n_bkt is current bucket count, __n_elt is current element count,
462 // and __n_ins is number of elements to be inserted. Do we need to
463 // increase bucket count? If so, return make_pair(true, n), where n
464 // is the new bucket count. If not, return make_pair(false, 0).
465 std::pair<bool, std::size_t>
466 _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
467 std::size_t __n_ins) const;
468
469 typedef std::size_t _State;
470
471 _State
472 _M_state() const
473 { return _M_next_resize; }
474
475 void
476 _M_reset() noexcept
477 { _M_next_resize = 0; }
478
479 void
480 _M_reset(_State __state)
481 { _M_next_resize = __state; }
482
483 static const std::size_t _S_growth_factor = 2;
484
485 float _M_max_load_factor;
486 mutable std::size_t _M_next_resize;
487 };
488
489 /// Range hashing function assuming that second arg is a power of 2.
490 struct _Mask_range_hashing
491 {
492 typedef std::size_t first_argument_type;
493 typedef std::size_t second_argument_type;
494 typedef std::size_t result_type;
495
496 result_type
497 operator()(first_argument_type __num,
498 second_argument_type __den) const noexcept
499 { return __num & (__den - 1); }
500 };
501
502 /// Compute closest power of 2 not less than __n
503 inline std::size_t
504 __clp2(std::size_t __n) noexcept
505 {
506 using __gnu_cxx::__int_traits;
507 // Equivalent to return __n ? std::bit_ceil(__n) : 0;
508 if (__n < 2)
509 return __n;
510 const unsigned __lz = sizeof(size_t) > sizeof(long)
511 ? __builtin_clzll(__n - 1ull)
512 : __builtin_clzl(__n - 1ul);
513 // Doing two shifts avoids undefined behaviour when __lz == 0.
514 return (size_t(1) << (__int_traits<size_t>::__digits - __lz - 1)) << 1;
515 }
516
517 /// Rehash policy providing power of 2 bucket numbers. Avoids modulo
518 /// operations.
519 struct _Power2_rehash_policy
520 {
521 using __has_load_factor = true_type;
522
523 _Power2_rehash_policy(float __z = 1.0) noexcept
524 : _M_max_load_factor(__z), _M_next_resize(0) { }
525
526 float
527 max_load_factor() const noexcept
528 { return _M_max_load_factor; }
529
530 // Return a bucket size no smaller than n (as long as n is not above the
531 // highest power of 2).
532 std::size_t
533 _M_next_bkt(std::size_t __n) noexcept
534 {
535 if (__n == 0)
536 // Special case on container 1st initialization with 0 bucket count
537 // hint. We keep _M_next_resize to 0 to make sure that next time we
538 // want to add an element allocation will take place.
539 return 1;
540
541 const auto __max_width = std::min<size_t>(sizeof(size_t), 8);
542 const auto __max_bkt = size_t(1) << (__max_width * __CHAR_BIT__ - 1);
543 std::size_t __res = __clp2(__n);
544
545 if (__res == 0)
546 __res = __max_bkt;
547 else if (__res == 1)
548 // If __res is 1 we force it to 2 to make sure there will be an
549 // allocation so that nothing need to be stored in the initial
550 // single bucket
551 __res = 2;
552
553 if (__res == __max_bkt)
554 // Set next resize to the max value so that we never try to rehash again
555 // as we already reach the biggest possible bucket number.
556 // Note that it might result in max_load_factor not being respected.
557 _M_next_resize = size_t(-1);
558 else
559 _M_next_resize
560 = __builtin_floor(__res * (double)_M_max_load_factor);
561
562 return __res;
563 }
564
565 // Return a bucket count appropriate for n elements
566 std::size_t
567 _M_bkt_for_elements(std::size_t __n) const noexcept
568 { return __builtin_ceil(__n / (double)_M_max_load_factor); }
569
570 // __n_bkt is current bucket count, __n_elt is current element count,
571 // and __n_ins is number of elements to be inserted. Do we need to
572 // increase bucket count? If so, return make_pair(true, n), where n
573 // is the new bucket count. If not, return make_pair(false, 0).
574 std::pair<bool, std::size_t>
575 _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
576 std::size_t __n_ins) noexcept
577 {
578 if (__n_elt + __n_ins > _M_next_resize)
579 {
580 // If _M_next_resize is 0 it means that we have nothing allocated so
581 // far and that we start inserting elements. In this case we start
582 // with an initial bucket size of 11.
583 double __min_bkts
584 = std::max<std::size_t>(__n_elt + __n_ins, _M_next_resize ? 0 : 11)
585 / (double)_M_max_load_factor;
586 if (__min_bkts >= __n_bkt)
587 return { true,
588 _M_next_bkt(std::max<std::size_t>(__builtin_floor(__min_bkts) + 1,
589 __n_bkt * _S_growth_factor)) };
590
591 _M_next_resize
592 = __builtin_floor(__n_bkt * (double)_M_max_load_factor);
593 return { false, 0 };
594 }
595 else
596 return { false, 0 };
597 }
598
599 typedef std::size_t _State;
600
601 _State
602 _M_state() const noexcept
603 { return _M_next_resize; }
604
605 void
606 _M_reset() noexcept
607 { _M_next_resize = 0; }
608
609 void
610 _M_reset(_State __state) noexcept
611 { _M_next_resize = __state; }
612
613 static const std::size_t _S_growth_factor = 2;
614
615 float _M_max_load_factor;
616 std::size_t _M_next_resize;
617 };
618
619 // Base classes for std::_Hashtable. We define these base classes
620 // because in some cases we want to do different things depending on
621 // the value of a policy class. In some cases the policy class
622 // affects which member functions and nested typedefs are defined;
623 // we handle that by specializing base class templates. Several of
624 // the base class templates need to access other members of class
625 // template _Hashtable, so we use a variant of the "Curiously
626 // Recurring Template Pattern" (CRTP) technique.
627
628 /**
629 * Primary class template _Map_base.
630 *
631 * If the hashtable has a value type of the form pair<T1, T2> and a
632 * key extraction policy (_ExtractKey) that returns the first part
633 * of the pair, the hashtable gets a mapped_type typedef. If it
634 * satisfies those criteria and also has unique keys, then it also
635 * gets an operator[].
636 */
637 template<typename _Key, typename _Value, typename _Alloc,
638 typename _ExtractKey, typename _Equal,
639 typename _Hash, typename _RangeHash, typename _Unused,
640 typename _RehashPolicy, typename _Traits,
641 bool _Unique_keys = _Traits::__unique_keys::value>
642 struct _Map_base { };
643
644 /// Partial specialization, __unique_keys set to false.
645 template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
646 typename _Hash, typename _RangeHash, typename _Unused,
647 typename _RehashPolicy, typename _Traits>
648 struct _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
649 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>
650 {
651 using mapped_type = typename std::tuple_element<1, _Pair>::type;
652 };
653
654 /// Partial specialization, __unique_keys set to true.
655 template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
656 typename _Hash, typename _RangeHash, typename _Unused,
657 typename _RehashPolicy, typename _Traits>
658 struct _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
659 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>
660 {
661 private:
662 using __hashtable_base = _Hashtable_base<_Key, _Pair, _Select1st, _Equal,
663 _Hash, _RangeHash, _Unused,
664 _Traits>;
665
666 using __hashtable = _Hashtable<_Key, _Pair, _Alloc, _Select1st, _Equal,
667 _Hash, _RangeHash,
668 _Unused, _RehashPolicy, _Traits>;
669
670 using __hash_code = typename __hashtable_base::__hash_code;
671
672 public:
673 using key_type = typename __hashtable_base::key_type;
674 using mapped_type = typename std::tuple_element<1, _Pair>::type;
675
676 mapped_type&
677 operator[](const key_type& __k);
678
679 mapped_type&
680 operator[](key_type&& __k);
681
682 // _GLIBCXX_RESOLVE_LIB_DEFECTS
683 // DR 761. unordered_map needs an at() member function.
684 mapped_type&
685 at(const key_type& __k);
686
687 const mapped_type&
688 at(const key_type& __k) const;
689 };
690
691 template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
692 typename _Hash, typename _RangeHash, typename _Unused,
693 typename _RehashPolicy, typename _Traits>
694 auto
695 _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
696 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
697 operator[](const key_type& __k)
698 -> mapped_type&
699 {
700 __hashtable* __h = static_cast<__hashtable*>(this);
701 __hash_code __code = __h->_M_hash_code(__k);
702 std::size_t __bkt = __h->_M_bucket_index(__code);
703 if (auto __node = __h->_M_find_node(__bkt, __k, __code))
704 return __node->_M_v().second;
705
706 typename __hashtable::_Scoped_node __node {
707 __h,
708 std::piecewise_construct,
709 std::tuple<const key_type&>(__k),
710 std::tuple<>()
711 };
712 auto __pos
713 = __h->_M_insert_unique_node(__bkt, __code, __node._M_node);
714 __node._M_node = nullptr;
715 return __pos->second;
716 }
717
718 template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
719 typename _Hash, typename _RangeHash, typename _Unused,
720 typename _RehashPolicy, typename _Traits>
721 auto
722 _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
723 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
724 operator[](key_type&& __k)
725 -> mapped_type&
726 {
727 __hashtable* __h = static_cast<__hashtable*>(this);
728 __hash_code __code = __h->_M_hash_code(__k);
729 std::size_t __bkt = __h->_M_bucket_index(__code);
730 if (auto __node = __h->_M_find_node(__bkt, __k, __code))
731 return __node->_M_v().second;
732
733 typename __hashtable::_Scoped_node __node {
734 __h,
735 std::piecewise_construct,
736 std::forward_as_tuple(std::move(__k)),
737 std::tuple<>()
738 };
739 auto __pos
740 = __h->_M_insert_unique_node(__bkt, __code, __node._M_node);
741 __node._M_node = nullptr;
742 return __pos->second;
743 }
744
745 template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
746 typename _Hash, typename _RangeHash, typename _Unused,
747 typename _RehashPolicy, typename _Traits>
748 auto
749 _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
750 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
751 at(const key_type& __k)
752 -> mapped_type&
753 {
754 __hashtable* __h = static_cast<__hashtable*>(this);
755 auto __ite = __h->find(__k);
756
757 if (!__ite._M_cur)
758 __throw_out_of_range(__N("_Map_base::at"));
759 return __ite->second;
760 }
761
762 template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
763 typename _Hash, typename _RangeHash, typename _Unused,
764 typename _RehashPolicy, typename _Traits>
765 auto
766 _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
767 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
768 at(const key_type& __k) const
769 -> const mapped_type&
770 {
771 const __hashtable* __h = static_cast<const __hashtable*>(this);
772 auto __ite = __h->find(__k);
773
774 if (!__ite._M_cur)
775 __throw_out_of_range(__N("_Map_base::at"));
776 return __ite->second;
777 }
778
779 /**
780 * Primary class template _Insert_base.
781 *
782 * Defines @c insert member functions appropriate to all _Hashtables.
783 */
784 template<typename _Key, typename _Value, typename _Alloc,
785 typename _ExtractKey, typename _Equal,
786 typename _Hash, typename _RangeHash, typename _Unused,
787 typename _RehashPolicy, typename _Traits>
788 struct _Insert_base
789 {
790 protected:
791 using __hashtable_base = _Hashtable_base<_Key, _Value, _ExtractKey,
792 _Equal, _Hash, _RangeHash,
793 _Unused, _Traits>;
794
795 using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
796 _Hash, _RangeHash,
797 _Unused, _RehashPolicy, _Traits>;
798
799 using __hash_cached = typename _Traits::__hash_cached;
800 using __constant_iterators = typename _Traits::__constant_iterators;
801
802 using __hashtable_alloc = _Hashtable_alloc<
803 __alloc_rebind<_Alloc, _Hash_node<_Value,
804 __hash_cached::value>>>;
805
806 using value_type = typename __hashtable_base::value_type;
807 using size_type = typename __hashtable_base::size_type;
808
809 using __unique_keys = typename _Traits::__unique_keys;
810 using __node_alloc_type = typename __hashtable_alloc::__node_alloc_type;
811 using __node_gen_type = _AllocNode<__node_alloc_type>;
812
813 __hashtable&
814 _M_conjure_hashtable()
815 { return *(static_cast<__hashtable*>(this)); }
816
817 template<typename _InputIterator, typename _NodeGetter>
818 void
819 _M_insert_range(_InputIterator __first, _InputIterator __last,
820 const _NodeGetter&, true_type __uks);
821
822 template<typename _InputIterator, typename _NodeGetter>
823 void
824 _M_insert_range(_InputIterator __first, _InputIterator __last,
825 const _NodeGetter&, false_type __uks);
826
827 public:
828 using iterator = _Node_iterator<_Value, __constant_iterators::value,
829 __hash_cached::value>;
830
831 using const_iterator = _Node_const_iterator<_Value, __constant_iterators::value,
832 __hash_cached::value>;
833
834 using __ireturn_type = typename std::conditional<__unique_keys::value,
835 std::pair<iterator, bool>,
836 iterator>::type;
837
838 __ireturn_type
839 insert(const value_type& __v)
840 {
841 __hashtable& __h = _M_conjure_hashtable();
842 __node_gen_type __node_gen(__h);
843 return __h._M_insert(__v, __node_gen, __unique_keys{});
844 }
845
846 iterator
847 insert(const_iterator __hint, const value_type& __v)
848 {
849 __hashtable& __h = _M_conjure_hashtable();
850 __node_gen_type __node_gen(__h);
851 return __h._M_insert(__hint, __v, __node_gen, __unique_keys{});
852 }
853
854 template<typename _KType, typename... _Args>
855 std::pair<iterator, bool>
856 try_emplace(const_iterator, _KType&& __k, _Args&&... __args)
857 {
858 __hashtable& __h = _M_conjure_hashtable();
859 auto __code = __h._M_hash_code(__k);
860 std::size_t __bkt = __h._M_bucket_index(__code);
861 if (auto __node = __h._M_find_node(__bkt, __k, __code))
862 return { iterator(__node), false };
863
864 typename __hashtable::_Scoped_node __node {
865 &__h,
866 std::piecewise_construct,
867 std::forward_as_tuple(std::forward<_KType>(__k)),
868 std::forward_as_tuple(std::forward<_Args>(__args)...)
869 };
870 auto __it
871 = __h._M_insert_unique_node(__bkt, __code, __node._M_node);
872 __node._M_node = nullptr;
873 return { __it, true };
874 }
875
876 void
877 insert(initializer_list<value_type> __l)
878 { this->insert(__l.begin(), __l.end()); }
879
880 template<typename _InputIterator>
881 void
882 insert(_InputIterator __first, _InputIterator __last)
883 {
884 __hashtable& __h = _M_conjure_hashtable();
885 __node_gen_type __node_gen(__h);
886 return _M_insert_range(__first, __last, __node_gen, __unique_keys{});
887 }
888 };
889
890 template<typename _Key, typename _Value, typename _Alloc,
891 typename _ExtractKey, typename _Equal,
892 typename _Hash, typename _RangeHash, typename _Unused,
893 typename _RehashPolicy, typename _Traits>
894 template<typename _InputIterator, typename _NodeGetter>
895 void
896 _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
897 _Hash, _RangeHash, _Unused,
898 _RehashPolicy, _Traits>::
899 _M_insert_range(_InputIterator __first, _InputIterator __last,
900 const _NodeGetter& __node_gen, true_type __uks)
901 {
902 __hashtable& __h = _M_conjure_hashtable();
903 for (; __first != __last; ++__first)
904 __h._M_insert(*__first, __node_gen, __uks);
905 }
906
907 template<typename _Key, typename _Value, typename _Alloc,
908 typename _ExtractKey, typename _Equal,
909 typename _Hash, typename _RangeHash, typename _Unused,
910 typename _RehashPolicy, typename _Traits>
911 template<typename _InputIterator, typename _NodeGetter>
912 void
913 _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
914 _Hash, _RangeHash, _Unused,
915 _RehashPolicy, _Traits>::
916 _M_insert_range(_InputIterator __first, _InputIterator __last,
917 const _NodeGetter& __node_gen, false_type __uks)
918 {
919 using __rehash_type = typename __hashtable::__rehash_type;
920 using __rehash_state = typename __hashtable::__rehash_state;
921 using pair_type = std::pair<bool, std::size_t>;
922
923 size_type __n_elt = __detail::__distance_fw(__first, __last);
924 if (__n_elt == 0)
925 return;
926
927 __hashtable& __h = _M_conjure_hashtable();
928 __rehash_type& __rehash = __h._M_rehash_policy;
929 const __rehash_state& __saved_state = __rehash._M_state();
930 pair_type __do_rehash = __rehash._M_need_rehash(__h._M_bucket_count,
931 __h._M_element_count,
932 __n_elt);
933
934 if (__do_rehash.first)
935 __h._M_rehash(__do_rehash.second, __saved_state);
936
937 for (; __first != __last; ++__first)
938 __h._M_insert(*__first, __node_gen, __uks);
939 }
940
941 /**
942 * Primary class template _Insert.
943 *
944 * Defines @c insert member functions that depend on _Hashtable policies,
945 * via partial specializations.
946 */
947 template<typename _Key, typename _Value, typename _Alloc,
948 typename _ExtractKey, typename _Equal,
949 typename _Hash, typename _RangeHash, typename _Unused,
950 typename _RehashPolicy, typename _Traits,
951 bool _Constant_iterators = _Traits::__constant_iterators::value>
952 struct _Insert;
953
954 /// Specialization.
955 template<typename _Key, typename _Value, typename _Alloc,
956 typename _ExtractKey, typename _Equal,
957 typename _Hash, typename _RangeHash, typename _Unused,
958 typename _RehashPolicy, typename _Traits>
959 struct _Insert<_Key, _Value, _Alloc, _ExtractKey, _Equal,
960 _Hash, _RangeHash, _Unused,
961 _RehashPolicy, _Traits, true>
962 : public _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
963 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>
964 {
965 using __base_type = _Insert_base<_Key, _Value, _Alloc, _ExtractKey,
966 _Equal, _Hash, _RangeHash, _Unused,
967 _RehashPolicy, _Traits>;
968
969 using value_type = typename __base_type::value_type;
970 using iterator = typename __base_type::iterator;
971 using const_iterator = typename __base_type::const_iterator;
972 using __ireturn_type = typename __base_type::__ireturn_type;
973
974 using __unique_keys = typename __base_type::__unique_keys;
975 using __hashtable = typename __base_type::__hashtable;
976 using __node_gen_type = typename __base_type::__node_gen_type;
977
978 using __base_type::insert;
979
980 __ireturn_type
981 insert(value_type&& __v)
982 {
983 __hashtable& __h = this->_M_conjure_hashtable();
984 __node_gen_type __node_gen(__h);
985 return __h._M_insert(std::move(__v), __node_gen, __unique_keys{});
986 }
987
988 iterator
989 insert(const_iterator __hint, value_type&& __v)
990 {
991 __hashtable& __h = this->_M_conjure_hashtable();
992 __node_gen_type __node_gen(__h);
993 return __h._M_insert(__hint, std::move(__v), __node_gen,
994 __unique_keys{});
995 }
996 };
997
998 /// Specialization.
999 template<typename _Key, typename _Value, typename _Alloc,
1000 typename _ExtractKey, typename _Equal,
1001 typename _Hash, typename _RangeHash, typename _Unused,
1002 typename _RehashPolicy, typename _Traits>
1003 struct _Insert<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1004 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>
1005 : public _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1006 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>
1007 {
1008 using __base_type = _Insert_base<_Key, _Value, _Alloc, _ExtractKey,
1009 _Equal, _Hash, _RangeHash, _Unused,
1010 _RehashPolicy, _Traits>;
1011 using value_type = typename __base_type::value_type;
1012 using iterator = typename __base_type::iterator;
1013 using const_iterator = typename __base_type::const_iterator;
1014
1015 using __unique_keys = typename __base_type::__unique_keys;
1016 using __hashtable = typename __base_type::__hashtable;
1017 using __ireturn_type = typename __base_type::__ireturn_type;
1018
1019 using __base_type::insert;
1020
1021 template<typename _Pair>
1022 using __is_cons = std::is_constructible<value_type, _Pair&&>;
1023
1024 template<typename _Pair>
1025 using _IFcons = std::enable_if<__is_cons<_Pair>::value>;
1026
1027 template<typename _Pair>
1028 using _IFconsp = typename _IFcons<_Pair>::type;
1029
1030 template<typename _Pair, typename = _IFconsp<_Pair>>
1031 __ireturn_type
1032 insert(_Pair&& __v)
1033 {
1034 __hashtable& __h = this->_M_conjure_hashtable();
1035 return __h._M_emplace(__unique_keys{}, std::forward<_Pair>(__v));
1036 }
1037
1038 template<typename _Pair, typename = _IFconsp<_Pair>>
1039 iterator
1040 insert(const_iterator __hint, _Pair&& __v)
1041 {
1042 __hashtable& __h = this->_M_conjure_hashtable();
1043 return __h._M_emplace(__hint, __unique_keys{},
1044 std::forward<_Pair>(__v));
1045 }
1046 };
1047
1048 template<typename _Policy>
1049 using __has_load_factor = typename _Policy::__has_load_factor;
1050
1051 /**
1052 * Primary class template _Rehash_base.
1053 *
1054 * Give hashtable the max_load_factor functions and reserve iff the
1055 * rehash policy supports it.
1056 */
1057 template<typename _Key, typename _Value, typename _Alloc,
1058 typename _ExtractKey, typename _Equal,
1059 typename _Hash, typename _RangeHash, typename _Unused,
1060 typename _RehashPolicy, typename _Traits,
1061 typename =
1062 __detected_or_t<false_type, __has_load_factor, _RehashPolicy>>
1063 struct _Rehash_base;
1064
1065 /// Specialization when rehash policy doesn't provide load factor management.
1066 template<typename _Key, typename _Value, typename _Alloc,
1067 typename _ExtractKey, typename _Equal,
1068 typename _Hash, typename _RangeHash, typename _Unused,
1069 typename _RehashPolicy, typename _Traits>
1070 struct _Rehash_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1071 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits,
1072 false_type /* Has load factor */>
1073 {
1074 };
1075
1076 /// Specialization when rehash policy provide load factor management.
1077 template<typename _Key, typename _Value, typename _Alloc,
1078 typename _ExtractKey, typename _Equal,
1079 typename _Hash, typename _RangeHash, typename _Unused,
1080 typename _RehashPolicy, typename _Traits>
1081 struct _Rehash_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1082 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits,
1083 true_type /* Has load factor */>
1084 {
1085 using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey,
1086 _Equal, _Hash, _RangeHash, _Unused,
1087 _RehashPolicy, _Traits>;
1088
1089 float
1090 max_load_factor() const noexcept
1091 {
1092 const __hashtable* __this = static_cast<const __hashtable*>(this);
1093 return __this->__rehash_policy().max_load_factor();
1094 }
1095
1096 void
1097 max_load_factor(float __z)
1098 {
1099 __hashtable* __this = static_cast<__hashtable*>(this);
1100 __this->__rehash_policy(_RehashPolicy(__z));
1101 }
1102
1103 void
1104 reserve(std::size_t __n)
1105 {
1106 __hashtable* __this = static_cast<__hashtable*>(this);
1107 __this->rehash(__this->__rehash_policy()._M_bkt_for_elements(__n));
1108 }
1109 };
1110
1111 /**
1112 * Primary class template _Hashtable_ebo_helper.
1113 *
1114 * Helper class using EBO when it is not forbidden (the type is not
1115 * final) and when it is worth it (the type is empty.)
1116 */
1117 template<int _Nm, typename _Tp,
1118 bool __use_ebo = !__is_final(_Tp) && __is_empty(_Tp)>
1119 struct _Hashtable_ebo_helper;
1120
1121 /// Specialization using EBO.
1122 template<int _Nm, typename _Tp>
1123 struct _Hashtable_ebo_helper<_Nm, _Tp, true>
1124 : private _Tp
1125 {
1126 _Hashtable_ebo_helper() = default;
1127
1128 template<typename _OtherTp>
1129 _Hashtable_ebo_helper(_OtherTp&& __tp)
1130 : _Tp(std::forward<_OtherTp>(__tp))
1131 { }
1132
1133 const _Tp& _M_cget() const { return static_cast<const _Tp&>(*this); }
1134 _Tp& _M_get() { return static_cast<_Tp&>(*this); }
1135 };
1136
1137 /// Specialization not using EBO.
1138 template<int _Nm, typename _Tp>
1139 struct _Hashtable_ebo_helper<_Nm, _Tp, false>
1140 {
1141 _Hashtable_ebo_helper() = default;
1142
1143 template<typename _OtherTp>
1144 _Hashtable_ebo_helper(_OtherTp&& __tp)
1145 : _M_tp(std::forward<_OtherTp>(__tp))
1146 { }
1147
1148 const _Tp& _M_cget() const { return _M_tp; }
1149 _Tp& _M_get() { return _M_tp; }
1150
1151 private:
1152 _Tp _M_tp;
1153 };
1154
1155 /**
1156 * Primary class template _Local_iterator_base.
1157 *
1158 * Base class for local iterators, used to iterate within a bucket
1159 * but not between buckets.
1160 */
1161 template<typename _Key, typename _Value, typename _ExtractKey,
1162 typename _Hash, typename _RangeHash, typename _Unused,
1163 bool __cache_hash_code>
1164 struct _Local_iterator_base;
1165
1166 /**
1167 * Primary class template _Hash_code_base.
1168 *
1169 * Encapsulates two policy issues that aren't quite orthogonal.
1170 * (1) the difference between using a ranged hash function and using
1171 * the combination of a hash function and a range-hashing function.
1172 * In the former case we don't have such things as hash codes, so
1173 * we have a dummy type as placeholder.
1174 * (2) Whether or not we cache hash codes. Caching hash codes is
1175 * meaningless if we have a ranged hash function.
1176 *
1177 * We also put the key extraction objects here, for convenience.
1178 * Each specialization derives from one or more of the template
1179 * parameters to benefit from Ebo. This is important as this type
1180 * is inherited in some cases by the _Local_iterator_base type used
1181 * to implement local_iterator and const_local_iterator. As with
1182 * any iterator type we prefer to make it as small as possible.
1183 */
1184 template<typename _Key, typename _Value, typename _ExtractKey,
1185 typename _Hash, typename _RangeHash, typename _Unused,
1186 bool __cache_hash_code>
1187 struct _Hash_code_base
1188 : private _Hashtable_ebo_helper<1, _Hash>
1189 {
1190 private:
1191 using __ebo_hash = _Hashtable_ebo_helper<1, _Hash>;
1192
1193 // Gives the local iterator implementation access to _M_bucket_index().
1194 friend struct _Local_iterator_base<_Key, _Value, _ExtractKey,
1195 _Hash, _RangeHash, _Unused, false>;
1196
1197 public:
1198 typedef _Hash hasher;
1199
1200 hasher
1201 hash_function() const
1202 { return _M_hash(); }
1203
1204 protected:
1205 typedef std::size_t __hash_code;
1206
1207 // We need the default constructor for the local iterators and _Hashtable
1208 // default constructor.
1209 _Hash_code_base() = default;
1210 _Hash_code_base(const _Hash& __hash) : __ebo_hash(__hash) { }
1211
1212 __hash_code
1213 _M_hash_code(const _Key& __k) const
1214 {
1215 static_assert(__is_invocable<const _Hash&, const _Key&>{},
1216 "hash function must be invocable with an argument of key type");
1217 return _M_hash()(__k);
1218 }
1219
1220 std::size_t
1221 _M_bucket_index(__hash_code __c, std::size_t __bkt_count) const
1222 { return _RangeHash{}(__c, __bkt_count); }
1223
1224 std::size_t
1225 _M_bucket_index(const _Hash_node_value<_Value, false>& __n,
1226 std::size_t __bkt_count) const
1227 noexcept( noexcept(declval<const _Hash&>()(declval<const _Key&>()))
1228 && noexcept(declval<const _RangeHash&>()((__hash_code)0,
1229 (std::size_t)0)) )
1230 {
1231 return _RangeHash{}(_M_hash_code(_ExtractKey{}(__n._M_v())),
1232 __bkt_count);
1233 }
1234
1235 std::size_t
1236 _M_bucket_index(const _Hash_node_value<_Value, true>& __n,
1237 std::size_t __bkt_count) const
1238 noexcept( noexcept(declval<const _RangeHash&>()((__hash_code)0,
1239 (std::size_t)0)) )
1240 { return _RangeHash{}(__n._M_hash_code, __bkt_count); }
1241
1242 void
1243 _M_store_code(_Hash_node_code_cache<false>&, __hash_code) const
1244 { }
1245
1246 void
1247 _M_copy_code(_Hash_node_code_cache<false>&,
1248 const _Hash_node_code_cache<false>&) const
1249 { }
1250
1251 void
1252 _M_store_code(_Hash_node_code_cache<true>& __n, __hash_code __c) const
1253 { __n._M_hash_code = __c; }
1254
1255 void
1256 _M_copy_code(_Hash_node_code_cache<true>& __to,
1257 const _Hash_node_code_cache<true>& __from) const
1258 { __to._M_hash_code = __from._M_hash_code; }
1259
1260 void
1261 _M_swap(_Hash_code_base& __x)
1262 { std::swap(__ebo_hash::_M_get(), __x.__ebo_hash::_M_get()); }
1263
1264 const _Hash&
1265 _M_hash() const { return __ebo_hash::_M_cget(); }
1266 };
1267
1268 /// Partial specialization used when nodes contain a cached hash code.
1269 template<typename _Key, typename _Value, typename _ExtractKey,
1270 typename _Hash, typename _RangeHash, typename _Unused>
1271 struct _Local_iterator_base<_Key, _Value, _ExtractKey,
1272 _Hash, _RangeHash, _Unused, true>
1273 : public _Node_iterator_base<_Value, true>
1274 {
1275 protected:
1276 using __base_node_iter = _Node_iterator_base<_Value, true>;
1277 using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
1278 _Hash, _RangeHash, _Unused, true>;
1279
1280 _Local_iterator_base() = default;
1281 _Local_iterator_base(const __hash_code_base&,
1282 _Hash_node<_Value, true>* __p,
1283 std::size_t __bkt, std::size_t __bkt_count)
1284 : __base_node_iter(__p), _M_bucket(__bkt), _M_bucket_count(__bkt_count)
1285 { }
1286
1287 void
1288 _M_incr()
1289 {
1290 __base_node_iter::_M_incr();
1291 if (this->_M_cur)
1292 {
1293 std::size_t __bkt
1294 = _RangeHash{}(this->_M_cur->_M_hash_code, _M_bucket_count);
1295 if (__bkt != _M_bucket)
1296 this->_M_cur = nullptr;
1297 }
1298 }
1299
1300 std::size_t _M_bucket;
1301 std::size_t _M_bucket_count;
1302
1303 public:
1304 std::size_t
1305 _M_get_bucket() const { return _M_bucket; } // for debug mode
1306 };
1307
1308 // Uninitialized storage for a _Hash_code_base.
1309 // This type is DefaultConstructible and Assignable even if the
1310 // _Hash_code_base type isn't, so that _Local_iterator_base<..., false>
1311 // can be DefaultConstructible and Assignable.
1312 template<typename _Tp, bool _IsEmpty = std::is_empty<_Tp>::value>
1313 struct _Hash_code_storage
1314 {
1315 __gnu_cxx::__aligned_buffer<_Tp> _M_storage;
1316
1317 _Tp*
1318 _M_h() { return _M_storage._M_ptr(); }
1319
1320 const _Tp*
1321 _M_h() const { return _M_storage._M_ptr(); }
1322 };
1323
1324 // Empty partial specialization for empty _Hash_code_base types.
1325 template<typename _Tp>
1326 struct _Hash_code_storage<_Tp, true>
1327 {
1328 static_assert( std::is_empty<_Tp>::value, "Type must be empty" );
1329
1330 // As _Tp is an empty type there will be no bytes written/read through
1331 // the cast pointer, so no strict-aliasing violation.
1332 _Tp*
1333 _M_h() { return reinterpret_cast<_Tp*>(this); }
1334
1335 const _Tp*
1336 _M_h() const { return reinterpret_cast<const _Tp*>(this); }
1337 };
1338
1339 template<typename _Key, typename _Value, typename _ExtractKey,
1340 typename _Hash, typename _RangeHash, typename _Unused>
1341 using __hash_code_for_local_iter
1342 = _Hash_code_storage<_Hash_code_base<_Key, _Value, _ExtractKey,
1343 _Hash, _RangeHash, _Unused, false>>;
1344
1345 // Partial specialization used when hash codes are not cached
1346 template<typename _Key, typename _Value, typename _ExtractKey,
1347 typename _Hash, typename _RangeHash, typename _Unused>
1348 struct _Local_iterator_base<_Key, _Value, _ExtractKey,
1349 _Hash, _RangeHash, _Unused, false>
1350 : __hash_code_for_local_iter<_Key, _Value, _ExtractKey, _Hash, _RangeHash,
1351 _Unused>
1352 , _Node_iterator_base<_Value, false>
1353 {
1354 protected:
1355 using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
1356 _Hash, _RangeHash, _Unused, false>;
1357 using __node_iter_base = _Node_iterator_base<_Value, false>;
1358
1359 _Local_iterator_base() : _M_bucket_count(-1) { }
1360
1361 _Local_iterator_base(const __hash_code_base& __base,
1362 _Hash_node<_Value, false>* __p,
1363 std::size_t __bkt, std::size_t __bkt_count)
1364 : __node_iter_base(__p), _M_bucket(__bkt), _M_bucket_count(__bkt_count)
1365 { _M_init(__base); }
1366
1367 ~_Local_iterator_base()
1368 {
1369 if (_M_bucket_count != size_t(-1))
1370 _M_destroy();
1371 }
1372
1373 _Local_iterator_base(const _Local_iterator_base& __iter)
1374 : __node_iter_base(__iter._M_cur), _M_bucket(__iter._M_bucket)
1375 , _M_bucket_count(__iter._M_bucket_count)
1376 {
1377 if (_M_bucket_count != size_t(-1))
1378 _M_init(*__iter._M_h());
1379 }
1380
1381 _Local_iterator_base&
1382 operator=(const _Local_iterator_base& __iter)
1383 {
1384 if (_M_bucket_count != -1)
1385 _M_destroy();
1386 this->_M_cur = __iter._M_cur;
1387 _M_bucket = __iter._M_bucket;
1388 _M_bucket_count = __iter._M_bucket_count;
1389 if (_M_bucket_count != -1)
1390 _M_init(*__iter._M_h());
1391 return *this;
1392 }
1393
1394 void
1395 _M_incr()
1396 {
1397 __node_iter_base::_M_incr();
1398 if (this->_M_cur)
1399 {
1400 std::size_t __bkt = this->_M_h()->_M_bucket_index(*this->_M_cur,
1401 _M_bucket_count);
1402 if (__bkt != _M_bucket)
1403 this->_M_cur = nullptr;
1404 }
1405 }
1406
1407 std::size_t _M_bucket;
1408 std::size_t _M_bucket_count;
1409
1410 void
1411 _M_init(const __hash_code_base& __base)
1412 { ::new(this->_M_h()) __hash_code_base(__base); }
1413
1414 void
1415 _M_destroy() { this->_M_h()->~__hash_code_base(); }
1416
1417 public:
1418 std::size_t
1419 _M_get_bucket() const { return _M_bucket; } // for debug mode
1420 };
1421
1422 /// local iterators
1423 template<typename _Key, typename _Value, typename _ExtractKey,
1424 typename _Hash, typename _RangeHash, typename _Unused,
1425 bool __constant_iterators, bool __cache>
1426 struct _Local_iterator
1427 : public _Local_iterator_base<_Key, _Value, _ExtractKey,
1428 _Hash, _RangeHash, _Unused, __cache>
1429 {
1430 private:
1431 using __base_type = _Local_iterator_base<_Key, _Value, _ExtractKey,
1432 _Hash, _RangeHash, _Unused, __cache>;
1433 using __hash_code_base = typename __base_type::__hash_code_base;
1434
1435 public:
1436 typedef _Value value_type;
1437 typedef typename std::conditional<__constant_iterators,
1438 const value_type*, value_type*>::type
1439 pointer;
1440 typedef typename std::conditional<__constant_iterators,
1441 const value_type&, value_type&>::type
1442 reference;
1443 typedef std::ptrdiff_t difference_type;
1444 typedef std::forward_iterator_tag iterator_category;
1445
1446 _Local_iterator() = default;
1447
1448 _Local_iterator(const __hash_code_base& __base,
1449 _Hash_node<_Value, __cache>* __n,
1450 std::size_t __bkt, std::size_t __bkt_count)
1451 : __base_type(__base, __n, __bkt, __bkt_count)
1452 { }
1453
1454 reference
1455 operator*() const
1456 { return this->_M_cur->_M_v(); }
1457
1458 pointer
1459 operator->() const
1460 { return this->_M_cur->_M_valptr(); }
1461
1462 _Local_iterator&
1463 operator++()
1464 {
1465 this->_M_incr();
1466 return *this;
1467 }
1468
1469 _Local_iterator
1470 operator++(int)
1471 {
1472 _Local_iterator __tmp(*this);
1473 this->_M_incr();
1474 return __tmp;
1475 }
1476 };
1477
1478 /// local const_iterators
1479 template<typename _Key, typename _Value, typename _ExtractKey,
1480 typename _Hash, typename _RangeHash, typename _Unused,
1481 bool __constant_iterators, bool __cache>
1482 struct _Local_const_iterator
1483 : public _Local_iterator_base<_Key, _Value, _ExtractKey,
1484 _Hash, _RangeHash, _Unused, __cache>
1485 {
1486 private:
1487 using __base_type = _Local_iterator_base<_Key, _Value, _ExtractKey,
1488 _Hash, _RangeHash, _Unused, __cache>;
1489 using __hash_code_base = typename __base_type::__hash_code_base;
1490
1491 public:
1492 typedef _Value value_type;
1493 typedef const value_type* pointer;
1494 typedef const value_type& reference;
1495 typedef std::ptrdiff_t difference_type;
1496 typedef std::forward_iterator_tag iterator_category;
1497
1498 _Local_const_iterator() = default;
1499
1500 _Local_const_iterator(const __hash_code_base& __base,
1501 _Hash_node<_Value, __cache>* __n,
1502 std::size_t __bkt, std::size_t __bkt_count)
1503 : __base_type(__base, __n, __bkt, __bkt_count)
1504 { }
1505
1506 _Local_const_iterator(const _Local_iterator<_Key, _Value, _ExtractKey,
1507 _Hash, _RangeHash, _Unused,
1508 __constant_iterators,
1509 __cache>& __x)
1510 : __base_type(__x)
1511 { }
1512
1513 reference
1514 operator*() const
1515 { return this->_M_cur->_M_v(); }
1516
1517 pointer
1518 operator->() const
1519 { return this->_M_cur->_M_valptr(); }
1520
1521 _Local_const_iterator&
1522 operator++()
1523 {
1524 this->_M_incr();
1525 return *this;
1526 }
1527
1528 _Local_const_iterator
1529 operator++(int)
1530 {
1531 _Local_const_iterator __tmp(*this);
1532 this->_M_incr();
1533 return __tmp;
1534 }
1535 };
1536
1537 /**
1538 * Primary class template _Hashtable_base.
1539 *
1540 * Helper class adding management of _Equal functor to
1541 * _Hash_code_base type.
1542 *
1543 * Base class templates are:
1544 * - __detail::_Hash_code_base
1545 * - __detail::_Hashtable_ebo_helper
1546 */
1547 template<typename _Key, typename _Value, typename _ExtractKey,
1548 typename _Equal, typename _Hash, typename _RangeHash,
1549 typename _Unused, typename _Traits>
1550 struct _Hashtable_base
1551 : public _Hash_code_base<_Key, _Value, _ExtractKey, _Hash, _RangeHash,
1552 _Unused, _Traits::__hash_cached::value>,
1553 private _Hashtable_ebo_helper<0, _Equal>
1554 {
1555 public:
1556 typedef _Key key_type;
1557 typedef _Value value_type;
1558 typedef _Equal key_equal;
1559 typedef std::size_t size_type;
1560 typedef std::ptrdiff_t difference_type;
1561
1562 using __traits_type = _Traits;
1563 using __hash_cached = typename __traits_type::__hash_cached;
1564
1565 using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
1566 _Hash, _RangeHash, _Unused,
1567 __hash_cached::value>;
1568
1569 using __hash_code = typename __hash_code_base::__hash_code;
1570
1571 private:
1572 using _EqualEBO = _Hashtable_ebo_helper<0, _Equal>;
1573
1574 static bool
1575 _S_equals(__hash_code, const _Hash_node_code_cache<false>&)
1576 { return true; }
1577
1578 static bool
1579 _S_node_equals(const _Hash_node_code_cache<false>&,
1580 const _Hash_node_code_cache<false>&)
1581 { return true; }
1582
1583 static bool
1584 _S_equals(__hash_code __c, const _Hash_node_code_cache<true>& __n)
1585 { return __c == __n._M_hash_code; }
1586
1587 static bool
1588 _S_node_equals(const _Hash_node_code_cache<true>& __lhn,
1589 const _Hash_node_code_cache<true>& __rhn)
1590 { return __lhn._M_hash_code == __rhn._M_hash_code; }
1591
1592 protected:
1593 _Hashtable_base() = default;
1594 _Hashtable_base(const _Hash& __hash, const _Equal& __eq)
1595 : __hash_code_base(__hash), _EqualEBO(__eq)
1596 { }
1597
1598 bool
1599 _M_equals(const _Key& __k, __hash_code __c,
1600 const _Hash_node_value<_Value, __hash_cached::value>& __n) const
1601 {
1602 static_assert(__is_invocable<const _Equal&, const _Key&, const _Key&>{},
1603 "key equality predicate must be invocable with two arguments of "
1604 "key type");
1605 return _S_equals(__c, __n) && _M_eq()(__k, _ExtractKey{}(__n._M_v()));
1606 }
1607
1608 bool
1609 _M_node_equals(
1610 const _Hash_node_value<_Value, __hash_cached::value>& __lhn,
1611 const _Hash_node_value<_Value, __hash_cached::value>& __rhn) const
1612 {
1613 return _S_node_equals(__lhn, __rhn)
1614 && _M_eq()(_ExtractKey{}(__lhn._M_v()), _ExtractKey{}(__rhn._M_v()));
1615 }
1616
1617 void
1618 _M_swap(_Hashtable_base& __x)
1619 {
1620 __hash_code_base::_M_swap(__x);
1621 std::swap(_EqualEBO::_M_get(), __x._EqualEBO::_M_get());
1622 }
1623
1624 const _Equal&
1625 _M_eq() const { return _EqualEBO::_M_cget(); }
1626 };
1627
1628 /**
1629 * Primary class template _Equality.
1630 *
1631 * This is for implementing equality comparison for unordered
1632 * containers, per N3068, by John Lakos and Pablo Halpern.
1633 * Algorithmically, we follow closely the reference implementations
1634 * therein.
1635 */
1636 template<typename _Key, typename _Value, typename _Alloc,
1637 typename _ExtractKey, typename _Equal,
1638 typename _Hash, typename _RangeHash, typename _Unused,
1639 typename _RehashPolicy, typename _Traits,
1640 bool _Unique_keys = _Traits::__unique_keys::value>
1641 struct _Equality;
1642
1643 /// unordered_map and unordered_set specializations.
1644 template<typename _Key, typename _Value, typename _Alloc,
1645 typename _ExtractKey, typename _Equal,
1646 typename _Hash, typename _RangeHash, typename _Unused,
1647 typename _RehashPolicy, typename _Traits>
1648 struct _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1649 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>
1650 {
1651 using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1652 _Hash, _RangeHash, _Unused,
1653 _RehashPolicy, _Traits>;
1654
1655 bool
1656 _M_equal(const __hashtable&) const;
1657 };
1658
1659 template<typename _Key, typename _Value, typename _Alloc,
1660 typename _ExtractKey, typename _Equal,
1661 typename _Hash, typename _RangeHash, typename _Unused,
1662 typename _RehashPolicy, typename _Traits>
1663 bool
1664 _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1665 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
1666 _M_equal(const __hashtable& __other) const
1667 {
1668 using __node_type = typename __hashtable::__node_type;
1669 const __hashtable* __this = static_cast<const __hashtable*>(this);
1670 if (__this->size() != __other.size())
1671 return false;
1672
1673 for (auto __itx = __this->begin(); __itx != __this->end(); ++__itx)
1674 {
1675 std::size_t __ybkt = __other._M_bucket_index(*__itx._M_cur);
1676 auto __prev_n = __other._M_buckets[__ybkt];
1677 if (!__prev_n)
1678 return false;
1679
1680 for (__node_type* __n = static_cast<__node_type*>(__prev_n->_M_nxt);;
1681 __n = __n->_M_next())
1682 {
1683 if (__n->_M_v() == *__itx)
1684 break;
1685
1686 if (!__n->_M_nxt
1687 || __other._M_bucket_index(*__n->_M_next()) != __ybkt)
1688 return false;
1689 }
1690 }
1691
1692 return true;
1693 }
1694
1695 /// unordered_multiset and unordered_multimap specializations.
1696 template<typename _Key, typename _Value, typename _Alloc,
1697 typename _ExtractKey, typename _Equal,
1698 typename _Hash, typename _RangeHash, typename _Unused,
1699 typename _RehashPolicy, typename _Traits>
1700 struct _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1701 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>
1702 {
1703 using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1704 _Hash, _RangeHash, _Unused,
1705 _RehashPolicy, _Traits>;
1706
1707 bool
1708 _M_equal(const __hashtable&) const;
1709 };
1710
1711 template<typename _Key, typename _Value, typename _Alloc,
1712 typename _ExtractKey, typename _Equal,
1713 typename _Hash, typename _RangeHash, typename _Unused,
1714 typename _RehashPolicy, typename _Traits>
1715 bool
1716 _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1717 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>::
1718 _M_equal(const __hashtable& __other) const
1719 {
1720 using __node_type = typename __hashtable::__node_type;
1721 const __hashtable* __this = static_cast<const __hashtable*>(this);
1722 if (__this->size() != __other.size())
1723 return false;
1724
1725 for (auto __itx = __this->begin(); __itx != __this->end();)
1726 {
1727 std::size_t __x_count = 1;
1728 auto __itx_end = __itx;
1729 for (++__itx_end; __itx_end != __this->end()
1730 && __this->key_eq()(_ExtractKey{}(*__itx),
1731 _ExtractKey{}(*__itx_end));
1732 ++__itx_end)
1733 ++__x_count;
1734
1735 std::size_t __ybkt = __other._M_bucket_index(*__itx._M_cur);
1736 auto __y_prev_n = __other._M_buckets[__ybkt];
1737 if (!__y_prev_n)
1738 return false;
1739
1740 __node_type* __y_n = static_cast<__node_type*>(__y_prev_n->_M_nxt);
1741 for (;;)
1742 {
1743 if (__this->key_eq()(_ExtractKey{}(__y_n->_M_v()),
1744 _ExtractKey{}(*__itx)))
1745 break;
1746
1747 auto __y_ref_n = __y_n;
1748 for (__y_n = __y_n->_M_next(); __y_n; __y_n = __y_n->_M_next())
1749 if (!__other._M_node_equals(*__y_ref_n, *__y_n))
1750 break;
1751
1752 if (!__y_n || __other._M_bucket_index(*__y_n) != __ybkt)
1753 return false;
1754 }
1755
1756 typename __hashtable::const_iterator __ity(__y_n);
1757 for (auto __ity_end = __ity; __ity_end != __other.end(); ++__ity_end)
1758 if (--__x_count == 0)
1759 break;
1760
1761 if (__x_count != 0)
1762 return false;
1763
1764 if (!std::is_permutation(__itx, __itx_end, __ity))
1765 return false;
1766
1767 __itx = __itx_end;
1768 }
1769 return true;
1770 }
1771
1772 /**
1773 * This type deals with all allocation and keeps an allocator instance
1774 * through inheritance to benefit from EBO when possible.
1775 */
1776 template<typename _NodeAlloc>
1777 struct _Hashtable_alloc : private _Hashtable_ebo_helper<0, _NodeAlloc>
1778 {
1779 private:
1780 using __ebo_node_alloc = _Hashtable_ebo_helper<0, _NodeAlloc>;
1781 public:
1782 using __node_type = typename _NodeAlloc::value_type;
1783 using __node_alloc_type = _NodeAlloc;
1784 // Use __gnu_cxx to benefit from _S_always_equal and al.
1785 using __node_alloc_traits = __gnu_cxx::__alloc_traits<__node_alloc_type>;
1786
1787 using __value_alloc_traits = typename __node_alloc_traits::template
1788 rebind_traits<typename __node_type::value_type>;
1789
1790 using __node_ptr = __node_type*;
1791 using __node_base = _Hash_node_base;
1792 using __node_base_ptr = __node_base*;
1793 using __buckets_alloc_type =
1794 __alloc_rebind<__node_alloc_type, __node_base_ptr>;
1795 using __buckets_alloc_traits = std::allocator_traits<__buckets_alloc_type>;
1796 using __buckets_ptr = __node_base_ptr*;
1797
1798 _Hashtable_alloc() = default;
1799 _Hashtable_alloc(const _Hashtable_alloc&) = default;
1800 _Hashtable_alloc(_Hashtable_alloc&&) = default;
1801
1802 template<typename _Alloc>
1803 _Hashtable_alloc(_Alloc&& __a)
1804 : __ebo_node_alloc(std::forward<_Alloc>(__a))
1805 { }
1806
1807 __node_alloc_type&
1808 _M_node_allocator()
1809 { return __ebo_node_alloc::_M_get(); }
1810
1811 const __node_alloc_type&
1812 _M_node_allocator() const
1813 { return __ebo_node_alloc::_M_cget(); }
1814
1815 // Allocate a node and construct an element within it.
1816 template<typename... _Args>
1817 __node_ptr
1818 _M_allocate_node(_Args&&... __args);
1819
1820 // Destroy the element within a node and deallocate the node.
1821 void
1822 _M_deallocate_node(__node_ptr __n);
1823
1824 // Deallocate a node.
1825 void
1826 _M_deallocate_node_ptr(__node_ptr __n);
1827
1828 // Deallocate the linked list of nodes pointed to by __n.
1829 // The elements within the nodes are destroyed.
1830 void
1831 _M_deallocate_nodes(__node_ptr __n);
1832
1833 __buckets_ptr
1834 _M_allocate_buckets(std::size_t __bkt_count);
1835
1836 void
1837 _M_deallocate_buckets(__buckets_ptr, std::size_t __bkt_count);
1838 };
1839
1840 // Definitions of class template _Hashtable_alloc's out-of-line member
1841 // functions.
1842 template<typename _NodeAlloc>
1843 template<typename... _Args>
1844 auto
1845 _Hashtable_alloc<_NodeAlloc>::_M_allocate_node(_Args&&... __args)
1846 -> __node_ptr
1847 {
1848 auto __nptr = __node_alloc_traits::allocate(_M_node_allocator(), 1);
1849 __node_ptr __n = std::__to_address(__nptr);
1850 __try
1851 {
1852 ::new ((void*)__n) __node_type;
1853 __node_alloc_traits::construct(_M_node_allocator(),
1854 __n->_M_valptr(),
1855 std::forward<_Args>(__args)...);
1856 return __n;
1857 }
1858 __catch(...)
1859 {
1860 __node_alloc_traits::deallocate(_M_node_allocator(), __nptr, 1);
1861 __throw_exception_again;
1862 }
1863 }
1864
1865 template<typename _NodeAlloc>
1866 void
1867 _Hashtable_alloc<_NodeAlloc>::_M_deallocate_node(__node_ptr __n)
1868 {
1869 __node_alloc_traits::destroy(_M_node_allocator(), __n->_M_valptr());
1870 _M_deallocate_node_ptr(__n);
1871 }
1872
1873 template<typename _NodeAlloc>
1874 void
1875 _Hashtable_alloc<_NodeAlloc>::_M_deallocate_node_ptr(__node_ptr __n)
1876 {
1877 typedef typename __node_alloc_traits::pointer _Ptr;
1878 auto __ptr = std::pointer_traits<_Ptr>::pointer_to(*__n);
1879 __n->~__node_type();
1880 __node_alloc_traits::deallocate(_M_node_allocator(), __ptr, 1);
1881 }
1882
1883 template<typename _NodeAlloc>
1884 void
1885 _Hashtable_alloc<_NodeAlloc>::_M_deallocate_nodes(__node_ptr __n)
1886 {
1887 while (__n)
1888 {
1889 __node_ptr __tmp = __n;
1890 __n = __n->_M_next();
1891 _M_deallocate_node(__tmp);
1892 }
1893 }
1894
1895 template<typename _NodeAlloc>
1896 auto
1897 _Hashtable_alloc<_NodeAlloc>::_M_allocate_buckets(std::size_t __bkt_count)
1898 -> __buckets_ptr
1899 {
1900 __buckets_alloc_type __alloc(_M_node_allocator());
1901
1902 auto __ptr = __buckets_alloc_traits::allocate(__alloc, __bkt_count);
1903 __buckets_ptr __p = std::__to_address(__ptr);
1904 __builtin_memset(__p, 0, __bkt_count * sizeof(__node_base_ptr));
1905 return __p;
1906 }
1907
1908 template<typename _NodeAlloc>
1909 void
1910 _Hashtable_alloc<_NodeAlloc>::
1911 _M_deallocate_buckets(__buckets_ptr __bkts,
1912 std::size_t __bkt_count)
1913 {
1914 typedef typename __buckets_alloc_traits::pointer _Ptr;
1915 auto __ptr = std::pointer_traits<_Ptr>::pointer_to(*__bkts);
1916 __buckets_alloc_type __alloc(_M_node_allocator());
1917 __buckets_alloc_traits::deallocate(__alloc, __ptr, __bkt_count);
1918 }
1919
1920 //@} hashtable-detail
1921 } // namespace __detail
1922 _GLIBCXX_END_NAMESPACE_VERSION
1923 } // namespace std
1924
1925 #endif // _HASHTABLE_POLICY_H