libstdc++
hashtable_policy.h
Go to the documentation of this file.
1 // Internal policy header for unordered_set and unordered_map -*- C++ -*-
2 
3 // Copyright (C) 2010, 2011 Free Software Foundation, Inc.
4 //
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
9 // any later version.
10 
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
15 
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
19 
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
24 
25 /** @file bits/hashtable_policy.h
26  * This is an internal header file, included by other library headers.
27  * Do not attempt to use it directly.
28  * @headername{unordered_map,unordered_set}
29  */
30 
31 #ifndef _HASHTABLE_POLICY_H
32 #define _HASHTABLE_POLICY_H 1
33 
34 namespace std _GLIBCXX_VISIBILITY(default)
35 {
36 namespace __detail
37 {
38 _GLIBCXX_BEGIN_NAMESPACE_VERSION
39 
40  // Helper function: return distance(first, last) for forward
41  // iterators, or 0 for input iterators.
42  template<class _Iterator>
43  inline typename std::iterator_traits<_Iterator>::difference_type
44  __distance_fw(_Iterator __first, _Iterator __last,
46  { return 0; }
47 
48  template<class _Iterator>
49  inline typename std::iterator_traits<_Iterator>::difference_type
50  __distance_fw(_Iterator __first, _Iterator __last,
52  { return std::distance(__first, __last); }
53 
54  template<class _Iterator>
55  inline typename std::iterator_traits<_Iterator>::difference_type
56  __distance_fw(_Iterator __first, _Iterator __last)
57  {
58  typedef typename std::iterator_traits<_Iterator>::iterator_category _Tag;
59  return __distance_fw(__first, __last, _Tag());
60  }
61 
62  // Auxiliary types used for all instantiations of _Hashtable: nodes
63  // and iterators.
64 
65  // Nodes, used to wrap elements stored in the hash table. A policy
66  // template parameter of class template _Hashtable controls whether
67  // nodes also store a hash code. In some cases (e.g. strings) this
68  // may be a performance win.
69  template<typename _Value, bool __cache_hash_code>
70  struct _Hash_node;
71 
72  template<typename _Value>
73  struct _Hash_node<_Value, true>
74  {
75  _Value _M_v;
76  std::size_t _M_hash_code;
77  _Hash_node* _M_next;
78 
79  template<typename... _Args>
80  _Hash_node(_Args&&... __args)
81  : _M_v(std::forward<_Args>(__args)...),
82  _M_hash_code(), _M_next() { }
83  };
84 
85  template<typename _Value>
86  struct _Hash_node<_Value, false>
87  {
88  _Value _M_v;
89  _Hash_node* _M_next;
90 
91  template<typename... _Args>
92  _Hash_node(_Args&&... __args)
93  : _M_v(std::forward<_Args>(__args)...),
94  _M_next() { }
95  };
96 
97  // Local iterators, used to iterate within a bucket but not between
98  // buckets.
99  template<typename _Value, bool __cache>
100  struct _Node_iterator_base
101  {
102  _Node_iterator_base(_Hash_node<_Value, __cache>* __p)
103  : _M_cur(__p) { }
104 
105  void
106  _M_incr()
107  { _M_cur = _M_cur->_M_next; }
108 
109  _Hash_node<_Value, __cache>* _M_cur;
110  };
111 
112  template<typename _Value, bool __cache>
113  inline bool
114  operator==(const _Node_iterator_base<_Value, __cache>& __x,
115  const _Node_iterator_base<_Value, __cache>& __y)
116  { return __x._M_cur == __y._M_cur; }
117 
118  template<typename _Value, bool __cache>
119  inline bool
120  operator!=(const _Node_iterator_base<_Value, __cache>& __x,
121  const _Node_iterator_base<_Value, __cache>& __y)
122  { return __x._M_cur != __y._M_cur; }
123 
124  template<typename _Value, bool __constant_iterators, bool __cache>
125  struct _Node_iterator
126  : public _Node_iterator_base<_Value, __cache>
127  {
128  typedef _Value value_type;
129  typedef typename std::conditional<__constant_iterators,
130  const _Value*, _Value*>::type
131  pointer;
132  typedef typename std::conditional<__constant_iterators,
133  const _Value&, _Value&>::type
134  reference;
135  typedef std::ptrdiff_t difference_type;
136  typedef std::forward_iterator_tag iterator_category;
137 
138  _Node_iterator()
139  : _Node_iterator_base<_Value, __cache>(0) { }
140 
141  explicit
142  _Node_iterator(_Hash_node<_Value, __cache>* __p)
143  : _Node_iterator_base<_Value, __cache>(__p) { }
144 
145  reference
146  operator*() const
147  { return this->_M_cur->_M_v; }
148 
149  pointer
150  operator->() const
151  { return std::__addressof(this->_M_cur->_M_v); }
152 
153  _Node_iterator&
154  operator++()
155  {
156  this->_M_incr();
157  return *this;
158  }
159 
160  _Node_iterator
161  operator++(int)
162  {
163  _Node_iterator __tmp(*this);
164  this->_M_incr();
165  return __tmp;
166  }
167  };
168 
169  template<typename _Value, bool __constant_iterators, bool __cache>
170  struct _Node_const_iterator
171  : public _Node_iterator_base<_Value, __cache>
172  {
173  typedef _Value value_type;
174  typedef const _Value* pointer;
175  typedef const _Value& reference;
176  typedef std::ptrdiff_t difference_type;
177  typedef std::forward_iterator_tag iterator_category;
178 
179  _Node_const_iterator()
180  : _Node_iterator_base<_Value, __cache>(0) { }
181 
182  explicit
183  _Node_const_iterator(_Hash_node<_Value, __cache>* __p)
184  : _Node_iterator_base<_Value, __cache>(__p) { }
185 
186  _Node_const_iterator(const _Node_iterator<_Value, __constant_iterators,
187  __cache>& __x)
188  : _Node_iterator_base<_Value, __cache>(__x._M_cur) { }
189 
190  reference
191  operator*() const
192  { return this->_M_cur->_M_v; }
193 
194  pointer
195  operator->() const
196  { return std::__addressof(this->_M_cur->_M_v); }
197 
198  _Node_const_iterator&
199  operator++()
200  {
201  this->_M_incr();
202  return *this;
203  }
204 
205  _Node_const_iterator
206  operator++(int)
207  {
208  _Node_const_iterator __tmp(*this);
209  this->_M_incr();
210  return __tmp;
211  }
212  };
213 
214  template<typename _Value, bool __cache>
215  struct _Hashtable_iterator_base
216  {
217  _Hashtable_iterator_base(_Hash_node<_Value, __cache>* __node,
218  _Hash_node<_Value, __cache>** __bucket)
219  : _M_cur_node(__node), _M_cur_bucket(__bucket) { }
220 
221  void
222  _M_incr()
223  {
224  _M_cur_node = _M_cur_node->_M_next;
225  if (!_M_cur_node)
226  _M_incr_bucket();
227  }
228 
229  void
230  _M_incr_bucket();
231 
232  _Hash_node<_Value, __cache>* _M_cur_node;
233  _Hash_node<_Value, __cache>** _M_cur_bucket;
234  };
235 
236  // Global iterators, used for arbitrary iteration within a hash
237  // table. Larger and more expensive than local iterators.
238  template<typename _Value, bool __cache>
239  void
240  _Hashtable_iterator_base<_Value, __cache>::
241  _M_incr_bucket()
242  {
243  ++_M_cur_bucket;
244 
245  // This loop requires the bucket array to have a non-null sentinel.
246  while (!*_M_cur_bucket)
247  ++_M_cur_bucket;
248  _M_cur_node = *_M_cur_bucket;
249  }
250 
251  template<typename _Value, bool __cache>
252  inline bool
253  operator==(const _Hashtable_iterator_base<_Value, __cache>& __x,
254  const _Hashtable_iterator_base<_Value, __cache>& __y)
255  { return __x._M_cur_node == __y._M_cur_node; }
256 
257  template<typename _Value, bool __cache>
258  inline bool
259  operator!=(const _Hashtable_iterator_base<_Value, __cache>& __x,
260  const _Hashtable_iterator_base<_Value, __cache>& __y)
261  { return __x._M_cur_node != __y._M_cur_node; }
262 
263  template<typename _Value, bool __constant_iterators, bool __cache>
264  struct _Hashtable_iterator
265  : public _Hashtable_iterator_base<_Value, __cache>
266  {
267  typedef _Value value_type;
268  typedef typename std::conditional<__constant_iterators,
269  const _Value*, _Value*>::type
270  pointer;
271  typedef typename std::conditional<__constant_iterators,
272  const _Value&, _Value&>::type
273  reference;
274  typedef std::ptrdiff_t difference_type;
275  typedef std::forward_iterator_tag iterator_category;
276 
277  _Hashtable_iterator()
278  : _Hashtable_iterator_base<_Value, __cache>(0, 0) { }
279 
280  _Hashtable_iterator(_Hash_node<_Value, __cache>* __p,
281  _Hash_node<_Value, __cache>** __b)
282  : _Hashtable_iterator_base<_Value, __cache>(__p, __b) { }
283 
284  explicit
285  _Hashtable_iterator(_Hash_node<_Value, __cache>** __b)
286  : _Hashtable_iterator_base<_Value, __cache>(*__b, __b) { }
287 
288  reference
289  operator*() const
290  { return this->_M_cur_node->_M_v; }
291 
292  pointer
293  operator->() const
294  { return std::__addressof(this->_M_cur_node->_M_v); }
295 
296  _Hashtable_iterator&
297  operator++()
298  {
299  this->_M_incr();
300  return *this;
301  }
302 
303  _Hashtable_iterator
304  operator++(int)
305  {
306  _Hashtable_iterator __tmp(*this);
307  this->_M_incr();
308  return __tmp;
309  }
310  };
311 
312  template<typename _Value, bool __constant_iterators, bool __cache>
313  struct _Hashtable_const_iterator
314  : public _Hashtable_iterator_base<_Value, __cache>
315  {
316  typedef _Value value_type;
317  typedef const _Value* pointer;
318  typedef const _Value& reference;
319  typedef std::ptrdiff_t difference_type;
320  typedef std::forward_iterator_tag iterator_category;
321 
322  _Hashtable_const_iterator()
323  : _Hashtable_iterator_base<_Value, __cache>(0, 0) { }
324 
325  _Hashtable_const_iterator(_Hash_node<_Value, __cache>* __p,
326  _Hash_node<_Value, __cache>** __b)
327  : _Hashtable_iterator_base<_Value, __cache>(__p, __b) { }
328 
329  explicit
330  _Hashtable_const_iterator(_Hash_node<_Value, __cache>** __b)
331  : _Hashtable_iterator_base<_Value, __cache>(*__b, __b) { }
332 
333  _Hashtable_const_iterator(const _Hashtable_iterator<_Value,
334  __constant_iterators, __cache>& __x)
335  : _Hashtable_iterator_base<_Value, __cache>(__x._M_cur_node,
336  __x._M_cur_bucket) { }
337 
338  reference
339  operator*() const
340  { return this->_M_cur_node->_M_v; }
341 
342  pointer
343  operator->() const
344  { return std::__addressof(this->_M_cur_node->_M_v); }
345 
346  _Hashtable_const_iterator&
347  operator++()
348  {
349  this->_M_incr();
350  return *this;
351  }
352 
353  _Hashtable_const_iterator
354  operator++(int)
355  {
356  _Hashtable_const_iterator __tmp(*this);
357  this->_M_incr();
358  return __tmp;
359  }
360  };
361 
362 
363  // Many of class template _Hashtable's template parameters are policy
364  // classes. These are defaults for the policies.
365 
366  // Default range hashing function: use division to fold a large number
367  // into the range [0, N).
368  struct _Mod_range_hashing
369  {
370  typedef std::size_t first_argument_type;
371  typedef std::size_t second_argument_type;
372  typedef std::size_t result_type;
373 
374  result_type
375  operator()(first_argument_type __num, second_argument_type __den) const
376  { return __num % __den; }
377  };
378 
379  // Default ranged hash function H. In principle it should be a
380  // function object composed from objects of type H1 and H2 such that
381  // h(k, N) = h2(h1(k), N), but that would mean making extra copies of
382  // h1 and h2. So instead we'll just use a tag to tell class template
383  // hashtable to do that composition.
384  struct _Default_ranged_hash { };
385 
386  // Default value for rehash policy. Bucket size is (usually) the
387  // smallest prime that keeps the load factor small enough.
388  struct _Prime_rehash_policy
389  {
390  _Prime_rehash_policy(float __z = 1.0)
391  : _M_max_load_factor(__z), _M_growth_factor(2.f), _M_next_resize(0) { }
392 
393  float
394  max_load_factor() const
395  { return _M_max_load_factor; }
396 
397  // Return a bucket size no smaller than n.
398  std::size_t
399  _M_next_bkt(std::size_t __n) const;
400 
401  // Return a bucket count appropriate for n elements
402  std::size_t
403  _M_bkt_for_elements(std::size_t __n) const;
404 
405  // __n_bkt is current bucket count, __n_elt is current element count,
406  // and __n_ins is number of elements to be inserted. Do we need to
407  // increase bucket count? If so, return make_pair(true, n), where n
408  // is the new bucket count. If not, return make_pair(false, 0).
410  _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
411  std::size_t __n_ins) const;
412 
413  enum { _S_n_primes = sizeof(unsigned long) != 8 ? 256 : 256 + 48 };
414 
415  float _M_max_load_factor;
416  float _M_growth_factor;
417  mutable std::size_t _M_next_resize;
418  };
419 
420  extern const unsigned long __prime_list[];
421 
422  // XXX This is a hack. There's no good reason for any of
423  // _Prime_rehash_policy's member functions to be inline.
424 
425  // Return a prime no smaller than n.
426  inline std::size_t
427  _Prime_rehash_policy::
428  _M_next_bkt(std::size_t __n) const
429  {
430  const unsigned long* __p = std::lower_bound(__prime_list, __prime_list
431  + _S_n_primes, __n);
432  _M_next_resize =
433  static_cast<std::size_t>(__builtin_ceil(*__p * _M_max_load_factor));
434  return *__p;
435  }
436 
437  // Return the smallest prime p such that alpha p >= n, where alpha
438  // is the load factor.
439  inline std::size_t
440  _Prime_rehash_policy::
441  _M_bkt_for_elements(std::size_t __n) const
442  {
443  const float __min_bkts = __n / _M_max_load_factor;
444  const unsigned long* __p = std::lower_bound(__prime_list, __prime_list
445  + _S_n_primes, __min_bkts);
446  _M_next_resize =
447  static_cast<std::size_t>(__builtin_ceil(*__p * _M_max_load_factor));
448  return *__p;
449  }
450 
451  // Finds the smallest prime p such that alpha p > __n_elt + __n_ins.
452  // If p > __n_bkt, return make_pair(true, p); otherwise return
453  // make_pair(false, 0). In principle this isn't very different from
454  // _M_bkt_for_elements.
455 
456  // The only tricky part is that we're caching the element count at
457  // which we need to rehash, so we don't have to do a floating-point
458  // multiply for every insertion.
459 
461  _Prime_rehash_policy::
462  _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
463  std::size_t __n_ins) const
464  {
465  if (__n_elt + __n_ins > _M_next_resize)
466  {
467  float __min_bkts = ((float(__n_ins) + float(__n_elt))
468  / _M_max_load_factor);
469  if (__min_bkts > __n_bkt)
470  {
471  __min_bkts = std::max(__min_bkts, _M_growth_factor * __n_bkt);
472  const unsigned long* __p =
473  std::lower_bound(__prime_list, __prime_list + _S_n_primes,
474  __min_bkts);
475  _M_next_resize = static_cast<std::size_t>
476  (__builtin_ceil(*__p * _M_max_load_factor));
477  return std::make_pair(true, *__p);
478  }
479  else
480  {
481  _M_next_resize = static_cast<std::size_t>
482  (__builtin_ceil(__n_bkt * _M_max_load_factor));
483  return std::make_pair(false, 0);
484  }
485  }
486  else
487  return std::make_pair(false, 0);
488  }
489 
490  // Base classes for std::_Hashtable. We define these base classes
491  // because in some cases we want to do different things depending
492  // on the value of a policy class. In some cases the policy class
493  // affects which member functions and nested typedefs are defined;
494  // we handle that by specializing base class templates. Several of
495  // the base class templates need to access other members of class
496  // template _Hashtable, so we use the "curiously recurring template
497  // pattern" for them.
498 
499  // class template _Map_base. If the hashtable has a value type of
500  // the form pair<T1, T2> and a key extraction policy that returns the
501  // first part of the pair, the hashtable gets a mapped_type typedef.
502  // If it satisfies those criteria and also has unique keys, then it
503  // also gets an operator[].
504  template<typename _Key, typename _Value, typename _Ex, bool __unique,
505  typename _Hashtable>
506  struct _Map_base { };
507 
508  template<typename _Key, typename _Pair, typename _Hashtable>
509  struct _Map_base<_Key, _Pair, std::_Select1st<_Pair>, false, _Hashtable>
510  {
511  typedef typename _Pair::second_type mapped_type;
512  };
513 
514  template<typename _Key, typename _Pair, typename _Hashtable>
515  struct _Map_base<_Key, _Pair, std::_Select1st<_Pair>, true, _Hashtable>
516  {
517  typedef typename _Pair::second_type mapped_type;
518 
519  mapped_type&
520  operator[](const _Key& __k);
521 
522  mapped_type&
523  operator[](_Key&& __k);
524 
525  // _GLIBCXX_RESOLVE_LIB_DEFECTS
526  // DR 761. unordered_map needs an at() member function.
527  mapped_type&
528  at(const _Key& __k);
529 
530  const mapped_type&
531  at(const _Key& __k) const;
532  };
533 
534  template<typename _Key, typename _Pair, typename _Hashtable>
535  typename _Map_base<_Key, _Pair, std::_Select1st<_Pair>,
536  true, _Hashtable>::mapped_type&
537  _Map_base<_Key, _Pair, std::_Select1st<_Pair>, true, _Hashtable>::
538  operator[](const _Key& __k)
539  {
540  _Hashtable* __h = static_cast<_Hashtable*>(this);
541  typename _Hashtable::_Hash_code_type __code = __h->_M_hash_code(__k);
542  std::size_t __n = __h->_M_bucket_index(__k, __code,
543  __h->_M_bucket_count);
544 
545  typename _Hashtable::_Node* __p =
546  __h->_M_find_node(__h->_M_buckets[__n], __k, __code);
547  if (!__p)
548  return __h->_M_insert_bucket(std::make_pair(__k, mapped_type()),
549  __n, __code)->second;
550  return (__p->_M_v).second;
551  }
552 
553  template<typename _Key, typename _Pair, typename _Hashtable>
554  typename _Map_base<_Key, _Pair, std::_Select1st<_Pair>,
555  true, _Hashtable>::mapped_type&
556  _Map_base<_Key, _Pair, std::_Select1st<_Pair>, true, _Hashtable>::
557  operator[](_Key&& __k)
558  {
559  _Hashtable* __h = static_cast<_Hashtable*>(this);
560  typename _Hashtable::_Hash_code_type __code = __h->_M_hash_code(__k);
561  std::size_t __n = __h->_M_bucket_index(__k, __code,
562  __h->_M_bucket_count);
563 
564  typename _Hashtable::_Node* __p =
565  __h->_M_find_node(__h->_M_buckets[__n], __k, __code);
566  if (!__p)
567  return __h->_M_insert_bucket(std::make_pair(std::move(__k),
568  mapped_type()),
569  __n, __code)->second;
570  return (__p->_M_v).second;
571  }
572 
573  template<typename _Key, typename _Pair, typename _Hashtable>
574  typename _Map_base<_Key, _Pair, std::_Select1st<_Pair>,
575  true, _Hashtable>::mapped_type&
576  _Map_base<_Key, _Pair, std::_Select1st<_Pair>, true, _Hashtable>::
577  at(const _Key& __k)
578  {
579  _Hashtable* __h = static_cast<_Hashtable*>(this);
580  typename _Hashtable::_Hash_code_type __code = __h->_M_hash_code(__k);
581  std::size_t __n = __h->_M_bucket_index(__k, __code,
582  __h->_M_bucket_count);
583 
584  typename _Hashtable::_Node* __p =
585  __h->_M_find_node(__h->_M_buckets[__n], __k, __code);
586  if (!__p)
587  __throw_out_of_range(__N("_Map_base::at"));
588  return (__p->_M_v).second;
589  }
590 
591  template<typename _Key, typename _Pair, typename _Hashtable>
592  const typename _Map_base<_Key, _Pair, std::_Select1st<_Pair>,
593  true, _Hashtable>::mapped_type&
594  _Map_base<_Key, _Pair, std::_Select1st<_Pair>, true, _Hashtable>::
595  at(const _Key& __k) const
596  {
597  const _Hashtable* __h = static_cast<const _Hashtable*>(this);
598  typename _Hashtable::_Hash_code_type __code = __h->_M_hash_code(__k);
599  std::size_t __n = __h->_M_bucket_index(__k, __code,
600  __h->_M_bucket_count);
601 
602  typename _Hashtable::_Node* __p =
603  __h->_M_find_node(__h->_M_buckets[__n], __k, __code);
604  if (!__p)
605  __throw_out_of_range(__N("_Map_base::at"));
606  return (__p->_M_v).second;
607  }
608 
609  // class template _Rehash_base. Give hashtable the max_load_factor
610  // functions and reserve iff the rehash policy is _Prime_rehash_policy.
611  template<typename _RehashPolicy, typename _Hashtable>
612  struct _Rehash_base { };
613 
614  template<typename _Hashtable>
615  struct _Rehash_base<_Prime_rehash_policy, _Hashtable>
616  {
617  float
618  max_load_factor() const
619  {
620  const _Hashtable* __this = static_cast<const _Hashtable*>(this);
621  return __this->__rehash_policy().max_load_factor();
622  }
623 
624  void
625  max_load_factor(float __z)
626  {
627  _Hashtable* __this = static_cast<_Hashtable*>(this);
628  __this->__rehash_policy(_Prime_rehash_policy(__z));
629  }
630 
631  void
632  reserve(std::size_t __n)
633  {
634  _Hashtable* __this = static_cast<_Hashtable*>(this);
635  __this->rehash(__builtin_ceil(__n / max_load_factor()));
636  }
637  };
638 
639  // Class template _Hash_code_base. Encapsulates two policy issues that
640  // aren't quite orthogonal.
641  // (1) the difference between using a ranged hash function and using
642  // the combination of a hash function and a range-hashing function.
643  // In the former case we don't have such things as hash codes, so
644  // we have a dummy type as placeholder.
645  // (2) Whether or not we cache hash codes. Caching hash codes is
646  // meaningless if we have a ranged hash function.
647  // We also put the key extraction and equality comparison function
648  // objects here, for convenience.
649 
650  // Primary template: unused except as a hook for specializations.
651  template<typename _Key, typename _Value,
652  typename _ExtractKey, typename _Equal,
653  typename _H1, typename _H2, typename _Hash,
654  bool __cache_hash_code>
655  struct _Hash_code_base;
656 
657  // Specialization: ranged hash function, no caching hash codes. H1
658  // and H2 are provided but ignored. We define a dummy hash code type.
659  template<typename _Key, typename _Value,
660  typename _ExtractKey, typename _Equal,
661  typename _H1, typename _H2, typename _Hash>
662  struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2,
663  _Hash, false>
664  {
665  protected:
666  _Hash_code_base(const _ExtractKey& __ex, const _Equal& __eq,
667  const _H1&, const _H2&, const _Hash& __h)
668  : _M_extract(__ex), _M_eq(__eq), _M_ranged_hash(__h) { }
669 
670  typedef void* _Hash_code_type;
671 
672  _Hash_code_type
673  _M_hash_code(const _Key& __key) const
674  { return 0; }
675 
676  std::size_t
677  _M_bucket_index(const _Key& __k, _Hash_code_type,
678  std::size_t __n) const
679  { return _M_ranged_hash(__k, __n); }
680 
681  std::size_t
682  _M_bucket_index(const _Hash_node<_Value, false>* __p,
683  std::size_t __n) const
684  { return _M_ranged_hash(_M_extract(__p->_M_v), __n); }
685 
686  bool
687  _M_compare(const _Key& __k, _Hash_code_type,
688  _Hash_node<_Value, false>* __n) const
689  { return _M_eq(__k, _M_extract(__n->_M_v)); }
690 
691  void
692  _M_store_code(_Hash_node<_Value, false>*, _Hash_code_type) const
693  { }
694 
695  void
696  _M_copy_code(_Hash_node<_Value, false>*,
697  const _Hash_node<_Value, false>*) const
698  { }
699 
700  void
701  _M_swap(_Hash_code_base& __x)
702  {
703  std::swap(_M_extract, __x._M_extract);
704  std::swap(_M_eq, __x._M_eq);
705  std::swap(_M_ranged_hash, __x._M_ranged_hash);
706  }
707 
708  protected:
709  _ExtractKey _M_extract;
710  _Equal _M_eq;
711  _Hash _M_ranged_hash;
712  };
713 
714 
715  // No specialization for ranged hash function while caching hash codes.
716  // That combination is meaningless, and trying to do it is an error.
717 
718 
719  // Specialization: ranged hash function, cache hash codes. This
720  // combination is meaningless, so we provide only a declaration
721  // and no definition.
722  template<typename _Key, typename _Value,
723  typename _ExtractKey, typename _Equal,
724  typename _H1, typename _H2, typename _Hash>
725  struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2,
726  _Hash, true>;
727 
728  // Specialization: hash function and range-hashing function, no
729  // caching of hash codes. H is provided but ignored. Provides
730  // typedef and accessor required by TR1.
731  template<typename _Key, typename _Value,
732  typename _ExtractKey, typename _Equal,
733  typename _H1, typename _H2>
734  struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2,
735  _Default_ranged_hash, false>
736  {
737  typedef _H1 hasher;
738 
739  hasher
740  hash_function() const
741  { return _M_h1; }
742 
743  protected:
744  _Hash_code_base(const _ExtractKey& __ex, const _Equal& __eq,
745  const _H1& __h1, const _H2& __h2,
746  const _Default_ranged_hash&)
747  : _M_extract(__ex), _M_eq(__eq), _M_h1(__h1), _M_h2(__h2) { }
748 
749  typedef std::size_t _Hash_code_type;
750 
751  _Hash_code_type
752  _M_hash_code(const _Key& __k) const
753  { return _M_h1(__k); }
754 
755  std::size_t
756  _M_bucket_index(const _Key&, _Hash_code_type __c,
757  std::size_t __n) const
758  { return _M_h2(__c, __n); }
759 
760  std::size_t
761  _M_bucket_index(const _Hash_node<_Value, false>* __p,
762  std::size_t __n) const
763  { return _M_h2(_M_h1(_M_extract(__p->_M_v)), __n); }
764 
765  bool
766  _M_compare(const _Key& __k, _Hash_code_type,
767  _Hash_node<_Value, false>* __n) const
768  { return _M_eq(__k, _M_extract(__n->_M_v)); }
769 
770  void
771  _M_store_code(_Hash_node<_Value, false>*, _Hash_code_type) const
772  { }
773 
774  void
775  _M_copy_code(_Hash_node<_Value, false>*,
776  const _Hash_node<_Value, false>*) const
777  { }
778 
779  void
780  _M_swap(_Hash_code_base& __x)
781  {
782  std::swap(_M_extract, __x._M_extract);
783  std::swap(_M_eq, __x._M_eq);
784  std::swap(_M_h1, __x._M_h1);
785  std::swap(_M_h2, __x._M_h2);
786  }
787 
788  protected:
789  _ExtractKey _M_extract;
790  _Equal _M_eq;
791  _H1 _M_h1;
792  _H2 _M_h2;
793  };
794 
795  // Specialization: hash function and range-hashing function,
796  // caching hash codes. H is provided but ignored. Provides
797  // typedef and accessor required by TR1.
798  template<typename _Key, typename _Value,
799  typename _ExtractKey, typename _Equal,
800  typename _H1, typename _H2>
801  struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2,
802  _Default_ranged_hash, true>
803  {
804  typedef _H1 hasher;
805 
806  hasher
807  hash_function() const
808  { return _M_h1; }
809 
810  protected:
811  _Hash_code_base(const _ExtractKey& __ex, const _Equal& __eq,
812  const _H1& __h1, const _H2& __h2,
813  const _Default_ranged_hash&)
814  : _M_extract(__ex), _M_eq(__eq), _M_h1(__h1), _M_h2(__h2) { }
815 
816  typedef std::size_t _Hash_code_type;
817 
818  _Hash_code_type
819  _M_hash_code(const _Key& __k) const
820  { return _M_h1(__k); }
821 
822  std::size_t
823  _M_bucket_index(const _Key&, _Hash_code_type __c,
824  std::size_t __n) const
825  { return _M_h2(__c, __n); }
826 
827  std::size_t
828  _M_bucket_index(const _Hash_node<_Value, true>* __p,
829  std::size_t __n) const
830  { return _M_h2(__p->_M_hash_code, __n); }
831 
832  bool
833  _M_compare(const _Key& __k, _Hash_code_type __c,
834  _Hash_node<_Value, true>* __n) const
835  { return __c == __n->_M_hash_code && _M_eq(__k, _M_extract(__n->_M_v)); }
836 
837  void
838  _M_store_code(_Hash_node<_Value, true>* __n, _Hash_code_type __c) const
839  { __n->_M_hash_code = __c; }
840 
841  void
842  _M_copy_code(_Hash_node<_Value, true>* __to,
843  const _Hash_node<_Value, true>* __from) const
844  { __to->_M_hash_code = __from->_M_hash_code; }
845 
846  void
847  _M_swap(_Hash_code_base& __x)
848  {
849  std::swap(_M_extract, __x._M_extract);
850  std::swap(_M_eq, __x._M_eq);
851  std::swap(_M_h1, __x._M_h1);
852  std::swap(_M_h2, __x._M_h2);
853  }
854 
855  protected:
856  _ExtractKey _M_extract;
857  _Equal _M_eq;
858  _H1 _M_h1;
859  _H2 _M_h2;
860  };
861 
862 
863  // Class template _Equality_base. This is for implementing equality
864  // comparison for unordered containers, per N3068, by John Lakos and
865  // Pablo Halpern. Algorithmically, we follow closely the reference
866  // implementations therein.
867  template<typename _ExtractKey, bool __unique_keys,
868  typename _Hashtable>
869  struct _Equality_base;
870 
871  template<typename _ExtractKey, typename _Hashtable>
872  struct _Equality_base<_ExtractKey, true, _Hashtable>
873  {
874  bool _M_equal(const _Hashtable&) const;
875  };
876 
877  template<typename _ExtractKey, typename _Hashtable>
878  bool
879  _Equality_base<_ExtractKey, true, _Hashtable>::
880  _M_equal(const _Hashtable& __other) const
881  {
882  const _Hashtable* __this = static_cast<const _Hashtable*>(this);
883 
884  if (__this->size() != __other.size())
885  return false;
886 
887  for (auto __itx = __this->begin(); __itx != __this->end(); ++__itx)
888  {
889  const auto __ity = __other.find(_ExtractKey()(*__itx));
890  if (__ity == __other.end() || !bool(*__ity == *__itx))
891  return false;
892  }
893  return true;
894  }
895 
896  template<typename _ExtractKey, typename _Hashtable>
897  struct _Equality_base<_ExtractKey, false, _Hashtable>
898  {
899  bool _M_equal(const _Hashtable&) const;
900 
901  private:
902  template<typename _Uiterator>
903  static bool
904  _S_is_permutation(_Uiterator, _Uiterator, _Uiterator);
905  };
906 
907  // See std::is_permutation in N3068.
908  template<typename _ExtractKey, typename _Hashtable>
909  template<typename _Uiterator>
910  bool
911  _Equality_base<_ExtractKey, false, _Hashtable>::
912  _S_is_permutation(_Uiterator __first1, _Uiterator __last1,
913  _Uiterator __first2)
914  {
915  for (; __first1 != __last1; ++__first1, ++__first2)
916  if (!(*__first1 == *__first2))
917  break;
918 
919  if (__first1 == __last1)
920  return true;
921 
922  _Uiterator __last2 = __first2;
923  std::advance(__last2, std::distance(__first1, __last1));
924 
925  for (_Uiterator __it1 = __first1; __it1 != __last1; ++__it1)
926  {
927  _Uiterator __tmp = __first1;
928  while (__tmp != __it1 && !bool(*__tmp == *__it1))
929  ++__tmp;
930 
931  // We've seen this one before.
932  if (__tmp != __it1)
933  continue;
934 
935  std::ptrdiff_t __n2 = 0;
936  for (__tmp = __first2; __tmp != __last2; ++__tmp)
937  if (*__tmp == *__it1)
938  ++__n2;
939 
940  if (!__n2)
941  return false;
942 
943  std::ptrdiff_t __n1 = 0;
944  for (__tmp = __it1; __tmp != __last1; ++__tmp)
945  if (*__tmp == *__it1)
946  ++__n1;
947 
948  if (__n1 != __n2)
949  return false;
950  }
951  return true;
952  }
953 
954  template<typename _ExtractKey, typename _Hashtable>
955  bool
956  _Equality_base<_ExtractKey, false, _Hashtable>::
957  _M_equal(const _Hashtable& __other) const
958  {
959  const _Hashtable* __this = static_cast<const _Hashtable*>(this);
960 
961  if (__this->size() != __other.size())
962  return false;
963 
964  for (auto __itx = __this->begin(); __itx != __this->end();)
965  {
966  const auto __xrange = __this->equal_range(_ExtractKey()(*__itx));
967  const auto __yrange = __other.equal_range(_ExtractKey()(*__itx));
968 
969  if (std::distance(__xrange.first, __xrange.second)
970  != std::distance(__yrange.first, __yrange.second))
971  return false;
972 
973  if (!_S_is_permutation(__xrange.first,
974  __xrange.second,
975  __yrange.first))
976  return false;
977 
978  __itx = __xrange.second;
979  }
980  return true;
981  }
982 
983 _GLIBCXX_END_NAMESPACE_VERSION
984 } // namespace __detail
985 } // namespace std
986 
987 #endif // _HASHTABLE_POLICY_H
reference operator[](size_t __position)
Array-indexing support.
Definition: bitset:1138
Struct holding two objects of arbitrary type.
Definition: stl_pair.h:87
const _Tp & max(const _Tp &, const _Tp &)
This does what you think it does.
Definition: stl_algobase.h:210
Forward iterators support a superset of input iterator operations.
_ForwardIterator lower_bound(_ForwardIterator __first, _ForwardIterator __last, const _Tp &__val)
Finds the first position in which val could be inserted without changing the ordering.
Definition: stl_algobase.h:936
pair< typename __decay_and_strip< _T1 >::__type, typename __decay_and_strip< _T2 >::__type > make_pair(_T1 &&__x, _T2 &&__y)
A convenience wrapper for creating a pair from two objects.
Definition: stl_pair.h:262
iterator_traits< _InputIterator >::difference_type distance(_InputIterator __first, _InputIterator __last)
A generalization of pointer arithmetic.
complex< _Tp > operator*(const complex< _Tp > &__x, const complex< _Tp > &__y)
Return new complex value x times y.
Definition: complex:381
conditional
Definition: type_traits:849
Marking input iterators.
void advance(_InputIterator &__i, _Distance __n)
A generalization of pointer arithmetic.
_Tp && forward(typename std::remove_reference< _Tp >::type &__t)
forward (as per N3143)
Definition: move.h:62