Intel(R) Threading Building Blocks Doxygen Documentation  version 4.2.3
tbb::interface5::internal::split_ordered_list< T, Allocator > Class Template Reference

#include <_concurrent_unordered_impl.h>

Inheritance diagram for tbb::interface5::internal::split_ordered_list< T, Allocator >:
Collaboration diagram for tbb::interface5::internal::split_ordered_list< T, Allocator >:

Classes

struct  node
 

Public Types

typedef split_ordered_list< T, Allocator > self_type
 
typedef tbb::internal::allocator_rebind< Allocator, T >::type allocator_type
 
typedef nodenodeptr_t
 
typedef tbb::internal::allocator_traits< allocator_type >::value_type value_type
 
typedef tbb::internal::allocator_traits< allocator_type >::size_type size_type
 
typedef tbb::internal::allocator_traits< allocator_type >::difference_type difference_type
 
typedef tbb::internal::allocator_traits< allocator_type >::pointer pointer
 
typedef tbb::internal::allocator_traits< allocator_type >::const_pointer const_pointer
 
typedef value_typereference
 
typedef const value_typeconst_reference
 
typedef solist_iterator< self_type, const value_typeconst_iterator
 
typedef solist_iterator< self_type, value_typeiterator
 
typedef flist_iterator< self_type, const value_typeraw_const_iterator
 
typedef flist_iterator< self_type, value_typeraw_iterator
 

Public Member Functions

nodeptr_t create_node (sokey_t order_key)
 
template<typename Arg >
nodeptr_t create_node (sokey_t order_key, __TBB_FORWARDING_REF(Arg) t, tbb::internal::true_type=tbb::internal::true_type())
 
template<typename Arg >
nodeptr_t create_node (sokey_t, __TBB_FORWARDING_REF(Arg), tbb::internal::false_type)
 
template<typename __TBB_PARAMETER_PACK Args>
nodeptr_t create_node_v (__TBB_FORWARDING_REF(Args) __TBB_PARAMETER_PACK args)
 
 split_ordered_list (allocator_type a=allocator_type())
 
 ~split_ordered_list ()
 
allocator_type get_allocator () const
 
void clear ()
 
iterator begin ()
 
const_iterator begin () const
 
iterator end ()
 
const_iterator end () const
 
const_iterator cbegin () const
 
const_iterator cend () const
 
bool empty () const
 
size_type size () const
 
size_type max_size () const
 
void swap (self_type &other)
 
raw_iterator raw_begin ()
 
raw_const_iterator raw_begin () const
 
raw_iterator raw_end ()
 
raw_const_iterator raw_end () const
 
iterator get_iterator (raw_iterator it)
 
const_iterator get_iterator (raw_const_iterator it) const
 
raw_iterator get_iterator (raw_const_iterator it)
 
iterator first_real_iterator (raw_iterator it)
 
const_iterator first_real_iterator (raw_const_iterator it) const
 
void destroy_node (nodeptr_t pnode)
 
std::pair< iterator, bool > try_insert (raw_iterator it, raw_iterator next, nodeptr_t pnode, size_type *new_count)
 
raw_iterator insert_dummy (raw_iterator it, sokey_t order_key)
 
nodeptr_t erase_node_impl (raw_iterator previous, raw_const_iterator &where)
 
void erase_node (raw_iterator previous, raw_const_iterator &where, tbb::internal::true_type)
 
void erase_node (raw_iterator previous, raw_const_iterator &where, tbb::internal::false_type)
 
void erase_node (raw_iterator previous, raw_const_iterator &where)
 
template<typename AllowDestroy >
iterator erase_node (raw_iterator previous, const_iterator where, AllowDestroy)
 
iterator erase_node (raw_iterator previous, const_iterator &where)
 
void move_all (self_type &source)
 

Static Public Member Functions

static sokey_t get_order_key (const raw_const_iterator &it)
 
static sokey_t get_safe_order_key (const raw_const_iterator &it)
 
static iterator get_iterator (const_iterator it)
 
static nodeptr_t try_insert_atomic (nodeptr_t previous, nodeptr_t new_node, nodeptr_t current_node)
 

Private Member Functions

void check_range (raw_iterator first, raw_iterator last)
 
void check_range ()
 

Private Attributes

tbb::internal::allocator_rebind< allocator_type, node >::type my_node_allocator
 
size_type my_element_count
 
nodeptr_t my_head
 

Friends

template<typename Traits >
class concurrent_unordered_base
 

Detailed Description

template<typename T, typename Allocator>
class tbb::interface5::internal::split_ordered_list< T, Allocator >

Definition at line 61 of file _concurrent_unordered_impl.h.

Member Typedef Documentation

◆ allocator_type

template<typename T, typename Allocator>
typedef tbb::internal::allocator_rebind<Allocator, T>::type tbb::interface5::internal::split_ordered_list< T, Allocator >::allocator_type

Definition at line 197 of file _concurrent_unordered_impl.h.

◆ const_iterator

template<typename T, typename Allocator>
typedef solist_iterator<self_type, const value_type> tbb::interface5::internal::split_ordered_list< T, Allocator >::const_iterator

Definition at line 211 of file _concurrent_unordered_impl.h.

◆ const_pointer

Definition at line 206 of file _concurrent_unordered_impl.h.

◆ const_reference

template<typename T, typename Allocator>
typedef const value_type& tbb::interface5::internal::split_ordered_list< T, Allocator >::const_reference

Definition at line 209 of file _concurrent_unordered_impl.h.

◆ difference_type

Definition at line 204 of file _concurrent_unordered_impl.h.

◆ iterator

template<typename T, typename Allocator>
typedef solist_iterator<self_type, value_type> tbb::interface5::internal::split_ordered_list< T, Allocator >::iterator

Definition at line 212 of file _concurrent_unordered_impl.h.

◆ nodeptr_t

template<typename T, typename Allocator>
typedef node* tbb::interface5::internal::split_ordered_list< T, Allocator >::nodeptr_t

Definition at line 199 of file _concurrent_unordered_impl.h.

◆ pointer

template<typename T, typename Allocator>
typedef tbb::internal::allocator_traits<allocator_type>::pointer tbb::interface5::internal::split_ordered_list< T, Allocator >::pointer

Definition at line 205 of file _concurrent_unordered_impl.h.

◆ raw_const_iterator

template<typename T, typename Allocator>
typedef flist_iterator<self_type, const value_type> tbb::interface5::internal::split_ordered_list< T, Allocator >::raw_const_iterator

Definition at line 213 of file _concurrent_unordered_impl.h.

◆ raw_iterator

template<typename T, typename Allocator>
typedef flist_iterator<self_type, value_type> tbb::interface5::internal::split_ordered_list< T, Allocator >::raw_iterator

Definition at line 214 of file _concurrent_unordered_impl.h.

◆ reference

template<typename T, typename Allocator>
typedef value_type& tbb::interface5::internal::split_ordered_list< T, Allocator >::reference

Definition at line 208 of file _concurrent_unordered_impl.h.

◆ self_type

template<typename T, typename Allocator>
typedef split_ordered_list<T, Allocator> tbb::interface5::internal::split_ordered_list< T, Allocator >::self_type

Definition at line 195 of file _concurrent_unordered_impl.h.

◆ size_type

template<typename T, typename Allocator>
typedef tbb::internal::allocator_traits<allocator_type>::size_type tbb::interface5::internal::split_ordered_list< T, Allocator >::size_type

Definition at line 203 of file _concurrent_unordered_impl.h.

◆ value_type

template<typename T, typename Allocator>
typedef tbb::internal::allocator_traits<allocator_type>::value_type tbb::interface5::internal::split_ordered_list< T, Allocator >::value_type

Definition at line 202 of file _concurrent_unordered_impl.h.

Constructor & Destructor Documentation

◆ split_ordered_list()

template<typename T, typename Allocator>
tbb::interface5::internal::split_ordered_list< T, Allocator >::split_ordered_list ( allocator_type  a = allocator_type())
inline

Definition at line 322 of file _concurrent_unordered_impl.h.

324  {
325  // Immediately allocate a dummy node with order key of 0. This node
326  // will always be the head of the list.
328  }
tbb::internal::allocator_rebind< allocator_type, node >::type my_node_allocator

◆ ~split_ordered_list()

template<typename T, typename Allocator>
tbb::interface5::internal::split_ordered_list< T, Allocator >::~split_ordered_list ( )
inline

Definition at line 330 of file _concurrent_unordered_impl.h.

331  {
332  // Clear the list
333  clear();
334 
335  // Remove the head element which is not cleared by clear()
336  nodeptr_t pnode = my_head;
337  my_head = NULL;
338 
339  __TBB_ASSERT(pnode != NULL && pnode->my_next == NULL, "Invalid head list node");
340 
341  destroy_node(pnode);
342  }
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165

Member Function Documentation

◆ begin() [1/2]

template<typename T, typename Allocator>
iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::begin ( )
inline

◆ begin() [2/2]

template<typename T, typename Allocator>
const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::begin ( ) const
inline

◆ cbegin()

template<typename T, typename Allocator>
const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::cbegin ( ) const
inline

Definition at line 387 of file _concurrent_unordered_impl.h.

387  {
388  return (((const self_type *)this)->begin());
389  }

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::cbegin().

Here is the caller graph for this function:

◆ cend()

template<typename T, typename Allocator>
const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::cend ( ) const
inline

Definition at line 391 of file _concurrent_unordered_impl.h.

391  {
392  return (((const self_type *)this)->end());
393  }

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::cend().

Here is the caller graph for this function:

◆ check_range() [1/2]

template<typename T, typename Allocator>
void tbb::interface5::internal::split_ordered_list< T, Allocator >::check_range ( raw_iterator  first,
raw_iterator  last 
)
inlineprivate

Definition at line 663 of file _concurrent_unordered_impl.h.

664  {
665 #if TBB_USE_ASSERT
666  for (raw_iterator it = first; it != last; ++it)
667  {
668  raw_iterator next = it;
669  ++next;
670 
671  __TBB_ASSERT(next == raw_end() || get_order_key(next) >= get_order_key(it), "!!! List order inconsistency !!!");
672  }
673 #else
675 #endif
676  }
auto last(Container &c) -> decltype(begin(c))
auto first(Container &c) -> decltype(begin(c))
void suppress_unused_warning(const T1 &)
Utility template function to prevent "unused" warnings by various compilers.
Definition: tbb_stddef.h:377
flist_iterator< self_type, value_type > raw_iterator
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165
static sokey_t get_order_key(const raw_const_iterator &it)

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::concurrent_unordered_base().

Here is the caller graph for this function:

◆ check_range() [2/2]

◆ clear()

template<typename T, typename Allocator>
void tbb::interface5::internal::split_ordered_list< T, Allocator >::clear ( )
inline

Definition at line 350 of file _concurrent_unordered_impl.h.

350  {
351  nodeptr_t pnext;
352  nodeptr_t pnode = my_head;
353 
354  __TBB_ASSERT(my_head != NULL, "Invalid head list node");
355  pnext = pnode->my_next;
356  pnode->my_next = NULL;
357  pnode = pnext;
358 
359  while (pnode != NULL)
360  {
361  pnext = pnode->my_next;
362  destroy_node(pnode);
363  pnode = pnext;
364  }
365 
366  my_element_count = 0;
367  }
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::clear(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_copy(), and tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::~split_ordered_list().

Here is the caller graph for this function:

◆ create_node() [1/3]

template<typename T, typename Allocator>
nodeptr_t tbb::interface5::internal::split_ordered_list< T, Allocator >::create_node ( sokey_t  order_key)
inline

◆ create_node() [2/3]

template<typename T, typename Allocator>
template<typename Arg >
nodeptr_t tbb::interface5::internal::split_ordered_list< T, Allocator >::create_node ( sokey_t  order_key,
__TBB_FORWARDING_REF(Arg)  t,
tbb::internal::true_type  = tbb::internal::true_type() 
)
inline

Definition at line 282 of file _concurrent_unordered_impl.h.

283  {
284  nodeptr_t pnode = my_node_allocator.allocate(1);
285 
286  //TODO: use RAII scoped guard instead of explicit catch
287  __TBB_TRY {
288  new(static_cast<void*>(&pnode->my_element)) T(tbb::internal::forward<Arg>(t));
289  pnode->init(order_key);
290  } __TBB_CATCH(...) {
291  my_node_allocator.deallocate(pnode, 1);
292  __TBB_RETHROW();
293  }
294 
295  return (pnode);
296  }
tbb::internal::allocator_rebind< allocator_type, node >::type my_node_allocator
#define __TBB_CATCH(e)
Definition: tbb_stddef.h:284
#define __TBB_TRY
Definition: tbb_stddef.h:283
#define __TBB_RETHROW()
Definition: tbb_stddef.h:286

◆ create_node() [3/3]

template<typename T, typename Allocator>
template<typename Arg >
nodeptr_t tbb::interface5::internal::split_ordered_list< T, Allocator >::create_node ( sokey_t  ,
__TBB_FORWARDING_REF(Arg)  ,
tbb::internal::false_type   
)
inline

Definition at line 300 of file _concurrent_unordered_impl.h.

301  {
302  __TBB_ASSERT(false, "This compile-time helper should never get called");
303  return nodeptr_t();
304  }
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165

◆ create_node_v()

template<typename T, typename Allocator>
template<typename __TBB_PARAMETER_PACK Args>
nodeptr_t tbb::interface5::internal::split_ordered_list< T, Allocator >::create_node_v ( __TBB_FORWARDING_REF(Args) __TBB_PARAMETER_PACK  args)
inline

Definition at line 308 of file _concurrent_unordered_impl.h.

308  {
309  nodeptr_t pnode = my_node_allocator.allocate(1);
310 
311  //TODO: use RAII scoped guard instead of explicit catch
312  __TBB_TRY {
313  new(static_cast<void*>(&pnode->my_element)) T(__TBB_PACK_EXPANSION(tbb::internal::forward<Args>(args)));
314  } __TBB_CATCH(...) {
315  my_node_allocator.deallocate(pnode, 1);
316  __TBB_RETHROW();
317  }
318 
319  return (pnode);
320  }
tbb::internal::allocator_rebind< allocator_type, node >::type my_node_allocator
#define __TBB_CATCH(e)
Definition: tbb_stddef.h:284
#define __TBB_TRY
Definition: tbb_stddef.h:283
#define __TBB_RETHROW()
Definition: tbb_stddef.h:286
#define __TBB_PACK_EXPANSION(A)
Definition: tbb_stddef.h:504

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::emplace().

Here is the caller graph for this function:

◆ destroy_node()

◆ empty()

template<typename T, typename Allocator>
bool tbb::interface5::internal::split_ordered_list< T, Allocator >::empty ( ) const
inline

Definition at line 396 of file _concurrent_unordered_impl.h.

396  {
397  return (my_element_count == 0);
398  }

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::empty().

Here is the caller graph for this function:

◆ end() [1/2]

template<typename T, typename Allocator>
iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::end ( )
inline

◆ end() [2/2]

template<typename T, typename Allocator>
const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::end ( ) const
inline

Definition at line 383 of file _concurrent_unordered_impl.h.

383  {
384  return (const_iterator(0, this));
385  }
solist_iterator< self_type, const value_type > const_iterator

◆ erase_node() [1/5]

template<typename T, typename Allocator>
void tbb::interface5::internal::split_ordered_list< T, Allocator >::erase_node ( raw_iterator  previous,
raw_const_iterator where,
tbb::internal::true_type   
)
inline

◆ erase_node() [2/5]

template<typename T, typename Allocator>
void tbb::interface5::internal::split_ordered_list< T, Allocator >::erase_node ( raw_iterator  previous,
raw_const_iterator where,
tbb::internal::false_type   
)
inline

Definition at line 603 of file _concurrent_unordered_impl.h.

605  {
606  erase_node_impl(previous, where);
607  }
nodeptr_t erase_node_impl(raw_iterator previous, raw_const_iterator &where)

◆ erase_node() [3/5]

template<typename T, typename Allocator>
void tbb::interface5::internal::split_ordered_list< T, Allocator >::erase_node ( raw_iterator  previous,
raw_const_iterator where 
)
inline

Definition at line 609 of file _concurrent_unordered_impl.h.

609  {
610  erase_node(previous, where, /*allow_destroy*/tbb::internal::true_type());
611  }
void erase_node(raw_iterator previous, raw_const_iterator &where, tbb::internal::true_type)

◆ erase_node() [4/5]

template<typename T, typename Allocator>
template<typename AllowDestroy >
iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::erase_node ( raw_iterator  previous,
const_iterator  where,
AllowDestroy   
)
inline

Definition at line 615 of file _concurrent_unordered_impl.h.

616  {
617  raw_const_iterator it = where;
618  erase_node(previous, it, AllowDestroy());
620 
621  return get_iterator(first_real_iterator(it));
622  }
flist_iterator< self_type, const value_type > raw_const_iterator
void erase_node(raw_iterator previous, raw_const_iterator &where, tbb::internal::true_type)

◆ erase_node() [5/5]

template<typename T, typename Allocator>
iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::erase_node ( raw_iterator  previous,
const_iterator where 
)
inline

Definition at line 624 of file _concurrent_unordered_impl.h.

624  {
625  return erase_node(previous, where, /*allow_destroy*/tbb::internal::true_type());
626  }
void erase_node(raw_iterator previous, raw_const_iterator &where, tbb::internal::true_type)

◆ erase_node_impl()

template<typename T, typename Allocator>
nodeptr_t tbb::interface5::internal::split_ordered_list< T, Allocator >::erase_node_impl ( raw_iterator  previous,
raw_const_iterator where 
)
inline

Definition at line 587 of file _concurrent_unordered_impl.h.

587  {
588  nodeptr_t pnode = (where++).get_node_ptr();
589  nodeptr_t prevnode = previous.get_node_ptr();
590  __TBB_ASSERT(prevnode->my_next == pnode, "Erase must take consecutive iterators");
591  prevnode->my_next = pnode->my_next;
592  return pnode;
593  }
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165

Referenced by tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::erase_node().

Here is the caller graph for this function:

◆ first_real_iterator() [1/2]

template<typename T, typename Allocator>
iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::first_real_iterator ( raw_iterator  it)
inline

Definition at line 478 of file _concurrent_unordered_impl.h.

479  {
480  // Skip all dummy, internal only iterators
481  while (it != raw_end() && it.get_node_ptr()->is_dummy())
482  ++it;
483 
484  return iterator(it.get_node_ptr(), this);
485  }
solist_iterator< self_type, value_type > iterator

Referenced by tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::begin(), tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::erase_node(), tbb::interface5::internal::concurrent_unordered_base< Traits >::const_range_type::set_midpoint(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::unsafe_begin(), and tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::unsafe_end().

Here is the caller graph for this function:

◆ first_real_iterator() [2/2]

template<typename T, typename Allocator>
const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::first_real_iterator ( raw_const_iterator  it) const
inline

Definition at line 489 of file _concurrent_unordered_impl.h.

490  {
491  // Skip all dummy, internal only iterators
492  while (it != raw_end() && it.get_node_ptr()->is_dummy())
493  ++it;
494 
495  return const_iterator(it.get_node_ptr(), this);
496  }
solist_iterator< self_type, const value_type > const_iterator

◆ get_allocator()

template<typename T, typename Allocator>
allocator_type tbb::interface5::internal::split_ordered_list< T, Allocator >::get_allocator ( ) const
inline

Definition at line 346 of file _concurrent_unordered_impl.h.

346  {
347  return (my_node_allocator);
348  }
tbb::internal::allocator_rebind< allocator_type, node >::type my_node_allocator

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::get_allocator().

Here is the caller graph for this function:

◆ get_iterator() [1/4]

template<typename T, typename Allocator>
iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::get_iterator ( raw_iterator  it)
inline

Definition at line 454 of file _concurrent_unordered_impl.h.

454  {
455  __TBB_ASSERT(it.get_node_ptr() == NULL || !it.get_node_ptr()->is_dummy(), "Invalid user node (dummy)");
456  return iterator(it.get_node_ptr(), this);
457  }
solist_iterator< self_type, value_type > iterator
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165

Referenced by tbb::interface5::internal::concurrent_unordered_base< Traits >::const_range_type::begin(), tbb::interface5::internal::concurrent_unordered_base< Traits >::const_range_type::end(), tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::erase_node(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_equal_range(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_erase(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_extract(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_find(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_insert(), tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::move_all(), and tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::unsafe_erase().

Here is the caller graph for this function:

◆ get_iterator() [2/4]

template<typename T, typename Allocator>
const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::get_iterator ( raw_const_iterator  it) const
inline

Definition at line 461 of file _concurrent_unordered_impl.h.

461  {
462  __TBB_ASSERT(it.get_node_ptr() == NULL || !it.get_node_ptr()->is_dummy(), "Invalid user node (dummy)");
463  return const_iterator(it.get_node_ptr(), this);
464  }
solist_iterator< self_type, const value_type > const_iterator
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165

◆ get_iterator() [3/4]

template<typename T, typename Allocator>
raw_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::get_iterator ( raw_const_iterator  it)
inline

Definition at line 467 of file _concurrent_unordered_impl.h.

467  {
468  return raw_iterator(it.get_node_ptr());
469  }
flist_iterator< self_type, value_type > raw_iterator

◆ get_iterator() [4/4]

template<typename T, typename Allocator>
static iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::get_iterator ( const_iterator  it)
inlinestatic

Definition at line 472 of file _concurrent_unordered_impl.h.

472  {
473  return iterator(it.my_node_ptr, it.my_list_ptr);
474  }
solist_iterator< self_type, value_type > iterator

◆ get_order_key()

template<typename T, typename Allocator>
static sokey_t tbb::interface5::internal::split_ordered_list< T, Allocator >::get_order_key ( const raw_const_iterator it)
inlinestatic

Definition at line 443 of file _concurrent_unordered_impl.h.

443  {
444  return it.get_node_ptr()->get_order_key();
445  }

Referenced by tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::check_range(), and tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::insert_dummy().

Here is the caller graph for this function:

◆ get_safe_order_key()

template<typename T, typename Allocator>
static sokey_t tbb::interface5::internal::split_ordered_list< T, Allocator >::get_safe_order_key ( const raw_const_iterator it)
inlinestatic

Definition at line 447 of file _concurrent_unordered_impl.h.

447  {
448  if( !it.get_node_ptr() ) return ~sokey_t(0);
449  return it.get_node_ptr()->get_order_key();
450  }

◆ insert_dummy()

template<typename T, typename Allocator>
raw_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::insert_dummy ( raw_iterator  it,
sokey_t  order_key 
)
inline

Definition at line 530 of file _concurrent_unordered_impl.h.

531  {
533  raw_iterator where = it;
534 
535  __TBB_ASSERT(where != last, "Invalid head node");
536 
537  ++where;
538 
539  // Create a dummy element up front, even though it may be discarded (due to concurrent insertion)
540  nodeptr_t dummy_node = create_node(order_key);
541 
542  for (;;)
543  {
544  __TBB_ASSERT(it != last, "Invalid head list node");
545 
546  // If the head iterator is at the end of the list, or past the point where this dummy
547  // node needs to be inserted, then try to insert it.
548  if (where == last || get_order_key(where) > order_key)
549  {
550  __TBB_ASSERT(get_order_key(it) < order_key, "Invalid node order in the list");
551 
552  // Try to insert it in the right place
553  nodeptr_t inserted_node = try_insert_atomic(it.get_node_ptr(), dummy_node, where.get_node_ptr());
554 
555  if (inserted_node == dummy_node)
556  {
557  // Insertion succeeded, check the list for order violations
558  check_range(it, where);
559  return raw_iterator(dummy_node);
560  }
561  else
562  {
563  // Insertion failed: either dummy node was inserted by another thread, or
564  // a real element was inserted at exactly the same place as dummy node.
565  // Proceed with the search from the previous location where order key was
566  // known to be larger (note: this is legal only because there is no safe
567  // concurrent erase operation supported).
568  where = it;
569  ++where;
570  continue;
571  }
572  }
573  else if (get_order_key(where) == order_key)
574  {
575  // Another dummy node with the same value found, discard the new one.
576  destroy_node(dummy_node);
577  return where;
578  }
579 
580  // Move the iterator forward
581  it = where;
582  ++where;
583  }
584 
585  }
auto last(Container &c) -> decltype(begin(c))
flist_iterator< self_type, value_type > raw_iterator
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165
static sokey_t get_order_key(const raw_const_iterator &it)
static nodeptr_t try_insert_atomic(nodeptr_t previous, nodeptr_t new_node, nodeptr_t current_node)

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::init_bucket().

Here is the caller graph for this function:

◆ max_size()

template<typename T, typename Allocator>
size_type tbb::interface5::internal::split_ordered_list< T, Allocator >::max_size ( ) const
inline

Definition at line 406 of file _concurrent_unordered_impl.h.

406  {
407  return my_node_allocator.max_size();
408  }
tbb::internal::allocator_rebind< allocator_type, node >::type my_node_allocator

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::max_size().

Here is the caller graph for this function:

◆ move_all()

template<typename T, typename Allocator>
void tbb::interface5::internal::split_ordered_list< T, Allocator >::move_all ( self_type source)
inline

Definition at line 631 of file _concurrent_unordered_impl.h.

632  {
633  raw_const_iterator first = source.raw_begin();
634  raw_const_iterator last = source.raw_end();
635 
636  if (first == last)
637  return;
638 
639  nodeptr_t previous_node = my_head;
640  raw_const_iterator begin_iterator = first++;
641 
642  // Move all elements one by one, including dummy ones
643  for (raw_const_iterator it = first; it != last;)
644  {
645  nodeptr_t pnode = it.get_node_ptr();
646 
647  nodeptr_t dummy_node = pnode->is_dummy() ? create_node(pnode->get_order_key()) : create_node(pnode->get_order_key(), pnode->my_element);
648  previous_node = try_insert_atomic(previous_node, dummy_node, NULL);
649  __TBB_ASSERT(previous_node != NULL, "Insertion must succeed");
650  raw_const_iterator where = it++;
651  source.erase_node(get_iterator(begin_iterator), where);
652  }
653  check_range();
654  }
auto last(Container &c) -> decltype(begin(c))
flist_iterator< self_type, const value_type > raw_const_iterator
auto first(Container &c) -> decltype(begin(c))
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
Definition: tbb_stddef.h:165
static nodeptr_t try_insert_atomic(nodeptr_t previous, nodeptr_t new_node, nodeptr_t current_node)

◆ raw_begin() [1/2]

◆ raw_begin() [2/2]

template<typename T, typename Allocator>
raw_const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::raw_begin ( ) const
inline

Definition at line 431 of file _concurrent_unordered_impl.h.

431  {
432  return raw_const_iterator(my_head);
433  }
flist_iterator< self_type, const value_type > raw_const_iterator

◆ raw_end() [1/2]

template<typename T, typename Allocator>
raw_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::raw_end ( )
inline

Definition at line 435 of file _concurrent_unordered_impl.h.

435  {
436  return raw_iterator(0);
437  }
flist_iterator< self_type, value_type > raw_iterator

Referenced by tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::check_range(), tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::first_real_iterator(), tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::insert_dummy(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_equal_range(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_erase(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_extract(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_find(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_insert(), tbb::interface5::internal::split_ordered_list< value_type, typename Traits::allocator_type >::move_all(), tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::unsafe_bucket_size(), and tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::unsafe_end().

Here is the caller graph for this function:

◆ raw_end() [2/2]

template<typename T, typename Allocator>
raw_const_iterator tbb::interface5::internal::split_ordered_list< T, Allocator >::raw_end ( ) const
inline

Definition at line 439 of file _concurrent_unordered_impl.h.

439  {
440  return raw_const_iterator(0);
441  }
flist_iterator< self_type, const value_type > raw_const_iterator

◆ size()

template<typename T, typename Allocator>
size_type tbb::interface5::internal::split_ordered_list< T, Allocator >::size ( ) const
inline

Definition at line 401 of file _concurrent_unordered_impl.h.

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::size().

Here is the caller graph for this function:

◆ swap()

template<typename T, typename Allocator>
void tbb::interface5::internal::split_ordered_list< T, Allocator >::swap ( self_type other)
inline

Definition at line 411 of file _concurrent_unordered_impl.h.

412  {
413  if (this == &other)
414  {
415  // Nothing to do
416  return;
417  }
418 
419  std::swap(my_element_count, other.my_element_count);
420  std::swap(my_head, other.my_head);
421  }
void swap(atomic< T > &lhs, atomic< T > &rhs)
Definition: atomic.h:535

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::swap().

Here is the caller graph for this function:

◆ try_insert()

template<typename T, typename Allocator>
std::pair<iterator, bool> tbb::interface5::internal::split_ordered_list< T, Allocator >::try_insert ( raw_iterator  it,
raw_iterator  next,
nodeptr_t  pnode,
size_type new_count 
)
inline

Definition at line 512 of file _concurrent_unordered_impl.h.

513  {
514  nodeptr_t inserted_node = try_insert_atomic(it.get_node_ptr(), pnode, next.get_node_ptr());
515 
516  if (inserted_node == pnode)
517  {
518  // If the insert succeeded, check that the order is correct and increment the element count
519  check_range(it, next);
520  *new_count = tbb::internal::as_atomic(my_element_count).fetch_and_increment();
521  return std::pair<iterator, bool>(iterator(pnode, this), true);
522  }
523  else
524  {
525  return std::pair<iterator, bool>(end(), false);
526  }
527  }
solist_iterator< self_type, value_type > iterator
atomic< T > & as_atomic(T &t)
Definition: atomic.h:543
static nodeptr_t try_insert_atomic(nodeptr_t previous, nodeptr_t new_node, nodeptr_t current_node)

Referenced by tbb::interface5::internal::concurrent_unordered_base< concurrent_unordered_map_traits< Key, T, internal::hash_compare< Key, Hasher, Key_equality >, Allocator, false > >::internal_insert().

Here is the caller graph for this function:

◆ try_insert_atomic()

template<typename T, typename Allocator>
static nodeptr_t tbb::interface5::internal::split_ordered_list< T, Allocator >::try_insert_atomic ( nodeptr_t  previous,
nodeptr_t  new_node,
nodeptr_t  current_node 
)
inlinestatic

Friends And Related Function Documentation

◆ concurrent_unordered_base

template<typename T, typename Allocator>
template<typename Traits >
friend class concurrent_unordered_base
friend

Definition at line 660 of file _concurrent_unordered_impl.h.

Member Data Documentation

◆ my_element_count

◆ my_head

◆ my_node_allocator


The documentation for this class was generated from the following file:

Copyright © 2005-2019 Intel Corporation. All Rights Reserved.

Intel, Pentium, Intel Xeon, Itanium, Intel XScale and VTune are registered trademarks or trademarks of Intel Corporation or its subsidiaries in the United States and other countries.

* Other names and brands may be claimed as the property of others.