freelist.hpp 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656
  1. // lock-free freelist
  2. //
  3. // Copyright (C) 2008-2016 Tim Blechmann
  4. //
  5. // Distributed under the Boost Software License, Version 1.0. (See
  6. // accompanying file LICENSE_1_0.txt or copy at
  7. // http://www.boost.org/LICENSE_1_0.txt)
  8. #ifndef BOOST_LOCKFREE_FREELIST_HPP_INCLUDED
  9. #define BOOST_LOCKFREE_FREELIST_HPP_INCLUDED
  10. #include <limits>
  11. #include <memory>
  12. #include <boost/array.hpp>
  13. #include <boost/config.hpp>
  14. #include <boost/cstdint.hpp>
  15. #include <boost/noncopyable.hpp>
  16. #include <boost/static_assert.hpp>
  17. #include <boost/align/align_up.hpp>
  18. #include <boost/align/aligned_allocator_adaptor.hpp>
  19. #include <boost/lockfree/detail/atomic.hpp>
  20. #include <boost/lockfree/detail/parameter.hpp>
  21. #include <boost/lockfree/detail/tagged_ptr.hpp>
  22. #if defined(_MSC_VER)
  23. #pragma warning(push)
  24. #pragma warning(disable: 4100) // unreferenced formal parameter
  25. #pragma warning(disable: 4127) // conditional expression is constant
  26. #endif
  27. namespace boost {
  28. namespace lockfree {
  29. namespace detail {
  30. template <typename T,
  31. typename Alloc = std::allocator<T>
  32. >
  33. class freelist_stack:
  34. Alloc
  35. {
  36. struct freelist_node
  37. {
  38. tagged_ptr<freelist_node> next;
  39. };
  40. typedef tagged_ptr<freelist_node> tagged_node_ptr;
  41. public:
  42. typedef T * index_t;
  43. typedef tagged_ptr<T> tagged_node_handle;
  44. template <typename Allocator>
  45. freelist_stack (Allocator const & alloc, std::size_t n = 0):
  46. Alloc(alloc),
  47. pool_(tagged_node_ptr(NULL))
  48. {
  49. for (std::size_t i = 0; i != n; ++i) {
  50. T * node = Alloc::allocate(1);
  51. #ifdef BOOST_LOCKFREE_FREELIST_INIT_RUNS_DTOR
  52. destruct<false>(node);
  53. #else
  54. deallocate<false>(node);
  55. #endif
  56. }
  57. }
  58. template <bool ThreadSafe>
  59. void reserve (std::size_t count)
  60. {
  61. for (std::size_t i = 0; i != count; ++i) {
  62. T * node = Alloc::allocate(1);
  63. deallocate<ThreadSafe>(node);
  64. }
  65. }
  66. template <bool ThreadSafe, bool Bounded>
  67. T * construct (void)
  68. {
  69. T * node = allocate<ThreadSafe, Bounded>();
  70. if (node)
  71. new(node) T();
  72. return node;
  73. }
  74. template <bool ThreadSafe, bool Bounded, typename ArgumentType>
  75. T * construct (ArgumentType const & arg)
  76. {
  77. T * node = allocate<ThreadSafe, Bounded>();
  78. if (node)
  79. new(node) T(arg);
  80. return node;
  81. }
  82. template <bool ThreadSafe, bool Bounded, typename ArgumentType1, typename ArgumentType2>
  83. T * construct (ArgumentType1 const & arg1, ArgumentType2 const & arg2)
  84. {
  85. T * node = allocate<ThreadSafe, Bounded>();
  86. if (node)
  87. new(node) T(arg1, arg2);
  88. return node;
  89. }
  90. template <bool ThreadSafe>
  91. void destruct (tagged_node_handle const & tagged_ptr)
  92. {
  93. T * n = tagged_ptr.get_ptr();
  94. n->~T();
  95. deallocate<ThreadSafe>(n);
  96. }
  97. template <bool ThreadSafe>
  98. void destruct (T * n)
  99. {
  100. n->~T();
  101. deallocate<ThreadSafe>(n);
  102. }
  103. ~freelist_stack(void)
  104. {
  105. tagged_node_ptr current = pool_.load();
  106. while (current) {
  107. freelist_node * current_ptr = current.get_ptr();
  108. if (current_ptr)
  109. current = current_ptr->next;
  110. Alloc::deallocate((T*)current_ptr, 1);
  111. }
  112. }
  113. bool is_lock_free(void) const
  114. {
  115. return pool_.is_lock_free();
  116. }
  117. T * get_handle(T * pointer) const
  118. {
  119. return pointer;
  120. }
  121. T * get_handle(tagged_node_handle const & handle) const
  122. {
  123. return get_pointer(handle);
  124. }
  125. T * get_pointer(tagged_node_handle const & tptr) const
  126. {
  127. return tptr.get_ptr();
  128. }
  129. T * get_pointer(T * pointer) const
  130. {
  131. return pointer;
  132. }
  133. T * null_handle(void) const
  134. {
  135. return NULL;
  136. }
  137. protected: // allow use from subclasses
  138. template <bool ThreadSafe, bool Bounded>
  139. T * allocate (void)
  140. {
  141. if (ThreadSafe)
  142. return allocate_impl<Bounded>();
  143. else
  144. return allocate_impl_unsafe<Bounded>();
  145. }
  146. private:
  147. template <bool Bounded>
  148. T * allocate_impl (void)
  149. {
  150. tagged_node_ptr old_pool = pool_.load(memory_order_consume);
  151. for(;;) {
  152. if (!old_pool.get_ptr()) {
  153. if (!Bounded)
  154. return Alloc::allocate(1);
  155. else
  156. return 0;
  157. }
  158. freelist_node * new_pool_ptr = old_pool->next.get_ptr();
  159. tagged_node_ptr new_pool (new_pool_ptr, old_pool.get_next_tag());
  160. if (pool_.compare_exchange_weak(old_pool, new_pool)) {
  161. void * ptr = old_pool.get_ptr();
  162. return reinterpret_cast<T*>(ptr);
  163. }
  164. }
  165. }
  166. template <bool Bounded>
  167. T * allocate_impl_unsafe (void)
  168. {
  169. tagged_node_ptr old_pool = pool_.load(memory_order_relaxed);
  170. if (!old_pool.get_ptr()) {
  171. if (!Bounded)
  172. return Alloc::allocate(1);
  173. else
  174. return 0;
  175. }
  176. freelist_node * new_pool_ptr = old_pool->next.get_ptr();
  177. tagged_node_ptr new_pool (new_pool_ptr, old_pool.get_next_tag());
  178. pool_.store(new_pool, memory_order_relaxed);
  179. void * ptr = old_pool.get_ptr();
  180. return reinterpret_cast<T*>(ptr);
  181. }
  182. protected:
  183. template <bool ThreadSafe>
  184. void deallocate (T * n)
  185. {
  186. if (ThreadSafe)
  187. deallocate_impl(n);
  188. else
  189. deallocate_impl_unsafe(n);
  190. }
  191. private:
  192. void deallocate_impl (T * n)
  193. {
  194. void * node = n;
  195. tagged_node_ptr old_pool = pool_.load(memory_order_consume);
  196. freelist_node * new_pool_ptr = reinterpret_cast<freelist_node*>(node);
  197. for(;;) {
  198. tagged_node_ptr new_pool (new_pool_ptr, old_pool.get_tag());
  199. new_pool->next.set_ptr(old_pool.get_ptr());
  200. if (pool_.compare_exchange_weak(old_pool, new_pool))
  201. return;
  202. }
  203. }
  204. void deallocate_impl_unsafe (T * n)
  205. {
  206. void * node = n;
  207. tagged_node_ptr old_pool = pool_.load(memory_order_relaxed);
  208. freelist_node * new_pool_ptr = reinterpret_cast<freelist_node*>(node);
  209. tagged_node_ptr new_pool (new_pool_ptr, old_pool.get_tag());
  210. new_pool->next.set_ptr(old_pool.get_ptr());
  211. pool_.store(new_pool, memory_order_relaxed);
  212. }
  213. atomic<tagged_node_ptr> pool_;
  214. };
  215. class
  216. BOOST_ALIGNMENT( 4 ) // workaround for bugs in MSVC
  217. tagged_index
  218. {
  219. public:
  220. typedef boost::uint16_t tag_t;
  221. typedef boost::uint16_t index_t;
  222. /** uninitialized constructor */
  223. tagged_index(void) BOOST_NOEXCEPT //: index(0), tag(0)
  224. {}
  225. /** copy constructor */
  226. #ifdef BOOST_NO_CXX11_DEFAULTED_FUNCTIONS
  227. tagged_index(tagged_index const & rhs):
  228. index(rhs.index), tag(rhs.tag)
  229. {}
  230. #else
  231. tagged_index(tagged_index const & rhs) = default;
  232. #endif
  233. explicit tagged_index(index_t i, tag_t t = 0):
  234. index(i), tag(t)
  235. {}
  236. /** index access */
  237. /* @{ */
  238. index_t get_index() const
  239. {
  240. return index;
  241. }
  242. void set_index(index_t i)
  243. {
  244. index = i;
  245. }
  246. /* @} */
  247. /** tag access */
  248. /* @{ */
  249. tag_t get_tag() const
  250. {
  251. return tag;
  252. }
  253. tag_t get_next_tag() const
  254. {
  255. tag_t next = (get_tag() + 1u) & (std::numeric_limits<tag_t>::max)();
  256. return next;
  257. }
  258. void set_tag(tag_t t)
  259. {
  260. tag = t;
  261. }
  262. /* @} */
  263. bool operator==(tagged_index const & rhs) const
  264. {
  265. return (index == rhs.index) && (tag == rhs.tag);
  266. }
  267. bool operator!=(tagged_index const & rhs) const
  268. {
  269. return !operator==(rhs);
  270. }
  271. protected:
  272. index_t index;
  273. tag_t tag;
  274. };
  275. template <typename T,
  276. std::size_t size>
  277. struct compiletime_sized_freelist_storage
  278. {
  279. // array-based freelists only support a 16bit address space.
  280. BOOST_STATIC_ASSERT(size < 65536);
  281. boost::array<char, size * sizeof(T) + 64> data;
  282. // unused ... only for API purposes
  283. template <typename Allocator>
  284. compiletime_sized_freelist_storage(Allocator const & /* alloc */, std::size_t /* count */)
  285. {}
  286. T * nodes(void) const
  287. {
  288. char * data_pointer = const_cast<char*>(data.data());
  289. return reinterpret_cast<T*>( boost::alignment::align_up( data_pointer, BOOST_LOCKFREE_CACHELINE_BYTES ) );
  290. }
  291. std::size_t node_count(void) const
  292. {
  293. return size;
  294. }
  295. };
  296. template <typename T,
  297. typename Alloc = std::allocator<T> >
  298. struct runtime_sized_freelist_storage:
  299. boost::alignment::aligned_allocator_adaptor<Alloc, BOOST_LOCKFREE_CACHELINE_BYTES >
  300. {
  301. typedef boost::alignment::aligned_allocator_adaptor<Alloc, BOOST_LOCKFREE_CACHELINE_BYTES > allocator_type;
  302. T * nodes_;
  303. std::size_t node_count_;
  304. template <typename Allocator>
  305. runtime_sized_freelist_storage(Allocator const & alloc, std::size_t count):
  306. allocator_type(alloc), node_count_(count)
  307. {
  308. if (count > 65535)
  309. boost::throw_exception(std::runtime_error("boost.lockfree: freelist size is limited to a maximum of 65535 objects"));
  310. nodes_ = allocator_type::allocate(count);
  311. }
  312. ~runtime_sized_freelist_storage(void)
  313. {
  314. allocator_type::deallocate(nodes_, node_count_);
  315. }
  316. T * nodes(void) const
  317. {
  318. return nodes_;
  319. }
  320. std::size_t node_count(void) const
  321. {
  322. return node_count_;
  323. }
  324. };
  325. template <typename T,
  326. typename NodeStorage = runtime_sized_freelist_storage<T>
  327. >
  328. class fixed_size_freelist:
  329. NodeStorage
  330. {
  331. struct freelist_node
  332. {
  333. tagged_index next;
  334. };
  335. void initialize(void)
  336. {
  337. T * nodes = NodeStorage::nodes();
  338. for (std::size_t i = 0; i != NodeStorage::node_count(); ++i) {
  339. tagged_index * next_index = reinterpret_cast<tagged_index*>(nodes + i);
  340. next_index->set_index(null_handle());
  341. #ifdef BOOST_LOCKFREE_FREELIST_INIT_RUNS_DTOR
  342. destruct<false>(nodes + i);
  343. #else
  344. deallocate<false>(static_cast<index_t>(i));
  345. #endif
  346. }
  347. }
  348. public:
  349. typedef tagged_index tagged_node_handle;
  350. typedef tagged_index::index_t index_t;
  351. template <typename Allocator>
  352. fixed_size_freelist (Allocator const & alloc, std::size_t count):
  353. NodeStorage(alloc, count),
  354. pool_(tagged_index(static_cast<index_t>(count), 0))
  355. {
  356. initialize();
  357. }
  358. fixed_size_freelist (void):
  359. pool_(tagged_index(NodeStorage::node_count(), 0))
  360. {
  361. initialize();
  362. }
  363. template <bool ThreadSafe, bool Bounded>
  364. T * construct (void)
  365. {
  366. index_t node_index = allocate<ThreadSafe>();
  367. if (node_index == null_handle())
  368. return NULL;
  369. T * node = NodeStorage::nodes() + node_index;
  370. new(node) T();
  371. return node;
  372. }
  373. template <bool ThreadSafe, bool Bounded, typename ArgumentType>
  374. T * construct (ArgumentType const & arg)
  375. {
  376. index_t node_index = allocate<ThreadSafe>();
  377. if (node_index == null_handle())
  378. return NULL;
  379. T * node = NodeStorage::nodes() + node_index;
  380. new(node) T(arg);
  381. return node;
  382. }
  383. template <bool ThreadSafe, bool Bounded, typename ArgumentType1, typename ArgumentType2>
  384. T * construct (ArgumentType1 const & arg1, ArgumentType2 const & arg2)
  385. {
  386. index_t node_index = allocate<ThreadSafe>();
  387. if (node_index == null_handle())
  388. return NULL;
  389. T * node = NodeStorage::nodes() + node_index;
  390. new(node) T(arg1, arg2);
  391. return node;
  392. }
  393. template <bool ThreadSafe>
  394. void destruct (tagged_node_handle tagged_index)
  395. {
  396. index_t index = tagged_index.get_index();
  397. T * n = NodeStorage::nodes() + index;
  398. (void)n; // silence msvc warning
  399. n->~T();
  400. deallocate<ThreadSafe>(index);
  401. }
  402. template <bool ThreadSafe>
  403. void destruct (T * n)
  404. {
  405. n->~T();
  406. deallocate<ThreadSafe>(static_cast<index_t>(n - NodeStorage::nodes()));
  407. }
  408. bool is_lock_free(void) const
  409. {
  410. return pool_.is_lock_free();
  411. }
  412. index_t null_handle(void) const
  413. {
  414. return static_cast<index_t>(NodeStorage::node_count());
  415. }
  416. index_t get_handle(T * pointer) const
  417. {
  418. if (pointer == NULL)
  419. return null_handle();
  420. else
  421. return static_cast<index_t>(pointer - NodeStorage::nodes());
  422. }
  423. index_t get_handle(tagged_node_handle const & handle) const
  424. {
  425. return handle.get_index();
  426. }
  427. T * get_pointer(tagged_node_handle const & tptr) const
  428. {
  429. return get_pointer(tptr.get_index());
  430. }
  431. T * get_pointer(index_t index) const
  432. {
  433. if (index == null_handle())
  434. return 0;
  435. else
  436. return NodeStorage::nodes() + index;
  437. }
  438. T * get_pointer(T * ptr) const
  439. {
  440. return ptr;
  441. }
  442. protected: // allow use from subclasses
  443. template <bool ThreadSafe>
  444. index_t allocate (void)
  445. {
  446. if (ThreadSafe)
  447. return allocate_impl();
  448. else
  449. return allocate_impl_unsafe();
  450. }
  451. private:
  452. index_t allocate_impl (void)
  453. {
  454. tagged_index old_pool = pool_.load(memory_order_consume);
  455. for(;;) {
  456. index_t index = old_pool.get_index();
  457. if (index == null_handle())
  458. return index;
  459. T * old_node = NodeStorage::nodes() + index;
  460. tagged_index * next_index = reinterpret_cast<tagged_index*>(old_node);
  461. tagged_index new_pool(next_index->get_index(), old_pool.get_next_tag());
  462. if (pool_.compare_exchange_weak(old_pool, new_pool))
  463. return old_pool.get_index();
  464. }
  465. }
  466. index_t allocate_impl_unsafe (void)
  467. {
  468. tagged_index old_pool = pool_.load(memory_order_consume);
  469. index_t index = old_pool.get_index();
  470. if (index == null_handle())
  471. return index;
  472. T * old_node = NodeStorage::nodes() + index;
  473. tagged_index * next_index = reinterpret_cast<tagged_index*>(old_node);
  474. tagged_index new_pool(next_index->get_index(), old_pool.get_next_tag());
  475. pool_.store(new_pool, memory_order_relaxed);
  476. return old_pool.get_index();
  477. }
  478. template <bool ThreadSafe>
  479. void deallocate (index_t index)
  480. {
  481. if (ThreadSafe)
  482. deallocate_impl(index);
  483. else
  484. deallocate_impl_unsafe(index);
  485. }
  486. void deallocate_impl (index_t index)
  487. {
  488. freelist_node * new_pool_node = reinterpret_cast<freelist_node*>(NodeStorage::nodes() + index);
  489. tagged_index old_pool = pool_.load(memory_order_consume);
  490. for(;;) {
  491. tagged_index new_pool (index, old_pool.get_tag());
  492. new_pool_node->next.set_index(old_pool.get_index());
  493. if (pool_.compare_exchange_weak(old_pool, new_pool))
  494. return;
  495. }
  496. }
  497. void deallocate_impl_unsafe (index_t index)
  498. {
  499. freelist_node * new_pool_node = reinterpret_cast<freelist_node*>(NodeStorage::nodes() + index);
  500. tagged_index old_pool = pool_.load(memory_order_consume);
  501. tagged_index new_pool (index, old_pool.get_tag());
  502. new_pool_node->next.set_index(old_pool.get_index());
  503. pool_.store(new_pool);
  504. }
  505. atomic<tagged_index> pool_;
  506. };
  507. template <typename T,
  508. typename Alloc,
  509. bool IsCompileTimeSized,
  510. bool IsFixedSize,
  511. std::size_t Capacity
  512. >
  513. struct select_freelist
  514. {
  515. typedef typename mpl::if_c<IsCompileTimeSized,
  516. compiletime_sized_freelist_storage<T, Capacity>,
  517. runtime_sized_freelist_storage<T, Alloc>
  518. >::type fixed_sized_storage_type;
  519. typedef typename mpl::if_c<IsCompileTimeSized || IsFixedSize,
  520. fixed_size_freelist<T, fixed_sized_storage_type>,
  521. freelist_stack<T, Alloc>
  522. >::type type;
  523. };
  524. template <typename T, bool IsNodeBased>
  525. struct select_tagged_handle
  526. {
  527. typedef typename mpl::if_c<IsNodeBased,
  528. tagged_ptr<T>,
  529. tagged_index
  530. >::type tagged_handle_type;
  531. typedef typename mpl::if_c<IsNodeBased,
  532. T*,
  533. typename tagged_index::index_t
  534. >::type handle_type;
  535. };
  536. } /* namespace detail */
  537. } /* namespace lockfree */
  538. } /* namespace boost */
  539. #if defined(_MSC_VER)
  540. #pragma warning(pop)
  541. #endif
  542. #endif /* BOOST_LOCKFREE_FREELIST_HPP_INCLUDED */