You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

662 lines
16KB

  1. // <stop_token> -*- C++ -*-
  2. // Copyright (C) 2019-2020 Free Software Foundation, Inc.
  3. //
  4. // This file is part of the GNU ISO C++ Library. This library is free
  5. // software; you can redistribute it and/or modify it under the
  6. // terms of the GNU General Public License as published by the
  7. // Free Software Foundation; either version 3, or (at your option)
  8. // any later version.
  9. // This library is distributed in the hope that it will be useful,
  10. // but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  12. // GNU General Public License for more details.
  13. // Under Section 7 of GPL version 3, you are granted additional
  14. // permissions described in the GCC Runtime Library Exception, version
  15. // 3.1, as published by the Free Software Foundation.
  16. // You should have received a copy of the GNU General Public License and
  17. // a copy of the GCC Runtime Library Exception along with this program;
  18. // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
  19. // <http://www.gnu.org/licenses/>.
  20. /** @file include/stop_token
  21. * This is a Standard C++ Library header.
  22. */
  23. #ifndef _GLIBCXX_STOP_TOKEN
  24. #define _GLIBCXX_STOP_TOKEN
  25. #if __cplusplus > 201703L
  26. #include <atomic>
  27. #ifdef _GLIBCXX_HAS_GTHREADS
  28. # define __cpp_lib_jthread 201911L
  29. # include <bits/gthr.h>
  30. # if __has_include(<semaphore>)
  31. # include <semaphore>
  32. # endif
  33. #endif
  34. namespace std _GLIBCXX_VISIBILITY(default)
  35. {
  36. _GLIBCXX_BEGIN_NAMESPACE_VERSION
  37. /// Tag type indicating a stop_source should have no shared-stop-state.
  38. struct nostopstate_t { explicit nostopstate_t() = default; };
  39. inline constexpr nostopstate_t nostopstate{};
  40. class stop_source;
  41. /// Allow testing whether a stop request has been made on a `stop_source`.
  42. class stop_token
  43. {
  44. public:
  45. stop_token() noexcept = default;
  46. stop_token(const stop_token&) noexcept = default;
  47. stop_token(stop_token&&) noexcept = default;
  48. ~stop_token() = default;
  49. stop_token&
  50. operator=(const stop_token&) noexcept = default;
  51. stop_token&
  52. operator=(stop_token&&) noexcept = default;
  53. [[nodiscard]]
  54. bool
  55. stop_possible() const noexcept
  56. {
  57. return static_cast<bool>(_M_state) && _M_state->_M_stop_possible();
  58. }
  59. [[nodiscard]]
  60. bool
  61. stop_requested() const noexcept
  62. {
  63. return static_cast<bool>(_M_state) && _M_state->_M_stop_requested();
  64. }
  65. void
  66. swap(stop_token& __rhs) noexcept
  67. { _M_state.swap(__rhs._M_state); }
  68. [[nodiscard]]
  69. friend bool
  70. operator==(const stop_token& __a, const stop_token& __b)
  71. { return __a._M_state == __b._M_state; }
  72. friend void
  73. swap(stop_token& __lhs, stop_token& __rhs) noexcept
  74. { __lhs.swap(__rhs); }
  75. private:
  76. friend class stop_source;
  77. template<typename _Callback>
  78. friend class stop_callback;
  79. static void
  80. _S_yield() noexcept
  81. {
  82. #if defined __i386__ || defined __x86_64__
  83. __builtin_ia32_pause();
  84. #elif defined _GLIBCXX_USE_SCHED_YIELD
  85. __gthread_yield();
  86. #endif
  87. }
  88. #ifndef __cpp_lib_semaphore
  89. // TODO: replace this with a real implementation of std::binary_semaphore
  90. struct binary_semaphore
  91. {
  92. explicit binary_semaphore(int __d) : _M_counter(__d > 0) { }
  93. void release() { _M_counter.fetch_add(1, memory_order::release); }
  94. void acquire()
  95. {
  96. int __old = 1;
  97. while (!_M_counter.compare_exchange_weak(__old, 0,
  98. memory_order::acquire,
  99. memory_order::relaxed))
  100. {
  101. __old = 1;
  102. _S_yield();
  103. }
  104. }
  105. atomic<int> _M_counter;
  106. };
  107. #endif
  108. struct _Stop_cb
  109. {
  110. using __cb_type = void(_Stop_cb*) noexcept;
  111. __cb_type* _M_callback;
  112. _Stop_cb* _M_prev = nullptr;
  113. _Stop_cb* _M_next = nullptr;
  114. bool* _M_destroyed = nullptr;
  115. binary_semaphore _M_done{0};
  116. [[__gnu__::__nonnull__]]
  117. explicit
  118. _Stop_cb(__cb_type* __cb)
  119. : _M_callback(__cb)
  120. { }
  121. void _M_run() noexcept { _M_callback(this); }
  122. };
  123. struct _Stop_state_t
  124. {
  125. using value_type = uint32_t;
  126. static constexpr value_type _S_stop_requested_bit = 1;
  127. static constexpr value_type _S_locked_bit = 2;
  128. static constexpr value_type _S_ssrc_counter_inc = 4;
  129. std::atomic<value_type> _M_owners{1};
  130. std::atomic<value_type> _M_value{_S_ssrc_counter_inc};
  131. _Stop_cb* _M_head = nullptr;
  132. #if _GLIBCXX_HAS_GTHREADS
  133. __gthread_t _M_requester;
  134. #endif
  135. _Stop_state_t() = default;
  136. bool
  137. _M_stop_possible() noexcept
  138. {
  139. // true if a stop request has already been made or there are still
  140. // stop_source objects that would allow one to be made.
  141. return _M_value.load(memory_order::acquire) & ~_S_locked_bit;
  142. }
  143. bool
  144. _M_stop_requested() noexcept
  145. {
  146. return _M_value.load(memory_order::acquire) & _S_stop_requested_bit;
  147. }
  148. void
  149. _M_add_owner() noexcept
  150. {
  151. _M_owners.fetch_add(1, memory_order::relaxed);
  152. }
  153. void
  154. _M_release_ownership() noexcept
  155. {
  156. if (_M_owners.fetch_sub(1, memory_order::release) == 1)
  157. delete this;
  158. }
  159. void
  160. _M_add_ssrc() noexcept
  161. {
  162. _M_value.fetch_add(_S_ssrc_counter_inc, memory_order::relaxed);
  163. }
  164. void
  165. _M_sub_ssrc() noexcept
  166. {
  167. _M_value.fetch_sub(_S_ssrc_counter_inc, memory_order::release);
  168. }
  169. // Obtain lock.
  170. void
  171. _M_lock() noexcept
  172. {
  173. // Can use relaxed loads to get the current value.
  174. // The successful call to _M_try_lock is an acquire operation.
  175. auto __old = _M_value.load(memory_order::relaxed);
  176. while (!_M_try_lock(__old, memory_order::relaxed))
  177. { }
  178. }
  179. // Precondition: calling thread holds the lock.
  180. void
  181. _M_unlock() noexcept
  182. {
  183. _M_value.fetch_sub(_S_locked_bit, memory_order::release);
  184. }
  185. bool
  186. _M_request_stop() noexcept
  187. {
  188. // obtain lock and set stop_requested bit
  189. auto __old = _M_value.load(memory_order::acquire);
  190. do
  191. {
  192. if (__old & _S_stop_requested_bit) // stop request already made
  193. return false;
  194. }
  195. while (!_M_try_lock_and_stop(__old));
  196. #if _GLIBCXX_HAS_GTHREADS
  197. _M_requester = __gthread_self();
  198. #endif
  199. while (_M_head)
  200. {
  201. bool __last_cb;
  202. _Stop_cb* __cb = _M_head;
  203. _M_head = _M_head->_M_next;
  204. if (_M_head)
  205. {
  206. _M_head->_M_prev = nullptr;
  207. __last_cb = false;
  208. }
  209. else
  210. __last_cb = true;
  211. // Allow other callbacks to be unregistered while __cb runs.
  212. _M_unlock();
  213. bool __destroyed = false;
  214. __cb->_M_destroyed = &__destroyed;
  215. // run callback
  216. __cb->_M_run();
  217. if (!__destroyed)
  218. {
  219. __cb->_M_destroyed = nullptr;
  220. #if _GLIBCXX_HAS_GTHREADS
  221. // synchronize with destructor of stop_callback that owns *__cb
  222. __cb->_M_done.release();
  223. #endif
  224. }
  225. // Avoid relocking if we already know there are no more callbacks.
  226. if (__last_cb)
  227. return true;
  228. _M_lock();
  229. }
  230. _M_unlock();
  231. return true;
  232. }
  233. [[__gnu__::__nonnull__]]
  234. bool
  235. _M_register_callback(_Stop_cb* __cb) noexcept
  236. {
  237. auto __old = _M_value.load(memory_order::acquire);
  238. do
  239. {
  240. if (__old & _S_stop_requested_bit) // stop request already made
  241. {
  242. __cb->_M_run(); // run synchronously
  243. return false;
  244. }
  245. if (__old < _S_ssrc_counter_inc) // no stop_source owns *this
  246. // No need to register callback if no stop request can be made.
  247. // Returning false also means the stop_callback does not share
  248. // ownership of this state, but that's not observable.
  249. return false;
  250. }
  251. while (!_M_try_lock(__old));
  252. __cb->_M_next = _M_head;
  253. if (_M_head)
  254. {
  255. _M_head->_M_prev = __cb;
  256. }
  257. _M_head = __cb;
  258. _M_unlock();
  259. return true;
  260. }
  261. // Called by ~stop_callback just before destroying *__cb.
  262. [[__gnu__::__nonnull__]]
  263. void
  264. _M_remove_callback(_Stop_cb* __cb)
  265. {
  266. _M_lock();
  267. if (__cb == _M_head)
  268. {
  269. _M_head = _M_head->_M_next;
  270. if (_M_head)
  271. _M_head->_M_prev = nullptr;
  272. _M_unlock();
  273. return;
  274. }
  275. else if (__cb->_M_prev)
  276. {
  277. __cb->_M_prev->_M_next = __cb->_M_next;
  278. if (__cb->_M_next)
  279. __cb->_M_next->_M_prev = __cb->_M_prev;
  280. _M_unlock();
  281. return;
  282. }
  283. _M_unlock();
  284. // Callback is not in the list, so must have been removed by a call to
  285. // _M_request_stop.
  286. #if _GLIBCXX_HAS_GTHREADS
  287. // Despite appearances there is no data race on _M_requester. The only
  288. // write to it happens before the callback is removed from the list,
  289. // and removing it from the list happens before this read.
  290. if (!__gthread_equal(_M_requester, __gthread_self()))
  291. {
  292. // Synchronize with completion of callback.
  293. __cb->_M_done.acquire();
  294. // Safe for ~stop_callback to destroy *__cb now.
  295. return;
  296. }
  297. #endif
  298. if (__cb->_M_destroyed)
  299. *__cb->_M_destroyed = true;
  300. }
  301. // Try to obtain the lock.
  302. // Returns true if the lock is acquired (with memory order acquire).
  303. // Otherwise, sets __curval = _M_value.load(__failure) and returns false.
  304. // Might fail spuriously, so must be called in a loop.
  305. bool
  306. _M_try_lock(value_type& __curval,
  307. memory_order __failure = memory_order::acquire) noexcept
  308. {
  309. return _M_do_try_lock(__curval, 0, memory_order::acquire, __failure);
  310. }
  311. // Try to obtain the lock to make a stop request.
  312. // Returns true if the lock is acquired and the _S_stop_requested_bit is
  313. // set (with memory order acq_rel so that other threads see the request).
  314. // Otherwise, sets __curval = _M_value.load(memory_order::acquire) and
  315. // returns false.
  316. // Might fail spuriously, so must be called in a loop.
  317. bool
  318. _M_try_lock_and_stop(value_type& __curval) noexcept
  319. {
  320. return _M_do_try_lock(__curval, _S_stop_requested_bit,
  321. memory_order::acq_rel, memory_order::acquire);
  322. }
  323. bool
  324. _M_do_try_lock(value_type& __curval, value_type __newbits,
  325. memory_order __success, memory_order __failure) noexcept
  326. {
  327. if (__curval & _S_locked_bit)
  328. {
  329. _S_yield();
  330. __curval = _M_value.load(__failure);
  331. return false;
  332. }
  333. __newbits |= _S_locked_bit;
  334. return _M_value.compare_exchange_weak(__curval, __curval | __newbits,
  335. __success, __failure);
  336. }
  337. };
  338. struct _Stop_state_ref
  339. {
  340. _Stop_state_ref() = default;
  341. explicit
  342. _Stop_state_ref(const stop_source&)
  343. : _M_ptr(new _Stop_state_t())
  344. { }
  345. _Stop_state_ref(const _Stop_state_ref& __other) noexcept
  346. : _M_ptr(__other._M_ptr)
  347. {
  348. if (_M_ptr)
  349. _M_ptr->_M_add_owner();
  350. }
  351. _Stop_state_ref(_Stop_state_ref&& __other) noexcept
  352. : _M_ptr(__other._M_ptr)
  353. {
  354. __other._M_ptr = nullptr;
  355. }
  356. _Stop_state_ref&
  357. operator=(const _Stop_state_ref& __other) noexcept
  358. {
  359. if (auto __ptr = __other._M_ptr; __ptr != _M_ptr)
  360. {
  361. if (__ptr)
  362. __ptr->_M_add_owner();
  363. if (_M_ptr)
  364. _M_ptr->_M_release_ownership();
  365. _M_ptr = __ptr;
  366. }
  367. return *this;
  368. }
  369. _Stop_state_ref&
  370. operator=(_Stop_state_ref&& __other) noexcept
  371. {
  372. _Stop_state_ref(std::move(__other)).swap(*this);
  373. return *this;
  374. }
  375. ~_Stop_state_ref()
  376. {
  377. if (_M_ptr)
  378. _M_ptr->_M_release_ownership();
  379. }
  380. void
  381. swap(_Stop_state_ref& __other) noexcept
  382. { std::swap(_M_ptr, __other._M_ptr); }
  383. explicit operator bool() const noexcept { return _M_ptr != nullptr; }
  384. _Stop_state_t* operator->() const noexcept { return _M_ptr; }
  385. #if __cpp_impl_three_way_comparison >= 201907L
  386. friend bool
  387. operator==(const _Stop_state_ref&, const _Stop_state_ref&) = default;
  388. #else
  389. friend bool
  390. operator==(const _Stop_state_ref& __lhs, const _Stop_state_ref& __rhs)
  391. noexcept
  392. { return __lhs._M_ptr == __rhs._M_ptr; }
  393. friend bool
  394. operator!=(const _Stop_state_ref& __lhs, const _Stop_state_ref& __rhs)
  395. noexcept
  396. { return __lhs._M_ptr != __rhs._M_ptr; }
  397. #endif
  398. private:
  399. _Stop_state_t* _M_ptr = nullptr;
  400. };
  401. _Stop_state_ref _M_state;
  402. explicit
  403. stop_token(const _Stop_state_ref& __state) noexcept
  404. : _M_state{__state}
  405. { }
  406. };
  407. /// A type that allows a stop request to be made.
  408. class stop_source
  409. {
  410. public:
  411. stop_source() : _M_state(*this)
  412. { }
  413. explicit stop_source(std::nostopstate_t) noexcept
  414. { }
  415. stop_source(const stop_source& __other) noexcept
  416. : _M_state(__other._M_state)
  417. {
  418. if (_M_state)
  419. _M_state->_M_add_ssrc();
  420. }
  421. stop_source(stop_source&&) noexcept = default;
  422. stop_source&
  423. operator=(const stop_source& __other) noexcept
  424. {
  425. if (_M_state != __other._M_state)
  426. {
  427. stop_source __sink(std::move(*this));
  428. _M_state = __other._M_state;
  429. if (_M_state)
  430. _M_state->_M_add_ssrc();
  431. }
  432. return *this;
  433. }
  434. stop_source&
  435. operator=(stop_source&&) noexcept = default;
  436. ~stop_source()
  437. {
  438. if (_M_state)
  439. _M_state->_M_sub_ssrc();
  440. }
  441. [[nodiscard]]
  442. bool
  443. stop_possible() const noexcept
  444. {
  445. return static_cast<bool>(_M_state);
  446. }
  447. [[nodiscard]]
  448. bool
  449. stop_requested() const noexcept
  450. {
  451. return static_cast<bool>(_M_state) && _M_state->_M_stop_requested();
  452. }
  453. bool
  454. request_stop() const noexcept
  455. {
  456. if (stop_possible())
  457. return _M_state->_M_request_stop();
  458. return false;
  459. }
  460. [[nodiscard]]
  461. stop_token
  462. get_token() const noexcept
  463. {
  464. return stop_token{_M_state};
  465. }
  466. void
  467. swap(stop_source& __other) noexcept
  468. {
  469. _M_state.swap(__other._M_state);
  470. }
  471. [[nodiscard]]
  472. friend bool
  473. operator==(const stop_source& __a, const stop_source& __b) noexcept
  474. {
  475. return __a._M_state == __b._M_state;
  476. }
  477. friend void
  478. swap(stop_source& __lhs, stop_source& __rhs) noexcept
  479. {
  480. __lhs.swap(__rhs);
  481. }
  482. private:
  483. stop_token::_Stop_state_ref _M_state;
  484. };
  485. /// A wrapper for callbacks to be run when a stop request is made.
  486. template<typename _Callback>
  487. class [[nodiscard]] stop_callback
  488. {
  489. static_assert(is_nothrow_destructible_v<_Callback>);
  490. static_assert(is_invocable_v<_Callback>);
  491. public:
  492. using callback_type = _Callback;
  493. template<typename _Cb,
  494. enable_if_t<is_constructible_v<_Callback, _Cb>, int> = 0>
  495. explicit
  496. stop_callback(const stop_token& __token, _Cb&& __cb)
  497. noexcept(is_nothrow_constructible_v<_Callback, _Cb>)
  498. : _M_cb(std::forward<_Cb>(__cb))
  499. {
  500. if (auto __state = __token._M_state)
  501. {
  502. if (__state->_M_register_callback(&_M_cb))
  503. _M_state.swap(__state);
  504. }
  505. }
  506. template<typename _Cb,
  507. enable_if_t<is_constructible_v<_Callback, _Cb>, int> = 0>
  508. explicit
  509. stop_callback(stop_token&& __token, _Cb&& __cb)
  510. noexcept(is_nothrow_constructible_v<_Callback, _Cb>)
  511. : _M_cb(std::forward<_Cb>(__cb))
  512. {
  513. if (auto& __state = __token._M_state)
  514. {
  515. if (__state->_M_register_callback(&_M_cb))
  516. _M_state.swap(__state);
  517. }
  518. }
  519. ~stop_callback()
  520. {
  521. if (_M_state)
  522. {
  523. _M_state->_M_remove_callback(&_M_cb);
  524. }
  525. }
  526. stop_callback(const stop_callback&) = delete;
  527. stop_callback& operator=(const stop_callback&) = delete;
  528. stop_callback(stop_callback&&) = delete;
  529. stop_callback& operator=(stop_callback&&) = delete;
  530. private:
  531. struct _Cb_impl : stop_token::_Stop_cb
  532. {
  533. template<typename _Cb>
  534. explicit
  535. _Cb_impl(_Cb&& __cb)
  536. : _Stop_cb(&_S_execute),
  537. _M_cb(std::forward<_Cb>(__cb))
  538. { }
  539. _Callback _M_cb;
  540. [[__gnu__::__nonnull__]]
  541. static void
  542. _S_execute(_Stop_cb* __that) noexcept
  543. {
  544. _Callback& __cb = static_cast<_Cb_impl*>(__that)->_M_cb;
  545. std::forward<_Callback>(__cb)();
  546. }
  547. };
  548. _Cb_impl _M_cb;
  549. stop_token::_Stop_state_ref _M_state;
  550. };
  551. template<typename _Callback>
  552. stop_callback(stop_token, _Callback) -> stop_callback<_Callback>;
  553. _GLIBCXX_END_NAMESPACE_VERSION
  554. } // namespace
  555. #endif // __cplusplus > 201703L
  556. #endif // _GLIBCXX_STOP_TOKEN