Nie możesz wybrać więcej, niż 25 tematów Tematy muszą się zaczynać od litery lub cyfry, mogą zawierać myślniki ('-') i mogą mieć do 35 znaków.

3 lat temu
1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552
  1. // -*- C++ -*- header.
  2. // Copyright (C) 2008-2020 Free Software Foundation, Inc.
  3. //
  4. // This file is part of the GNU ISO C++ Library. This library is free
  5. // software; you can redistribute it and/or modify it under the
  6. // terms of the GNU General Public License as published by the
  7. // Free Software Foundation; either version 3, or (at your option)
  8. // any later version.
  9. // This library is distributed in the hope that it will be useful,
  10. // but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  12. // GNU General Public License for more details.
  13. // Under Section 7 of GPL version 3, you are granted additional
  14. // permissions described in the GCC Runtime Library Exception, version
  15. // 3.1, as published by the Free Software Foundation.
  16. // You should have received a copy of the GNU General Public License and
  17. // a copy of the GCC Runtime Library Exception along with this program;
  18. // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
  19. // <http://www.gnu.org/licenses/>.
  20. /** @file include/atomic
  21. * This is a Standard C++ Library header.
  22. */
  23. // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
  24. // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
  25. #ifndef _GLIBCXX_ATOMIC
  26. #define _GLIBCXX_ATOMIC 1
  27. #pragma GCC system_header
  28. #if __cplusplus < 201103L
  29. # include <bits/c++0x_warning.h>
  30. #else
  31. #include <bits/atomic_base.h>
  32. namespace std _GLIBCXX_VISIBILITY(default)
  33. {
  34. _GLIBCXX_BEGIN_NAMESPACE_VERSION
  35. /**
  36. * @addtogroup atomics
  37. * @{
  38. */
  39. #if __cplusplus >= 201703L
  40. # define __cpp_lib_atomic_is_always_lock_free 201603
  41. #endif
  42. template<typename _Tp>
  43. struct atomic;
  44. /// atomic<bool>
  45. // NB: No operators or fetch-operations for this type.
  46. template<>
  47. struct atomic<bool>
  48. {
  49. using value_type = bool;
  50. private:
  51. __atomic_base<bool> _M_base;
  52. public:
  53. atomic() noexcept = default;
  54. ~atomic() noexcept = default;
  55. atomic(const atomic&) = delete;
  56. atomic& operator=(const atomic&) = delete;
  57. atomic& operator=(const atomic&) volatile = delete;
  58. constexpr atomic(bool __i) noexcept : _M_base(__i) { }
  59. bool
  60. operator=(bool __i) noexcept
  61. { return _M_base.operator=(__i); }
  62. bool
  63. operator=(bool __i) volatile noexcept
  64. { return _M_base.operator=(__i); }
  65. operator bool() const noexcept
  66. { return _M_base.load(); }
  67. operator bool() const volatile noexcept
  68. { return _M_base.load(); }
  69. bool
  70. is_lock_free() const noexcept { return _M_base.is_lock_free(); }
  71. bool
  72. is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
  73. #if __cplusplus >= 201703L
  74. static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
  75. #endif
  76. void
  77. store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
  78. { _M_base.store(__i, __m); }
  79. void
  80. store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
  81. { _M_base.store(__i, __m); }
  82. bool
  83. load(memory_order __m = memory_order_seq_cst) const noexcept
  84. { return _M_base.load(__m); }
  85. bool
  86. load(memory_order __m = memory_order_seq_cst) const volatile noexcept
  87. { return _M_base.load(__m); }
  88. bool
  89. exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
  90. { return _M_base.exchange(__i, __m); }
  91. bool
  92. exchange(bool __i,
  93. memory_order __m = memory_order_seq_cst) volatile noexcept
  94. { return _M_base.exchange(__i, __m); }
  95. bool
  96. compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
  97. memory_order __m2) noexcept
  98. { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
  99. bool
  100. compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
  101. memory_order __m2) volatile noexcept
  102. { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
  103. bool
  104. compare_exchange_weak(bool& __i1, bool __i2,
  105. memory_order __m = memory_order_seq_cst) noexcept
  106. { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
  107. bool
  108. compare_exchange_weak(bool& __i1, bool __i2,
  109. memory_order __m = memory_order_seq_cst) volatile noexcept
  110. { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
  111. bool
  112. compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
  113. memory_order __m2) noexcept
  114. { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
  115. bool
  116. compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
  117. memory_order __m2) volatile noexcept
  118. { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
  119. bool
  120. compare_exchange_strong(bool& __i1, bool __i2,
  121. memory_order __m = memory_order_seq_cst) noexcept
  122. { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
  123. bool
  124. compare_exchange_strong(bool& __i1, bool __i2,
  125. memory_order __m = memory_order_seq_cst) volatile noexcept
  126. { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
  127. };
  128. #if __cplusplus <= 201703L
  129. # define _GLIBCXX20_INIT(I)
  130. #else
  131. # define _GLIBCXX20_INIT(I) = I
  132. #endif
  133. /**
  134. * @brief Generic atomic type, primary class template.
  135. *
  136. * @tparam _Tp Type to be made atomic, must be trivially copyable.
  137. */
  138. template<typename _Tp>
  139. struct atomic
  140. {
  141. using value_type = _Tp;
  142. private:
  143. // Align 1/2/4/8/16-byte types to at least their size.
  144. static constexpr int _S_min_alignment
  145. = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
  146. ? 0 : sizeof(_Tp);
  147. static constexpr int _S_alignment
  148. = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
  149. alignas(_S_alignment) _Tp _M_i _GLIBCXX20_INIT(_Tp());
  150. static_assert(__is_trivially_copyable(_Tp),
  151. "std::atomic requires a trivially copyable type");
  152. static_assert(sizeof(_Tp) > 0,
  153. "Incomplete or zero-sized types are not supported");
  154. public:
  155. atomic() = default;
  156. ~atomic() noexcept = default;
  157. atomic(const atomic&) = delete;
  158. atomic& operator=(const atomic&) = delete;
  159. atomic& operator=(const atomic&) volatile = delete;
  160. constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
  161. operator _Tp() const noexcept
  162. { return load(); }
  163. operator _Tp() const volatile noexcept
  164. { return load(); }
  165. _Tp
  166. operator=(_Tp __i) noexcept
  167. { store(__i); return __i; }
  168. _Tp
  169. operator=(_Tp __i) volatile noexcept
  170. { store(__i); return __i; }
  171. bool
  172. is_lock_free() const noexcept
  173. {
  174. // Produce a fake, minimally aligned pointer.
  175. return __atomic_is_lock_free(sizeof(_M_i),
  176. reinterpret_cast<void *>(-_S_alignment));
  177. }
  178. bool
  179. is_lock_free() const volatile noexcept
  180. {
  181. // Produce a fake, minimally aligned pointer.
  182. return __atomic_is_lock_free(sizeof(_M_i),
  183. reinterpret_cast<void *>(-_S_alignment));
  184. }
  185. #if __cplusplus >= 201703L
  186. static constexpr bool is_always_lock_free
  187. = __atomic_always_lock_free(sizeof(_M_i), 0);
  188. #endif
  189. void
  190. store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
  191. { __atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m)); }
  192. void
  193. store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
  194. { __atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m)); }
  195. _Tp
  196. load(memory_order __m = memory_order_seq_cst) const noexcept
  197. {
  198. alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
  199. _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
  200. __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
  201. return *__ptr;
  202. }
  203. _Tp
  204. load(memory_order __m = memory_order_seq_cst) const volatile noexcept
  205. {
  206. alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
  207. _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
  208. __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
  209. return *__ptr;
  210. }
  211. _Tp
  212. exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
  213. {
  214. alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
  215. _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
  216. __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
  217. __ptr, int(__m));
  218. return *__ptr;
  219. }
  220. _Tp
  221. exchange(_Tp __i,
  222. memory_order __m = memory_order_seq_cst) volatile noexcept
  223. {
  224. alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
  225. _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
  226. __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
  227. __ptr, int(__m));
  228. return *__ptr;
  229. }
  230. bool
  231. compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
  232. memory_order __f) noexcept
  233. {
  234. return __atomic_compare_exchange(std::__addressof(_M_i),
  235. std::__addressof(__e),
  236. std::__addressof(__i),
  237. true, int(__s), int(__f));
  238. }
  239. bool
  240. compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
  241. memory_order __f) volatile noexcept
  242. {
  243. return __atomic_compare_exchange(std::__addressof(_M_i),
  244. std::__addressof(__e),
  245. std::__addressof(__i),
  246. true, int(__s), int(__f));
  247. }
  248. bool
  249. compare_exchange_weak(_Tp& __e, _Tp __i,
  250. memory_order __m = memory_order_seq_cst) noexcept
  251. { return compare_exchange_weak(__e, __i, __m,
  252. __cmpexch_failure_order(__m)); }
  253. bool
  254. compare_exchange_weak(_Tp& __e, _Tp __i,
  255. memory_order __m = memory_order_seq_cst) volatile noexcept
  256. { return compare_exchange_weak(__e, __i, __m,
  257. __cmpexch_failure_order(__m)); }
  258. bool
  259. compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
  260. memory_order __f) noexcept
  261. {
  262. return __atomic_compare_exchange(std::__addressof(_M_i),
  263. std::__addressof(__e),
  264. std::__addressof(__i),
  265. false, int(__s), int(__f));
  266. }
  267. bool
  268. compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
  269. memory_order __f) volatile noexcept
  270. {
  271. return __atomic_compare_exchange(std::__addressof(_M_i),
  272. std::__addressof(__e),
  273. std::__addressof(__i),
  274. false, int(__s), int(__f));
  275. }
  276. bool
  277. compare_exchange_strong(_Tp& __e, _Tp __i,
  278. memory_order __m = memory_order_seq_cst) noexcept
  279. { return compare_exchange_strong(__e, __i, __m,
  280. __cmpexch_failure_order(__m)); }
  281. bool
  282. compare_exchange_strong(_Tp& __e, _Tp __i,
  283. memory_order __m = memory_order_seq_cst) volatile noexcept
  284. { return compare_exchange_strong(__e, __i, __m,
  285. __cmpexch_failure_order(__m)); }
  286. };
  287. #undef _GLIBCXX20_INIT
  288. /// Partial specialization for pointer types.
  289. template<typename _Tp>
  290. struct atomic<_Tp*>
  291. {
  292. using value_type = _Tp*;
  293. using difference_type = ptrdiff_t;
  294. typedef _Tp* __pointer_type;
  295. typedef __atomic_base<_Tp*> __base_type;
  296. __base_type _M_b;
  297. atomic() noexcept = default;
  298. ~atomic() noexcept = default;
  299. atomic(const atomic&) = delete;
  300. atomic& operator=(const atomic&) = delete;
  301. atomic& operator=(const atomic&) volatile = delete;
  302. constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
  303. operator __pointer_type() const noexcept
  304. { return __pointer_type(_M_b); }
  305. operator __pointer_type() const volatile noexcept
  306. { return __pointer_type(_M_b); }
  307. __pointer_type
  308. operator=(__pointer_type __p) noexcept
  309. { return _M_b.operator=(__p); }
  310. __pointer_type
  311. operator=(__pointer_type __p) volatile noexcept
  312. { return _M_b.operator=(__p); }
  313. __pointer_type
  314. operator++(int) noexcept
  315. {
  316. #if __cplusplus >= 201703L
  317. static_assert( is_object<_Tp>::value, "pointer to object type" );
  318. #endif
  319. return _M_b++;
  320. }
  321. __pointer_type
  322. operator++(int) volatile noexcept
  323. {
  324. #if __cplusplus >= 201703L
  325. static_assert( is_object<_Tp>::value, "pointer to object type" );
  326. #endif
  327. return _M_b++;
  328. }
  329. __pointer_type
  330. operator--(int) noexcept
  331. {
  332. #if __cplusplus >= 201703L
  333. static_assert( is_object<_Tp>::value, "pointer to object type" );
  334. #endif
  335. return _M_b--;
  336. }
  337. __pointer_type
  338. operator--(int) volatile noexcept
  339. {
  340. #if __cplusplus >= 201703L
  341. static_assert( is_object<_Tp>::value, "pointer to object type" );
  342. #endif
  343. return _M_b--;
  344. }
  345. __pointer_type
  346. operator++() noexcept
  347. {
  348. #if __cplusplus >= 201703L
  349. static_assert( is_object<_Tp>::value, "pointer to object type" );
  350. #endif
  351. return ++_M_b;
  352. }
  353. __pointer_type
  354. operator++() volatile noexcept
  355. {
  356. #if __cplusplus >= 201703L
  357. static_assert( is_object<_Tp>::value, "pointer to object type" );
  358. #endif
  359. return ++_M_b;
  360. }
  361. __pointer_type
  362. operator--() noexcept
  363. {
  364. #if __cplusplus >= 201703L
  365. static_assert( is_object<_Tp>::value, "pointer to object type" );
  366. #endif
  367. return --_M_b;
  368. }
  369. __pointer_type
  370. operator--() volatile noexcept
  371. {
  372. #if __cplusplus >= 201703L
  373. static_assert( is_object<_Tp>::value, "pointer to object type" );
  374. #endif
  375. return --_M_b;
  376. }
  377. __pointer_type
  378. operator+=(ptrdiff_t __d) noexcept
  379. {
  380. #if __cplusplus >= 201703L
  381. static_assert( is_object<_Tp>::value, "pointer to object type" );
  382. #endif
  383. return _M_b.operator+=(__d);
  384. }
  385. __pointer_type
  386. operator+=(ptrdiff_t __d) volatile noexcept
  387. {
  388. #if __cplusplus >= 201703L
  389. static_assert( is_object<_Tp>::value, "pointer to object type" );
  390. #endif
  391. return _M_b.operator+=(__d);
  392. }
  393. __pointer_type
  394. operator-=(ptrdiff_t __d) noexcept
  395. {
  396. #if __cplusplus >= 201703L
  397. static_assert( is_object<_Tp>::value, "pointer to object type" );
  398. #endif
  399. return _M_b.operator-=(__d);
  400. }
  401. __pointer_type
  402. operator-=(ptrdiff_t __d) volatile noexcept
  403. {
  404. #if __cplusplus >= 201703L
  405. static_assert( is_object<_Tp>::value, "pointer to object type" );
  406. #endif
  407. return _M_b.operator-=(__d);
  408. }
  409. bool
  410. is_lock_free() const noexcept
  411. { return _M_b.is_lock_free(); }
  412. bool
  413. is_lock_free() const volatile noexcept
  414. { return _M_b.is_lock_free(); }
  415. #if __cplusplus >= 201703L
  416. static constexpr bool is_always_lock_free = ATOMIC_POINTER_LOCK_FREE == 2;
  417. #endif
  418. void
  419. store(__pointer_type __p,
  420. memory_order __m = memory_order_seq_cst) noexcept
  421. { return _M_b.store(__p, __m); }
  422. void
  423. store(__pointer_type __p,
  424. memory_order __m = memory_order_seq_cst) volatile noexcept
  425. { return _M_b.store(__p, __m); }
  426. __pointer_type
  427. load(memory_order __m = memory_order_seq_cst) const noexcept
  428. { return _M_b.load(__m); }
  429. __pointer_type
  430. load(memory_order __m = memory_order_seq_cst) const volatile noexcept
  431. { return _M_b.load(__m); }
  432. __pointer_type
  433. exchange(__pointer_type __p,
  434. memory_order __m = memory_order_seq_cst) noexcept
  435. { return _M_b.exchange(__p, __m); }
  436. __pointer_type
  437. exchange(__pointer_type __p,
  438. memory_order __m = memory_order_seq_cst) volatile noexcept
  439. { return _M_b.exchange(__p, __m); }
  440. bool
  441. compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
  442. memory_order __m1, memory_order __m2) noexcept
  443. { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
  444. bool
  445. compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
  446. memory_order __m1,
  447. memory_order __m2) volatile noexcept
  448. { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
  449. bool
  450. compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
  451. memory_order __m = memory_order_seq_cst) noexcept
  452. {
  453. return compare_exchange_weak(__p1, __p2, __m,
  454. __cmpexch_failure_order(__m));
  455. }
  456. bool
  457. compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
  458. memory_order __m = memory_order_seq_cst) volatile noexcept
  459. {
  460. return compare_exchange_weak(__p1, __p2, __m,
  461. __cmpexch_failure_order(__m));
  462. }
  463. bool
  464. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  465. memory_order __m1, memory_order __m2) noexcept
  466. { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
  467. bool
  468. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  469. memory_order __m1,
  470. memory_order __m2) volatile noexcept
  471. { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
  472. bool
  473. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  474. memory_order __m = memory_order_seq_cst) noexcept
  475. {
  476. return _M_b.compare_exchange_strong(__p1, __p2, __m,
  477. __cmpexch_failure_order(__m));
  478. }
  479. bool
  480. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  481. memory_order __m = memory_order_seq_cst) volatile noexcept
  482. {
  483. return _M_b.compare_exchange_strong(__p1, __p2, __m,
  484. __cmpexch_failure_order(__m));
  485. }
  486. __pointer_type
  487. fetch_add(ptrdiff_t __d,
  488. memory_order __m = memory_order_seq_cst) noexcept
  489. {
  490. #if __cplusplus >= 201703L
  491. static_assert( is_object<_Tp>::value, "pointer to object type" );
  492. #endif
  493. return _M_b.fetch_add(__d, __m);
  494. }
  495. __pointer_type
  496. fetch_add(ptrdiff_t __d,
  497. memory_order __m = memory_order_seq_cst) volatile noexcept
  498. {
  499. #if __cplusplus >= 201703L
  500. static_assert( is_object<_Tp>::value, "pointer to object type" );
  501. #endif
  502. return _M_b.fetch_add(__d, __m);
  503. }
  504. __pointer_type
  505. fetch_sub(ptrdiff_t __d,
  506. memory_order __m = memory_order_seq_cst) noexcept
  507. {
  508. #if __cplusplus >= 201703L
  509. static_assert( is_object<_Tp>::value, "pointer to object type" );
  510. #endif
  511. return _M_b.fetch_sub(__d, __m);
  512. }
  513. __pointer_type
  514. fetch_sub(ptrdiff_t __d,
  515. memory_order __m = memory_order_seq_cst) volatile noexcept
  516. {
  517. #if __cplusplus >= 201703L
  518. static_assert( is_object<_Tp>::value, "pointer to object type" );
  519. #endif
  520. return _M_b.fetch_sub(__d, __m);
  521. }
  522. };
  523. /// Explicit specialization for char.
  524. template<>
  525. struct atomic<char> : __atomic_base<char>
  526. {
  527. typedef char __integral_type;
  528. typedef __atomic_base<char> __base_type;
  529. atomic() noexcept = default;
  530. ~atomic() noexcept = default;
  531. atomic(const atomic&) = delete;
  532. atomic& operator=(const atomic&) = delete;
  533. atomic& operator=(const atomic&) volatile = delete;
  534. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  535. using __base_type::operator __integral_type;
  536. using __base_type::operator=;
  537. #if __cplusplus >= 201703L
  538. static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
  539. #endif
  540. };
  541. /// Explicit specialization for signed char.
  542. template<>
  543. struct atomic<signed char> : __atomic_base<signed char>
  544. {
  545. typedef signed char __integral_type;
  546. typedef __atomic_base<signed char> __base_type;
  547. atomic() noexcept= default;
  548. ~atomic() noexcept = default;
  549. atomic(const atomic&) = delete;
  550. atomic& operator=(const atomic&) = delete;
  551. atomic& operator=(const atomic&) volatile = delete;
  552. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  553. using __base_type::operator __integral_type;
  554. using __base_type::operator=;
  555. #if __cplusplus >= 201703L
  556. static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
  557. #endif
  558. };
  559. /// Explicit specialization for unsigned char.
  560. template<>
  561. struct atomic<unsigned char> : __atomic_base<unsigned char>
  562. {
  563. typedef unsigned char __integral_type;
  564. typedef __atomic_base<unsigned char> __base_type;
  565. atomic() noexcept= default;
  566. ~atomic() noexcept = default;
  567. atomic(const atomic&) = delete;
  568. atomic& operator=(const atomic&) = delete;
  569. atomic& operator=(const atomic&) volatile = delete;
  570. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  571. using __base_type::operator __integral_type;
  572. using __base_type::operator=;
  573. #if __cplusplus >= 201703L
  574. static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
  575. #endif
  576. };
  577. /// Explicit specialization for short.
  578. template<>
  579. struct atomic<short> : __atomic_base<short>
  580. {
  581. typedef short __integral_type;
  582. typedef __atomic_base<short> __base_type;
  583. atomic() noexcept = default;
  584. ~atomic() noexcept = default;
  585. atomic(const atomic&) = delete;
  586. atomic& operator=(const atomic&) = delete;
  587. atomic& operator=(const atomic&) volatile = delete;
  588. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  589. using __base_type::operator __integral_type;
  590. using __base_type::operator=;
  591. #if __cplusplus >= 201703L
  592. static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
  593. #endif
  594. };
  595. /// Explicit specialization for unsigned short.
  596. template<>
  597. struct atomic<unsigned short> : __atomic_base<unsigned short>
  598. {
  599. typedef unsigned short __integral_type;
  600. typedef __atomic_base<unsigned short> __base_type;
  601. atomic() noexcept = default;
  602. ~atomic() noexcept = default;
  603. atomic(const atomic&) = delete;
  604. atomic& operator=(const atomic&) = delete;
  605. atomic& operator=(const atomic&) volatile = delete;
  606. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  607. using __base_type::operator __integral_type;
  608. using __base_type::operator=;
  609. #if __cplusplus >= 201703L
  610. static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
  611. #endif
  612. };
  613. /// Explicit specialization for int.
  614. template<>
  615. struct atomic<int> : __atomic_base<int>
  616. {
  617. typedef int __integral_type;
  618. typedef __atomic_base<int> __base_type;
  619. atomic() noexcept = default;
  620. ~atomic() noexcept = default;
  621. atomic(const atomic&) = delete;
  622. atomic& operator=(const atomic&) = delete;
  623. atomic& operator=(const atomic&) volatile = delete;
  624. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  625. using __base_type::operator __integral_type;
  626. using __base_type::operator=;
  627. #if __cplusplus >= 201703L
  628. static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
  629. #endif
  630. };
  631. /// Explicit specialization for unsigned int.
  632. template<>
  633. struct atomic<unsigned int> : __atomic_base<unsigned int>
  634. {
  635. typedef unsigned int __integral_type;
  636. typedef __atomic_base<unsigned int> __base_type;
  637. atomic() noexcept = default;
  638. ~atomic() noexcept = default;
  639. atomic(const atomic&) = delete;
  640. atomic& operator=(const atomic&) = delete;
  641. atomic& operator=(const atomic&) volatile = delete;
  642. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  643. using __base_type::operator __integral_type;
  644. using __base_type::operator=;
  645. #if __cplusplus >= 201703L
  646. static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
  647. #endif
  648. };
  649. /// Explicit specialization for long.
  650. template<>
  651. struct atomic<long> : __atomic_base<long>
  652. {
  653. typedef long __integral_type;
  654. typedef __atomic_base<long> __base_type;
  655. atomic() noexcept = default;
  656. ~atomic() noexcept = default;
  657. atomic(const atomic&) = delete;
  658. atomic& operator=(const atomic&) = delete;
  659. atomic& operator=(const atomic&) volatile = delete;
  660. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  661. using __base_type::operator __integral_type;
  662. using __base_type::operator=;
  663. #if __cplusplus >= 201703L
  664. static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
  665. #endif
  666. };
  667. /// Explicit specialization for unsigned long.
  668. template<>
  669. struct atomic<unsigned long> : __atomic_base<unsigned long>
  670. {
  671. typedef unsigned long __integral_type;
  672. typedef __atomic_base<unsigned long> __base_type;
  673. atomic() noexcept = default;
  674. ~atomic() noexcept = default;
  675. atomic(const atomic&) = delete;
  676. atomic& operator=(const atomic&) = delete;
  677. atomic& operator=(const atomic&) volatile = delete;
  678. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  679. using __base_type::operator __integral_type;
  680. using __base_type::operator=;
  681. #if __cplusplus >= 201703L
  682. static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
  683. #endif
  684. };
  685. /// Explicit specialization for long long.
  686. template<>
  687. struct atomic<long long> : __atomic_base<long long>
  688. {
  689. typedef long long __integral_type;
  690. typedef __atomic_base<long long> __base_type;
  691. atomic() noexcept = default;
  692. ~atomic() noexcept = default;
  693. atomic(const atomic&) = delete;
  694. atomic& operator=(const atomic&) = delete;
  695. atomic& operator=(const atomic&) volatile = delete;
  696. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  697. using __base_type::operator __integral_type;
  698. using __base_type::operator=;
  699. #if __cplusplus >= 201703L
  700. static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
  701. #endif
  702. };
  703. /// Explicit specialization for unsigned long long.
  704. template<>
  705. struct atomic<unsigned long long> : __atomic_base<unsigned long long>
  706. {
  707. typedef unsigned long long __integral_type;
  708. typedef __atomic_base<unsigned long long> __base_type;
  709. atomic() noexcept = default;
  710. ~atomic() noexcept = default;
  711. atomic(const atomic&) = delete;
  712. atomic& operator=(const atomic&) = delete;
  713. atomic& operator=(const atomic&) volatile = delete;
  714. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  715. using __base_type::operator __integral_type;
  716. using __base_type::operator=;
  717. #if __cplusplus >= 201703L
  718. static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
  719. #endif
  720. };
  721. /// Explicit specialization for wchar_t.
  722. template<>
  723. struct atomic<wchar_t> : __atomic_base<wchar_t>
  724. {
  725. typedef wchar_t __integral_type;
  726. typedef __atomic_base<wchar_t> __base_type;
  727. atomic() noexcept = default;
  728. ~atomic() noexcept = default;
  729. atomic(const atomic&) = delete;
  730. atomic& operator=(const atomic&) = delete;
  731. atomic& operator=(const atomic&) volatile = delete;
  732. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  733. using __base_type::operator __integral_type;
  734. using __base_type::operator=;
  735. #if __cplusplus >= 201703L
  736. static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
  737. #endif
  738. };
  739. #ifdef _GLIBCXX_USE_CHAR8_T
  740. /// Explicit specialization for char8_t.
  741. template<>
  742. struct atomic<char8_t> : __atomic_base<char8_t>
  743. {
  744. typedef char8_t __integral_type;
  745. typedef __atomic_base<char8_t> __base_type;
  746. atomic() noexcept = default;
  747. ~atomic() noexcept = default;
  748. atomic(const atomic&) = delete;
  749. atomic& operator=(const atomic&) = delete;
  750. atomic& operator=(const atomic&) volatile = delete;
  751. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  752. using __base_type::operator __integral_type;
  753. using __base_type::operator=;
  754. #if __cplusplus > 201402L
  755. static constexpr bool is_always_lock_free = ATOMIC_CHAR8_T_LOCK_FREE == 2;
  756. #endif
  757. };
  758. #endif
  759. /// Explicit specialization for char16_t.
  760. template<>
  761. struct atomic<char16_t> : __atomic_base<char16_t>
  762. {
  763. typedef char16_t __integral_type;
  764. typedef __atomic_base<char16_t> __base_type;
  765. atomic() noexcept = default;
  766. ~atomic() noexcept = default;
  767. atomic(const atomic&) = delete;
  768. atomic& operator=(const atomic&) = delete;
  769. atomic& operator=(const atomic&) volatile = delete;
  770. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  771. using __base_type::operator __integral_type;
  772. using __base_type::operator=;
  773. #if __cplusplus >= 201703L
  774. static constexpr bool is_always_lock_free = ATOMIC_CHAR16_T_LOCK_FREE == 2;
  775. #endif
  776. };
  777. /// Explicit specialization for char32_t.
  778. template<>
  779. struct atomic<char32_t> : __atomic_base<char32_t>
  780. {
  781. typedef char32_t __integral_type;
  782. typedef __atomic_base<char32_t> __base_type;
  783. atomic() noexcept = default;
  784. ~atomic() noexcept = default;
  785. atomic(const atomic&) = delete;
  786. atomic& operator=(const atomic&) = delete;
  787. atomic& operator=(const atomic&) volatile = delete;
  788. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  789. using __base_type::operator __integral_type;
  790. using __base_type::operator=;
  791. #if __cplusplus >= 201703L
  792. static constexpr bool is_always_lock_free = ATOMIC_CHAR32_T_LOCK_FREE == 2;
  793. #endif
  794. };
  795. /// atomic_bool
  796. typedef atomic<bool> atomic_bool;
  797. /// atomic_char
  798. typedef atomic<char> atomic_char;
  799. /// atomic_schar
  800. typedef atomic<signed char> atomic_schar;
  801. /// atomic_uchar
  802. typedef atomic<unsigned char> atomic_uchar;
  803. /// atomic_short
  804. typedef atomic<short> atomic_short;
  805. /// atomic_ushort
  806. typedef atomic<unsigned short> atomic_ushort;
  807. /// atomic_int
  808. typedef atomic<int> atomic_int;
  809. /// atomic_uint
  810. typedef atomic<unsigned int> atomic_uint;
  811. /// atomic_long
  812. typedef atomic<long> atomic_long;
  813. /// atomic_ulong
  814. typedef atomic<unsigned long> atomic_ulong;
  815. /// atomic_llong
  816. typedef atomic<long long> atomic_llong;
  817. /// atomic_ullong
  818. typedef atomic<unsigned long long> atomic_ullong;
  819. /// atomic_wchar_t
  820. typedef atomic<wchar_t> atomic_wchar_t;
  821. #ifdef _GLIBCXX_USE_CHAR8_T
  822. /// atomic_char8_t
  823. typedef atomic<char8_t> atomic_char8_t;
  824. #endif
  825. /// atomic_char16_t
  826. typedef atomic<char16_t> atomic_char16_t;
  827. /// atomic_char32_t
  828. typedef atomic<char32_t> atomic_char32_t;
  829. #ifdef _GLIBCXX_USE_C99_STDINT_TR1
  830. // _GLIBCXX_RESOLVE_LIB_DEFECTS
  831. // 2441. Exact-width atomic typedefs should be provided
  832. /// atomic_int8_t
  833. typedef atomic<int8_t> atomic_int8_t;
  834. /// atomic_uint8_t
  835. typedef atomic<uint8_t> atomic_uint8_t;
  836. /// atomic_int16_t
  837. typedef atomic<int16_t> atomic_int16_t;
  838. /// atomic_uint16_t
  839. typedef atomic<uint16_t> atomic_uint16_t;
  840. /// atomic_int32_t
  841. typedef atomic<int32_t> atomic_int32_t;
  842. /// atomic_uint32_t
  843. typedef atomic<uint32_t> atomic_uint32_t;
  844. /// atomic_int64_t
  845. typedef atomic<int64_t> atomic_int64_t;
  846. /// atomic_uint64_t
  847. typedef atomic<uint64_t> atomic_uint64_t;
  848. /// atomic_int_least8_t
  849. typedef atomic<int_least8_t> atomic_int_least8_t;
  850. /// atomic_uint_least8_t
  851. typedef atomic<uint_least8_t> atomic_uint_least8_t;
  852. /// atomic_int_least16_t
  853. typedef atomic<int_least16_t> atomic_int_least16_t;
  854. /// atomic_uint_least16_t
  855. typedef atomic<uint_least16_t> atomic_uint_least16_t;
  856. /// atomic_int_least32_t
  857. typedef atomic<int_least32_t> atomic_int_least32_t;
  858. /// atomic_uint_least32_t
  859. typedef atomic<uint_least32_t> atomic_uint_least32_t;
  860. /// atomic_int_least64_t
  861. typedef atomic<int_least64_t> atomic_int_least64_t;
  862. /// atomic_uint_least64_t
  863. typedef atomic<uint_least64_t> atomic_uint_least64_t;
  864. /// atomic_int_fast8_t
  865. typedef atomic<int_fast8_t> atomic_int_fast8_t;
  866. /// atomic_uint_fast8_t
  867. typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
  868. /// atomic_int_fast16_t
  869. typedef atomic<int_fast16_t> atomic_int_fast16_t;
  870. /// atomic_uint_fast16_t
  871. typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
  872. /// atomic_int_fast32_t
  873. typedef atomic<int_fast32_t> atomic_int_fast32_t;
  874. /// atomic_uint_fast32_t
  875. typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
  876. /// atomic_int_fast64_t
  877. typedef atomic<int_fast64_t> atomic_int_fast64_t;
  878. /// atomic_uint_fast64_t
  879. typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
  880. #endif
  881. /// atomic_intptr_t
  882. typedef atomic<intptr_t> atomic_intptr_t;
  883. /// atomic_uintptr_t
  884. typedef atomic<uintptr_t> atomic_uintptr_t;
  885. /// atomic_size_t
  886. typedef atomic<size_t> atomic_size_t;
  887. /// atomic_ptrdiff_t
  888. typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
  889. #ifdef _GLIBCXX_USE_C99_STDINT_TR1
  890. /// atomic_intmax_t
  891. typedef atomic<intmax_t> atomic_intmax_t;
  892. /// atomic_uintmax_t
  893. typedef atomic<uintmax_t> atomic_uintmax_t;
  894. #endif
  895. // Function definitions, atomic_flag operations.
  896. inline bool
  897. atomic_flag_test_and_set_explicit(atomic_flag* __a,
  898. memory_order __m) noexcept
  899. { return __a->test_and_set(__m); }
  900. inline bool
  901. atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
  902. memory_order __m) noexcept
  903. { return __a->test_and_set(__m); }
  904. inline void
  905. atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
  906. { __a->clear(__m); }
  907. inline void
  908. atomic_flag_clear_explicit(volatile atomic_flag* __a,
  909. memory_order __m) noexcept
  910. { __a->clear(__m); }
  911. inline bool
  912. atomic_flag_test_and_set(atomic_flag* __a) noexcept
  913. { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
  914. inline bool
  915. atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
  916. { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
  917. inline void
  918. atomic_flag_clear(atomic_flag* __a) noexcept
  919. { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
  920. inline void
  921. atomic_flag_clear(volatile atomic_flag* __a) noexcept
  922. { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
  923. template<typename _Tp>
  924. using __atomic_val_t = typename atomic<_Tp>::value_type;
  925. template<typename _Tp>
  926. using __atomic_diff_t = typename atomic<_Tp>::difference_type;
  927. // [atomics.nonmembers] Non-member functions.
  928. // Function templates generally applicable to atomic types.
  929. template<typename _ITp>
  930. inline bool
  931. atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
  932. { return __a->is_lock_free(); }
  933. template<typename _ITp>
  934. inline bool
  935. atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
  936. { return __a->is_lock_free(); }
  937. template<typename _ITp>
  938. inline void
  939. atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
  940. { __a->store(__i, memory_order_relaxed); }
  941. template<typename _ITp>
  942. inline void
  943. atomic_init(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
  944. { __a->store(__i, memory_order_relaxed); }
  945. template<typename _ITp>
  946. inline void
  947. atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
  948. memory_order __m) noexcept
  949. { __a->store(__i, __m); }
  950. template<typename _ITp>
  951. inline void
  952. atomic_store_explicit(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
  953. memory_order __m) noexcept
  954. { __a->store(__i, __m); }
  955. template<typename _ITp>
  956. inline _ITp
  957. atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
  958. { return __a->load(__m); }
  959. template<typename _ITp>
  960. inline _ITp
  961. atomic_load_explicit(const volatile atomic<_ITp>* __a,
  962. memory_order __m) noexcept
  963. { return __a->load(__m); }
  964. template<typename _ITp>
  965. inline _ITp
  966. atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
  967. memory_order __m) noexcept
  968. { return __a->exchange(__i, __m); }
  969. template<typename _ITp>
  970. inline _ITp
  971. atomic_exchange_explicit(volatile atomic<_ITp>* __a,
  972. __atomic_val_t<_ITp> __i,
  973. memory_order __m) noexcept
  974. { return __a->exchange(__i, __m); }
  975. template<typename _ITp>
  976. inline bool
  977. atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
  978. __atomic_val_t<_ITp>* __i1,
  979. __atomic_val_t<_ITp> __i2,
  980. memory_order __m1,
  981. memory_order __m2) noexcept
  982. { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
  983. template<typename _ITp>
  984. inline bool
  985. atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
  986. __atomic_val_t<_ITp>* __i1,
  987. __atomic_val_t<_ITp> __i2,
  988. memory_order __m1,
  989. memory_order __m2) noexcept
  990. { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
  991. template<typename _ITp>
  992. inline bool
  993. atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
  994. __atomic_val_t<_ITp>* __i1,
  995. __atomic_val_t<_ITp> __i2,
  996. memory_order __m1,
  997. memory_order __m2) noexcept
  998. { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
  999. template<typename _ITp>
  1000. inline bool
  1001. atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
  1002. __atomic_val_t<_ITp>* __i1,
  1003. __atomic_val_t<_ITp> __i2,
  1004. memory_order __m1,
  1005. memory_order __m2) noexcept
  1006. { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
  1007. template<typename _ITp>
  1008. inline void
  1009. atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
  1010. { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
  1011. template<typename _ITp>
  1012. inline void
  1013. atomic_store(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
  1014. { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
  1015. template<typename _ITp>
  1016. inline _ITp
  1017. atomic_load(const atomic<_ITp>* __a) noexcept
  1018. { return atomic_load_explicit(__a, memory_order_seq_cst); }
  1019. template<typename _ITp>
  1020. inline _ITp
  1021. atomic_load(const volatile atomic<_ITp>* __a) noexcept
  1022. { return atomic_load_explicit(__a, memory_order_seq_cst); }
  1023. template<typename _ITp>
  1024. inline _ITp
  1025. atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
  1026. { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
  1027. template<typename _ITp>
  1028. inline _ITp
  1029. atomic_exchange(volatile atomic<_ITp>* __a,
  1030. __atomic_val_t<_ITp> __i) noexcept
  1031. { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
  1032. template<typename _ITp>
  1033. inline bool
  1034. atomic_compare_exchange_weak(atomic<_ITp>* __a,
  1035. __atomic_val_t<_ITp>* __i1,
  1036. __atomic_val_t<_ITp> __i2) noexcept
  1037. {
  1038. return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
  1039. memory_order_seq_cst,
  1040. memory_order_seq_cst);
  1041. }
  1042. template<typename _ITp>
  1043. inline bool
  1044. atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
  1045. __atomic_val_t<_ITp>* __i1,
  1046. __atomic_val_t<_ITp> __i2) noexcept
  1047. {
  1048. return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
  1049. memory_order_seq_cst,
  1050. memory_order_seq_cst);
  1051. }
  1052. template<typename _ITp>
  1053. inline bool
  1054. atomic_compare_exchange_strong(atomic<_ITp>* __a,
  1055. __atomic_val_t<_ITp>* __i1,
  1056. __atomic_val_t<_ITp> __i2) noexcept
  1057. {
  1058. return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
  1059. memory_order_seq_cst,
  1060. memory_order_seq_cst);
  1061. }
  1062. template<typename _ITp>
  1063. inline bool
  1064. atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
  1065. __atomic_val_t<_ITp>* __i1,
  1066. __atomic_val_t<_ITp> __i2) noexcept
  1067. {
  1068. return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
  1069. memory_order_seq_cst,
  1070. memory_order_seq_cst);
  1071. }
  1072. // Function templates for atomic_integral and atomic_pointer operations only.
  1073. // Some operations (and, or, xor) are only available for atomic integrals,
  1074. // which is implemented by taking a parameter of type __atomic_base<_ITp>*.
  1075. template<typename _ITp>
  1076. inline _ITp
  1077. atomic_fetch_add_explicit(atomic<_ITp>* __a,
  1078. __atomic_diff_t<_ITp> __i,
  1079. memory_order __m) noexcept
  1080. { return __a->fetch_add(__i, __m); }
  1081. template<typename _ITp>
  1082. inline _ITp
  1083. atomic_fetch_add_explicit(volatile atomic<_ITp>* __a,
  1084. __atomic_diff_t<_ITp> __i,
  1085. memory_order __m) noexcept
  1086. { return __a->fetch_add(__i, __m); }
  1087. template<typename _ITp>
  1088. inline _ITp
  1089. atomic_fetch_sub_explicit(atomic<_ITp>* __a,
  1090. __atomic_diff_t<_ITp> __i,
  1091. memory_order __m) noexcept
  1092. { return __a->fetch_sub(__i, __m); }
  1093. template<typename _ITp>
  1094. inline _ITp
  1095. atomic_fetch_sub_explicit(volatile atomic<_ITp>* __a,
  1096. __atomic_diff_t<_ITp> __i,
  1097. memory_order __m) noexcept
  1098. { return __a->fetch_sub(__i, __m); }
  1099. template<typename _ITp>
  1100. inline _ITp
  1101. atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
  1102. __atomic_val_t<_ITp> __i,
  1103. memory_order __m) noexcept
  1104. { return __a->fetch_and(__i, __m); }
  1105. template<typename _ITp>
  1106. inline _ITp
  1107. atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a,
  1108. __atomic_val_t<_ITp> __i,
  1109. memory_order __m) noexcept
  1110. { return __a->fetch_and(__i, __m); }
  1111. template<typename _ITp>
  1112. inline _ITp
  1113. atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
  1114. __atomic_val_t<_ITp> __i,
  1115. memory_order __m) noexcept
  1116. { return __a->fetch_or(__i, __m); }
  1117. template<typename _ITp>
  1118. inline _ITp
  1119. atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a,
  1120. __atomic_val_t<_ITp> __i,
  1121. memory_order __m) noexcept
  1122. { return __a->fetch_or(__i, __m); }
  1123. template<typename _ITp>
  1124. inline _ITp
  1125. atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
  1126. __atomic_val_t<_ITp> __i,
  1127. memory_order __m) noexcept
  1128. { return __a->fetch_xor(__i, __m); }
  1129. template<typename _ITp>
  1130. inline _ITp
  1131. atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a,
  1132. __atomic_val_t<_ITp> __i,
  1133. memory_order __m) noexcept
  1134. { return __a->fetch_xor(__i, __m); }
  1135. template<typename _ITp>
  1136. inline _ITp
  1137. atomic_fetch_add(atomic<_ITp>* __a,
  1138. __atomic_diff_t<_ITp> __i) noexcept
  1139. { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
  1140. template<typename _ITp>
  1141. inline _ITp
  1142. atomic_fetch_add(volatile atomic<_ITp>* __a,
  1143. __atomic_diff_t<_ITp> __i) noexcept
  1144. { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
  1145. template<typename _ITp>
  1146. inline _ITp
  1147. atomic_fetch_sub(atomic<_ITp>* __a,
  1148. __atomic_diff_t<_ITp> __i) noexcept
  1149. { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
  1150. template<typename _ITp>
  1151. inline _ITp
  1152. atomic_fetch_sub(volatile atomic<_ITp>* __a,
  1153. __atomic_diff_t<_ITp> __i) noexcept
  1154. { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
  1155. template<typename _ITp>
  1156. inline _ITp
  1157. atomic_fetch_and(__atomic_base<_ITp>* __a,
  1158. __atomic_val_t<_ITp> __i) noexcept
  1159. { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
  1160. template<typename _ITp>
  1161. inline _ITp
  1162. atomic_fetch_and(volatile __atomic_base<_ITp>* __a,
  1163. __atomic_val_t<_ITp> __i) noexcept
  1164. { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
  1165. template<typename _ITp>
  1166. inline _ITp
  1167. atomic_fetch_or(__atomic_base<_ITp>* __a,
  1168. __atomic_val_t<_ITp> __i) noexcept
  1169. { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
  1170. template<typename _ITp>
  1171. inline _ITp
  1172. atomic_fetch_or(volatile __atomic_base<_ITp>* __a,
  1173. __atomic_val_t<_ITp> __i) noexcept
  1174. { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
  1175. template<typename _ITp>
  1176. inline _ITp
  1177. atomic_fetch_xor(__atomic_base<_ITp>* __a,
  1178. __atomic_val_t<_ITp> __i) noexcept
  1179. { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
  1180. template<typename _ITp>
  1181. inline _ITp
  1182. atomic_fetch_xor(volatile __atomic_base<_ITp>* __a,
  1183. __atomic_val_t<_ITp> __i) noexcept
  1184. { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
  1185. #if __cplusplus > 201703L
  1186. #define __cpp_lib_atomic_float 201711L
  1187. template<>
  1188. struct atomic<float> : __atomic_float<float>
  1189. {
  1190. atomic() noexcept = default;
  1191. constexpr
  1192. atomic(float __fp) noexcept : __atomic_float<float>(__fp)
  1193. { }
  1194. atomic& operator=(const atomic&) volatile = delete;
  1195. atomic& operator=(const atomic&) = delete;
  1196. using __atomic_float<float>::operator=;
  1197. };
  1198. template<>
  1199. struct atomic<double> : __atomic_float<double>
  1200. {
  1201. atomic() noexcept = default;
  1202. constexpr
  1203. atomic(double __fp) noexcept : __atomic_float<double>(__fp)
  1204. { }
  1205. atomic& operator=(const atomic&) volatile = delete;
  1206. atomic& operator=(const atomic&) = delete;
  1207. using __atomic_float<double>::operator=;
  1208. };
  1209. template<>
  1210. struct atomic<long double> : __atomic_float<long double>
  1211. {
  1212. atomic() noexcept = default;
  1213. constexpr
  1214. atomic(long double __fp) noexcept : __atomic_float<long double>(__fp)
  1215. { }
  1216. atomic& operator=(const atomic&) volatile = delete;
  1217. atomic& operator=(const atomic&) = delete;
  1218. using __atomic_float<long double>::operator=;
  1219. };
  1220. #define __cpp_lib_atomic_ref 201806L
  1221. /// Class template to provide atomic operations on a non-atomic variable.
  1222. template<typename _Tp>
  1223. struct atomic_ref : __atomic_ref<_Tp>
  1224. {
  1225. explicit
  1226. atomic_ref(_Tp& __t) noexcept : __atomic_ref<_Tp>(__t)
  1227. { }
  1228. atomic_ref& operator=(const atomic_ref&) = delete;
  1229. atomic_ref(const atomic_ref&) = default;
  1230. using __atomic_ref<_Tp>::operator=;
  1231. };
  1232. #endif // C++2a
  1233. // @} group atomics
  1234. _GLIBCXX_END_NAMESPACE_VERSION
  1235. } // namespace
  1236. #endif // C++11
  1237. #endif // _GLIBCXX_ATOMIC