You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

преди 3 години
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413
  1. /*-
  2. * Copyright (c) 2011 Ed Schouten <ed@FreeBSD.org>
  3. * David Chisnall <theraven@FreeBSD.org>
  4. * All rights reserved.
  5. *
  6. * Redistribution and use in source and binary forms, with or without
  7. * modification, are permitted provided that the following conditions
  8. * are met:
  9. * 1. Redistributions of source code must retain the above copyright
  10. * notice, this list of conditions and the following disclaimer.
  11. * 2. Redistributions in binary form must reproduce the above copyright
  12. * notice, this list of conditions and the following disclaimer in the
  13. * documentation and/or other materials provided with the distribution.
  14. *
  15. * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
  16. * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  17. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
  18. * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
  19. * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
  20. * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
  21. * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
  22. * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
  23. * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
  24. * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
  25. * SUCH DAMAGE.
  26. *
  27. * $FreeBSD$
  28. */
  29. #ifndef _STDATOMIC_H_
  30. #define _STDATOMIC_H_
  31. #include <sys/cdefs.h>
  32. #include <sys/_types.h>
  33. #if __has_extension(c_atomic) || __has_extension(cxx_atomic)
  34. #define __CLANG_ATOMICS
  35. #elif __GNUC_PREREQ__(4, 7)
  36. #define __GNUC_ATOMICS
  37. #elif defined(__GNUC__)
  38. #define __SYNC_ATOMICS
  39. #else
  40. #error "stdatomic.h does not support your compiler"
  41. #endif
  42. /*
  43. * 7.17.1 Atomic lock-free macros.
  44. */
  45. #ifdef __GCC_ATOMIC_BOOL_LOCK_FREE
  46. #define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE
  47. #endif
  48. #ifdef __GCC_ATOMIC_CHAR_LOCK_FREE
  49. #define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE
  50. #endif
  51. #ifdef __GCC_ATOMIC_CHAR16_T_LOCK_FREE
  52. #define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE
  53. #endif
  54. #ifdef __GCC_ATOMIC_CHAR32_T_LOCK_FREE
  55. #define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE
  56. #endif
  57. #ifdef __GCC_ATOMIC_WCHAR_T_LOCK_FREE
  58. #define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE
  59. #endif
  60. #ifdef __GCC_ATOMIC_SHORT_LOCK_FREE
  61. #define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE
  62. #endif
  63. #ifdef __GCC_ATOMIC_INT_LOCK_FREE
  64. #define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE
  65. #endif
  66. #ifdef __GCC_ATOMIC_LONG_LOCK_FREE
  67. #define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE
  68. #endif
  69. #ifdef __GCC_ATOMIC_LLONG_LOCK_FREE
  70. #define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE
  71. #endif
  72. #ifdef __GCC_ATOMIC_POINTER_LOCK_FREE
  73. #define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE
  74. #endif
  75. /*
  76. * 7.17.2 Initialization.
  77. */
  78. #if defined(__CLANG_ATOMICS)
  79. #define ATOMIC_VAR_INIT(value) (value)
  80. #define atomic_init(obj, value) __c11_atomic_init(obj, value)
  81. #else
  82. #define ATOMIC_VAR_INIT(value) { .__val = (value) }
  83. #define atomic_init(obj, value) ((void)((obj)->__val = (value)))
  84. #endif
  85. /*
  86. * Clang and recent GCC both provide predefined macros for the memory
  87. * orderings. If we are using a compiler that doesn't define them, use the
  88. * clang values - these will be ignored in the fallback path.
  89. */
  90. #ifndef __ATOMIC_RELAXED
  91. #define __ATOMIC_RELAXED 0
  92. #endif
  93. #ifndef __ATOMIC_CONSUME
  94. #define __ATOMIC_CONSUME 1
  95. #endif
  96. #ifndef __ATOMIC_ACQUIRE
  97. #define __ATOMIC_ACQUIRE 2
  98. #endif
  99. #ifndef __ATOMIC_RELEASE
  100. #define __ATOMIC_RELEASE 3
  101. #endif
  102. #ifndef __ATOMIC_ACQ_REL
  103. #define __ATOMIC_ACQ_REL 4
  104. #endif
  105. #ifndef __ATOMIC_SEQ_CST
  106. #define __ATOMIC_SEQ_CST 5
  107. #endif
  108. /*
  109. * 7.17.3 Order and consistency.
  110. *
  111. * The memory_order_* constants that denote the barrier behaviour of the
  112. * atomic operations.
  113. */
  114. typedef enum {
  115. memory_order_relaxed = __ATOMIC_RELAXED,
  116. memory_order_consume = __ATOMIC_CONSUME,
  117. memory_order_acquire = __ATOMIC_ACQUIRE,
  118. memory_order_release = __ATOMIC_RELEASE,
  119. memory_order_acq_rel = __ATOMIC_ACQ_REL,
  120. memory_order_seq_cst = __ATOMIC_SEQ_CST
  121. } memory_order;
  122. /*
  123. * 7.17.4 Fences.
  124. */
  125. static __inline void
  126. atomic_thread_fence(memory_order __order __unused)
  127. {
  128. #ifdef __CLANG_ATOMICS
  129. __c11_atomic_thread_fence(__order);
  130. #elif defined(__GNUC_ATOMICS)
  131. __atomic_thread_fence(__order);
  132. #else
  133. __sync_synchronize();
  134. #endif
  135. }
  136. static __inline void
  137. atomic_signal_fence(memory_order __order __unused)
  138. {
  139. #ifdef __CLANG_ATOMICS
  140. __c11_atomic_signal_fence(__order);
  141. #elif defined(__GNUC_ATOMICS)
  142. __atomic_signal_fence(__order);
  143. #else
  144. __asm volatile ("" ::: "memory");
  145. #endif
  146. }
  147. /*
  148. * 7.17.5 Lock-free property.
  149. */
  150. #if defined(_KERNEL)
  151. /* Atomics in kernelspace are always lock-free. */
  152. #define atomic_is_lock_free(obj) \
  153. ((void)(obj), (_Bool)1)
  154. #elif defined(__CLANG_ATOMICS)
  155. #define atomic_is_lock_free(obj) \
  156. __atomic_is_lock_free(sizeof(*(obj)), obj)
  157. #elif defined(__GNUC_ATOMICS)
  158. #define atomic_is_lock_free(obj) \
  159. __atomic_is_lock_free(sizeof((obj)->__val), &(obj)->__val)
  160. #else
  161. #define atomic_is_lock_free(obj) \
  162. ((void)(obj), sizeof((obj)->__val) <= sizeof(void *))
  163. #endif
  164. /*
  165. * 7.17.6 Atomic integer types.
  166. */
  167. typedef _Atomic(_Bool) atomic_bool;
  168. typedef _Atomic(char) atomic_char;
  169. typedef _Atomic(signed char) atomic_schar;
  170. typedef _Atomic(unsigned char) atomic_uchar;
  171. typedef _Atomic(short) atomic_short;
  172. typedef _Atomic(unsigned short) atomic_ushort;
  173. typedef _Atomic(int) atomic_int;
  174. typedef _Atomic(unsigned int) atomic_uint;
  175. typedef _Atomic(long) atomic_long;
  176. typedef _Atomic(unsigned long) atomic_ulong;
  177. typedef _Atomic(long long) atomic_llong;
  178. typedef _Atomic(unsigned long long) atomic_ullong;
  179. #if 0
  180. typedef _Atomic(__char16_t) atomic_char16_t;
  181. typedef _Atomic(__char32_t) atomic_char32_t;
  182. #endif
  183. typedef _Atomic(wchar_t) atomic_wchar_t;
  184. typedef _Atomic(int_least8_t) atomic_int_least8_t;
  185. typedef _Atomic(uint_least8_t) atomic_uint_least8_t;
  186. typedef _Atomic(int_least16_t) atomic_int_least16_t;
  187. typedef _Atomic(uint_least16_t) atomic_uint_least16_t;
  188. typedef _Atomic(int_least32_t) atomic_int_least32_t;
  189. typedef _Atomic(uint_least32_t) atomic_uint_least32_t;
  190. typedef _Atomic(int_least64_t) atomic_int_least64_t;
  191. typedef _Atomic(uint_least64_t) atomic_uint_least64_t;
  192. typedef _Atomic(int_fast8_t) atomic_int_fast8_t;
  193. typedef _Atomic(uint_fast8_t) atomic_uint_fast8_t;
  194. typedef _Atomic(int_fast16_t) atomic_int_fast16_t;
  195. typedef _Atomic(uint_fast16_t) atomic_uint_fast16_t;
  196. typedef _Atomic(int_fast32_t) atomic_int_fast32_t;
  197. typedef _Atomic(uint_fast32_t) atomic_uint_fast32_t;
  198. typedef _Atomic(int_fast64_t) atomic_int_fast64_t;
  199. typedef _Atomic(uint_fast64_t) atomic_uint_fast64_t;
  200. typedef _Atomic(intptr_t) atomic_intptr_t;
  201. typedef _Atomic(uintptr_t) atomic_uintptr_t;
  202. typedef _Atomic(size_t) atomic_size_t;
  203. typedef _Atomic(ptrdiff_t) atomic_ptrdiff_t;
  204. typedef _Atomic(intmax_t) atomic_intmax_t;
  205. typedef _Atomic(uintmax_t) atomic_uintmax_t;
  206. /*
  207. * 7.17.7 Operations on atomic types.
  208. */
  209. /*
  210. * Compiler-specific operations.
  211. */
  212. #if defined(__CLANG_ATOMICS)
  213. #define atomic_compare_exchange_strong_explicit(object, expected, \
  214. desired, success, failure) \
  215. __c11_atomic_compare_exchange_strong(object, expected, desired, \
  216. success, failure)
  217. #define atomic_compare_exchange_weak_explicit(object, expected, \
  218. desired, success, failure) \
  219. __c11_atomic_compare_exchange_weak(object, expected, desired, \
  220. success, failure)
  221. #define atomic_exchange_explicit(object, desired, order) \
  222. __c11_atomic_exchange(object, desired, order)
  223. #define atomic_fetch_add_explicit(object, operand, order) \
  224. __c11_atomic_fetch_add(object, operand, order)
  225. #define atomic_fetch_and_explicit(object, operand, order) \
  226. __c11_atomic_fetch_and(object, operand, order)
  227. #define atomic_fetch_or_explicit(object, operand, order) \
  228. __c11_atomic_fetch_or(object, operand, order)
  229. #define atomic_fetch_sub_explicit(object, operand, order) \
  230. __c11_atomic_fetch_sub(object, operand, order)
  231. #define atomic_fetch_xor_explicit(object, operand, order) \
  232. __c11_atomic_fetch_xor(object, operand, order)
  233. #define atomic_load_explicit(object, order) \
  234. __c11_atomic_load(object, order)
  235. #define atomic_store_explicit(object, desired, order) \
  236. __c11_atomic_store(object, desired, order)
  237. #elif defined(__GNUC_ATOMICS)
  238. #define atomic_compare_exchange_strong_explicit(object, expected, \
  239. desired, success, failure) \
  240. __atomic_compare_exchange_n(&(object)->__val, expected, \
  241. desired, 0, success, failure)
  242. #define atomic_compare_exchange_weak_explicit(object, expected, \
  243. desired, success, failure) \
  244. __atomic_compare_exchange_n(&(object)->__val, expected, \
  245. desired, 1, success, failure)
  246. #define atomic_exchange_explicit(object, desired, order) \
  247. __atomic_exchange_n(&(object)->__val, desired, order)
  248. #define atomic_fetch_add_explicit(object, operand, order) \
  249. __atomic_fetch_add(&(object)->__val, operand, order)
  250. #define atomic_fetch_and_explicit(object, operand, order) \
  251. __atomic_fetch_and(&(object)->__val, operand, order)
  252. #define atomic_fetch_or_explicit(object, operand, order) \
  253. __atomic_fetch_or(&(object)->__val, operand, order)
  254. #define atomic_fetch_sub_explicit(object, operand, order) \
  255. __atomic_fetch_sub(&(object)->__val, operand, order)
  256. #define atomic_fetch_xor_explicit(object, operand, order) \
  257. __atomic_fetch_xor(&(object)->__val, operand, order)
  258. #define atomic_load_explicit(object, order) \
  259. __atomic_load_n(&(object)->__val, order)
  260. #define atomic_store_explicit(object, desired, order) \
  261. __atomic_store_n(&(object)->__val, desired, order)
  262. #else
  263. #define __atomic_apply_stride(object, operand) \
  264. (((__typeof__((object)->__val))0) + (operand))
  265. #define atomic_compare_exchange_strong_explicit(object, expected, \
  266. desired, success, failure) __extension__ ({ \
  267. __typeof__(expected) __ep = (expected); \
  268. __typeof__(*__ep) __e = *__ep; \
  269. (void)(success); (void)(failure); \
  270. (_Bool)((*__ep = __sync_val_compare_and_swap(&(object)->__val, \
  271. __e, desired)) == __e); \
  272. })
  273. #define atomic_compare_exchange_weak_explicit(object, expected, \
  274. desired, success, failure) \
  275. atomic_compare_exchange_strong_explicit(object, expected, \
  276. desired, success, failure)
  277. #if __has_builtin(__sync_swap)
  278. /* Clang provides a full-barrier atomic exchange - use it if available. */
  279. #define atomic_exchange_explicit(object, desired, order) \
  280. ((void)(order), __sync_swap(&(object)->__val, desired))
  281. #else
  282. /*
  283. * __sync_lock_test_and_set() is only an acquire barrier in theory (although in
  284. * practice it is usually a full barrier) so we need an explicit barrier before
  285. * it.
  286. */
  287. #define atomic_exchange_explicit(object, desired, order) \
  288. __extension__ ({ \
  289. __typeof__(object) __o = (object); \
  290. __typeof__(desired) __d = (desired); \
  291. (void)(order); \
  292. __sync_synchronize(); \
  293. __sync_lock_test_and_set(&(__o)->__val, __d); \
  294. })
  295. #endif
  296. #define atomic_fetch_add_explicit(object, operand, order) \
  297. ((void)(order), __sync_fetch_and_add(&(object)->__val, \
  298. __atomic_apply_stride(object, operand)))
  299. #define atomic_fetch_and_explicit(object, operand, order) \
  300. ((void)(order), __sync_fetch_and_and(&(object)->__val, operand))
  301. #define atomic_fetch_or_explicit(object, operand, order) \
  302. ((void)(order), __sync_fetch_and_or(&(object)->__val, operand))
  303. #define atomic_fetch_sub_explicit(object, operand, order) \
  304. ((void)(order), __sync_fetch_and_sub(&(object)->__val, \
  305. __atomic_apply_stride(object, operand)))
  306. #define atomic_fetch_xor_explicit(object, operand, order) \
  307. ((void)(order), __sync_fetch_and_xor(&(object)->__val, operand))
  308. #define atomic_load_explicit(object, order) \
  309. ((void)(order), __sync_fetch_and_add(&(object)->__val, 0))
  310. #define atomic_store_explicit(object, desired, order) \
  311. ((void)atomic_exchange_explicit(object, desired, order))
  312. #endif
  313. /*
  314. * Convenience functions.
  315. *
  316. * Don't provide these in kernel space. In kernel space, we should be
  317. * disciplined enough to always provide explicit barriers.
  318. */
  319. #ifndef _KERNEL
  320. #define atomic_compare_exchange_strong(object, expected, desired) \
  321. atomic_compare_exchange_strong_explicit(object, expected, \
  322. desired, memory_order_seq_cst, memory_order_seq_cst)
  323. #define atomic_compare_exchange_weak(object, expected, desired) \
  324. atomic_compare_exchange_weak_explicit(object, expected, \
  325. desired, memory_order_seq_cst, memory_order_seq_cst)
  326. #define atomic_exchange(object, desired) \
  327. atomic_exchange_explicit(object, desired, memory_order_seq_cst)
  328. #define atomic_fetch_add(object, operand) \
  329. atomic_fetch_add_explicit(object, operand, memory_order_seq_cst)
  330. #define atomic_fetch_and(object, operand) \
  331. atomic_fetch_and_explicit(object, operand, memory_order_seq_cst)
  332. #define atomic_fetch_or(object, operand) \
  333. atomic_fetch_or_explicit(object, operand, memory_order_seq_cst)
  334. #define atomic_fetch_sub(object, operand) \
  335. atomic_fetch_sub_explicit(object, operand, memory_order_seq_cst)
  336. #define atomic_fetch_xor(object, operand) \
  337. atomic_fetch_xor_explicit(object, operand, memory_order_seq_cst)
  338. #define atomic_load(object) \
  339. atomic_load_explicit(object, memory_order_seq_cst)
  340. #define atomic_store(object, desired) \
  341. atomic_store_explicit(object, desired, memory_order_seq_cst)
  342. #endif /* !_KERNEL */
  343. /*
  344. * 7.17.8 Atomic flag type and operations.
  345. *
  346. * XXX: Assume atomic_bool can be used as an atomic_flag. Is there some
  347. * kind of compiler built-in type we could use?
  348. */
  349. typedef struct {
  350. atomic_bool __flag;
  351. } atomic_flag;
  352. #define ATOMIC_FLAG_INIT { ATOMIC_VAR_INIT(0) }
  353. static __inline _Bool
  354. atomic_flag_test_and_set_explicit(volatile atomic_flag *__object,
  355. memory_order __order)
  356. {
  357. return (atomic_exchange_explicit(&__object->__flag, 1, __order));
  358. }
  359. static __inline void
  360. atomic_flag_clear_explicit(volatile atomic_flag *__object, memory_order __order)
  361. {
  362. atomic_store_explicit(&__object->__flag, 0, __order);
  363. }
  364. #ifndef _KERNEL
  365. static __inline _Bool
  366. atomic_flag_test_and_set(volatile atomic_flag *__object)
  367. {
  368. return (atomic_flag_test_and_set_explicit(__object,
  369. memory_order_seq_cst));
  370. }
  371. static __inline void
  372. atomic_flag_clear(volatile atomic_flag *__object)
  373. {
  374. atomic_flag_clear_explicit(__object, memory_order_seq_cst);
  375. }
  376. #endif /* !_KERNEL */
  377. #endif /* !_STDATOMIC_H_ */