gatomic.h 34 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587
  1. /*
  2. * Copyright © 2011 Ryan Lortie
  3. *
  4. * SPDX-License-Identifier: LGPL-2.1-or-later
  5. *
  6. * This library is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU Lesser General Public
  8. * License as published by the Free Software Foundation; either
  9. * version 2.1 of the License, or (at your option) any later version.
  10. *
  11. * This library is distributed in the hope that it will be useful, but
  12. * WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  14. * Lesser General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU Lesser General Public
  17. * License along with this library; if not, see <http://www.gnu.org/licenses/>.
  18. *
  19. * Author: Ryan Lortie <desrt@desrt.ca>
  20. */
  21. #ifndef __G_ATOMIC_H__
  22. #define __G_ATOMIC_H__
  23. #if !defined (__GLIB_H_INSIDE__) && !defined (GLIB_COMPILATION)
  24. #error "Only <glib.h> can be included directly."
  25. #endif
  26. #include <glib/gtypes.h>
  27. #include <glib/glib-typeof.h>
  28. G_BEGIN_DECLS
  29. GLIB_AVAILABLE_IN_ALL
  30. gint g_atomic_int_get (const volatile gint *atomic);
  31. GLIB_AVAILABLE_IN_ALL
  32. void g_atomic_int_set (volatile gint *atomic,
  33. gint newval);
  34. GLIB_AVAILABLE_IN_ALL
  35. void g_atomic_int_inc (volatile gint *atomic);
  36. GLIB_AVAILABLE_IN_ALL
  37. gboolean g_atomic_int_dec_and_test (volatile gint *atomic);
  38. GLIB_AVAILABLE_IN_ALL
  39. gboolean g_atomic_int_compare_and_exchange (volatile gint *atomic,
  40. gint oldval,
  41. gint newval);
  42. GLIB_AVAILABLE_IN_2_74
  43. gboolean g_atomic_int_compare_and_exchange_full (gint *atomic,
  44. gint oldval,
  45. gint newval,
  46. gint *preval);
  47. GLIB_AVAILABLE_IN_2_74
  48. gint g_atomic_int_exchange (gint *atomic,
  49. gint newval);
  50. GLIB_AVAILABLE_IN_ALL
  51. gint g_atomic_int_add (volatile gint *atomic,
  52. gint val);
  53. GLIB_AVAILABLE_IN_2_30
  54. guint g_atomic_int_and (volatile guint *atomic,
  55. guint val);
  56. GLIB_AVAILABLE_IN_2_30
  57. guint g_atomic_int_or (volatile guint *atomic,
  58. guint val);
  59. GLIB_AVAILABLE_IN_ALL
  60. guint g_atomic_int_xor (volatile guint *atomic,
  61. guint val);
  62. GLIB_AVAILABLE_IN_ALL
  63. gpointer g_atomic_pointer_get (const volatile void *atomic);
  64. GLIB_AVAILABLE_IN_ALL
  65. void g_atomic_pointer_set (volatile void *atomic,
  66. gpointer newval);
  67. GLIB_AVAILABLE_IN_ALL
  68. gboolean g_atomic_pointer_compare_and_exchange (volatile void *atomic,
  69. gpointer oldval,
  70. gpointer newval);
  71. GLIB_AVAILABLE_IN_2_74
  72. gboolean g_atomic_pointer_compare_and_exchange_full (void *atomic,
  73. gpointer oldval,
  74. gpointer newval,
  75. void *preval);
  76. GLIB_AVAILABLE_IN_2_74
  77. gpointer g_atomic_pointer_exchange (void *atomic,
  78. gpointer newval);
  79. GLIB_AVAILABLE_IN_ALL
  80. gintptr g_atomic_pointer_add (volatile void *atomic,
  81. gssize val);
  82. GLIB_AVAILABLE_IN_2_30
  83. guintptr g_atomic_pointer_and (volatile void *atomic,
  84. gsize val);
  85. GLIB_AVAILABLE_IN_2_30
  86. guintptr g_atomic_pointer_or (volatile void *atomic,
  87. gsize val);
  88. GLIB_AVAILABLE_IN_ALL
  89. guintptr g_atomic_pointer_xor (volatile void *atomic,
  90. gsize val);
  91. GLIB_DEPRECATED_IN_2_30_FOR(g_atomic_int_add)
  92. gint g_atomic_int_exchange_and_add (volatile gint *atomic,
  93. gint val);
  94. G_END_DECLS
  95. #if defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4)
  96. /* We prefer the new C11-style atomic extension of GCC if available */
  97. #if defined(__ATOMIC_SEQ_CST)
  98. #define g_atomic_int_get(atomic) \
  99. (G_GNUC_EXTENSION ({ \
  100. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
  101. gint gaig_temp; \
  102. (void) (0 ? *(atomic) ^ *(atomic) : 1); \
  103. __atomic_load ((gint *)(atomic), &gaig_temp, __ATOMIC_SEQ_CST); \
  104. (gint) gaig_temp; \
  105. }))
  106. #define g_atomic_int_set(atomic, newval) \
  107. (G_GNUC_EXTENSION ({ \
  108. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
  109. gint gais_temp = (gint) (newval); \
  110. (void) (0 ? *(atomic) ^ (newval) : 1); \
  111. __atomic_store ((gint *)(atomic), &gais_temp, __ATOMIC_SEQ_CST); \
  112. }))
  113. #if defined(glib_typeof)
  114. #define g_atomic_pointer_get(atomic) \
  115. (G_GNUC_EXTENSION ({ \
  116. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
  117. glib_typeof (*(atomic)) gapg_temp_newval; \
  118. glib_typeof ((atomic)) gapg_temp_atomic = (atomic); \
  119. __atomic_load (gapg_temp_atomic, &gapg_temp_newval, __ATOMIC_SEQ_CST); \
  120. gapg_temp_newval; \
  121. }))
  122. #define g_atomic_pointer_set(atomic, newval) \
  123. (G_GNUC_EXTENSION ({ \
  124. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
  125. glib_typeof ((atomic)) gaps_temp_atomic = (atomic); \
  126. glib_typeof (*(atomic)) gaps_temp_newval = (newval); \
  127. (void) (0 ? (gpointer) * (atomic) : NULL); \
  128. __atomic_store (gaps_temp_atomic, &gaps_temp_newval, __ATOMIC_SEQ_CST); \
  129. }))
  130. #else /* if !(defined(glib_typeof) */
  131. #define g_atomic_pointer_get(atomic) \
  132. (G_GNUC_EXTENSION ({ \
  133. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
  134. gpointer gapg_temp_newval; \
  135. gpointer *gapg_temp_atomic = (gpointer *)(atomic); \
  136. __atomic_load (gapg_temp_atomic, &gapg_temp_newval, __ATOMIC_SEQ_CST); \
  137. gapg_temp_newval; \
  138. }))
  139. #define g_atomic_pointer_set(atomic, newval) \
  140. (G_GNUC_EXTENSION ({ \
  141. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
  142. gpointer *gaps_temp_atomic = (gpointer *)(atomic); \
  143. gpointer gaps_temp_newval = (gpointer)(newval); \
  144. (void) (0 ? (gpointer) *(atomic) : NULL); \
  145. __atomic_store (gaps_temp_atomic, &gaps_temp_newval, __ATOMIC_SEQ_CST); \
  146. }))
  147. #endif /* if defined(glib_typeof) */
  148. #define g_atomic_int_inc(atomic) \
  149. (G_GNUC_EXTENSION ({ \
  150. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
  151. (void) (0 ? *(atomic) ^ *(atomic) : 1); \
  152. (void) __atomic_fetch_add ((atomic), 1, __ATOMIC_SEQ_CST); \
  153. }))
  154. #define g_atomic_int_dec_and_test(atomic) \
  155. (G_GNUC_EXTENSION ({ \
  156. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
  157. (void) (0 ? *(atomic) ^ *(atomic) : 1); \
  158. __atomic_fetch_sub ((atomic), 1, __ATOMIC_SEQ_CST) == 1; \
  159. }))
  160. #if defined(glib_typeof) && defined(G_CXX_STD_VERSION)
  161. /* See comments below about equivalent g_atomic_pointer_compare_and_exchange()
  162. * shenanigans for type-safety when compiling in C++ mode. */
  163. #define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
  164. (G_GNUC_EXTENSION ({ \
  165. glib_typeof (*(atomic)) gaicae_oldval = (oldval); \
  166. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
  167. (void) (0 ? *(atomic) ^ (newval) ^ (oldval) : 1); \
  168. __atomic_compare_exchange_n ((atomic), &gaicae_oldval, (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \
  169. }))
  170. #else /* if !(defined(glib_typeof) && defined(G_CXX_STD_VERSION)) */
  171. #define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
  172. (G_GNUC_EXTENSION ({ \
  173. gint gaicae_oldval = (oldval); \
  174. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
  175. (void) (0 ? *(atomic) ^ (newval) ^ (oldval) : 1); \
  176. __atomic_compare_exchange_n ((atomic), (void *) (&(gaicae_oldval)), (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \
  177. }))
  178. #endif /* defined(glib_typeof) */
  179. #define g_atomic_int_compare_and_exchange_full(atomic, oldval, newval, preval) \
  180. (G_GNUC_EXTENSION ({ \
  181. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
  182. G_STATIC_ASSERT (sizeof *(preval) == sizeof (gint)); \
  183. (void) (0 ? *(atomic) ^ (newval) ^ (oldval) ^ *(preval) : 1); \
  184. *(preval) = (oldval); \
  185. __atomic_compare_exchange_n ((atomic), (preval), (newval), FALSE, \
  186. __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) \
  187. ? TRUE : FALSE; \
  188. }))
  189. #define g_atomic_int_exchange(atomic, newval) \
  190. (G_GNUC_EXTENSION ({ \
  191. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
  192. (void) (0 ? *(atomic) ^ (newval) : 1); \
  193. (gint) __atomic_exchange_n ((atomic), (newval), __ATOMIC_SEQ_CST); \
  194. }))
  195. #define g_atomic_int_add(atomic, val) \
  196. (G_GNUC_EXTENSION ({ \
  197. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
  198. (void) (0 ? *(atomic) ^ (val) : 1); \
  199. (gint) __atomic_fetch_add ((atomic), (val), __ATOMIC_SEQ_CST); \
  200. }))
  201. #define g_atomic_int_and(atomic, val) \
  202. (G_GNUC_EXTENSION ({ \
  203. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
  204. (void) (0 ? *(atomic) ^ (val) : 1); \
  205. (guint) __atomic_fetch_and ((atomic), (val), __ATOMIC_SEQ_CST); \
  206. }))
  207. #define g_atomic_int_or(atomic, val) \
  208. (G_GNUC_EXTENSION ({ \
  209. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
  210. (void) (0 ? *(atomic) ^ (val) : 1); \
  211. (guint) __atomic_fetch_or ((atomic), (val), __ATOMIC_SEQ_CST); \
  212. }))
  213. #define g_atomic_int_xor(atomic, val) \
  214. (G_GNUC_EXTENSION ({ \
  215. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
  216. (void) (0 ? *(atomic) ^ (val) : 1); \
  217. (guint) __atomic_fetch_xor ((atomic), (val), __ATOMIC_SEQ_CST); \
  218. }))
  219. #if defined(glib_typeof) && defined(G_CXX_STD_VERSION)
  220. /* This is typesafe because we check we can assign oldval to the type of
  221. * (*atomic). Unfortunately it can only be done in C++ because gcc/clang warn
  222. * when atomic is volatile and not oldval, or when atomic is gsize* and oldval
  223. * is NULL. Note that clang++ force us to be typesafe because it is an error if the 2nd
  224. * argument of __atomic_compare_exchange_n() has a different type than the
  225. * first.
  226. * https://gitlab.gnome.org/GNOME/glib/-/merge_requests/1919
  227. * https://gitlab.gnome.org/GNOME/glib/-/merge_requests/1715#note_1024120. */
  228. #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
  229. (G_GNUC_EXTENSION ({ \
  230. G_STATIC_ASSERT (sizeof (static_cast<glib_typeof (*(atomic))>((oldval))) \
  231. == sizeof (gpointer)); \
  232. glib_typeof (*(atomic)) gapcae_oldval = (oldval); \
  233. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
  234. (void) (0 ? (gpointer) *(atomic) : NULL); \
  235. __atomic_compare_exchange_n ((atomic), &gapcae_oldval, (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \
  236. }))
  237. #else /* if !(defined(glib_typeof) && defined(G_CXX_STD_VERSION) */
  238. #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
  239. (G_GNUC_EXTENSION ({ \
  240. G_STATIC_ASSERT (sizeof (oldval) == sizeof (gpointer)); \
  241. gpointer gapcae_oldval = (gpointer)(oldval); \
  242. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
  243. (void) (0 ? (gpointer) *(atomic) : NULL); \
  244. __atomic_compare_exchange_n ((atomic), (void *) (&(gapcae_oldval)), (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \
  245. }))
  246. #endif /* defined(glib_typeof) */
  247. #define g_atomic_pointer_compare_and_exchange_full(atomic, oldval, newval, preval) \
  248. (G_GNUC_EXTENSION ({ \
  249. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
  250. G_STATIC_ASSERT (sizeof *(preval) == sizeof (gpointer)); \
  251. (void) (0 ? (gpointer) *(atomic) : NULL); \
  252. (void) (0 ? (gpointer) *(preval) : NULL); \
  253. *(preval) = (oldval); \
  254. __atomic_compare_exchange_n ((atomic), (preval), (newval), FALSE, \
  255. __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? \
  256. TRUE : FALSE; \
  257. }))
  258. #define g_atomic_pointer_exchange(atomic, newval) \
  259. (G_GNUC_EXTENSION ({ \
  260. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
  261. (void) (0 ? (gpointer) *(atomic) : NULL); \
  262. (gpointer) __atomic_exchange_n ((atomic), (newval), __ATOMIC_SEQ_CST); \
  263. }))
  264. #define g_atomic_pointer_add(atomic, val) \
  265. (G_GNUC_EXTENSION ({ \
  266. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
  267. (void) (0 ? (gpointer) *(atomic) : NULL); \
  268. (void) (0 ? (val) ^ (val) : 1); \
  269. (gintptr) __atomic_fetch_add ((atomic), (val), __ATOMIC_SEQ_CST); \
  270. }))
  271. #define g_atomic_pointer_and(atomic, val) \
  272. (G_GNUC_EXTENSION ({ \
  273. guintptr *gapa_atomic = (guintptr *) (atomic); \
  274. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
  275. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (guintptr)); \
  276. (void) (0 ? (gpointer) *(atomic) : NULL); \
  277. (void) (0 ? (val) ^ (val) : 1); \
  278. (guintptr) __atomic_fetch_and (gapa_atomic, (val), __ATOMIC_SEQ_CST); \
  279. }))
  280. #define g_atomic_pointer_or(atomic, val) \
  281. (G_GNUC_EXTENSION ({ \
  282. guintptr *gapo_atomic = (guintptr *) (atomic); \
  283. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
  284. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (guintptr)); \
  285. (void) (0 ? (gpointer) *(atomic) : NULL); \
  286. (void) (0 ? (val) ^ (val) : 1); \
  287. (guintptr) __atomic_fetch_or (gapo_atomic, (val), __ATOMIC_SEQ_CST); \
  288. }))
  289. #define g_atomic_pointer_xor(atomic, val) \
  290. (G_GNUC_EXTENSION ({ \
  291. guintptr *gapx_atomic = (guintptr *) (atomic); \
  292. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
  293. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (guintptr)); \
  294. (void) (0 ? (gpointer) *(atomic) : NULL); \
  295. (void) (0 ? (val) ^ (val) : 1); \
  296. (guintptr) __atomic_fetch_xor (gapx_atomic, (val), __ATOMIC_SEQ_CST); \
  297. }))
  298. #else /* defined(__ATOMIC_SEQ_CST) */
  299. /* We want to achieve __ATOMIC_SEQ_CST semantics here. See
  300. * https://en.cppreference.com/w/c/atomic/memory_order#Constants. For load
  301. * operations, that means performing an *acquire*:
  302. * > A load operation with this memory order performs the acquire operation on
  303. * > the affected memory location: no reads or writes in the current thread can
  304. * > be reordered before this load. All writes in other threads that release
  305. * > the same atomic variable are visible in the current thread.
  306. *
  307. * “no reads or writes in the current thread can be reordered before this load”
  308. * is implemented using a compiler barrier (a no-op `__asm__` section) to
  309. * prevent instruction reordering. Writes in other threads are synchronised
  310. * using `__sync_synchronize()`. It’s unclear from the GCC documentation whether
  311. * `__sync_synchronize()` acts as a compiler barrier, hence our explicit use of
  312. * one.
  313. *
  314. * For store operations, `__ATOMIC_SEQ_CST` means performing a *release*:
  315. * > A store operation with this memory order performs the release operation:
  316. * > no reads or writes in the current thread can be reordered after this store.
  317. * > All writes in the current thread are visible in other threads that acquire
  318. * > the same atomic variable (see Release-Acquire ordering below) and writes
  319. * > that carry a dependency into the atomic variable become visible in other
  320. * > threads that consume the same atomic (see Release-Consume ordering below).
  321. *
  322. * “no reads or writes in the current thread can be reordered after this store”
  323. * is implemented using a compiler barrier to prevent instruction reordering.
  324. * “All writes in the current thread are visible in other threads” is implemented
  325. * using `__sync_synchronize()`; similarly for “writes that carry a dependency”.
  326. */
  327. #define g_atomic_int_get(atomic) \
  328. (G_GNUC_EXTENSION ({ \
  329. gint gaig_result; \
  330. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
  331. (void) (0 ? *(atomic) ^ *(atomic) : 1); \
  332. gaig_result = (gint) *(atomic); \
  333. __sync_synchronize (); \
  334. __asm__ __volatile__ ("" : : : "memory"); \
  335. gaig_result; \
  336. }))
  337. #define g_atomic_int_set(atomic, newval) \
  338. (G_GNUC_EXTENSION ({ \
  339. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
  340. (void) (0 ? *(atomic) ^ (newval) : 1); \
  341. __sync_synchronize (); \
  342. __asm__ __volatile__ ("" : : : "memory"); \
  343. *(atomic) = (newval); \
  344. }))
  345. #define g_atomic_pointer_get(atomic) \
  346. (G_GNUC_EXTENSION ({ \
  347. gpointer gapg_result; \
  348. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
  349. gapg_result = (gpointer) *(atomic); \
  350. __sync_synchronize (); \
  351. __asm__ __volatile__ ("" : : : "memory"); \
  352. gapg_result; \
  353. }))
  354. #if defined(glib_typeof)
  355. #define g_atomic_pointer_set(atomic, newval) \
  356. (G_GNUC_EXTENSION ({ \
  357. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
  358. (void) (0 ? (gpointer) *(atomic) : NULL); \
  359. __sync_synchronize (); \
  360. __asm__ __volatile__ ("" : : : "memory"); \
  361. *(atomic) = (glib_typeof (*(atomic))) (guintptr) (newval); \
  362. }))
  363. #else /* if !(defined(glib_typeof) */
  364. #define g_atomic_pointer_set(atomic, newval) \
  365. (G_GNUC_EXTENSION ({ \
  366. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
  367. (void) (0 ? (gpointer) *(atomic) : NULL); \
  368. __sync_synchronize (); \
  369. __asm__ __volatile__ ("" : : : "memory"); \
  370. *(atomic) = (gpointer) (guintptr) (newval); \
  371. }))
  372. #endif /* if defined(glib_typeof) */
  373. #define g_atomic_int_inc(atomic) \
  374. (G_GNUC_EXTENSION ({ \
  375. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
  376. (void) (0 ? *(atomic) ^ *(atomic) : 1); \
  377. (void) __sync_fetch_and_add ((atomic), 1); \
  378. }))
  379. #define g_atomic_int_dec_and_test(atomic) \
  380. (G_GNUC_EXTENSION ({ \
  381. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
  382. (void) (0 ? *(atomic) ^ *(atomic) : 1); \
  383. __sync_fetch_and_sub ((atomic), 1) == 1; \
  384. }))
  385. #define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
  386. (G_GNUC_EXTENSION ({ \
  387. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
  388. (void) (0 ? *(atomic) ^ (newval) ^ (oldval) : 1); \
  389. __sync_bool_compare_and_swap ((atomic), (oldval), (newval)) ? TRUE : FALSE; \
  390. }))
  391. #define g_atomic_int_compare_and_exchange_full(atomic, oldval, newval, preval) \
  392. (G_GNUC_EXTENSION ({ \
  393. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
  394. G_STATIC_ASSERT (sizeof *(preval) == sizeof (gint)); \
  395. (void) (0 ? *(atomic) ^ (newval) ^ (oldval) ^ *(preval) : 1); \
  396. *(preval) = __sync_val_compare_and_swap ((atomic), (oldval), (newval)); \
  397. (*(preval) == (oldval)) ? TRUE : FALSE; \
  398. }))
  399. #if defined(_GLIB_GCC_HAVE_SYNC_SWAP)
  400. #define g_atomic_int_exchange(atomic, newval) \
  401. (G_GNUC_EXTENSION ({ \
  402. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
  403. (void) (0 ? *(atomic) ^ (newval) : 1); \
  404. (gint) __sync_swap ((atomic), (newval)); \
  405. }))
  406. #else /* defined(_GLIB_GCC_HAVE_SYNC_SWAP) */
  407. #define g_atomic_int_exchange(atomic, newval) \
  408. (G_GNUC_EXTENSION ({ \
  409. gint oldval; \
  410. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
  411. (void) (0 ? *(atomic) ^ (newval) : 1); \
  412. do \
  413. { \
  414. oldval = *atomic; \
  415. } while (!__sync_bool_compare_and_swap (atomic, oldval, newval)); \
  416. oldval; \
  417. }))
  418. #endif /* defined(_GLIB_GCC_HAVE_SYNC_SWAP) */
  419. #define g_atomic_int_add(atomic, val) \
  420. (G_GNUC_EXTENSION ({ \
  421. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
  422. (void) (0 ? *(atomic) ^ (val) : 1); \
  423. (gint) __sync_fetch_and_add ((atomic), (val)); \
  424. }))
  425. #define g_atomic_int_and(atomic, val) \
  426. (G_GNUC_EXTENSION ({ \
  427. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
  428. (void) (0 ? *(atomic) ^ (val) : 1); \
  429. (guint) __sync_fetch_and_and ((atomic), (val)); \
  430. }))
  431. #define g_atomic_int_or(atomic, val) \
  432. (G_GNUC_EXTENSION ({ \
  433. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
  434. (void) (0 ? *(atomic) ^ (val) : 1); \
  435. (guint) __sync_fetch_and_or ((atomic), (val)); \
  436. }))
  437. #define g_atomic_int_xor(atomic, val) \
  438. (G_GNUC_EXTENSION ({ \
  439. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
  440. (void) (0 ? *(atomic) ^ (val) : 1); \
  441. (guint) __sync_fetch_and_xor ((atomic), (val)); \
  442. }))
  443. #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
  444. (G_GNUC_EXTENSION ({ \
  445. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
  446. (void) (0 ? (gpointer) *(atomic) : NULL); \
  447. __sync_bool_compare_and_swap ((atomic), (oldval), (newval)) ? TRUE : FALSE; \
  448. }))
  449. #define g_atomic_pointer_compare_and_exchange_full(atomic, oldval, newval, preval) \
  450. (G_GNUC_EXTENSION ({ \
  451. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
  452. G_STATIC_ASSERT (sizeof *(preval) == sizeof (gpointer)); \
  453. (void) (0 ? (gpointer) *(atomic) : NULL); \
  454. (void) (0 ? (gpointer) *(preval) : NULL); \
  455. *(preval) = __sync_val_compare_and_swap ((atomic), (oldval), (newval)); \
  456. (*(preval) == (oldval)) ? TRUE : FALSE; \
  457. }))
  458. #if defined(_GLIB_GCC_HAVE_SYNC_SWAP)
  459. #define g_atomic_pointer_exchange(atomic, newval) \
  460. (G_GNUC_EXTENSION ({ \
  461. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
  462. (void) (0 ? (gpointer) *(atomic) : NULL); \
  463. (gpointer) __sync_swap ((atomic), (newval)); \
  464. }))
  465. #else
  466. #define g_atomic_pointer_exchange(atomic, newval) \
  467. (G_GNUC_EXTENSION ({ \
  468. gpointer oldval; \
  469. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
  470. (void) (0 ? (gpointer) *(atomic) : NULL); \
  471. do \
  472. { \
  473. oldval = (gpointer) *atomic; \
  474. } while (!__sync_bool_compare_and_swap (atomic, oldval, newval)); \
  475. oldval; \
  476. }))
  477. #endif /* defined(_GLIB_GCC_HAVE_SYNC_SWAP) */
  478. #define g_atomic_pointer_add(atomic, val) \
  479. (G_GNUC_EXTENSION ({ \
  480. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
  481. (void) (0 ? (gpointer) *(atomic) : NULL); \
  482. (void) (0 ? (val) ^ (val) : 1); \
  483. (gintptr) __sync_fetch_and_add ((atomic), (val)); \
  484. }))
  485. #define g_atomic_pointer_and(atomic, val) \
  486. (G_GNUC_EXTENSION ({ \
  487. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
  488. (void) (0 ? (gpointer) *(atomic) : NULL); \
  489. (void) (0 ? (val) ^ (val) : 1); \
  490. (guintptr) __sync_fetch_and_and ((atomic), (val)); \
  491. }))
  492. #define g_atomic_pointer_or(atomic, val) \
  493. (G_GNUC_EXTENSION ({ \
  494. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
  495. (void) (0 ? (gpointer) *(atomic) : NULL); \
  496. (void) (0 ? (val) ^ (val) : 1); \
  497. (guintptr) __sync_fetch_and_or ((atomic), (val)); \
  498. }))
  499. #define g_atomic_pointer_xor(atomic, val) \
  500. (G_GNUC_EXTENSION ({ \
  501. G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
  502. (void) (0 ? (gpointer) *(atomic) : NULL); \
  503. (void) (0 ? (val) ^ (val) : 1); \
  504. (guintptr) __sync_fetch_and_xor ((atomic), (val)); \
  505. }))
  506. #endif /* !defined(__ATOMIC_SEQ_CST) */
  507. #else /* defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4) */
  508. #define g_atomic_int_get(atomic) \
  509. (g_atomic_int_get ((gint *) (atomic)))
  510. #define g_atomic_int_set(atomic, newval) \
  511. (g_atomic_int_set ((gint *) (atomic), (gint) (newval)))
  512. #define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
  513. (g_atomic_int_compare_and_exchange ((gint *) (atomic), (oldval), (newval)))
  514. #define g_atomic_int_compare_and_exchange_full(atomic, oldval, newval, preval) \
  515. (g_atomic_int_compare_and_exchange_full ((gint *) (atomic), (oldval), (newval), (gint *) (preval)))
  516. #define g_atomic_int_exchange(atomic, newval) \
  517. (g_atomic_int_exchange ((gint *) (atomic), (newval)))
  518. #define g_atomic_int_add(atomic, val) \
  519. (g_atomic_int_add ((gint *) (atomic), (val)))
  520. #define g_atomic_int_and(atomic, val) \
  521. (g_atomic_int_and ((guint *) (atomic), (val)))
  522. #define g_atomic_int_or(atomic, val) \
  523. (g_atomic_int_or ((guint *) (atomic), (val)))
  524. #define g_atomic_int_xor(atomic, val) \
  525. (g_atomic_int_xor ((guint *) (atomic), (val)))
  526. #define g_atomic_int_inc(atomic) \
  527. (g_atomic_int_inc ((gint *) (atomic)))
  528. #define g_atomic_int_dec_and_test(atomic) \
  529. (g_atomic_int_dec_and_test ((gint *) (atomic)))
  530. #if defined(glib_typeof)
  531. /* The (void *) cast in the middle *looks* redundant, because
  532. * g_atomic_pointer_get returns void * already, but it's to silence
  533. * -Werror=bad-function-cast when we're doing something like:
  534. * guintptr a, b; ...; a = g_atomic_pointer_get (&b);
  535. * which would otherwise be assigning the void * result of
  536. * g_atomic_pointer_get directly to the pointer-sized but
  537. * non-pointer-typed result. */
  538. #define g_atomic_pointer_get(atomic) \
  539. (glib_typeof (*(atomic))) (void *) ((g_atomic_pointer_get) ((void *) atomic))
  540. #else /* !(defined(glib_typeof) */
  541. #define g_atomic_pointer_get(atomic) \
  542. (g_atomic_pointer_get (atomic))
  543. #endif
  544. #define g_atomic_pointer_set(atomic, newval) \
  545. (g_atomic_pointer_set ((atomic), (gpointer) (newval)))
  546. #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
  547. (g_atomic_pointer_compare_and_exchange ((atomic), (gpointer) (oldval), (gpointer) (newval)))
  548. #define g_atomic_pointer_compare_and_exchange_full(atomic, oldval, newval, prevval) \
  549. (g_atomic_pointer_compare_and_exchange_full ((atomic), (gpointer) (oldval), (gpointer) (newval), (prevval)))
  550. #define g_atomic_pointer_exchange(atomic, newval) \
  551. (g_atomic_pointer_exchange ((atomic), (gpointer) (newval)))
  552. #define g_atomic_pointer_add(atomic, val) \
  553. (g_atomic_pointer_add ((atomic), (gssize) (val)))
  554. #define g_atomic_pointer_and(atomic, val) \
  555. (g_atomic_pointer_and ((atomic), (gsize) (val)))
  556. #define g_atomic_pointer_or(atomic, val) \
  557. (g_atomic_pointer_or ((atomic), (gsize) (val)))
  558. #define g_atomic_pointer_xor(atomic, val) \
  559. (g_atomic_pointer_xor ((atomic), (gsize) (val)))
  560. #endif /* defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4) */
  561. #endif /* __G_ATOMIC_H__ */