23 #ifndef INSIDE_ATOMICS_H
24 #error "should be included via atomics.h"
30 #define pg_compiler_barrier_impl() __asm__ __volatile__("" ::: "memory")
37 #if !defined(pg_memory_barrier_impl)
38 # if defined(HAVE_GCC__ATOMIC_INT32_CAS)
39 # define pg_memory_barrier_impl() __atomic_thread_fence(__ATOMIC_SEQ_CST)
40 # elif (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 1))
41 # define pg_memory_barrier_impl() __sync_synchronize()
45 #if !defined(pg_read_barrier_impl) && defined(HAVE_GCC__ATOMIC_INT32_CAS)
47 # define pg_read_barrier_impl() __atomic_thread_fence(__ATOMIC_ACQUIRE)
50 #if !defined(pg_write_barrier_impl) && defined(HAVE_GCC__ATOMIC_INT32_CAS)
52 # define pg_write_barrier_impl() __atomic_thread_fence(__ATOMIC_RELEASE)
57 #if !defined(PG_HAVE_ATOMIC_FLAG_SUPPORT) \
58 && (defined(HAVE_GCC__SYNC_INT32_TAS) || defined(HAVE_GCC__SYNC_CHAR_TAS))
60 #define PG_HAVE_ATOMIC_FLAG_SUPPORT
61 typedef struct pg_atomic_flag
68 #ifdef HAVE_GCC__SYNC_INT32_TAS
78 #if !defined(PG_HAVE_ATOMIC_U32_SUPPORT) \
79 && (defined(HAVE_GCC__ATOMIC_INT32_CAS) || defined(HAVE_GCC__SYNC_INT32_CAS))
81 #define PG_HAVE_ATOMIC_U32_SUPPORT
90 #if !defined(PG_HAVE_ATOMIC_U64_SUPPORT) \
91 && !defined(PG_DISABLE_64_BIT_ATOMICS) \
92 && (defined(HAVE_GCC__ATOMIC_INT64_CAS) || defined(HAVE_GCC__SYNC_INT64_CAS))
94 #define PG_HAVE_ATOMIC_U64_SUPPORT
103 #ifdef PG_HAVE_ATOMIC_FLAG_SUPPORT
105 #if defined(HAVE_GCC__SYNC_CHAR_TAS) || defined(HAVE_GCC__SYNC_INT32_TAS)
107 #ifndef PG_HAVE_ATOMIC_TEST_SET_FLAG
108 #define PG_HAVE_ATOMIC_TEST_SET_FLAG
110 pg_atomic_test_set_flag_impl(
volatile pg_atomic_flag *ptr)
114 return __sync_lock_test_and_set(&ptr->value, 1) == 0;
120 #ifndef PG_HAVE_ATOMIC_UNLOCKED_TEST_FLAG
121 #define PG_HAVE_ATOMIC_UNLOCKED_TEST_FLAG
123 pg_atomic_unlocked_test_flag_impl(
volatile pg_atomic_flag *ptr)
125 return ptr->
value == 0;
129 #ifndef PG_HAVE_ATOMIC_CLEAR_FLAG
130 #define PG_HAVE_ATOMIC_CLEAR_FLAG
132 pg_atomic_clear_flag_impl(
volatile pg_atomic_flag *ptr)
134 __sync_lock_release(&ptr->value);
138 #ifndef PG_HAVE_ATOMIC_INIT_FLAG
139 #define PG_HAVE_ATOMIC_INIT_FLAG
141 pg_atomic_init_flag_impl(
volatile pg_atomic_flag *ptr)
143 pg_atomic_clear_flag_impl(ptr);
150 #if !defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32) && defined(HAVE_GCC__ATOMIC_INT32_CAS)
151 #define PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32
157 return __atomic_compare_exchange_n(&ptr->
value, expected,
newval,
false,
158 __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
162 #if !defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32) && defined(HAVE_GCC__SYNC_INT32_CAS)
163 #define PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32
170 current = __sync_val_compare_and_swap(&ptr->
value, *expected,
newval);
171 ret = current == *expected;
185 #if !defined(PG_HAVE_ATOMIC_EXCHANGE_U32) && defined(HAVE_GCC__ATOMIC_INT32_CAS)
186 #define PG_HAVE_ATOMIC_EXCHANGE_U32
190 return __atomic_exchange_n(&ptr->
value,
newval, __ATOMIC_SEQ_CST);
196 #if !defined(PG_HAVE_ATOMIC_FETCH_ADD_U32) && defined(HAVE_GCC__SYNC_INT32_CAS)
197 #define PG_HAVE_ATOMIC_FETCH_ADD_U32
201 return __sync_fetch_and_add(&ptr->
value, add_);
205 #if !defined(PG_HAVE_ATOMIC_FETCH_SUB_U32) && defined(HAVE_GCC__SYNC_INT32_CAS)
206 #define PG_HAVE_ATOMIC_FETCH_SUB_U32
210 return __sync_fetch_and_sub(&ptr->
value, sub_);
214 #if !defined(PG_HAVE_ATOMIC_FETCH_AND_U32) && defined(HAVE_GCC__SYNC_INT32_CAS)
215 #define PG_HAVE_ATOMIC_FETCH_AND_U32
219 return __sync_fetch_and_and(&ptr->
value, and_);
223 #if !defined(PG_HAVE_ATOMIC_FETCH_OR_U32) && defined(HAVE_GCC__SYNC_INT32_CAS)
224 #define PG_HAVE_ATOMIC_FETCH_OR_U32
228 return __sync_fetch_and_or(&ptr->
value, or_);
233 #if !defined(PG_DISABLE_64_BIT_ATOMICS)
235 #if !defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64) && defined(HAVE_GCC__ATOMIC_INT64_CAS)
236 #define PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64
239 uint64 *expected, uint64
newval)
242 return __atomic_compare_exchange_n(&ptr->
value, expected,
newval,
false,
243 __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
247 #if !defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64) && defined(HAVE_GCC__SYNC_INT64_CAS)
248 #define PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64
251 uint64 *expected, uint64
newval)
257 current = __sync_val_compare_and_swap(&ptr->
value, *expected,
newval);
258 ret = current == *expected;
272 #if !defined(PG_HAVE_ATOMIC_EXCHANGE_U64) && defined(HAVE_GCC__ATOMIC_INT64_CAS)
273 #define PG_HAVE_ATOMIC_EXCHANGE_U64
277 return __atomic_exchange_n(&ptr->
value,
newval, __ATOMIC_SEQ_CST);
283 #if !defined(PG_HAVE_ATOMIC_FETCH_ADD_U64) && defined(HAVE_GCC__SYNC_INT64_CAS)
284 #define PG_HAVE_ATOMIC_FETCH_ADD_U64
288 return __sync_fetch_and_add(&ptr->
value, add_);
292 #if !defined(PG_HAVE_ATOMIC_FETCH_SUB_U64) && defined(HAVE_GCC__SYNC_INT64_CAS)
293 #define PG_HAVE_ATOMIC_FETCH_SUB_U64
297 return __sync_fetch_and_sub(&ptr->
value, sub_);
301 #if !defined(PG_HAVE_ATOMIC_FETCH_AND_U64) && defined(HAVE_GCC__SYNC_INT64_CAS)
302 #define PG_HAVE_ATOMIC_FETCH_AND_U64
306 return __sync_fetch_and_and(&ptr->
value, and_);
310 #if !defined(PG_HAVE_ATOMIC_FETCH_OR_U64) && defined(HAVE_GCC__SYNC_INT64_CAS)
311 #define PG_HAVE_ATOMIC_FETCH_OR_U64
315 return __sync_fetch_and_or(&ptr->
value, or_);
static bool pg_atomic_compare_exchange_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 *expected, uint32 newval)
static uint32 pg_atomic_fetch_add_u32_impl(volatile pg_atomic_uint32 *ptr, int32 add_)
struct pg_atomic_uint32 pg_atomic_uint32
uint64 pg_atomic_fetch_add_u64_impl(volatile pg_atomic_uint64 *ptr, int64 add_)
bool pg_atomic_compare_exchange_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 *expected, uint64 newval)
#define AssertPointerAlignment(ptr, bndr)
struct pg_atomic_uint64 pg_atomic_uint64
struct pg_attribute_aligned(8) pg_atomic_uint64
static uint32 pg_atomic_exchange_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 newval)