34#if defined(__GNUC__) || defined(__INTEL_COMPILER)
35#if defined(__i386__) || defined(__i386)
36#define pg_memory_barrier_impl() \
37 __asm__ __volatile__ ("lock; addl $0,0(%%esp)" : : : "memory", "cc")
38#elif defined(__x86_64__)
39#define pg_memory_barrier_impl() \
40 __asm__ __volatile__ ("lock; addl $0,0(%%rsp)" : : : "memory", "cc")
44#define pg_read_barrier_impl() pg_compiler_barrier_impl()
45#define pg_write_barrier_impl() pg_compiler_barrier_impl()
52#if defined(__GNUC__) || defined(__INTEL_COMPILER)
54#define PG_HAVE_ATOMIC_FLAG_SUPPORT
60#define PG_HAVE_ATOMIC_U32_SUPPORT
71#define PG_HAVE_ATOMIC_U64_SUPPORT
81#if !defined(PG_HAVE_SPIN_DELAY)
105#if defined(__GNUC__) || defined(__INTEL_COMPILER)
106#define PG_HAVE_SPIN_DELAY
112#elif defined(_MSC_VER) && defined(__x86_64__)
113#define PG_HAVE_SPIN_DELAY
119#elif defined(_MSC_VER)
120#define PG_HAVE_SPIN_DELAY
131#if defined(__GNUC__) || defined(__INTEL_COMPILER)
133#define PG_HAVE_ATOMIC_TEST_SET_FLAG
148#define PG_HAVE_ATOMIC_CLEAR_FLAG
160#define PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32
181#define PG_HAVE_ATOMIC_FETCH_ADD_U32
189:
"=q"(res),
"=m"(ptr->
value)
197#define PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64
220#define PG_HAVE_ATOMIC_FETCH_ADD_U64
228:
"=q"(res),
"=m"(ptr->
value)
242#if defined(__i568__) || defined(__i668__) || \
243 (defined(_M_IX86) && _M_IX86 >= 500) || \
244 defined(__x86_64__) || defined(__x86_64) || defined(_M_X64)
245#define PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY
static bool pg_atomic_compare_exchange_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 *expected, uint32 newval)
static uint32 pg_atomic_fetch_add_u32_impl(volatile pg_atomic_uint32 *ptr, int32 add_)
uint64 pg_atomic_fetch_add_u64_impl(volatile pg_atomic_uint64 *ptr, int64 add_)
bool pg_atomic_compare_exchange_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 *expected, uint64 newval)
#define AssertPointerAlignment(ptr, bndr)
#define pg_spin_delay_impl()