23#define pg_memory_barrier_impl() __asm__ __volatile__ ("sync" : : : "memory")
24#define pg_read_barrier_impl() __asm__ __volatile__ ("lwsync" : : : "memory")
25#define pg_write_barrier_impl() __asm__ __volatile__ ("lwsync" : : : "memory")
28#define PG_HAVE_ATOMIC_U32_SUPPORT
36#define PG_HAVE_ATOMIC_U64_SUPPORT
78#define PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32
87#ifdef HAVE_I_CONSTRAINT__BUILTIN_CONSTANT_P
107 " lwarx %0,0,%5,1 \n"
129#define PG_HAVE_ATOMIC_FETCH_ADD_U32
136#ifdef HAVE_I_CONSTRAINT__BUILTIN_CONSTANT_P
141 " lwarx %1,0,%4,1 \n"
146:
"=&r"(
_t),
"=&b"(res),
"+m"(ptr->
value)
153 " lwarx %1,0,%4,1 \n"
158:
"=&r"(
_t),
"=&r"(res),
"+m"(ptr->
value)
165#ifdef PG_HAVE_ATOMIC_U64_SUPPORT
167#define PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64
179#ifdef HAVE_I_CONSTRAINT__BUILTIN_CONSTANT_P
185 " ldarx %0,0,%5,1 \n"
199 " ldarx %0,0,%5,1 \n"
216#define PG_HAVE_ATOMIC_FETCH_ADD_U64
224#ifdef HAVE_I_CONSTRAINT__BUILTIN_CONSTANT_P
229 " ldarx %1,0,%4,1 \n"
234:
"=&r"(
_t),
"=&b"(res),
"+m"(ptr->
value)
241 " ldarx %1,0,%4,1 \n"
246:
"=&r"(
_t),
"=&r"(res),
"+m"(ptr->
value)
256#define PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY
static bool pg_atomic_compare_exchange_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 *expected, uint32 newval)
static uint32 pg_atomic_fetch_add_u32_impl(volatile pg_atomic_uint32 *ptr, int32 add_)
struct pg_atomic_uint32 pg_atomic_uint32
uint64 pg_atomic_fetch_add_u64_impl(volatile pg_atomic_uint64 *ptr, int64 add_)
bool pg_atomic_compare_exchange_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 *expected, uint64 newval)
#define AssertPointerAlignment(ptr, bndr)