PostgreSQL Source Code  git master
generic.h
Go to the documentation of this file.
1 /*-------------------------------------------------------------------------
2  *
3  * generic.h
4  * Implement higher level operations based on some lower level atomic
5  * operations.
6  *
7  * Portions Copyright (c) 1996-2024, PostgreSQL Global Development Group
8  * Portions Copyright (c) 1994, Regents of the University of California
9  *
10  * src/include/port/atomics/generic.h
11  *
12  *-------------------------------------------------------------------------
13  */
14 
15 /* intentionally no include guards, should only be included by atomics.h */
16 #ifndef INSIDE_ATOMICS_H
17 # error "should be included via atomics.h"
18 #endif
19 
20 /*
21  * If read or write barriers are undefined, we upgrade them to full memory
22  * barriers.
23  */
24 #if !defined(pg_read_barrier_impl)
25 # define pg_read_barrier_impl pg_memory_barrier_impl
26 #endif
27 #if !defined(pg_write_barrier_impl)
28 # define pg_write_barrier_impl pg_memory_barrier_impl
29 #endif
30 
31 #ifndef PG_HAVE_SPIN_DELAY
32 #define PG_HAVE_SPIN_DELAY
33 #define pg_spin_delay_impl() ((void)0)
34 #endif
35 
36 
37 /* provide fallback */
38 #if !defined(PG_HAVE_ATOMIC_FLAG_SUPPORT) && defined(PG_HAVE_ATOMIC_U32_SUPPORT)
39 #define PG_HAVE_ATOMIC_FLAG_SUPPORT
40 typedef pg_atomic_uint32 pg_atomic_flag;
41 #endif
42 
43 #ifndef PG_HAVE_ATOMIC_READ_U32
44 #define PG_HAVE_ATOMIC_READ_U32
45 static inline uint32
47 {
48  return ptr->value;
49 }
50 #endif
51 
52 #ifndef PG_HAVE_ATOMIC_WRITE_U32
53 #define PG_HAVE_ATOMIC_WRITE_U32
54 static inline void
56 {
57  ptr->value = val;
58 }
59 #endif
60 
61 #ifndef PG_HAVE_ATOMIC_UNLOCKED_WRITE_U32
62 #define PG_HAVE_ATOMIC_UNLOCKED_WRITE_U32
63 static inline void
65 {
66  ptr->value = val;
67 }
68 #endif
69 
70 /*
71  * provide fallback for test_and_set using atomic_exchange if available
72  */
73 #if !defined(PG_HAVE_ATOMIC_TEST_SET_FLAG) && defined(PG_HAVE_ATOMIC_EXCHANGE_U32)
74 
75 #define PG_HAVE_ATOMIC_INIT_FLAG
76 static inline void
77 pg_atomic_init_flag_impl(volatile pg_atomic_flag *ptr)
78 {
80 }
81 
82 #define PG_HAVE_ATOMIC_TEST_SET_FLAG
83 static inline bool
84 pg_atomic_test_set_flag_impl(volatile pg_atomic_flag *ptr)
85 {
86  return pg_atomic_exchange_u32_impl(ptr, 1) == 0;
87 }
88 
89 #define PG_HAVE_ATOMIC_UNLOCKED_TEST_FLAG
90 static inline bool
91 pg_atomic_unlocked_test_flag_impl(volatile pg_atomic_flag *ptr)
92 {
93  return pg_atomic_read_u32_impl(ptr) == 0;
94 }
95 
96 
97 #define PG_HAVE_ATOMIC_CLEAR_FLAG
98 static inline void
99 pg_atomic_clear_flag_impl(volatile pg_atomic_flag *ptr)
100 {
101  /* XXX: release semantics suffice? */
103  pg_atomic_write_u32_impl(ptr, 0);
104 }
105 
106 /*
107  * provide fallback for test_and_set using atomic_compare_exchange if
108  * available.
109  */
110 #elif !defined(PG_HAVE_ATOMIC_TEST_SET_FLAG) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32)
111 
112 #define PG_HAVE_ATOMIC_INIT_FLAG
113 static inline void
114 pg_atomic_init_flag_impl(volatile pg_atomic_flag *ptr)
115 {
116  pg_atomic_write_u32_impl(ptr, 0);
117 }
118 
119 #define PG_HAVE_ATOMIC_TEST_SET_FLAG
120 static inline bool
121 pg_atomic_test_set_flag_impl(volatile pg_atomic_flag *ptr)
122 {
123  uint32 value = 0;
125 }
126 
127 #define PG_HAVE_ATOMIC_UNLOCKED_TEST_FLAG
128 static inline bool
129 pg_atomic_unlocked_test_flag_impl(volatile pg_atomic_flag *ptr)
130 {
131  return pg_atomic_read_u32_impl(ptr) == 0;
132 }
133 
134 #define PG_HAVE_ATOMIC_CLEAR_FLAG
135 static inline void
136 pg_atomic_clear_flag_impl(volatile pg_atomic_flag *ptr)
137 {
138  /* XXX: release semantics suffice? */
140  pg_atomic_write_u32_impl(ptr, 0);
141 }
142 
143 #elif !defined(PG_HAVE_ATOMIC_TEST_SET_FLAG)
144 # error "No pg_atomic_test_and_set provided"
145 #endif /* !defined(PG_HAVE_ATOMIC_TEST_SET_FLAG) */
146 
147 
148 #ifndef PG_HAVE_ATOMIC_INIT_U32
149 #define PG_HAVE_ATOMIC_INIT_U32
150 static inline void
152 {
153  ptr->value = val_;
154 }
155 #endif
156 
157 #if !defined(PG_HAVE_ATOMIC_EXCHANGE_U32) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32)
158 #define PG_HAVE_ATOMIC_EXCHANGE_U32
159 static inline uint32
161 {
162  uint32 old;
163  old = ptr->value; /* ok if read is not atomic */
164  while (!pg_atomic_compare_exchange_u32_impl(ptr, &old, xchg_))
165  /* skip */;
166  return old;
167 }
168 #endif
169 
170 #if !defined(PG_HAVE_ATOMIC_FETCH_ADD_U32) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32)
171 #define PG_HAVE_ATOMIC_FETCH_ADD_U32
172 static inline uint32
174 {
175  uint32 old;
176  old = ptr->value; /* ok if read is not atomic */
177  while (!pg_atomic_compare_exchange_u32_impl(ptr, &old, old + add_))
178  /* skip */;
179  return old;
180 }
181 #endif
182 
183 #if !defined(PG_HAVE_ATOMIC_FETCH_SUB_U32) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32)
184 #define PG_HAVE_ATOMIC_FETCH_SUB_U32
185 static inline uint32
186 pg_atomic_fetch_sub_u32_impl(volatile pg_atomic_uint32 *ptr, int32 sub_)
187 {
188  return pg_atomic_fetch_add_u32_impl(ptr, -sub_);
189 }
190 #endif
191 
192 #if !defined(PG_HAVE_ATOMIC_FETCH_AND_U32) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32)
193 #define PG_HAVE_ATOMIC_FETCH_AND_U32
194 static inline uint32
195 pg_atomic_fetch_and_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 and_)
196 {
197  uint32 old;
198  old = ptr->value; /* ok if read is not atomic */
199  while (!pg_atomic_compare_exchange_u32_impl(ptr, &old, old & and_))
200  /* skip */;
201  return old;
202 }
203 #endif
204 
205 #if !defined(PG_HAVE_ATOMIC_FETCH_OR_U32) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32)
206 #define PG_HAVE_ATOMIC_FETCH_OR_U32
207 static inline uint32
208 pg_atomic_fetch_or_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 or_)
209 {
210  uint32 old;
211  old = ptr->value; /* ok if read is not atomic */
212  while (!pg_atomic_compare_exchange_u32_impl(ptr, &old, old | or_))
213  /* skip */;
214  return old;
215 }
216 #endif
217 
218 #if !defined(PG_HAVE_ATOMIC_ADD_FETCH_U32) && defined(PG_HAVE_ATOMIC_FETCH_ADD_U32)
219 #define PG_HAVE_ATOMIC_ADD_FETCH_U32
220 static inline uint32
221 pg_atomic_add_fetch_u32_impl(volatile pg_atomic_uint32 *ptr, int32 add_)
222 {
223  return pg_atomic_fetch_add_u32_impl(ptr, add_) + add_;
224 }
225 #endif
226 
227 #if !defined(PG_HAVE_ATOMIC_SUB_FETCH_U32) && defined(PG_HAVE_ATOMIC_FETCH_SUB_U32)
228 #define PG_HAVE_ATOMIC_SUB_FETCH_U32
229 static inline uint32
230 pg_atomic_sub_fetch_u32_impl(volatile pg_atomic_uint32 *ptr, int32 sub_)
231 {
232  return pg_atomic_fetch_sub_u32_impl(ptr, sub_) - sub_;
233 }
234 #endif
235 
236 #if !defined(PG_HAVE_ATOMIC_READ_MEMBARRIER_U32) && defined(PG_HAVE_ATOMIC_FETCH_ADD_U32)
237 #define PG_HAVE_ATOMIC_READ_MEMBARRIER_U32
238 static inline uint32
239 pg_atomic_read_membarrier_u32_impl(volatile pg_atomic_uint32 *ptr)
240 {
241  return pg_atomic_fetch_add_u32_impl(ptr, 0);
242 }
243 #endif
244 
245 #if !defined(PG_HAVE_ATOMIC_WRITE_MEMBARRIER_U32) && defined(PG_HAVE_ATOMIC_EXCHANGE_U32)
246 #define PG_HAVE_ATOMIC_WRITE_MEMBARRIER_U32
247 static inline void
248 pg_atomic_write_membarrier_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 val)
249 {
250  (void) pg_atomic_exchange_u32_impl(ptr, val);
251 }
252 #endif
253 
254 #if !defined(PG_HAVE_ATOMIC_EXCHANGE_U64) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64)
255 #define PG_HAVE_ATOMIC_EXCHANGE_U64
256 static inline uint64
257 pg_atomic_exchange_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 xchg_)
258 {
259  uint64 old;
260  old = ptr->value; /* ok if read is not atomic */
261  while (!pg_atomic_compare_exchange_u64_impl(ptr, &old, xchg_))
262  /* skip */;
263  return old;
264 }
265 #endif
266 
267 #ifndef PG_HAVE_ATOMIC_WRITE_U64
268 #define PG_HAVE_ATOMIC_WRITE_U64
269 
270 #if defined(PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY) && \
271  !defined(PG_HAVE_ATOMIC_U64_SIMULATION)
272 
273 static inline void
274 pg_atomic_write_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val)
275 {
276  /*
277  * On this platform aligned 64bit writes are guaranteed to be atomic,
278  * except if using the fallback implementation, where can't guarantee the
279  * required alignment.
280  */
281  AssertPointerAlignment(ptr, 8);
282  ptr->value = val;
283 }
284 
285 #else
286 
287 static inline void
289 {
290  /*
291  * 64 bit writes aren't safe on all platforms. In the generic
292  * implementation implement them as an atomic exchange.
293  */
294  pg_atomic_exchange_u64_impl(ptr, val);
295 }
296 
297 #endif /* PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY && !PG_HAVE_ATOMIC_U64_SIMULATION */
298 #endif /* PG_HAVE_ATOMIC_WRITE_U64 */
299 
300 #ifndef PG_HAVE_ATOMIC_READ_U64
301 #define PG_HAVE_ATOMIC_READ_U64
302 
303 #if defined(PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY) && \
304  !defined(PG_HAVE_ATOMIC_U64_SIMULATION)
305 
306 static inline uint64
308 {
309  /*
310  * On this platform aligned 64-bit reads are guaranteed to be atomic.
311  */
312  AssertPointerAlignment(ptr, 8);
313  return ptr->value;
314 }
315 
316 #else
317 
318 static inline uint64
320 {
321  uint64 old = 0;
322 
323  /*
324  * 64-bit reads aren't atomic on all platforms. In the generic
325  * implementation implement them as a compare/exchange with 0. That'll
326  * fail or succeed, but always return the old value. Possibly might store
327  * a 0, but only if the previous value also was a 0 - i.e. harmless.
328  */
330 
331  return old;
332 }
333 #endif /* PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY && !PG_HAVE_ATOMIC_U64_SIMULATION */
334 #endif /* PG_HAVE_ATOMIC_READ_U64 */
335 
336 #ifndef PG_HAVE_ATOMIC_INIT_U64
337 #define PG_HAVE_ATOMIC_INIT_U64
338 static inline void
339 pg_atomic_init_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val_)
340 {
341  ptr->value = val_;
342 }
343 #endif
344 
345 #if !defined(PG_HAVE_ATOMIC_FETCH_ADD_U64) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64)
346 #define PG_HAVE_ATOMIC_FETCH_ADD_U64
347 static inline uint64
348 pg_atomic_fetch_add_u64_impl(volatile pg_atomic_uint64 *ptr, int64 add_)
349 {
350  uint64 old;
351  old = ptr->value; /* ok if read is not atomic */
352  while (!pg_atomic_compare_exchange_u64_impl(ptr, &old, old + add_))
353  /* skip */;
354  return old;
355 }
356 #endif
357 
358 #if !defined(PG_HAVE_ATOMIC_FETCH_SUB_U64) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64)
359 #define PG_HAVE_ATOMIC_FETCH_SUB_U64
360 static inline uint64
361 pg_atomic_fetch_sub_u64_impl(volatile pg_atomic_uint64 *ptr, int64 sub_)
362 {
363  return pg_atomic_fetch_add_u64_impl(ptr, -sub_);
364 }
365 #endif
366 
367 #if !defined(PG_HAVE_ATOMIC_FETCH_AND_U64) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64)
368 #define PG_HAVE_ATOMIC_FETCH_AND_U64
369 static inline uint64
370 pg_atomic_fetch_and_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 and_)
371 {
372  uint64 old;
373  old = ptr->value; /* ok if read is not atomic */
374  while (!pg_atomic_compare_exchange_u64_impl(ptr, &old, old & and_))
375  /* skip */;
376  return old;
377 }
378 #endif
379 
380 #if !defined(PG_HAVE_ATOMIC_FETCH_OR_U64) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64)
381 #define PG_HAVE_ATOMIC_FETCH_OR_U64
382 static inline uint64
383 pg_atomic_fetch_or_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 or_)
384 {
385  uint64 old;
386  old = ptr->value; /* ok if read is not atomic */
387  while (!pg_atomic_compare_exchange_u64_impl(ptr, &old, old | or_))
388  /* skip */;
389  return old;
390 }
391 #endif
392 
393 #if !defined(PG_HAVE_ATOMIC_ADD_FETCH_U64) && defined(PG_HAVE_ATOMIC_FETCH_ADD_U64)
394 #define PG_HAVE_ATOMIC_ADD_FETCH_U64
395 static inline uint64
396 pg_atomic_add_fetch_u64_impl(volatile pg_atomic_uint64 *ptr, int64 add_)
397 {
398  return pg_atomic_fetch_add_u64_impl(ptr, add_) + add_;
399 }
400 #endif
401 
402 #if !defined(PG_HAVE_ATOMIC_SUB_FETCH_U64) && defined(PG_HAVE_ATOMIC_FETCH_SUB_U64)
403 #define PG_HAVE_ATOMIC_SUB_FETCH_U64
404 static inline uint64
405 pg_atomic_sub_fetch_u64_impl(volatile pg_atomic_uint64 *ptr, int64 sub_)
406 {
407  return pg_atomic_fetch_sub_u64_impl(ptr, sub_) - sub_;
408 }
409 #endif
410 
411 #if !defined(PG_HAVE_ATOMIC_READ_MEMBARRIER_U64) && defined(PG_HAVE_ATOMIC_FETCH_ADD_U64)
412 #define PG_HAVE_ATOMIC_READ_MEMBARRIER_U64
413 static inline uint64
414 pg_atomic_read_membarrier_u64_impl(volatile pg_atomic_uint64 *ptr)
415 {
416  return pg_atomic_fetch_add_u64_impl(ptr, 0);
417 }
418 #endif
419 
420 #if !defined(PG_HAVE_ATOMIC_WRITE_MEMBARRIER_U64) && defined(PG_HAVE_ATOMIC_EXCHANGE_U64)
421 #define PG_HAVE_ATOMIC_WRITE_MEMBARRIER_U64
422 static inline void
423 pg_atomic_write_membarrier_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val)
424 {
425  (void) pg_atomic_exchange_u64_impl(ptr, val);
426 }
427 #endif
static bool pg_atomic_compare_exchange_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 *expected, uint32 newval)
Definition: arch-ppc.h:80
static uint32 pg_atomic_fetch_add_u32_impl(volatile pg_atomic_uint32 *ptr, int32 add_)
Definition: arch-ppc.h:131
uint64 pg_atomic_fetch_add_u64_impl(volatile pg_atomic_uint64 *ptr, int64 add_)
Definition: atomics.c:62
bool pg_atomic_compare_exchange_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 *expected, uint64 newval)
Definition: atomics.c:34
unsigned int uint32
Definition: c.h:492
signed int int32
Definition: c.h:482
#define AssertPointerAlignment(ptr, bndr)
Definition: c.h:873
#define pg_memory_barrier_impl()
Definition: generic-msvc.h:30
static uint32 pg_atomic_exchange_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 newval)
Definition: generic-msvc.h:61
static void pg_atomic_unlocked_write_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 val)
Definition: generic.h:64
static uint64 pg_atomic_read_u64_impl(volatile pg_atomic_uint64 *ptr)
Definition: generic.h:319
static void pg_atomic_write_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 val)
Definition: generic.h:55
static void pg_atomic_init_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val_)
Definition: generic.h:339
static void pg_atomic_write_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val)
Definition: generic.h:288
static uint32 pg_atomic_read_u32_impl(volatile pg_atomic_uint32 *ptr)
Definition: generic.h:46
static void pg_atomic_init_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 val_)
Definition: generic.h:151
static struct @160 value
long val
Definition: informix.c:689
volatile uint32 value
Definition: arch-ppc.h:31
volatile uint64 value
Definition: fallback.h:29