Line data Source code
1 : /*-------------------------------------------------------------------------
2 : *
3 : * generic.h
4 : * Implement higher level operations based on some lower level atomic
5 : * operations.
6 : *
7 : * Portions Copyright (c) 1996-2026, PostgreSQL Global Development Group
8 : * Portions Copyright (c) 1994, Regents of the University of California
9 : *
10 : * src/include/port/atomics/generic.h
11 : *
12 : *-------------------------------------------------------------------------
13 : */
14 :
15 : /* intentionally no include guards, should only be included by atomics.h */
16 : #ifndef INSIDE_ATOMICS_H
17 : # error "should be included via atomics.h"
18 : #endif
19 :
20 : /*
21 : * If read or write barriers are undefined, we upgrade them to full memory
22 : * barriers.
23 : */
24 : #if !defined(pg_read_barrier_impl)
25 : # define pg_read_barrier_impl pg_memory_barrier_impl
26 : #endif
27 : #if !defined(pg_write_barrier_impl)
28 : # define pg_write_barrier_impl pg_memory_barrier_impl
29 : #endif
30 :
31 : #ifndef PG_HAVE_SPIN_DELAY
32 : #define PG_HAVE_SPIN_DELAY
33 : #define pg_spin_delay_impl() ((void)0)
34 : #endif
35 :
36 :
37 : /* provide fallback */
38 : #if !defined(PG_HAVE_ATOMIC_FLAG_SUPPORT) && defined(PG_HAVE_ATOMIC_U32_SUPPORT)
39 : #define PG_HAVE_ATOMIC_FLAG_SUPPORT
40 : typedef pg_atomic_uint32 pg_atomic_flag;
41 : #endif
42 :
43 : #ifndef PG_HAVE_ATOMIC_READ_U32
44 : #define PG_HAVE_ATOMIC_READ_U32
45 : static inline uint32
46 1032472470 : pg_atomic_read_u32_impl(volatile pg_atomic_uint32 *ptr)
47 : {
48 1032472470 : return ptr->value;
49 : }
50 : #endif
51 :
52 : #ifndef PG_HAVE_ATOMIC_WRITE_U32
53 : #define PG_HAVE_ATOMIC_WRITE_U32
54 : static inline void
55 140322 : pg_atomic_write_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 val)
56 : {
57 140322 : ptr->value = val;
58 140322 : }
59 : #endif
60 :
61 : #ifndef PG_HAVE_ATOMIC_UNLOCKED_WRITE_U32
62 : #define PG_HAVE_ATOMIC_UNLOCKED_WRITE_U32
63 : static inline void
64 8733168 : pg_atomic_unlocked_write_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 val)
65 : {
66 8733168 : ptr->value = val;
67 8733168 : }
68 : #endif
69 :
70 : /*
71 : * provide fallback for test_and_set using atomic_exchange if available
72 : */
73 : #if !defined(PG_HAVE_ATOMIC_TEST_SET_FLAG) && defined(PG_HAVE_ATOMIC_EXCHANGE_U32)
74 :
75 : #define PG_HAVE_ATOMIC_INIT_FLAG
76 : static inline void
77 : pg_atomic_init_flag_impl(volatile pg_atomic_flag *ptr)
78 : {
79 : pg_atomic_write_u32_impl(ptr, 0);
80 : }
81 :
82 : #define PG_HAVE_ATOMIC_TEST_SET_FLAG
83 : static inline bool
84 : pg_atomic_test_set_flag_impl(volatile pg_atomic_flag *ptr)
85 : {
86 : return pg_atomic_exchange_u32_impl(ptr, 1) == 0;
87 : }
88 :
89 : #define PG_HAVE_ATOMIC_UNLOCKED_TEST_FLAG
90 : static inline bool
91 : pg_atomic_unlocked_test_flag_impl(volatile pg_atomic_flag *ptr)
92 : {
93 : return pg_atomic_read_u32_impl(ptr) == 0;
94 : }
95 :
96 :
97 : #define PG_HAVE_ATOMIC_CLEAR_FLAG
98 : static inline void
99 : pg_atomic_clear_flag_impl(volatile pg_atomic_flag *ptr)
100 : {
101 : /* XXX: release semantics suffice? */
102 : pg_memory_barrier_impl();
103 : pg_atomic_write_u32_impl(ptr, 0);
104 : }
105 :
106 : /*
107 : * provide fallback for test_and_set using atomic_compare_exchange if
108 : * available.
109 : */
110 : #elif !defined(PG_HAVE_ATOMIC_TEST_SET_FLAG) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32)
111 :
112 : #define PG_HAVE_ATOMIC_INIT_FLAG
113 : static inline void
114 : pg_atomic_init_flag_impl(volatile pg_atomic_flag *ptr)
115 : {
116 : pg_atomic_write_u32_impl(ptr, 0);
117 : }
118 :
119 : #define PG_HAVE_ATOMIC_TEST_SET_FLAG
120 : static inline bool
121 : pg_atomic_test_set_flag_impl(volatile pg_atomic_flag *ptr)
122 : {
123 : uint32 value = 0;
124 : return pg_atomic_compare_exchange_u32_impl(ptr, &value, 1);
125 : }
126 :
127 : #define PG_HAVE_ATOMIC_UNLOCKED_TEST_FLAG
128 : static inline bool
129 : pg_atomic_unlocked_test_flag_impl(volatile pg_atomic_flag *ptr)
130 : {
131 : return pg_atomic_read_u32_impl(ptr) == 0;
132 : }
133 :
134 : #define PG_HAVE_ATOMIC_CLEAR_FLAG
135 : static inline void
136 : pg_atomic_clear_flag_impl(volatile pg_atomic_flag *ptr)
137 : {
138 : /* XXX: release semantics suffice? */
139 : pg_memory_barrier_impl();
140 : pg_atomic_write_u32_impl(ptr, 0);
141 : }
142 :
143 : #elif !defined(PG_HAVE_ATOMIC_TEST_SET_FLAG)
144 : # error "No pg_atomic_test_and_set provided"
145 : #endif /* !defined(PG_HAVE_ATOMIC_TEST_SET_FLAG) */
146 :
147 :
148 : #ifndef PG_HAVE_ATOMIC_INIT_U32
149 : #define PG_HAVE_ATOMIC_INIT_U32
150 : static inline void
151 49546852 : pg_atomic_init_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 val_)
152 : {
153 49546852 : ptr->value = val_;
154 49546852 : }
155 : #endif
156 :
157 : #if !defined(PG_HAVE_ATOMIC_EXCHANGE_U32) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32)
158 : #define PG_HAVE_ATOMIC_EXCHANGE_U32
159 : static inline uint32
160 : pg_atomic_exchange_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 xchg_)
161 : {
162 : uint32 old;
163 : old = ptr->value; /* ok if read is not atomic */
164 : while (!pg_atomic_compare_exchange_u32_impl(ptr, &old, xchg_))
165 : /* skip */;
166 : return old;
167 : }
168 : #endif
169 :
170 : #if !defined(PG_HAVE_ATOMIC_FETCH_ADD_U32) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32)
171 : #define PG_HAVE_ATOMIC_FETCH_ADD_U32
172 : static inline uint32
173 : pg_atomic_fetch_add_u32_impl(volatile pg_atomic_uint32 *ptr, int32 add_)
174 : {
175 : uint32 old;
176 : old = ptr->value; /* ok if read is not atomic */
177 : while (!pg_atomic_compare_exchange_u32_impl(ptr, &old, old + add_))
178 : /* skip */;
179 : return old;
180 : }
181 : #endif
182 :
183 : #if !defined(PG_HAVE_ATOMIC_FETCH_SUB_U32) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32)
184 : #define PG_HAVE_ATOMIC_FETCH_SUB_U32
185 : static inline uint32
186 : pg_atomic_fetch_sub_u32_impl(volatile pg_atomic_uint32 *ptr, int32 sub_)
187 : {
188 : return pg_atomic_fetch_add_u32_impl(ptr, -sub_);
189 : }
190 : #endif
191 :
192 : #if !defined(PG_HAVE_ATOMIC_FETCH_AND_U32) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32)
193 : #define PG_HAVE_ATOMIC_FETCH_AND_U32
194 : static inline uint32
195 : pg_atomic_fetch_and_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 and_)
196 : {
197 : uint32 old;
198 : old = ptr->value; /* ok if read is not atomic */
199 : while (!pg_atomic_compare_exchange_u32_impl(ptr, &old, old & and_))
200 : /* skip */;
201 : return old;
202 : }
203 : #endif
204 :
205 : #if !defined(PG_HAVE_ATOMIC_FETCH_OR_U32) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32)
206 : #define PG_HAVE_ATOMIC_FETCH_OR_U32
207 : static inline uint32
208 : pg_atomic_fetch_or_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 or_)
209 : {
210 : uint32 old;
211 : old = ptr->value; /* ok if read is not atomic */
212 : while (!pg_atomic_compare_exchange_u32_impl(ptr, &old, old | or_))
213 : /* skip */;
214 : return old;
215 : }
216 : #endif
217 :
218 : #if !defined(PG_HAVE_ATOMIC_ADD_FETCH_U32) && defined(PG_HAVE_ATOMIC_FETCH_ADD_U32)
219 : #define PG_HAVE_ATOMIC_ADD_FETCH_U32
220 : static inline uint32
221 810 : pg_atomic_add_fetch_u32_impl(volatile pg_atomic_uint32 *ptr, int32 add_)
222 : {
223 810 : return pg_atomic_fetch_add_u32_impl(ptr, add_) + add_;
224 : }
225 : #endif
226 :
227 : #if !defined(PG_HAVE_ATOMIC_SUB_FETCH_U32) && defined(PG_HAVE_ATOMIC_FETCH_SUB_U32)
228 : #define PG_HAVE_ATOMIC_SUB_FETCH_U32
229 : static inline uint32
230 757979784 : pg_atomic_sub_fetch_u32_impl(volatile pg_atomic_uint32 *ptr, int32 sub_)
231 : {
232 757979784 : return pg_atomic_fetch_sub_u32_impl(ptr, sub_) - sub_;
233 : }
234 : #endif
235 :
236 : #if !defined(PG_HAVE_ATOMIC_READ_MEMBARRIER_U32) && defined(PG_HAVE_ATOMIC_FETCH_ADD_U32)
237 : #define PG_HAVE_ATOMIC_READ_MEMBARRIER_U32
238 : static inline uint32
239 : pg_atomic_read_membarrier_u32_impl(volatile pg_atomic_uint32 *ptr)
240 : {
241 : return pg_atomic_fetch_add_u32_impl(ptr, 0);
242 : }
243 : #endif
244 :
245 : #if !defined(PG_HAVE_ATOMIC_WRITE_MEMBARRIER_U32) && defined(PG_HAVE_ATOMIC_EXCHANGE_U32)
246 : #define PG_HAVE_ATOMIC_WRITE_MEMBARRIER_U32
247 : static inline void
248 28 : pg_atomic_write_membarrier_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 val)
249 : {
250 28 : (void) pg_atomic_exchange_u32_impl(ptr, val);
251 28 : }
252 : #endif
253 :
254 : #if !defined(PG_HAVE_ATOMIC_EXCHANGE_U64) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64)
255 : #define PG_HAVE_ATOMIC_EXCHANGE_U64
256 : static inline uint64
257 : pg_atomic_exchange_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 xchg_)
258 : {
259 : uint64 old;
260 : old = ptr->value; /* ok if read is not atomic */
261 : while (!pg_atomic_compare_exchange_u64_impl(ptr, &old, xchg_))
262 : /* skip */;
263 : return old;
264 : }
265 : #endif
266 :
267 : #ifndef PG_HAVE_ATOMIC_WRITE_U64
268 : #define PG_HAVE_ATOMIC_WRITE_U64
269 :
270 : #if defined(PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY) && \
271 : !defined(PG_HAVE_ATOMIC_U64_SIMULATION)
272 :
273 : static inline void
274 40765132 : pg_atomic_write_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val)
275 : {
276 : /*
277 : * On this platform aligned 64bit writes are guaranteed to be atomic,
278 : * except if using the fallback implementation, where can't guarantee the
279 : * required alignment.
280 : */
281 : AssertPointerAlignment(ptr, 8);
282 40765132 : ptr->value = val;
283 40765132 : }
284 :
285 : #else
286 :
287 : static inline void
288 : pg_atomic_write_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val)
289 : {
290 : /*
291 : * 64 bit writes aren't safe on all platforms. In the generic
292 : * implementation implement them as an atomic exchange.
293 : */
294 : pg_atomic_exchange_u64_impl(ptr, val);
295 : }
296 :
297 : #endif /* PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY && !PG_HAVE_ATOMIC_U64_SIMULATION */
298 : #endif /* PG_HAVE_ATOMIC_WRITE_U64 */
299 :
300 : #ifndef PG_HAVE_ATOMIC_UNLOCKED_WRITE_U64
301 : #define PG_HAVE_ATOMIC_UNLOCKED_WRITE_U64
302 : static inline void
303 : pg_atomic_unlocked_write_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val)
304 : {
305 : ptr->value = val;
306 : }
307 : #endif
308 :
309 : #ifndef PG_HAVE_ATOMIC_READ_U64
310 : #define PG_HAVE_ATOMIC_READ_U64
311 :
312 : #if defined(PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY) && \
313 : !defined(PG_HAVE_ATOMIC_U64_SIMULATION)
314 :
315 : static inline uint64
316 576901124 : pg_atomic_read_u64_impl(volatile pg_atomic_uint64 *ptr)
317 : {
318 : /*
319 : * On this platform aligned 64-bit reads are guaranteed to be atomic.
320 : */
321 : AssertPointerAlignment(ptr, 8);
322 576901124 : return ptr->value;
323 : }
324 :
325 : #else
326 :
327 : static inline uint64
328 : pg_atomic_read_u64_impl(volatile pg_atomic_uint64 *ptr)
329 : {
330 : uint64 old = 0;
331 :
332 : /*
333 : * 64-bit reads aren't atomic on all platforms. In the generic
334 : * implementation implement them as a compare/exchange with 0. That'll
335 : * fail or succeed, but always return the old value. Possibly might store
336 : * a 0, but only if the previous value also was a 0 - i.e. harmless.
337 : */
338 : pg_atomic_compare_exchange_u64_impl(ptr, &old, 0);
339 :
340 : return old;
341 : }
342 : #endif /* PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY && !PG_HAVE_ATOMIC_U64_SIMULATION */
343 : #endif /* PG_HAVE_ATOMIC_READ_U64 */
344 :
345 : #ifndef PG_HAVE_ATOMIC_INIT_U64
346 : #define PG_HAVE_ATOMIC_INIT_U64
347 : static inline void
348 6080550 : pg_atomic_init_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val_)
349 : {
350 6080550 : ptr->value = val_;
351 6080550 : }
352 : #endif
353 :
354 : #if !defined(PG_HAVE_ATOMIC_FETCH_ADD_U64) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64)
355 : #define PG_HAVE_ATOMIC_FETCH_ADD_U64
356 : static inline uint64
357 : pg_atomic_fetch_add_u64_impl(volatile pg_atomic_uint64 *ptr, int64 add_)
358 : {
359 : uint64 old;
360 : old = ptr->value; /* ok if read is not atomic */
361 : while (!pg_atomic_compare_exchange_u64_impl(ptr, &old, old + add_))
362 : /* skip */;
363 : return old;
364 : }
365 : #endif
366 :
367 : #if !defined(PG_HAVE_ATOMIC_FETCH_SUB_U64) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64)
368 : #define PG_HAVE_ATOMIC_FETCH_SUB_U64
369 : static inline uint64
370 : pg_atomic_fetch_sub_u64_impl(volatile pg_atomic_uint64 *ptr, int64 sub_)
371 : {
372 : return pg_atomic_fetch_add_u64_impl(ptr, -sub_);
373 : }
374 : #endif
375 :
376 : #if !defined(PG_HAVE_ATOMIC_FETCH_AND_U64) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64)
377 : #define PG_HAVE_ATOMIC_FETCH_AND_U64
378 : static inline uint64
379 : pg_atomic_fetch_and_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 and_)
380 : {
381 : uint64 old;
382 : old = ptr->value; /* ok if read is not atomic */
383 : while (!pg_atomic_compare_exchange_u64_impl(ptr, &old, old & and_))
384 : /* skip */;
385 : return old;
386 : }
387 : #endif
388 :
389 : #if !defined(PG_HAVE_ATOMIC_FETCH_OR_U64) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64)
390 : #define PG_HAVE_ATOMIC_FETCH_OR_U64
391 : static inline uint64
392 : pg_atomic_fetch_or_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 or_)
393 : {
394 : uint64 old;
395 : old = ptr->value; /* ok if read is not atomic */
396 : while (!pg_atomic_compare_exchange_u64_impl(ptr, &old, old | or_))
397 : /* skip */;
398 : return old;
399 : }
400 : #endif
401 :
402 : #if !defined(PG_HAVE_ATOMIC_ADD_FETCH_U64) && defined(PG_HAVE_ATOMIC_FETCH_ADD_U64)
403 : #define PG_HAVE_ATOMIC_ADD_FETCH_U64
404 : static inline uint64
405 1186 : pg_atomic_add_fetch_u64_impl(volatile pg_atomic_uint64 *ptr, int64 add_)
406 : {
407 1186 : return pg_atomic_fetch_add_u64_impl(ptr, add_) + add_;
408 : }
409 : #endif
410 :
411 : #if !defined(PG_HAVE_ATOMIC_SUB_FETCH_U64) && defined(PG_HAVE_ATOMIC_FETCH_SUB_U64)
412 : #define PG_HAVE_ATOMIC_SUB_FETCH_U64
413 : static inline uint64
414 10 : pg_atomic_sub_fetch_u64_impl(volatile pg_atomic_uint64 *ptr, int64 sub_)
415 : {
416 10 : return pg_atomic_fetch_sub_u64_impl(ptr, sub_) - sub_;
417 : }
418 : #endif
419 :
420 : #if !defined(PG_HAVE_ATOMIC_READ_MEMBARRIER_U64) && defined(PG_HAVE_ATOMIC_FETCH_ADD_U64)
421 : #define PG_HAVE_ATOMIC_READ_MEMBARRIER_U64
422 : static inline uint64
423 4675890 : pg_atomic_read_membarrier_u64_impl(volatile pg_atomic_uint64 *ptr)
424 : {
425 4675890 : return pg_atomic_fetch_add_u64_impl(ptr, 0);
426 : }
427 : #endif
428 :
429 : #if !defined(PG_HAVE_ATOMIC_WRITE_MEMBARRIER_U64) && defined(PG_HAVE_ATOMIC_EXCHANGE_U64)
430 : #define PG_HAVE_ATOMIC_WRITE_MEMBARRIER_U64
431 : static inline void
432 1978 : pg_atomic_write_membarrier_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val)
433 : {
434 1978 : (void) pg_atomic_exchange_u64_impl(ptr, val);
435 1978 : }
436 : #endif
|