Line data Source code
1 : /*-------------------------------------------------------------------------
2 : *
3 : * generic.h
4 : * Implement higher level operations based on some lower level atomic
5 : * operations.
6 : *
7 : * Portions Copyright (c) 1996-2024, PostgreSQL Global Development Group
8 : * Portions Copyright (c) 1994, Regents of the University of California
9 : *
10 : * src/include/port/atomics/generic.h
11 : *
12 : *-------------------------------------------------------------------------
13 : */
14 :
15 : /* intentionally no include guards, should only be included by atomics.h */
16 : #ifndef INSIDE_ATOMICS_H
17 : # error "should be included via atomics.h"
18 : #endif
19 :
20 : /*
21 : * If read or write barriers are undefined, we upgrade them to full memory
22 : * barriers.
23 : */
24 : #if !defined(pg_read_barrier_impl)
25 : # define pg_read_barrier_impl pg_memory_barrier_impl
26 : #endif
27 : #if !defined(pg_write_barrier_impl)
28 : # define pg_write_barrier_impl pg_memory_barrier_impl
29 : #endif
30 :
31 : #ifndef PG_HAVE_SPIN_DELAY
32 : #define PG_HAVE_SPIN_DELAY
33 : #define pg_spin_delay_impl() ((void)0)
34 : #endif
35 :
36 :
37 : /* provide fallback */
38 : #if !defined(PG_HAVE_ATOMIC_FLAG_SUPPORT) && defined(PG_HAVE_ATOMIC_U32_SUPPORT)
39 : #define PG_HAVE_ATOMIC_FLAG_SUPPORT
40 : typedef pg_atomic_uint32 pg_atomic_flag;
41 : #endif
42 :
43 : #ifndef PG_HAVE_ATOMIC_READ_U32
44 : #define PG_HAVE_ATOMIC_READ_U32
45 : static inline uint32
46 659084136 : pg_atomic_read_u32_impl(volatile pg_atomic_uint32 *ptr)
47 : {
48 659084136 : return ptr->value;
49 : }
50 : #endif
51 :
52 : #ifndef PG_HAVE_ATOMIC_WRITE_U32
53 : #define PG_HAVE_ATOMIC_WRITE_U32
54 : static inline void
55 47405674 : pg_atomic_write_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 val)
56 : {
57 47405674 : ptr->value = val;
58 47405674 : }
59 : #endif
60 :
61 : #ifndef PG_HAVE_ATOMIC_UNLOCKED_WRITE_U32
62 : #define PG_HAVE_ATOMIC_UNLOCKED_WRITE_U32
63 : static inline void
64 3443738 : pg_atomic_unlocked_write_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 val)
65 : {
66 3443738 : ptr->value = val;
67 3443738 : }
68 : #endif
69 :
70 : /*
71 : * provide fallback for test_and_set using atomic_exchange if available
72 : */
73 : #if !defined(PG_HAVE_ATOMIC_TEST_SET_FLAG) && defined(PG_HAVE_ATOMIC_EXCHANGE_U32)
74 :
75 : #define PG_HAVE_ATOMIC_INIT_FLAG
76 : static inline void
77 : pg_atomic_init_flag_impl(volatile pg_atomic_flag *ptr)
78 : {
79 : pg_atomic_write_u32_impl(ptr, 0);
80 : }
81 :
82 : #define PG_HAVE_ATOMIC_TEST_SET_FLAG
83 : static inline bool
84 : pg_atomic_test_set_flag_impl(volatile pg_atomic_flag *ptr)
85 : {
86 : return pg_atomic_exchange_u32_impl(ptr, 1) == 0;
87 : }
88 :
89 : #define PG_HAVE_ATOMIC_UNLOCKED_TEST_FLAG
90 : static inline bool
91 : pg_atomic_unlocked_test_flag_impl(volatile pg_atomic_flag *ptr)
92 : {
93 : return pg_atomic_read_u32_impl(ptr) == 0;
94 : }
95 :
96 :
97 : #define PG_HAVE_ATOMIC_CLEAR_FLAG
98 : static inline void
99 : pg_atomic_clear_flag_impl(volatile pg_atomic_flag *ptr)
100 : {
101 : /* XXX: release semantics suffice? */
102 : pg_memory_barrier_impl();
103 : pg_atomic_write_u32_impl(ptr, 0);
104 : }
105 :
106 : /*
107 : * provide fallback for test_and_set using atomic_compare_exchange if
108 : * available.
109 : */
110 : #elif !defined(PG_HAVE_ATOMIC_TEST_SET_FLAG) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32)
111 :
112 : #define PG_HAVE_ATOMIC_INIT_FLAG
113 : static inline void
114 : pg_atomic_init_flag_impl(volatile pg_atomic_flag *ptr)
115 : {
116 : pg_atomic_write_u32_impl(ptr, 0);
117 : }
118 :
119 : #define PG_HAVE_ATOMIC_TEST_SET_FLAG
120 : static inline bool
121 : pg_atomic_test_set_flag_impl(volatile pg_atomic_flag *ptr)
122 : {
123 : uint32 value = 0;
124 : return pg_atomic_compare_exchange_u32_impl(ptr, &value, 1);
125 : }
126 :
127 : #define PG_HAVE_ATOMIC_UNLOCKED_TEST_FLAG
128 : static inline bool
129 : pg_atomic_unlocked_test_flag_impl(volatile pg_atomic_flag *ptr)
130 : {
131 : return pg_atomic_read_u32_impl(ptr) == 0;
132 : }
133 :
134 : #define PG_HAVE_ATOMIC_CLEAR_FLAG
135 : static inline void
136 : pg_atomic_clear_flag_impl(volatile pg_atomic_flag *ptr)
137 : {
138 : /*
139 : * Use a memory barrier + plain write if we have a native memory
140 : * barrier. But don't do so if memory barriers use spinlocks - that'd lead
141 : * to circularity if flags are used to implement spinlocks.
142 : */
143 : #ifndef PG_HAVE_MEMORY_BARRIER_EMULATION
144 : /* XXX: release semantics suffice? */
145 : pg_memory_barrier_impl();
146 : pg_atomic_write_u32_impl(ptr, 0);
147 : #else
148 : uint32 value = 1;
149 : pg_atomic_compare_exchange_u32_impl(ptr, &value, 0);
150 : #endif
151 : }
152 :
153 : #elif !defined(PG_HAVE_ATOMIC_TEST_SET_FLAG)
154 : # error "No pg_atomic_test_and_set provided"
155 : #endif /* !defined(PG_HAVE_ATOMIC_TEST_SET_FLAG) */
156 :
157 :
158 : #ifndef PG_HAVE_ATOMIC_INIT_U32
159 : #define PG_HAVE_ATOMIC_INIT_U32
160 : static inline void
161 36545132 : pg_atomic_init_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 val_)
162 : {
163 36545132 : ptr->value = val_;
164 36545132 : }
165 : #endif
166 :
167 : #if !defined(PG_HAVE_ATOMIC_EXCHANGE_U32) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32)
168 : #define PG_HAVE_ATOMIC_EXCHANGE_U32
169 : static inline uint32
170 : pg_atomic_exchange_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 xchg_)
171 : {
172 : uint32 old;
173 : old = ptr->value; /* ok if read is not atomic */
174 : while (!pg_atomic_compare_exchange_u32_impl(ptr, &old, xchg_))
175 : /* skip */;
176 : return old;
177 : }
178 : #endif
179 :
180 : #if !defined(PG_HAVE_ATOMIC_FETCH_ADD_U32) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32)
181 : #define PG_HAVE_ATOMIC_FETCH_ADD_U32
182 : static inline uint32
183 : pg_atomic_fetch_add_u32_impl(volatile pg_atomic_uint32 *ptr, int32 add_)
184 : {
185 : uint32 old;
186 : old = ptr->value; /* ok if read is not atomic */
187 : while (!pg_atomic_compare_exchange_u32_impl(ptr, &old, old + add_))
188 : /* skip */;
189 : return old;
190 : }
191 : #endif
192 :
193 : #if !defined(PG_HAVE_ATOMIC_FETCH_SUB_U32) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32)
194 : #define PG_HAVE_ATOMIC_FETCH_SUB_U32
195 : static inline uint32
196 : pg_atomic_fetch_sub_u32_impl(volatile pg_atomic_uint32 *ptr, int32 sub_)
197 : {
198 : return pg_atomic_fetch_add_u32_impl(ptr, -sub_);
199 : }
200 : #endif
201 :
202 : #if !defined(PG_HAVE_ATOMIC_FETCH_AND_U32) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32)
203 : #define PG_HAVE_ATOMIC_FETCH_AND_U32
204 : static inline uint32
205 : pg_atomic_fetch_and_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 and_)
206 : {
207 : uint32 old;
208 : old = ptr->value; /* ok if read is not atomic */
209 : while (!pg_atomic_compare_exchange_u32_impl(ptr, &old, old & and_))
210 : /* skip */;
211 : return old;
212 : }
213 : #endif
214 :
215 : #if !defined(PG_HAVE_ATOMIC_FETCH_OR_U32) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32)
216 : #define PG_HAVE_ATOMIC_FETCH_OR_U32
217 : static inline uint32
218 : pg_atomic_fetch_or_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 or_)
219 : {
220 : uint32 old;
221 : old = ptr->value; /* ok if read is not atomic */
222 : while (!pg_atomic_compare_exchange_u32_impl(ptr, &old, old | or_))
223 : /* skip */;
224 : return old;
225 : }
226 : #endif
227 :
228 : #if !defined(PG_HAVE_ATOMIC_ADD_FETCH_U32) && defined(PG_HAVE_ATOMIC_FETCH_ADD_U32)
229 : #define PG_HAVE_ATOMIC_ADD_FETCH_U32
230 : static inline uint32
231 860 : pg_atomic_add_fetch_u32_impl(volatile pg_atomic_uint32 *ptr, int32 add_)
232 : {
233 860 : return pg_atomic_fetch_add_u32_impl(ptr, add_) + add_;
234 : }
235 : #endif
236 :
237 : #if !defined(PG_HAVE_ATOMIC_SUB_FETCH_U32) && defined(PG_HAVE_ATOMIC_FETCH_SUB_U32)
238 : #define PG_HAVE_ATOMIC_SUB_FETCH_U32
239 : static inline uint32
240 384492094 : pg_atomic_sub_fetch_u32_impl(volatile pg_atomic_uint32 *ptr, int32 sub_)
241 : {
242 384492094 : return pg_atomic_fetch_sub_u32_impl(ptr, sub_) - sub_;
243 : }
244 : #endif
245 :
246 : #if !defined(PG_HAVE_ATOMIC_READ_MEMBARRIER_U32) && defined(PG_HAVE_ATOMIC_FETCH_ADD_U32)
247 : #define PG_HAVE_ATOMIC_READ_MEMBARRIER_U32
248 : static inline uint32
249 : pg_atomic_read_membarrier_u32_impl(volatile pg_atomic_uint32 *ptr)
250 : {
251 : return pg_atomic_fetch_add_u32_impl(ptr, 0);
252 : }
253 : #endif
254 :
255 : #if !defined(PG_HAVE_ATOMIC_WRITE_MEMBARRIER_U32) && defined(PG_HAVE_ATOMIC_EXCHANGE_U32)
256 : #define PG_HAVE_ATOMIC_WRITE_MEMBARRIER_U32
257 : static inline void
258 22 : pg_atomic_write_membarrier_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 val)
259 : {
260 22 : (void) pg_atomic_exchange_u32_impl(ptr, val);
261 22 : }
262 : #endif
263 :
264 : #if !defined(PG_HAVE_ATOMIC_EXCHANGE_U64) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64)
265 : #define PG_HAVE_ATOMIC_EXCHANGE_U64
266 : static inline uint64
267 : pg_atomic_exchange_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 xchg_)
268 : {
269 : uint64 old;
270 : old = ptr->value; /* ok if read is not atomic */
271 : while (!pg_atomic_compare_exchange_u64_impl(ptr, &old, xchg_))
272 : /* skip */;
273 : return old;
274 : }
275 : #endif
276 :
277 : #ifndef PG_HAVE_ATOMIC_WRITE_U64
278 : #define PG_HAVE_ATOMIC_WRITE_U64
279 :
280 : #if defined(PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY) && \
281 : !defined(PG_HAVE_ATOMIC_U64_SIMULATION)
282 :
283 : static inline void
284 12467308 : pg_atomic_write_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val)
285 : {
286 : /*
287 : * On this platform aligned 64bit writes are guaranteed to be atomic,
288 : * except if using the fallback implementation, where can't guarantee the
289 : * required alignment.
290 : */
291 : AssertPointerAlignment(ptr, 8);
292 12467308 : ptr->value = val;
293 12467308 : }
294 :
295 : #else
296 :
297 : static inline void
298 : pg_atomic_write_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val)
299 : {
300 : /*
301 : * 64 bit writes aren't safe on all platforms. In the generic
302 : * implementation implement them as an atomic exchange.
303 : */
304 : pg_atomic_exchange_u64_impl(ptr, val);
305 : }
306 :
307 : #endif /* PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY && !PG_HAVE_ATOMIC_U64_SIMULATION */
308 : #endif /* PG_HAVE_ATOMIC_WRITE_U64 */
309 :
310 : #ifndef PG_HAVE_ATOMIC_READ_U64
311 : #define PG_HAVE_ATOMIC_READ_U64
312 :
313 : #if defined(PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY) && \
314 : !defined(PG_HAVE_ATOMIC_U64_SIMULATION)
315 :
316 : static inline uint64
317 46552296 : pg_atomic_read_u64_impl(volatile pg_atomic_uint64 *ptr)
318 : {
319 : /*
320 : * On this platform aligned 64-bit reads are guaranteed to be atomic.
321 : */
322 : AssertPointerAlignment(ptr, 8);
323 46552296 : return ptr->value;
324 : }
325 :
326 : #else
327 :
328 : static inline uint64
329 : pg_atomic_read_u64_impl(volatile pg_atomic_uint64 *ptr)
330 : {
331 : uint64 old = 0;
332 :
333 : /*
334 : * 64-bit reads aren't atomic on all platforms. In the generic
335 : * implementation implement them as a compare/exchange with 0. That'll
336 : * fail or succeed, but always return the old value. Possibly might store
337 : * a 0, but only if the previous value also was a 0 - i.e. harmless.
338 : */
339 : pg_atomic_compare_exchange_u64_impl(ptr, &old, 0);
340 :
341 : return old;
342 : }
343 : #endif /* PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY && !PG_HAVE_ATOMIC_U64_SIMULATION */
344 : #endif /* PG_HAVE_ATOMIC_READ_U64 */
345 :
346 : #ifndef PG_HAVE_ATOMIC_INIT_U64
347 : #define PG_HAVE_ATOMIC_INIT_U64
348 : static inline void
349 5129922 : pg_atomic_init_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val_)
350 : {
351 5129922 : ptr->value = val_;
352 5129922 : }
353 : #endif
354 :
355 : #if !defined(PG_HAVE_ATOMIC_FETCH_ADD_U64) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64)
356 : #define PG_HAVE_ATOMIC_FETCH_ADD_U64
357 : static inline uint64
358 : pg_atomic_fetch_add_u64_impl(volatile pg_atomic_uint64 *ptr, int64 add_)
359 : {
360 : uint64 old;
361 : old = ptr->value; /* ok if read is not atomic */
362 : while (!pg_atomic_compare_exchange_u64_impl(ptr, &old, old + add_))
363 : /* skip */;
364 : return old;
365 : }
366 : #endif
367 :
368 : #if !defined(PG_HAVE_ATOMIC_FETCH_SUB_U64) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64)
369 : #define PG_HAVE_ATOMIC_FETCH_SUB_U64
370 : static inline uint64
371 : pg_atomic_fetch_sub_u64_impl(volatile pg_atomic_uint64 *ptr, int64 sub_)
372 : {
373 : return pg_atomic_fetch_add_u64_impl(ptr, -sub_);
374 : }
375 : #endif
376 :
377 : #if !defined(PG_HAVE_ATOMIC_FETCH_AND_U64) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64)
378 : #define PG_HAVE_ATOMIC_FETCH_AND_U64
379 : static inline uint64
380 : pg_atomic_fetch_and_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 and_)
381 : {
382 : uint64 old;
383 : old = ptr->value; /* ok if read is not atomic */
384 : while (!pg_atomic_compare_exchange_u64_impl(ptr, &old, old & and_))
385 : /* skip */;
386 : return old;
387 : }
388 : #endif
389 :
390 : #if !defined(PG_HAVE_ATOMIC_FETCH_OR_U64) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64)
391 : #define PG_HAVE_ATOMIC_FETCH_OR_U64
392 : static inline uint64
393 : pg_atomic_fetch_or_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 or_)
394 : {
395 : uint64 old;
396 : old = ptr->value; /* ok if read is not atomic */
397 : while (!pg_atomic_compare_exchange_u64_impl(ptr, &old, old | or_))
398 : /* skip */;
399 : return old;
400 : }
401 : #endif
402 :
403 : #if !defined(PG_HAVE_ATOMIC_ADD_FETCH_U64) && defined(PG_HAVE_ATOMIC_FETCH_ADD_U64)
404 : #define PG_HAVE_ATOMIC_ADD_FETCH_U64
405 : static inline uint64
406 144 : pg_atomic_add_fetch_u64_impl(volatile pg_atomic_uint64 *ptr, int64 add_)
407 : {
408 144 : return pg_atomic_fetch_add_u64_impl(ptr, add_) + add_;
409 : }
410 : #endif
411 :
412 : #if !defined(PG_HAVE_ATOMIC_SUB_FETCH_U64) && defined(PG_HAVE_ATOMIC_FETCH_SUB_U64)
413 : #define PG_HAVE_ATOMIC_SUB_FETCH_U64
414 : static inline uint64
415 6 : pg_atomic_sub_fetch_u64_impl(volatile pg_atomic_uint64 *ptr, int64 sub_)
416 : {
417 6 : return pg_atomic_fetch_sub_u64_impl(ptr, sub_) - sub_;
418 : }
419 : #endif
420 :
421 : #if !defined(PG_HAVE_ATOMIC_READ_MEMBARRIER_U64) && defined(PG_HAVE_ATOMIC_FETCH_ADD_U64)
422 : #define PG_HAVE_ATOMIC_READ_MEMBARRIER_U64
423 : static inline uint64
424 1252320 : pg_atomic_read_membarrier_u64_impl(volatile pg_atomic_uint64 *ptr)
425 : {
426 1252320 : return pg_atomic_fetch_add_u64_impl(ptr, 0);
427 : }
428 : #endif
429 :
430 : #if !defined(PG_HAVE_ATOMIC_WRITE_MEMBARRIER_U64) && defined(PG_HAVE_ATOMIC_EXCHANGE_U64)
431 : #define PG_HAVE_ATOMIC_WRITE_MEMBARRIER_U64
432 : static inline void
433 1520 : pg_atomic_write_membarrier_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val)
434 : {
435 1520 : (void) pg_atomic_exchange_u64_impl(ptr, val);
436 1520 : }
437 : #endif
|