1/*-
2 * SPDX-License-Identifier: BSD-2-Clause-FreeBSD
3 *
4 * Copyright (c) 2011 Ed Schouten <ed@FreeBSD.org>
5 *                    David Chisnall <theraven@FreeBSD.org>
6 * All rights reserved.
7 *
8 * Redistribution and use in source and binary forms, with or without
9 * modification, are permitted provided that the following conditions
10 * are met:
11 * 1. Redistributions of source code must retain the above copyright
12 *    notice, this list of conditions and the following disclaimer.
13 * 2. Redistributions in binary form must reproduce the above copyright
14 *    notice, this list of conditions and the following disclaimer in the
15 *    documentation and/or other materials provided with the distribution.
16 *
17 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
18 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20 * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
21 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
22 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
23 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
24 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
25 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
26 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
27 * SUCH DAMAGE.
28 *
29 * $FreeBSD$
30 */
31
32#ifndef _STDATOMIC_H_
33#define	_STDATOMIC_H_
34
35#include <sys/cdefs.h>
36#include <sys/_types.h>
37
38#if __has_extension(c_atomic) || __has_extension(cxx_atomic)
39#define	__CLANG_ATOMICS
40#elif __GNUC_PREREQ__(4, 7)
41#define	__GNUC_ATOMICS
42#elif defined(__GNUC__)
43#define	__SYNC_ATOMICS
44#else
45#error "stdatomic.h does not support your compiler"
46#endif
47
48/*
49 * 7.17.1 Atomic lock-free macros.
50 */
51
52#ifdef __GCC_ATOMIC_BOOL_LOCK_FREE
53#define	ATOMIC_BOOL_LOCK_FREE		__GCC_ATOMIC_BOOL_LOCK_FREE
54#endif
55#ifdef __GCC_ATOMIC_CHAR_LOCK_FREE
56#define	ATOMIC_CHAR_LOCK_FREE		__GCC_ATOMIC_CHAR_LOCK_FREE
57#endif
58#ifdef __GCC_ATOMIC_CHAR16_T_LOCK_FREE
59#define	ATOMIC_CHAR16_T_LOCK_FREE	__GCC_ATOMIC_CHAR16_T_LOCK_FREE
60#endif
61#ifdef __GCC_ATOMIC_CHAR32_T_LOCK_FREE
62#define	ATOMIC_CHAR32_T_LOCK_FREE	__GCC_ATOMIC_CHAR32_T_LOCK_FREE
63#endif
64#ifdef __GCC_ATOMIC_WCHAR_T_LOCK_FREE
65#define	ATOMIC_WCHAR_T_LOCK_FREE	__GCC_ATOMIC_WCHAR_T_LOCK_FREE
66#endif
67#ifdef __GCC_ATOMIC_SHORT_LOCK_FREE
68#define	ATOMIC_SHORT_LOCK_FREE		__GCC_ATOMIC_SHORT_LOCK_FREE
69#endif
70#ifdef __GCC_ATOMIC_INT_LOCK_FREE
71#define	ATOMIC_INT_LOCK_FREE		__GCC_ATOMIC_INT_LOCK_FREE
72#endif
73#ifdef __GCC_ATOMIC_LONG_LOCK_FREE
74#define	ATOMIC_LONG_LOCK_FREE		__GCC_ATOMIC_LONG_LOCK_FREE
75#endif
76#ifdef __GCC_ATOMIC_LLONG_LOCK_FREE
77#define	ATOMIC_LLONG_LOCK_FREE		__GCC_ATOMIC_LLONG_LOCK_FREE
78#endif
79#ifdef __GCC_ATOMIC_POINTER_LOCK_FREE
80#define	ATOMIC_POINTER_LOCK_FREE	__GCC_ATOMIC_POINTER_LOCK_FREE
81#endif
82
83/*
84 * 7.17.2 Initialization.
85 */
86
87#if defined(__CLANG_ATOMICS)
88#define	ATOMIC_VAR_INIT(value)		(value)
89#define	atomic_init(obj, value)		__c11_atomic_init(obj, value)
90#else
91#define	ATOMIC_VAR_INIT(value)		{ .__val = (value) }
92#define	atomic_init(obj, value)		((void)((obj)->__val = (value)))
93#endif
94
95/*
96 * Clang and recent GCC both provide predefined macros for the memory
97 * orderings.  If we are using a compiler that doesn't define them, use the
98 * clang values - these will be ignored in the fallback path.
99 */
100
101#ifndef __ATOMIC_RELAXED
102#define __ATOMIC_RELAXED		0
103#endif
104#ifndef __ATOMIC_CONSUME
105#define __ATOMIC_CONSUME		1
106#endif
107#ifndef __ATOMIC_ACQUIRE
108#define __ATOMIC_ACQUIRE		2
109#endif
110#ifndef __ATOMIC_RELEASE
111#define __ATOMIC_RELEASE		3
112#endif
113#ifndef __ATOMIC_ACQ_REL
114#define __ATOMIC_ACQ_REL		4
115#endif
116#ifndef __ATOMIC_SEQ_CST
117#define __ATOMIC_SEQ_CST		5
118#endif
119
120/*
121 * 7.17.3 Order and consistency.
122 *
123 * The memory_order_* constants that denote the barrier behaviour of the
124 * atomic operations.
125 */
126
127typedef enum {
128	memory_order_relaxed = __ATOMIC_RELAXED,
129	memory_order_consume = __ATOMIC_CONSUME,
130	memory_order_acquire = __ATOMIC_ACQUIRE,
131	memory_order_release = __ATOMIC_RELEASE,
132	memory_order_acq_rel = __ATOMIC_ACQ_REL,
133	memory_order_seq_cst = __ATOMIC_SEQ_CST
134} memory_order;
135
136/*
137 * 7.17.4 Fences.
138 */
139
140static __inline void
141atomic_thread_fence(memory_order __order __unused)
142{
143
144#ifdef __CLANG_ATOMICS
145	__c11_atomic_thread_fence(__order);
146#elif defined(__GNUC_ATOMICS)
147	__atomic_thread_fence(__order);
148#else
149	__sync_synchronize();
150#endif
151}
152
153static __inline void
154atomic_signal_fence(memory_order __order __unused)
155{
156
157#ifdef __CLANG_ATOMICS
158	__c11_atomic_signal_fence(__order);
159#elif defined(__GNUC_ATOMICS)
160	__atomic_signal_fence(__order);
161#else
162	__asm volatile ("" ::: "memory");
163#endif
164}
165
166/*
167 * 7.17.5 Lock-free property.
168 */
169
170#if defined(_KERNEL)
171/* Atomics in kernelspace are always lock-free. */
172#define	atomic_is_lock_free(obj) \
173	((void)(obj), (_Bool)1)
174#elif defined(__CLANG_ATOMICS) || defined(__GNUC_ATOMICS)
175#define	atomic_is_lock_free(obj) \
176	__atomic_is_lock_free(sizeof(*(obj)), obj)
177#else
178#define	atomic_is_lock_free(obj) \
179	((void)(obj), sizeof((obj)->__val) <= sizeof(void *))
180#endif
181
182/*
183 * 7.17.6 Atomic integer types.
184 */
185
186typedef _Atomic(_Bool)			atomic_bool;
187typedef _Atomic(char)			atomic_char;
188typedef _Atomic(signed char)		atomic_schar;
189typedef _Atomic(unsigned char)		atomic_uchar;
190typedef _Atomic(short)			atomic_short;
191typedef _Atomic(unsigned short)		atomic_ushort;
192typedef _Atomic(int)			atomic_int;
193typedef _Atomic(unsigned int)		atomic_uint;
194typedef _Atomic(long)			atomic_long;
195typedef _Atomic(unsigned long)		atomic_ulong;
196typedef _Atomic(long long)		atomic_llong;
197typedef _Atomic(unsigned long long)	atomic_ullong;
198typedef _Atomic(__char16_t)		atomic_char16_t;
199typedef _Atomic(__char32_t)		atomic_char32_t;
200typedef _Atomic(___wchar_t)		atomic_wchar_t;
201typedef _Atomic(__int_least8_t)		atomic_int_least8_t;
202typedef _Atomic(__uint_least8_t)	atomic_uint_least8_t;
203typedef _Atomic(__int_least16_t)	atomic_int_least16_t;
204typedef _Atomic(__uint_least16_t)	atomic_uint_least16_t;
205typedef _Atomic(__int_least32_t)	atomic_int_least32_t;
206typedef _Atomic(__uint_least32_t)	atomic_uint_least32_t;
207typedef _Atomic(__int_least64_t)	atomic_int_least64_t;
208typedef _Atomic(__uint_least64_t)	atomic_uint_least64_t;
209typedef _Atomic(__int_fast8_t)		atomic_int_fast8_t;
210typedef _Atomic(__uint_fast8_t)		atomic_uint_fast8_t;
211typedef _Atomic(__int_fast16_t)		atomic_int_fast16_t;
212typedef _Atomic(__uint_fast16_t)	atomic_uint_fast16_t;
213typedef _Atomic(__int_fast32_t)		atomic_int_fast32_t;
214typedef _Atomic(__uint_fast32_t)	atomic_uint_fast32_t;
215typedef _Atomic(__int_fast64_t)		atomic_int_fast64_t;
216typedef _Atomic(__uint_fast64_t)	atomic_uint_fast64_t;
217typedef _Atomic(__intptr_t)		atomic_intptr_t;
218typedef _Atomic(__uintptr_t)		atomic_uintptr_t;
219typedef _Atomic(__size_t)		atomic_size_t;
220typedef _Atomic(__ptrdiff_t)		atomic_ptrdiff_t;
221typedef _Atomic(__intmax_t)		atomic_intmax_t;
222typedef _Atomic(__uintmax_t)		atomic_uintmax_t;
223
224/*
225 * 7.17.7 Operations on atomic types.
226 */
227
228/*
229 * Compiler-specific operations.
230 */
231
232#if defined(__CLANG_ATOMICS)
233#define	atomic_compare_exchange_strong_explicit(object, expected,	\
234    desired, success, failure)						\
235	__c11_atomic_compare_exchange_strong(object, expected, desired,	\
236	    success, failure)
237#define	atomic_compare_exchange_weak_explicit(object, expected,		\
238    desired, success, failure)						\
239	__c11_atomic_compare_exchange_weak(object, expected, desired,	\
240	    success, failure)
241#define	atomic_exchange_explicit(object, desired, order)		\
242	__c11_atomic_exchange(object, desired, order)
243#define	atomic_fetch_add_explicit(object, operand, order)		\
244	__c11_atomic_fetch_add(object, operand, order)
245#define	atomic_fetch_and_explicit(object, operand, order)		\
246	__c11_atomic_fetch_and(object, operand, order)
247#define	atomic_fetch_or_explicit(object, operand, order)		\
248	__c11_atomic_fetch_or(object, operand, order)
249#define	atomic_fetch_sub_explicit(object, operand, order)		\
250	__c11_atomic_fetch_sub(object, operand, order)
251#define	atomic_fetch_xor_explicit(object, operand, order)		\
252	__c11_atomic_fetch_xor(object, operand, order)
253#define	atomic_load_explicit(object, order)				\
254	__c11_atomic_load(object, order)
255#define	atomic_store_explicit(object, desired, order)			\
256	__c11_atomic_store(object, desired, order)
257#elif defined(__GNUC_ATOMICS)
258#define	atomic_compare_exchange_strong_explicit(object, expected,	\
259    desired, success, failure)						\
260	__atomic_compare_exchange_n(object, expected,			\
261	    desired, 0, success, failure)
262#define	atomic_compare_exchange_weak_explicit(object, expected,		\
263    desired, success, failure)						\
264	__atomic_compare_exchange_n(object, expected,			\
265	    desired, 1, success, failure)
266#define	atomic_exchange_explicit(object, desired, order)		\
267	__atomic_exchange_n(object, desired, order)
268#define	atomic_fetch_add_explicit(object, operand, order)		\
269	__atomic_fetch_add(object, operand, order)
270#define	atomic_fetch_and_explicit(object, operand, order)		\
271	__atomic_fetch_and(object, operand, order)
272#define	atomic_fetch_or_explicit(object, operand, order)		\
273	__atomic_fetch_or(object, operand, order)
274#define	atomic_fetch_sub_explicit(object, operand, order)		\
275	__atomic_fetch_sub(object, operand, order)
276#define	atomic_fetch_xor_explicit(object, operand, order)		\
277	__atomic_fetch_xor(object, operand, order)
278#define	atomic_load_explicit(object, order)				\
279	__atomic_load_n(object, order)
280#define	atomic_store_explicit(object, desired, order)			\
281	__atomic_store_n(object, desired, order)
282#else
283#define	__atomic_apply_stride(object, operand) \
284	(((__typeof__((object)->__val))0) + (operand))
285#define	atomic_compare_exchange_strong_explicit(object, expected,	\
286    desired, success, failure)	__extension__ ({			\
287	__typeof__(expected) __ep = (expected);				\
288	__typeof__(*__ep) __e = *__ep;					\
289	(void)(success); (void)(failure);				\
290	(_Bool)((*__ep = __sync_val_compare_and_swap(&(object)->__val,	\
291	    __e, desired)) == __e);					\
292})
293#define	atomic_compare_exchange_weak_explicit(object, expected,		\
294    desired, success, failure)						\
295	atomic_compare_exchange_strong_explicit(object, expected,	\
296		desired, success, failure)
297#if __has_builtin(__sync_swap)
298/* Clang provides a full-barrier atomic exchange - use it if available. */
299#define	atomic_exchange_explicit(object, desired, order)		\
300	((void)(order), __sync_swap(&(object)->__val, desired))
301#else
302/*
303 * __sync_lock_test_and_set() is only an acquire barrier in theory (although in
304 * practice it is usually a full barrier) so we need an explicit barrier before
305 * it.
306 */
307#define	atomic_exchange_explicit(object, desired, order)		\
308__extension__ ({							\
309	__typeof__(object) __o = (object);				\
310	__typeof__(desired) __d = (desired);				\
311	(void)(order);							\
312	__sync_synchronize();						\
313	__sync_lock_test_and_set(&(__o)->__val, __d);			\
314})
315#endif
316#define	atomic_fetch_add_explicit(object, operand, order)		\
317	((void)(order), __sync_fetch_and_add(&(object)->__val,		\
318	    __atomic_apply_stride(object, operand)))
319#define	atomic_fetch_and_explicit(object, operand, order)		\
320	((void)(order), __sync_fetch_and_and(&(object)->__val, operand))
321#define	atomic_fetch_or_explicit(object, operand, order)		\
322	((void)(order), __sync_fetch_and_or(&(object)->__val, operand))
323#define	atomic_fetch_sub_explicit(object, operand, order)		\
324	((void)(order), __sync_fetch_and_sub(&(object)->__val,		\
325	    __atomic_apply_stride(object, operand)))
326#define	atomic_fetch_xor_explicit(object, operand, order)		\
327	((void)(order), __sync_fetch_and_xor(&(object)->__val, operand))
328#define	atomic_load_explicit(object, order)				\
329	((void)(order), __sync_fetch_and_add(&(object)->__val, 0))
330#define	atomic_store_explicit(object, desired, order)			\
331	((void)atomic_exchange_explicit(object, desired, order))
332#endif
333
334/*
335 * Convenience functions.
336 *
337 * Don't provide these in kernel space. In kernel space, we should be
338 * disciplined enough to always provide explicit barriers.
339 */
340
341#ifndef _KERNEL
342#define	atomic_compare_exchange_strong(object, expected, desired)	\
343	atomic_compare_exchange_strong_explicit(object, expected,	\
344	    desired, memory_order_seq_cst, memory_order_seq_cst)
345#define	atomic_compare_exchange_weak(object, expected, desired)		\
346	atomic_compare_exchange_weak_explicit(object, expected,		\
347	    desired, memory_order_seq_cst, memory_order_seq_cst)
348#define	atomic_exchange(object, desired)				\
349	atomic_exchange_explicit(object, desired, memory_order_seq_cst)
350#define	atomic_fetch_add(object, operand)				\
351	atomic_fetch_add_explicit(object, operand, memory_order_seq_cst)
352#define	atomic_fetch_and(object, operand)				\
353	atomic_fetch_and_explicit(object, operand, memory_order_seq_cst)
354#define	atomic_fetch_or(object, operand)				\
355	atomic_fetch_or_explicit(object, operand, memory_order_seq_cst)
356#define	atomic_fetch_sub(object, operand)				\
357	atomic_fetch_sub_explicit(object, operand, memory_order_seq_cst)
358#define	atomic_fetch_xor(object, operand)				\
359	atomic_fetch_xor_explicit(object, operand, memory_order_seq_cst)
360#define	atomic_load(object)						\
361	atomic_load_explicit(object, memory_order_seq_cst)
362#define	atomic_store(object, desired)					\
363	atomic_store_explicit(object, desired, memory_order_seq_cst)
364#endif /* !_KERNEL */
365
366/*
367 * 7.17.8 Atomic flag type and operations.
368 *
369 * XXX: Assume atomic_bool can be used as an atomic_flag. Is there some
370 * kind of compiler built-in type we could use?
371 */
372
373typedef struct {
374	atomic_bool	__flag;
375} atomic_flag;
376#define	ATOMIC_FLAG_INIT		{ ATOMIC_VAR_INIT(0) }
377
378static __inline _Bool
379atomic_flag_test_and_set_explicit(volatile atomic_flag *__object,
380    memory_order __order)
381{
382	return (atomic_exchange_explicit(&__object->__flag, 1, __order));
383}
384
385static __inline void
386atomic_flag_clear_explicit(volatile atomic_flag *__object, memory_order __order)
387{
388
389	atomic_store_explicit(&__object->__flag, 0, __order);
390}
391
392#ifndef _KERNEL
393static __inline _Bool
394atomic_flag_test_and_set(volatile atomic_flag *__object)
395{
396
397	return (atomic_flag_test_and_set_explicit(__object,
398	    memory_order_seq_cst));
399}
400
401static __inline void
402atomic_flag_clear(volatile atomic_flag *__object)
403{
404
405	atomic_flag_clear_explicit(__object, memory_order_seq_cst);
406}
407#endif /* !_KERNEL */
408
409#endif /* !_STDATOMIC_H_ */
410