1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef _ASM_X86_CMPXCHG_32_H
3#define _ASM_X86_CMPXCHG_32_H
4
5/*
6 * Note: if you use __cmpxchg64(), or their variants,
7 *       you need to test for the feature in boot_cpu_data.
8 */
9
10union __u64_halves {
11	u64 full;
12	struct {
13		u32 low, high;
14	};
15};
16
17#define __arch_cmpxchg64(_ptr, _old, _new, _lock)			\
18({									\
19	union __u64_halves o = { .full = (_old), },			\
20			   n = { .full = (_new), };			\
21									\
22	asm volatile(_lock "cmpxchg8b %[ptr]"				\
23		     : [ptr] "+m" (*(_ptr)),				\
24		       "+a" (o.low), "+d" (o.high)			\
25		     : "b" (n.low), "c" (n.high)			\
26		     : "memory");					\
27									\
28	o.full;								\
29})
30
31
32static __always_inline u64 __cmpxchg64(volatile u64 *ptr, u64 old, u64 new)
33{
34	return __arch_cmpxchg64(ptr, old, new, LOCK_PREFIX);
35}
36
37static __always_inline u64 __cmpxchg64_local(volatile u64 *ptr, u64 old, u64 new)
38{
39	return __arch_cmpxchg64(ptr, old, new,);
40}
41
42#define __arch_try_cmpxchg64(_ptr, _oldp, _new, _lock)			\
43({									\
44	union __u64_halves o = { .full = *(_oldp), },			\
45			   n = { .full = (_new), };			\
46	bool ret;							\
47									\
48	asm volatile(_lock "cmpxchg8b %[ptr]"				\
49		     CC_SET(e)						\
50		     : CC_OUT(e) (ret),					\
51		       [ptr] "+m" (*(_ptr)),				\
52		       "+a" (o.low), "+d" (o.high)			\
53		     : "b" (n.low), "c" (n.high)			\
54		     : "memory");					\
55									\
56	if (unlikely(!ret))						\
57		*(_oldp) = o.full;					\
58									\
59	likely(ret);							\
60})
61
62static __always_inline bool __try_cmpxchg64(volatile u64 *ptr, u64 *oldp, u64 new)
63{
64	return __arch_try_cmpxchg64(ptr, oldp, new, LOCK_PREFIX);
65}
66
67static __always_inline bool __try_cmpxchg64_local(volatile u64 *ptr, u64 *oldp, u64 new)
68{
69	return __arch_try_cmpxchg64(ptr, oldp, new,);
70}
71
72#ifdef CONFIG_X86_CMPXCHG64
73
74#define arch_cmpxchg64 __cmpxchg64
75
76#define arch_cmpxchg64_local __cmpxchg64_local
77
78#define arch_try_cmpxchg64 __try_cmpxchg64
79
80#define arch_try_cmpxchg64_local __try_cmpxchg64_local
81
82#else
83
84/*
85 * Building a kernel capable running on 80386 and 80486. It may be necessary
86 * to simulate the cmpxchg8b on the 80386 and 80486 CPU.
87 */
88
89#define __arch_cmpxchg64_emu(_ptr, _old, _new, _lock_loc, _lock)	\
90({									\
91	union __u64_halves o = { .full = (_old), },			\
92			   n = { .full = (_new), };			\
93									\
94	asm volatile(ALTERNATIVE(_lock_loc				\
95				 "call cmpxchg8b_emu",			\
96				 _lock "cmpxchg8b %[ptr]", X86_FEATURE_CX8) \
97		     : [ptr] "+m" (*(_ptr)),				\
98		       "+a" (o.low), "+d" (o.high)			\
99		     : "b" (n.low), "c" (n.high), "S" (_ptr)		\
100		     : "memory");					\
101									\
102	o.full;								\
103})
104
105static __always_inline u64 arch_cmpxchg64(volatile u64 *ptr, u64 old, u64 new)
106{
107	return __arch_cmpxchg64_emu(ptr, old, new, LOCK_PREFIX_HERE, "lock; ");
108}
109#define arch_cmpxchg64 arch_cmpxchg64
110
111static __always_inline u64 arch_cmpxchg64_local(volatile u64 *ptr, u64 old, u64 new)
112{
113	return __arch_cmpxchg64_emu(ptr, old, new, ,);
114}
115#define arch_cmpxchg64_local arch_cmpxchg64_local
116
117#define __arch_try_cmpxchg64_emu(_ptr, _oldp, _new, _lock_loc, _lock)	\
118({									\
119	union __u64_halves o = { .full = *(_oldp), },			\
120			   n = { .full = (_new), };			\
121	bool ret;							\
122									\
123	asm volatile(ALTERNATIVE(_lock_loc				\
124				 "call cmpxchg8b_emu",			\
125				 _lock "cmpxchg8b %[ptr]", X86_FEATURE_CX8) \
126		     CC_SET(e)						\
127		     : CC_OUT(e) (ret),					\
128		       [ptr] "+m" (*(_ptr)),				\
129		       "+a" (o.low), "+d" (o.high)			\
130		     : "b" (n.low), "c" (n.high), "S" (_ptr)		\
131		     : "memory");					\
132									\
133	if (unlikely(!ret))						\
134		*(_oldp) = o.full;					\
135									\
136	likely(ret);							\
137})
138
139static __always_inline bool arch_try_cmpxchg64(volatile u64 *ptr, u64 *oldp, u64 new)
140{
141	return __arch_try_cmpxchg64_emu(ptr, oldp, new, LOCK_PREFIX_HERE, "lock; ");
142}
143#define arch_try_cmpxchg64 arch_try_cmpxchg64
144
145static __always_inline bool arch_try_cmpxchg64_local(volatile u64 *ptr, u64 *oldp, u64 new)
146{
147	return __arch_try_cmpxchg64_emu(ptr, oldp, new, ,);
148}
149#define arch_try_cmpxchg64_local arch_try_cmpxchg64_local
150
151#endif
152
153#define system_has_cmpxchg64()		boot_cpu_has(X86_FEATURE_CX8)
154
155#endif /* _ASM_X86_CMPXCHG_32_H */
156