1139804Simp/* SPDX-License-Identifier: GPL-2.0 */
21541Srgrimes#ifndef __ASM_BARRIER_H
31541Srgrimes#define __ASM_BARRIER_H
41541Srgrimes
51541Srgrimes#include <asm/alternative.h>
61541Srgrimes
71541Srgrimes#ifndef __ASSEMBLY__
81541Srgrimes
91541Srgrimes/* The synchronize caches instruction executes as a nop on systems in
101541Srgrimes   which all memory references are performed in order. */
111541Srgrimes#define synchronize_caches() asm volatile("sync" \
121541Srgrimes	ALTERNATIVE(ALT_COND_NO_SMP, INSN_NOP) \
131541Srgrimes	: : : "memory")
141541Srgrimes
151541Srgrimes#if defined(CONFIG_SMP)
161541Srgrimes#define mb()		do { synchronize_caches(); } while (0)
171541Srgrimes#define rmb()		mb()
181541Srgrimes#define wmb()		mb()
191541Srgrimes#define dma_rmb()	mb()
201541Srgrimes#define dma_wmb()	mb()
211541Srgrimes#else
221541Srgrimes#define mb()		barrier()
231541Srgrimes#define rmb()		barrier()
241541Srgrimes#define wmb()		barrier()
251541Srgrimes#define dma_rmb()	barrier()
261541Srgrimes#define dma_wmb()	barrier()
271541Srgrimes#endif
281541Srgrimes
291541Srgrimes#define __smp_mb()	mb()
301541Srgrimes#define __smp_rmb()	mb()
311541Srgrimes#define __smp_wmb()	mb()
32116182Sobrien
33116182Sobrien#define __smp_store_release(p, v)					\
34116182Sobriendo {									\
35106369Srwatson	typeof(p) __p = (p);						\
36106369Srwatson        union { typeof(*p) __val; char __c[1]; } __u =			\
371541Srgrimes                { .__val = (__force typeof(*p)) (v) };			\
3848274Speter	compiletime_assert_atomic_type(*p);				\
3976166Smarkm	switch (sizeof(*p)) {						\
4076166Smarkm	case 1:								\
4112221Sbde		asm volatile("stb,ma %0,0(%1)"				\
421541Srgrimes				: : "r"(*(__u8 *)__u.__c), "r"(__p)	\
433308Sphk				: "memory");				\
441541Srgrimes		break;							\
45106369Srwatson	case 2:								\
46140483Sps		asm volatile("sth,ma %0,0(%1)"				\
4725583Speter				: : "r"(*(__u16 *)__u.__c), "r"(__p)	\
481541Srgrimes				: "memory");				\
4925656Speter		break;							\
5058377Sphk	case 4:								\
511541Srgrimes		asm volatile("stw,ma %0,0(%1)"				\
5276166Smarkm				: : "r"(*(__u32 *)__u.__c), "r"(__p)	\
5326335Speter				: "memory");				\
5426335Speter		break;							\
551541Srgrimes	case 8:								\
56110299Sphk		if (IS_ENABLED(CONFIG_64BIT))				\
57110299Sphk			asm volatile("std,ma %0,0(%1)"			\
589369Sdg				: : "r"(*(__u64 *)__u.__c), "r"(__p)	\
598876Srgrimes				: "memory");				\
601541Srgrimes		break;							\
611541Srgrimes	}								\
621541Srgrimes} while (0)
631541Srgrimes
641541Srgrimes#define __smp_load_acquire(p)						\
651541Srgrimes({									\
661541Srgrimes	union { typeof(*p) __val; char __c[1]; } __u;			\
671541Srgrimes	typeof(p) __p = (p);						\
681541Srgrimes	compiletime_assert_atomic_type(*p);				\
6994343Sjhb	switch (sizeof(*p)) {						\
7092723Salfred	case 1:								\
7192723Salfred		asm volatile("ldb,ma 0(%1),%0"				\
7213016Sbde				: "=r"(*(__u8 *)__u.__c) : "r"(__p)	\
7330739Sphk				: "memory");				\
7430739Sphk		break;							\
7530739Sphk	case 2:								\
7630739Sphk		asm volatile("ldh,ma 0(%1),%0"				\
7730739Sphk				: "=r"(*(__u16 *)__u.__c) : "r"(__p)	\
7830739Sphk				: "memory");				\
7992723Salfred		break;							\
8030739Sphk	case 4:								\
8125583Speter		asm volatile("ldw,ma 0(%1),%0"				\
82102074Sphk				: "=r"(*(__u32 *)__u.__c) : "r"(__p)	\
8325583Speter				: "memory");				\
8445433Snsayer		break;							\
8545438Snsayer	case 8:								\
8633690Sphk		if (IS_ENABLED(CONFIG_64BIT))				\
8725583Speter			asm volatile("ldd,ma 0(%1),%0"			\
8825583Speter				: "=r"(*(__u64 *)__u.__c) : "r"(__p)	\
8925656Speter				: "memory");				\
9033818Sbde		break;							\
9135029Sphk	}								\
9235029Sphk	__u.__val;							\
9325583Speter})
9425583Speter#include <asm-generic/barrier.h>
9533818Sbde
9645433Snsayer#endif /* !__ASSEMBLY__ */
9745433Snsayer#endif /* __ASM_BARRIER_H */
9845433Snsayer