mirror of
https://github.com/AuxXxilium/linux_dsm_epyc7002.git
synced 2024-12-18 08:56:48 +07:00
parisc: Implement __smp_store_release and __smp_load_acquire barriers
This patch implements the __smp_store_release and __smp_load_acquire barriers using ordered stores and loads. This avoids the sync instruction present in the generic implementation. Cc: <stable@vger.kernel.org> # 4.14+ Signed-off-by: Dave Anglin <dave.anglin@bell.net> Signed-off-by: Helge Deller <deller@gmx.de>
This commit is contained in:
parent
5b24993c21
commit
e96ebd589d
@ -26,6 +26,67 @@
|
||||
#define __smp_rmb() mb()
|
||||
#define __smp_wmb() mb()
|
||||
|
||||
#define __smp_store_release(p, v) \
|
||||
do { \
|
||||
typeof(p) __p = (p); \
|
||||
union { typeof(*p) __val; char __c[1]; } __u = \
|
||||
{ .__val = (__force typeof(*p)) (v) }; \
|
||||
compiletime_assert_atomic_type(*p); \
|
||||
switch (sizeof(*p)) { \
|
||||
case 1: \
|
||||
asm volatile("stb,ma %0,0(%1)" \
|
||||
: : "r"(*(__u8 *)__u.__c), "r"(__p) \
|
||||
: "memory"); \
|
||||
break; \
|
||||
case 2: \
|
||||
asm volatile("sth,ma %0,0(%1)" \
|
||||
: : "r"(*(__u16 *)__u.__c), "r"(__p) \
|
||||
: "memory"); \
|
||||
break; \
|
||||
case 4: \
|
||||
asm volatile("stw,ma %0,0(%1)" \
|
||||
: : "r"(*(__u32 *)__u.__c), "r"(__p) \
|
||||
: "memory"); \
|
||||
break; \
|
||||
case 8: \
|
||||
if (IS_ENABLED(CONFIG_64BIT)) \
|
||||
asm volatile("std,ma %0,0(%1)" \
|
||||
: : "r"(*(__u64 *)__u.__c), "r"(__p) \
|
||||
: "memory"); \
|
||||
break; \
|
||||
} \
|
||||
} while (0)
|
||||
|
||||
#define __smp_load_acquire(p) \
|
||||
({ \
|
||||
union { typeof(*p) __val; char __c[1]; } __u; \
|
||||
typeof(p) __p = (p); \
|
||||
compiletime_assert_atomic_type(*p); \
|
||||
switch (sizeof(*p)) { \
|
||||
case 1: \
|
||||
asm volatile("ldb,ma 0(%1),%0" \
|
||||
: "=r"(*(__u8 *)__u.__c) : "r"(__p) \
|
||||
: "memory"); \
|
||||
break; \
|
||||
case 2: \
|
||||
asm volatile("ldh,ma 0(%1),%0" \
|
||||
: "=r"(*(__u16 *)__u.__c) : "r"(__p) \
|
||||
: "memory"); \
|
||||
break; \
|
||||
case 4: \
|
||||
asm volatile("ldw,ma 0(%1),%0" \
|
||||
: "=r"(*(__u32 *)__u.__c) : "r"(__p) \
|
||||
: "memory"); \
|
||||
break; \
|
||||
case 8: \
|
||||
if (IS_ENABLED(CONFIG_64BIT)) \
|
||||
asm volatile("ldd,ma 0(%1),%0" \
|
||||
: "=r"(*(__u64 *)__u.__c) : "r"(__p) \
|
||||
: "memory"); \
|
||||
break; \
|
||||
} \
|
||||
__u.__val; \
|
||||
})
|
||||
#include <asm-generic/barrier.h>
|
||||
|
||||
#endif /* !__ASSEMBLY__ */
|
||||
|
Loading…
Reference in New Issue
Block a user