linux_dsm_epyc7002/arch/mips/include/asm/llsc.h
Paul Burton c042be02d7
MIPS: bitops: Use BIT_WORD() & BITS_PER_LONG
Rather than using custom SZLONG_LOG & SZLONG_MASK macros to shift & mask
a bit index to form word & bit offsets respectively, make use of the
standard BIT_WORD() & BITS_PER_LONG macros for the same purpose.

volatile is added to the definition of pointers to the long-sized word
we'll operate on, in order to prevent the compiler complaining that we
cast away the volatile qualifier of the addr argument. This should have
no effect on generated code, which in the LL/SC case is inline asm
anyway & in the non-LLSC case access is constrained by compiler barriers
provided by raw_local_irq_{save,restore}().

Signed-off-by: Paul Burton <paul.burton@mips.com>
Cc: linux-mips@vger.kernel.org
Cc: Huacai Chen <chenhc@lemote.com>
Cc: Jiaxun Yang <jiaxun.yang@flygoat.com>
Cc: linux-kernel@vger.kernel.org
2019-10-07 09:42:55 -07:00

40 lines
913 B
C

/*
* This file is subject to the terms and conditions of the GNU General Public
* License. See the file "COPYING" in the main directory of this archive
* for more details.
*
* Macros for 32/64-bit neutral inline assembler
*/
#ifndef __ASM_LLSC_H
#define __ASM_LLSC_H
#include <asm/isa-rev.h>
#if _MIPS_SZLONG == 32
#define __LL "ll "
#define __SC "sc "
#define __INS "ins "
#define __EXT "ext "
#elif _MIPS_SZLONG == 64
#define __LL "lld "
#define __SC "scd "
#define __INS "dins "
#define __EXT "dext "
#endif
/*
* Using a branch-likely instruction to check the result of an sc instruction
* works around a bug present in R10000 CPUs prior to revision 3.0 that could
* cause ll-sc sequences to execute non-atomically.
*/
#if R10000_LLSC_WAR
# define __SC_BEQZ "beqzl "
#elif MIPS_ISA_REV >= 6
# define __SC_BEQZ "beqzc "
#else
# define __SC_BEQZ "beqz "
#endif
#endif /* __ASM_LLSC_H */