mirror of
https://github.com/AuxXxilium/linux_dsm_epyc7002.git
synced 2024-12-20 01:37:41 +07:00
4f6cdf296c
* for-next/acpi: ACPI/IORT: Fix 'Number of IDs' handling in iort_id_map() * for-next/cpufeatures: (2 commits) arm64: Introduce ID_ISAR6 CPU register ... * for-next/csum: (2 commits) arm64: csum: Fix pathological zero-length calls ... * for-next/e0pd: (7 commits) arm64: kconfig: Fix alignment of E0PD help text ... * for-next/entry: (5 commits) arm64: entry: cleanup sp_el0 manipulation ... * for-next/kbuild: (4 commits) arm64: kbuild: remove compressed images on 'make ARCH=arm64 (dist)clean' ... * for-next/kexec/cleanup: (11 commits) Revert "arm64: kexec: make dtb_mem always enabled" ... * for-next/kexec/file-kdump: (2 commits) arm64: kexec_file: add crash dump support ... * for-next/misc: (12 commits) arm64: entry: Avoid empty alternatives entries ... * for-next/nofpsimd: (7 commits) arm64: nofpsmid: Handle TIF_FOREIGN_FPSTATE flag cleanly ... * for-next/perf: (2 commits) perf/imx_ddr: Fix cpu hotplug state cleanup ... * for-next/scs: (6 commits) arm64: kernel: avoid x18 in __cpu_soft_restart ...
49 lines
1.2 KiB
C
49 lines
1.2 KiB
C
/* SPDX-License-Identifier: GPL-2.0 */
|
|
#ifndef __ASM_LSE_H
|
|
#define __ASM_LSE_H
|
|
|
|
#include <asm/atomic_ll_sc.h>
|
|
|
|
#ifdef CONFIG_ARM64_LSE_ATOMICS
|
|
|
|
#define __LSE_PREAMBLE ".arch armv8-a+lse\n"
|
|
|
|
#include <linux/compiler_types.h>
|
|
#include <linux/export.h>
|
|
#include <linux/jump_label.h>
|
|
#include <linux/stringify.h>
|
|
#include <asm/alternative.h>
|
|
#include <asm/atomic_lse.h>
|
|
#include <asm/cpucaps.h>
|
|
|
|
extern struct static_key_false cpu_hwcap_keys[ARM64_NCAPS];
|
|
extern struct static_key_false arm64_const_caps_ready;
|
|
|
|
static inline bool system_uses_lse_atomics(void)
|
|
{
|
|
return (static_branch_likely(&arm64_const_caps_ready)) &&
|
|
static_branch_likely(&cpu_hwcap_keys[ARM64_HAS_LSE_ATOMICS]);
|
|
}
|
|
|
|
#define __lse_ll_sc_body(op, ...) \
|
|
({ \
|
|
system_uses_lse_atomics() ? \
|
|
__lse_##op(__VA_ARGS__) : \
|
|
__ll_sc_##op(__VA_ARGS__); \
|
|
})
|
|
|
|
/* In-line patching at runtime */
|
|
#define ARM64_LSE_ATOMIC_INSN(llsc, lse) \
|
|
ALTERNATIVE(llsc, __LSE_PREAMBLE lse, ARM64_HAS_LSE_ATOMICS)
|
|
|
|
#else /* CONFIG_ARM64_LSE_ATOMICS */
|
|
|
|
static inline bool system_uses_lse_atomics(void) { return false; }
|
|
|
|
#define __lse_ll_sc_body(op, ...) __ll_sc_##op(__VA_ARGS__)
|
|
|
|
#define ARM64_LSE_ATOMIC_INSN(llsc, lse) llsc
|
|
|
|
#endif /* CONFIG_ARM64_LSE_ATOMICS */
|
|
#endif /* __ASM_LSE_H */
|