mirror of
https://github.com/AuxXxilium/linux_dsm_epyc7002.git
synced 2024-12-15 10:06:52 +07:00
c020395b66
Prefer __always_inline for atomic wrappers. When building for size (CC_OPTIMIZE_FOR_SIZE), some compilers appear to be less inclined to inline even relatively small static inline functions that are assumed to be inlinable such as atomic ops. This can cause problems, for example in UACCESS regions. By using __always_inline, we let the real implementation and not the wrapper determine the final inlining preference. For x86 tinyconfig we observe: - vmlinux baseline: 1316204 - vmlinux with patch: 1315988 (-216 bytes) This came up when addressing UACCESS warnings with CC_OPTIMIZE_FOR_SIZE in the KCSAN runtime: http://lkml.kernel.org/r/58708908-84a0-0a81-a836-ad97e33dbb62@infradead.org Reported-by: Randy Dunlap <rdunlap@infradead.org> Signed-off-by: Marco Elver <elver@google.com> Acked-by: Mark Rutland <mark.rutland@arm.com> Signed-off-by: Paul E. McKenney <paulmck@kernel.org>
1015 lines
20 KiB
C
1015 lines
20 KiB
C
// SPDX-License-Identifier: GPL-2.0
|
|
|
|
// Generated by scripts/atomic/gen-atomic-long.sh
|
|
// DO NOT MODIFY THIS FILE DIRECTLY
|
|
|
|
#ifndef _ASM_GENERIC_ATOMIC_LONG_H
|
|
#define _ASM_GENERIC_ATOMIC_LONG_H
|
|
|
|
#include <linux/compiler.h>
|
|
#include <asm/types.h>
|
|
|
|
#ifdef CONFIG_64BIT
|
|
typedef atomic64_t atomic_long_t;
|
|
#define ATOMIC_LONG_INIT(i) ATOMIC64_INIT(i)
|
|
#define atomic_long_cond_read_acquire atomic64_cond_read_acquire
|
|
#define atomic_long_cond_read_relaxed atomic64_cond_read_relaxed
|
|
#else
|
|
typedef atomic_t atomic_long_t;
|
|
#define ATOMIC_LONG_INIT(i) ATOMIC_INIT(i)
|
|
#define atomic_long_cond_read_acquire atomic_cond_read_acquire
|
|
#define atomic_long_cond_read_relaxed atomic_cond_read_relaxed
|
|
#endif
|
|
|
|
#ifdef CONFIG_64BIT
|
|
|
|
static __always_inline long
|
|
atomic_long_read(const atomic_long_t *v)
|
|
{
|
|
return atomic64_read(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_read_acquire(const atomic_long_t *v)
|
|
{
|
|
return atomic64_read_acquire(v);
|
|
}
|
|
|
|
static __always_inline void
|
|
atomic_long_set(atomic_long_t *v, long i)
|
|
{
|
|
atomic64_set(v, i);
|
|
}
|
|
|
|
static __always_inline void
|
|
atomic_long_set_release(atomic_long_t *v, long i)
|
|
{
|
|
atomic64_set_release(v, i);
|
|
}
|
|
|
|
static __always_inline void
|
|
atomic_long_add(long i, atomic_long_t *v)
|
|
{
|
|
atomic64_add(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_add_return(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_add_return(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_add_return_acquire(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_add_return_acquire(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_add_return_release(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_add_return_release(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_add_return_relaxed(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_add_return_relaxed(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_add(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_add(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_add_acquire(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_add_release(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_add_release(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_add_relaxed(i, v);
|
|
}
|
|
|
|
static __always_inline void
|
|
atomic_long_sub(long i, atomic_long_t *v)
|
|
{
|
|
atomic64_sub(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_sub_return(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_sub_return(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_sub_return_acquire(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_sub_return_acquire(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_sub_return_release(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_sub_return_release(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_sub_return_relaxed(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_sub(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_sub(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_sub_acquire(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_sub_release(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_sub_release(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_sub_relaxed(i, v);
|
|
}
|
|
|
|
static __always_inline void
|
|
atomic_long_inc(atomic_long_t *v)
|
|
{
|
|
atomic64_inc(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_inc_return(atomic_long_t *v)
|
|
{
|
|
return atomic64_inc_return(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_inc_return_acquire(atomic_long_t *v)
|
|
{
|
|
return atomic64_inc_return_acquire(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_inc_return_release(atomic_long_t *v)
|
|
{
|
|
return atomic64_inc_return_release(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_inc_return_relaxed(atomic_long_t *v)
|
|
{
|
|
return atomic64_inc_return_relaxed(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_inc(atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_inc(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_inc_acquire(atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_inc_acquire(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_inc_release(atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_inc_release(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_inc_relaxed(atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_inc_relaxed(v);
|
|
}
|
|
|
|
static __always_inline void
|
|
atomic_long_dec(atomic_long_t *v)
|
|
{
|
|
atomic64_dec(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_dec_return(atomic_long_t *v)
|
|
{
|
|
return atomic64_dec_return(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_dec_return_acquire(atomic_long_t *v)
|
|
{
|
|
return atomic64_dec_return_acquire(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_dec_return_release(atomic_long_t *v)
|
|
{
|
|
return atomic64_dec_return_release(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_dec_return_relaxed(atomic_long_t *v)
|
|
{
|
|
return atomic64_dec_return_relaxed(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_dec(atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_dec(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_dec_acquire(atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_dec_acquire(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_dec_release(atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_dec_release(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_dec_relaxed(atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_dec_relaxed(v);
|
|
}
|
|
|
|
static __always_inline void
|
|
atomic_long_and(long i, atomic_long_t *v)
|
|
{
|
|
atomic64_and(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_and(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_and(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_and_acquire(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_and_release(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_and_release(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_and_relaxed(i, v);
|
|
}
|
|
|
|
static __always_inline void
|
|
atomic_long_andnot(long i, atomic_long_t *v)
|
|
{
|
|
atomic64_andnot(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_andnot(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_andnot(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_andnot_acquire(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_andnot_release(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_andnot_relaxed(i, v);
|
|
}
|
|
|
|
static __always_inline void
|
|
atomic_long_or(long i, atomic_long_t *v)
|
|
{
|
|
atomic64_or(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_or(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_or(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_or_acquire(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_or_release(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_or_release(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_or_relaxed(i, v);
|
|
}
|
|
|
|
static __always_inline void
|
|
atomic_long_xor(long i, atomic_long_t *v)
|
|
{
|
|
atomic64_xor(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_xor(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_xor(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_xor_acquire(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_xor_release(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_xor_release(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_fetch_xor_relaxed(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_xchg(atomic_long_t *v, long i)
|
|
{
|
|
return atomic64_xchg(v, i);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_xchg_acquire(atomic_long_t *v, long i)
|
|
{
|
|
return atomic64_xchg_acquire(v, i);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_xchg_release(atomic_long_t *v, long i)
|
|
{
|
|
return atomic64_xchg_release(v, i);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_xchg_relaxed(atomic_long_t *v, long i)
|
|
{
|
|
return atomic64_xchg_relaxed(v, i);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
|
|
{
|
|
return atomic64_cmpxchg(v, old, new);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
|
|
{
|
|
return atomic64_cmpxchg_acquire(v, old, new);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
|
|
{
|
|
return atomic64_cmpxchg_release(v, old, new);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
|
|
{
|
|
return atomic64_cmpxchg_relaxed(v, old, new);
|
|
}
|
|
|
|
static __always_inline bool
|
|
atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
|
|
{
|
|
return atomic64_try_cmpxchg(v, (s64 *)old, new);
|
|
}
|
|
|
|
static __always_inline bool
|
|
atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
|
|
{
|
|
return atomic64_try_cmpxchg_acquire(v, (s64 *)old, new);
|
|
}
|
|
|
|
static __always_inline bool
|
|
atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
|
|
{
|
|
return atomic64_try_cmpxchg_release(v, (s64 *)old, new);
|
|
}
|
|
|
|
static __always_inline bool
|
|
atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
|
|
{
|
|
return atomic64_try_cmpxchg_relaxed(v, (s64 *)old, new);
|
|
}
|
|
|
|
static __always_inline bool
|
|
atomic_long_sub_and_test(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_sub_and_test(i, v);
|
|
}
|
|
|
|
static __always_inline bool
|
|
atomic_long_dec_and_test(atomic_long_t *v)
|
|
{
|
|
return atomic64_dec_and_test(v);
|
|
}
|
|
|
|
static __always_inline bool
|
|
atomic_long_inc_and_test(atomic_long_t *v)
|
|
{
|
|
return atomic64_inc_and_test(v);
|
|
}
|
|
|
|
static __always_inline bool
|
|
atomic_long_add_negative(long i, atomic_long_t *v)
|
|
{
|
|
return atomic64_add_negative(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
|
|
{
|
|
return atomic64_fetch_add_unless(v, a, u);
|
|
}
|
|
|
|
static __always_inline bool
|
|
atomic_long_add_unless(atomic_long_t *v, long a, long u)
|
|
{
|
|
return atomic64_add_unless(v, a, u);
|
|
}
|
|
|
|
static __always_inline bool
|
|
atomic_long_inc_not_zero(atomic_long_t *v)
|
|
{
|
|
return atomic64_inc_not_zero(v);
|
|
}
|
|
|
|
static __always_inline bool
|
|
atomic_long_inc_unless_negative(atomic_long_t *v)
|
|
{
|
|
return atomic64_inc_unless_negative(v);
|
|
}
|
|
|
|
static __always_inline bool
|
|
atomic_long_dec_unless_positive(atomic_long_t *v)
|
|
{
|
|
return atomic64_dec_unless_positive(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_dec_if_positive(atomic_long_t *v)
|
|
{
|
|
return atomic64_dec_if_positive(v);
|
|
}
|
|
|
|
#else /* CONFIG_64BIT */
|
|
|
|
static __always_inline long
|
|
atomic_long_read(const atomic_long_t *v)
|
|
{
|
|
return atomic_read(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_read_acquire(const atomic_long_t *v)
|
|
{
|
|
return atomic_read_acquire(v);
|
|
}
|
|
|
|
static __always_inline void
|
|
atomic_long_set(atomic_long_t *v, long i)
|
|
{
|
|
atomic_set(v, i);
|
|
}
|
|
|
|
static __always_inline void
|
|
atomic_long_set_release(atomic_long_t *v, long i)
|
|
{
|
|
atomic_set_release(v, i);
|
|
}
|
|
|
|
static __always_inline void
|
|
atomic_long_add(long i, atomic_long_t *v)
|
|
{
|
|
atomic_add(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_add_return(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_add_return(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_add_return_acquire(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_add_return_acquire(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_add_return_release(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_add_return_release(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_add_return_relaxed(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_add_return_relaxed(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_add(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_add(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_add_acquire(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_add_release(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_add_release(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_add_relaxed(i, v);
|
|
}
|
|
|
|
static __always_inline void
|
|
atomic_long_sub(long i, atomic_long_t *v)
|
|
{
|
|
atomic_sub(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_sub_return(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_sub_return(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_sub_return_acquire(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_sub_return_acquire(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_sub_return_release(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_sub_return_release(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_sub_return_relaxed(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_sub(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_sub(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_sub_acquire(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_sub_release(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_sub_release(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_sub_relaxed(i, v);
|
|
}
|
|
|
|
static __always_inline void
|
|
atomic_long_inc(atomic_long_t *v)
|
|
{
|
|
atomic_inc(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_inc_return(atomic_long_t *v)
|
|
{
|
|
return atomic_inc_return(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_inc_return_acquire(atomic_long_t *v)
|
|
{
|
|
return atomic_inc_return_acquire(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_inc_return_release(atomic_long_t *v)
|
|
{
|
|
return atomic_inc_return_release(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_inc_return_relaxed(atomic_long_t *v)
|
|
{
|
|
return atomic_inc_return_relaxed(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_inc(atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_inc(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_inc_acquire(atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_inc_acquire(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_inc_release(atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_inc_release(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_inc_relaxed(atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_inc_relaxed(v);
|
|
}
|
|
|
|
static __always_inline void
|
|
atomic_long_dec(atomic_long_t *v)
|
|
{
|
|
atomic_dec(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_dec_return(atomic_long_t *v)
|
|
{
|
|
return atomic_dec_return(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_dec_return_acquire(atomic_long_t *v)
|
|
{
|
|
return atomic_dec_return_acquire(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_dec_return_release(atomic_long_t *v)
|
|
{
|
|
return atomic_dec_return_release(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_dec_return_relaxed(atomic_long_t *v)
|
|
{
|
|
return atomic_dec_return_relaxed(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_dec(atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_dec(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_dec_acquire(atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_dec_acquire(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_dec_release(atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_dec_release(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_dec_relaxed(atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_dec_relaxed(v);
|
|
}
|
|
|
|
static __always_inline void
|
|
atomic_long_and(long i, atomic_long_t *v)
|
|
{
|
|
atomic_and(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_and(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_and(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_and_acquire(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_and_release(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_and_release(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_and_relaxed(i, v);
|
|
}
|
|
|
|
static __always_inline void
|
|
atomic_long_andnot(long i, atomic_long_t *v)
|
|
{
|
|
atomic_andnot(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_andnot(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_andnot(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_andnot_acquire(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_andnot_release(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_andnot_relaxed(i, v);
|
|
}
|
|
|
|
static __always_inline void
|
|
atomic_long_or(long i, atomic_long_t *v)
|
|
{
|
|
atomic_or(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_or(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_or(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_or_acquire(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_or_release(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_or_release(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_or_relaxed(i, v);
|
|
}
|
|
|
|
static __always_inline void
|
|
atomic_long_xor(long i, atomic_long_t *v)
|
|
{
|
|
atomic_xor(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_xor(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_xor(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_xor_acquire(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_xor_release(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_xor_release(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_fetch_xor_relaxed(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_xchg(atomic_long_t *v, long i)
|
|
{
|
|
return atomic_xchg(v, i);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_xchg_acquire(atomic_long_t *v, long i)
|
|
{
|
|
return atomic_xchg_acquire(v, i);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_xchg_release(atomic_long_t *v, long i)
|
|
{
|
|
return atomic_xchg_release(v, i);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_xchg_relaxed(atomic_long_t *v, long i)
|
|
{
|
|
return atomic_xchg_relaxed(v, i);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
|
|
{
|
|
return atomic_cmpxchg(v, old, new);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
|
|
{
|
|
return atomic_cmpxchg_acquire(v, old, new);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
|
|
{
|
|
return atomic_cmpxchg_release(v, old, new);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
|
|
{
|
|
return atomic_cmpxchg_relaxed(v, old, new);
|
|
}
|
|
|
|
static __always_inline bool
|
|
atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
|
|
{
|
|
return atomic_try_cmpxchg(v, (int *)old, new);
|
|
}
|
|
|
|
static __always_inline bool
|
|
atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
|
|
{
|
|
return atomic_try_cmpxchg_acquire(v, (int *)old, new);
|
|
}
|
|
|
|
static __always_inline bool
|
|
atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
|
|
{
|
|
return atomic_try_cmpxchg_release(v, (int *)old, new);
|
|
}
|
|
|
|
static __always_inline bool
|
|
atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
|
|
{
|
|
return atomic_try_cmpxchg_relaxed(v, (int *)old, new);
|
|
}
|
|
|
|
static __always_inline bool
|
|
atomic_long_sub_and_test(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_sub_and_test(i, v);
|
|
}
|
|
|
|
static __always_inline bool
|
|
atomic_long_dec_and_test(atomic_long_t *v)
|
|
{
|
|
return atomic_dec_and_test(v);
|
|
}
|
|
|
|
static __always_inline bool
|
|
atomic_long_inc_and_test(atomic_long_t *v)
|
|
{
|
|
return atomic_inc_and_test(v);
|
|
}
|
|
|
|
static __always_inline bool
|
|
atomic_long_add_negative(long i, atomic_long_t *v)
|
|
{
|
|
return atomic_add_negative(i, v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
|
|
{
|
|
return atomic_fetch_add_unless(v, a, u);
|
|
}
|
|
|
|
static __always_inline bool
|
|
atomic_long_add_unless(atomic_long_t *v, long a, long u)
|
|
{
|
|
return atomic_add_unless(v, a, u);
|
|
}
|
|
|
|
static __always_inline bool
|
|
atomic_long_inc_not_zero(atomic_long_t *v)
|
|
{
|
|
return atomic_inc_not_zero(v);
|
|
}
|
|
|
|
static __always_inline bool
|
|
atomic_long_inc_unless_negative(atomic_long_t *v)
|
|
{
|
|
return atomic_inc_unless_negative(v);
|
|
}
|
|
|
|
static __always_inline bool
|
|
atomic_long_dec_unless_positive(atomic_long_t *v)
|
|
{
|
|
return atomic_dec_unless_positive(v);
|
|
}
|
|
|
|
static __always_inline long
|
|
atomic_long_dec_if_positive(atomic_long_t *v)
|
|
{
|
|
return atomic_dec_if_positive(v);
|
|
}
|
|
|
|
#endif /* CONFIG_64BIT */
|
|
#endif /* _ASM_GENERIC_ATOMIC_LONG_H */
|
|
// a624200981f552b2c6be4f32fe44da8289f30d87
|