mirror of
https://github.com/AuxXxilium/linux_dsm_epyc7002.git
synced 2024-12-27 05:05:29 +07:00
fd2efaa4eb
Currently architectures can override __atomic_op_*() to define the barriers used before/after a relaxed atomic when used to build acquire/release/fence variants. This has the unfortunate property of requiring the architecture to define the full wrapper for the atomics, rather than just the barriers they care about, and gets in the way of generating atomics which can be easily read. Instead, this patch has architectures define an optional set of barriers: * __atomic_acquire_fence() * __atomic_release_fence() * __atomic_pre_full_fence() * __atomic_post_full_fence() ... which <linux/atomic.h> uses to build the wrappers. It would be nice if we could undef these, along with the __atomic_op_*() wrappers, but that would break the cmpxchg() wrappers, which are written in preprocessor. Undefs would have been nice, but alas. There should be no functional change as a result of this patch. Signed-off-by: Mark Rutland <mark.rutland@arm.com> Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org> Acked-by: Will Deacon <will.deacon@arm.com> Cc: Andrea Parri <parri.andrea@gmail.com> Cc: Boqun Feng <boqun.feng@gmail.com> Cc: Linus Torvalds <torvalds@linux-foundation.org> Cc: Peter Zijlstra <peterz@infradead.org> Cc: Thomas Gleixner <tglx@linutronix.de> Cc: andy.shevchenko@gmail.com Cc: arnd@arndb.de Cc: aryabinin@virtuozzo.com Cc: catalin.marinas@arm.com Cc: dvyukov@google.com Cc: glider@google.com Cc: linux-arm-kernel@lists.infradead.org Cc: peter@hurleysoftware.com Link: http://lkml.kernel.org/r/20180716113017.3909-7-mark.rutland@arm.com Signed-off-by: Ingo Molnar <mingo@kernel.org>
1318 lines
37 KiB
C
1318 lines
37 KiB
C
/* SPDX-License-Identifier: GPL-2.0 */
|
|
/* Atomic operations usable in machine independent code */
|
|
#ifndef _LINUX_ATOMIC_H
|
|
#define _LINUX_ATOMIC_H
|
|
#include <linux/types.h>
|
|
|
|
#include <asm/atomic.h>
|
|
#include <asm/barrier.h>
|
|
|
|
/*
|
|
* Relaxed variants of xchg, cmpxchg and some atomic operations.
|
|
*
|
|
* We support four variants:
|
|
*
|
|
* - Fully ordered: The default implementation, no suffix required.
|
|
* - Acquire: Provides ACQUIRE semantics, _acquire suffix.
|
|
* - Release: Provides RELEASE semantics, _release suffix.
|
|
* - Relaxed: No ordering guarantees, _relaxed suffix.
|
|
*
|
|
* For compound atomics performing both a load and a store, ACQUIRE
|
|
* semantics apply only to the load and RELEASE semantics only to the
|
|
* store portion of the operation. Note that a failed cmpxchg_acquire
|
|
* does -not- imply any memory ordering constraints.
|
|
*
|
|
* See Documentation/memory-barriers.txt for ACQUIRE/RELEASE definitions.
|
|
*/
|
|
|
|
#ifndef atomic_read_acquire
|
|
#define atomic_read_acquire(v) smp_load_acquire(&(v)->counter)
|
|
#endif
|
|
|
|
#ifndef atomic_set_release
|
|
#define atomic_set_release(v, i) smp_store_release(&(v)->counter, (i))
|
|
#endif
|
|
|
|
/*
|
|
* The idea here is to build acquire/release variants by adding explicit
|
|
* barriers on top of the relaxed variant. In the case where the relaxed
|
|
* variant is already fully ordered, no additional barriers are needed.
|
|
*
|
|
* If an architecture overrides __atomic_acquire_fence() it will probably
|
|
* want to define smp_mb__after_spinlock().
|
|
*/
|
|
#ifndef __atomic_acquire_fence
|
|
#define __atomic_acquire_fence smp_mb__after_atomic
|
|
#endif
|
|
|
|
#ifndef __atomic_release_fence
|
|
#define __atomic_release_fence smp_mb__before_atomic
|
|
#endif
|
|
|
|
#ifndef __atomic_pre_full_fence
|
|
#define __atomic_pre_full_fence smp_mb__before_atomic
|
|
#endif
|
|
|
|
#ifndef __atomic_post_full_fence
|
|
#define __atomic_post_full_fence smp_mb__after_atomic
|
|
#endif
|
|
|
|
#define __atomic_op_acquire(op, args...) \
|
|
({ \
|
|
typeof(op##_relaxed(args)) __ret = op##_relaxed(args); \
|
|
__atomic_acquire_fence(); \
|
|
__ret; \
|
|
})
|
|
|
|
#define __atomic_op_release(op, args...) \
|
|
({ \
|
|
__atomic_release_fence(); \
|
|
op##_relaxed(args); \
|
|
})
|
|
|
|
#define __atomic_op_fence(op, args...) \
|
|
({ \
|
|
typeof(op##_relaxed(args)) __ret; \
|
|
__atomic_pre_full_fence(); \
|
|
__ret = op##_relaxed(args); \
|
|
__atomic_post_full_fence(); \
|
|
__ret; \
|
|
})
|
|
|
|
/* atomic_add_return_relaxed */
|
|
#ifndef atomic_add_return_relaxed
|
|
#define atomic_add_return_relaxed atomic_add_return
|
|
#define atomic_add_return_acquire atomic_add_return
|
|
#define atomic_add_return_release atomic_add_return
|
|
|
|
#else /* atomic_add_return_relaxed */
|
|
|
|
#ifndef atomic_add_return_acquire
|
|
#define atomic_add_return_acquire(...) \
|
|
__atomic_op_acquire(atomic_add_return, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic_add_return_release
|
|
#define atomic_add_return_release(...) \
|
|
__atomic_op_release(atomic_add_return, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic_add_return
|
|
#define atomic_add_return(...) \
|
|
__atomic_op_fence(atomic_add_return, __VA_ARGS__)
|
|
#endif
|
|
#endif /* atomic_add_return_relaxed */
|
|
|
|
#ifndef atomic_inc
|
|
#define atomic_inc(v) atomic_add(1, (v))
|
|
#endif
|
|
|
|
/* atomic_inc_return_relaxed */
|
|
#ifndef atomic_inc_return_relaxed
|
|
|
|
#ifndef atomic_inc_return
|
|
#define atomic_inc_return(v) atomic_add_return(1, (v))
|
|
#define atomic_inc_return_relaxed(v) atomic_add_return_relaxed(1, (v))
|
|
#define atomic_inc_return_acquire(v) atomic_add_return_acquire(1, (v))
|
|
#define atomic_inc_return_release(v) atomic_add_return_release(1, (v))
|
|
#else /* atomic_inc_return */
|
|
#define atomic_inc_return_relaxed atomic_inc_return
|
|
#define atomic_inc_return_acquire atomic_inc_return
|
|
#define atomic_inc_return_release atomic_inc_return
|
|
#endif /* atomic_inc_return */
|
|
|
|
#else /* atomic_inc_return_relaxed */
|
|
|
|
#ifndef atomic_inc_return_acquire
|
|
#define atomic_inc_return_acquire(...) \
|
|
__atomic_op_acquire(atomic_inc_return, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic_inc_return_release
|
|
#define atomic_inc_return_release(...) \
|
|
__atomic_op_release(atomic_inc_return, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic_inc_return
|
|
#define atomic_inc_return(...) \
|
|
__atomic_op_fence(atomic_inc_return, __VA_ARGS__)
|
|
#endif
|
|
#endif /* atomic_inc_return_relaxed */
|
|
|
|
/* atomic_sub_return_relaxed */
|
|
#ifndef atomic_sub_return_relaxed
|
|
#define atomic_sub_return_relaxed atomic_sub_return
|
|
#define atomic_sub_return_acquire atomic_sub_return
|
|
#define atomic_sub_return_release atomic_sub_return
|
|
|
|
#else /* atomic_sub_return_relaxed */
|
|
|
|
#ifndef atomic_sub_return_acquire
|
|
#define atomic_sub_return_acquire(...) \
|
|
__atomic_op_acquire(atomic_sub_return, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic_sub_return_release
|
|
#define atomic_sub_return_release(...) \
|
|
__atomic_op_release(atomic_sub_return, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic_sub_return
|
|
#define atomic_sub_return(...) \
|
|
__atomic_op_fence(atomic_sub_return, __VA_ARGS__)
|
|
#endif
|
|
#endif /* atomic_sub_return_relaxed */
|
|
|
|
#ifndef atomic_dec
|
|
#define atomic_dec(v) atomic_sub(1, (v))
|
|
#endif
|
|
|
|
/* atomic_dec_return_relaxed */
|
|
#ifndef atomic_dec_return_relaxed
|
|
|
|
#ifndef atomic_dec_return
|
|
#define atomic_dec_return(v) atomic_sub_return(1, (v))
|
|
#define atomic_dec_return_relaxed(v) atomic_sub_return_relaxed(1, (v))
|
|
#define atomic_dec_return_acquire(v) atomic_sub_return_acquire(1, (v))
|
|
#define atomic_dec_return_release(v) atomic_sub_return_release(1, (v))
|
|
#else /* atomic_dec_return */
|
|
#define atomic_dec_return_relaxed atomic_dec_return
|
|
#define atomic_dec_return_acquire atomic_dec_return
|
|
#define atomic_dec_return_release atomic_dec_return
|
|
#endif /* atomic_dec_return */
|
|
|
|
#else /* atomic_dec_return_relaxed */
|
|
|
|
#ifndef atomic_dec_return_acquire
|
|
#define atomic_dec_return_acquire(...) \
|
|
__atomic_op_acquire(atomic_dec_return, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic_dec_return_release
|
|
#define atomic_dec_return_release(...) \
|
|
__atomic_op_release(atomic_dec_return, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic_dec_return
|
|
#define atomic_dec_return(...) \
|
|
__atomic_op_fence(atomic_dec_return, __VA_ARGS__)
|
|
#endif
|
|
#endif /* atomic_dec_return_relaxed */
|
|
|
|
|
|
/* atomic_fetch_add_relaxed */
|
|
#ifndef atomic_fetch_add_relaxed
|
|
#define atomic_fetch_add_relaxed atomic_fetch_add
|
|
#define atomic_fetch_add_acquire atomic_fetch_add
|
|
#define atomic_fetch_add_release atomic_fetch_add
|
|
|
|
#else /* atomic_fetch_add_relaxed */
|
|
|
|
#ifndef atomic_fetch_add_acquire
|
|
#define atomic_fetch_add_acquire(...) \
|
|
__atomic_op_acquire(atomic_fetch_add, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_add_release
|
|
#define atomic_fetch_add_release(...) \
|
|
__atomic_op_release(atomic_fetch_add, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_add
|
|
#define atomic_fetch_add(...) \
|
|
__atomic_op_fence(atomic_fetch_add, __VA_ARGS__)
|
|
#endif
|
|
#endif /* atomic_fetch_add_relaxed */
|
|
|
|
/* atomic_fetch_inc_relaxed */
|
|
#ifndef atomic_fetch_inc_relaxed
|
|
|
|
#ifndef atomic_fetch_inc
|
|
#define atomic_fetch_inc(v) atomic_fetch_add(1, (v))
|
|
#define atomic_fetch_inc_relaxed(v) atomic_fetch_add_relaxed(1, (v))
|
|
#define atomic_fetch_inc_acquire(v) atomic_fetch_add_acquire(1, (v))
|
|
#define atomic_fetch_inc_release(v) atomic_fetch_add_release(1, (v))
|
|
#else /* atomic_fetch_inc */
|
|
#define atomic_fetch_inc_relaxed atomic_fetch_inc
|
|
#define atomic_fetch_inc_acquire atomic_fetch_inc
|
|
#define atomic_fetch_inc_release atomic_fetch_inc
|
|
#endif /* atomic_fetch_inc */
|
|
|
|
#else /* atomic_fetch_inc_relaxed */
|
|
|
|
#ifndef atomic_fetch_inc_acquire
|
|
#define atomic_fetch_inc_acquire(...) \
|
|
__atomic_op_acquire(atomic_fetch_inc, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_inc_release
|
|
#define atomic_fetch_inc_release(...) \
|
|
__atomic_op_release(atomic_fetch_inc, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_inc
|
|
#define atomic_fetch_inc(...) \
|
|
__atomic_op_fence(atomic_fetch_inc, __VA_ARGS__)
|
|
#endif
|
|
#endif /* atomic_fetch_inc_relaxed */
|
|
|
|
/* atomic_fetch_sub_relaxed */
|
|
#ifndef atomic_fetch_sub_relaxed
|
|
#define atomic_fetch_sub_relaxed atomic_fetch_sub
|
|
#define atomic_fetch_sub_acquire atomic_fetch_sub
|
|
#define atomic_fetch_sub_release atomic_fetch_sub
|
|
|
|
#else /* atomic_fetch_sub_relaxed */
|
|
|
|
#ifndef atomic_fetch_sub_acquire
|
|
#define atomic_fetch_sub_acquire(...) \
|
|
__atomic_op_acquire(atomic_fetch_sub, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_sub_release
|
|
#define atomic_fetch_sub_release(...) \
|
|
__atomic_op_release(atomic_fetch_sub, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_sub
|
|
#define atomic_fetch_sub(...) \
|
|
__atomic_op_fence(atomic_fetch_sub, __VA_ARGS__)
|
|
#endif
|
|
#endif /* atomic_fetch_sub_relaxed */
|
|
|
|
/* atomic_fetch_dec_relaxed */
|
|
#ifndef atomic_fetch_dec_relaxed
|
|
|
|
#ifndef atomic_fetch_dec
|
|
#define atomic_fetch_dec(v) atomic_fetch_sub(1, (v))
|
|
#define atomic_fetch_dec_relaxed(v) atomic_fetch_sub_relaxed(1, (v))
|
|
#define atomic_fetch_dec_acquire(v) atomic_fetch_sub_acquire(1, (v))
|
|
#define atomic_fetch_dec_release(v) atomic_fetch_sub_release(1, (v))
|
|
#else /* atomic_fetch_dec */
|
|
#define atomic_fetch_dec_relaxed atomic_fetch_dec
|
|
#define atomic_fetch_dec_acquire atomic_fetch_dec
|
|
#define atomic_fetch_dec_release atomic_fetch_dec
|
|
#endif /* atomic_fetch_dec */
|
|
|
|
#else /* atomic_fetch_dec_relaxed */
|
|
|
|
#ifndef atomic_fetch_dec_acquire
|
|
#define atomic_fetch_dec_acquire(...) \
|
|
__atomic_op_acquire(atomic_fetch_dec, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_dec_release
|
|
#define atomic_fetch_dec_release(...) \
|
|
__atomic_op_release(atomic_fetch_dec, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_dec
|
|
#define atomic_fetch_dec(...) \
|
|
__atomic_op_fence(atomic_fetch_dec, __VA_ARGS__)
|
|
#endif
|
|
#endif /* atomic_fetch_dec_relaxed */
|
|
|
|
/* atomic_fetch_or_relaxed */
|
|
#ifndef atomic_fetch_or_relaxed
|
|
#define atomic_fetch_or_relaxed atomic_fetch_or
|
|
#define atomic_fetch_or_acquire atomic_fetch_or
|
|
#define atomic_fetch_or_release atomic_fetch_or
|
|
|
|
#else /* atomic_fetch_or_relaxed */
|
|
|
|
#ifndef atomic_fetch_or_acquire
|
|
#define atomic_fetch_or_acquire(...) \
|
|
__atomic_op_acquire(atomic_fetch_or, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_or_release
|
|
#define atomic_fetch_or_release(...) \
|
|
__atomic_op_release(atomic_fetch_or, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_or
|
|
#define atomic_fetch_or(...) \
|
|
__atomic_op_fence(atomic_fetch_or, __VA_ARGS__)
|
|
#endif
|
|
#endif /* atomic_fetch_or_relaxed */
|
|
|
|
/* atomic_fetch_and_relaxed */
|
|
#ifndef atomic_fetch_and_relaxed
|
|
#define atomic_fetch_and_relaxed atomic_fetch_and
|
|
#define atomic_fetch_and_acquire atomic_fetch_and
|
|
#define atomic_fetch_and_release atomic_fetch_and
|
|
|
|
#else /* atomic_fetch_and_relaxed */
|
|
|
|
#ifndef atomic_fetch_and_acquire
|
|
#define atomic_fetch_and_acquire(...) \
|
|
__atomic_op_acquire(atomic_fetch_and, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_and_release
|
|
#define atomic_fetch_and_release(...) \
|
|
__atomic_op_release(atomic_fetch_and, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_and
|
|
#define atomic_fetch_and(...) \
|
|
__atomic_op_fence(atomic_fetch_and, __VA_ARGS__)
|
|
#endif
|
|
#endif /* atomic_fetch_and_relaxed */
|
|
|
|
#ifndef atomic_andnot
|
|
#define atomic_andnot(i, v) atomic_and(~(int)(i), (v))
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_andnot_relaxed
|
|
|
|
#ifndef atomic_fetch_andnot
|
|
#define atomic_fetch_andnot(i, v) atomic_fetch_and(~(int)(i), (v))
|
|
#define atomic_fetch_andnot_relaxed(i, v) atomic_fetch_and_relaxed(~(int)(i), (v))
|
|
#define atomic_fetch_andnot_acquire(i, v) atomic_fetch_and_acquire(~(int)(i), (v))
|
|
#define atomic_fetch_andnot_release(i, v) atomic_fetch_and_release(~(int)(i), (v))
|
|
#else /* atomic_fetch_andnot */
|
|
#define atomic_fetch_andnot_relaxed atomic_fetch_andnot
|
|
#define atomic_fetch_andnot_acquire atomic_fetch_andnot
|
|
#define atomic_fetch_andnot_release atomic_fetch_andnot
|
|
#endif /* atomic_fetch_andnot */
|
|
|
|
#else /* atomic_fetch_andnot_relaxed */
|
|
|
|
#ifndef atomic_fetch_andnot_acquire
|
|
#define atomic_fetch_andnot_acquire(...) \
|
|
__atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_andnot_release
|
|
#define atomic_fetch_andnot_release(...) \
|
|
__atomic_op_release(atomic_fetch_andnot, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_andnot
|
|
#define atomic_fetch_andnot(...) \
|
|
__atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
|
|
#endif
|
|
#endif /* atomic_fetch_andnot_relaxed */
|
|
|
|
/* atomic_fetch_xor_relaxed */
|
|
#ifndef atomic_fetch_xor_relaxed
|
|
#define atomic_fetch_xor_relaxed atomic_fetch_xor
|
|
#define atomic_fetch_xor_acquire atomic_fetch_xor
|
|
#define atomic_fetch_xor_release atomic_fetch_xor
|
|
|
|
#else /* atomic_fetch_xor_relaxed */
|
|
|
|
#ifndef atomic_fetch_xor_acquire
|
|
#define atomic_fetch_xor_acquire(...) \
|
|
__atomic_op_acquire(atomic_fetch_xor, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_xor_release
|
|
#define atomic_fetch_xor_release(...) \
|
|
__atomic_op_release(atomic_fetch_xor, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic_fetch_xor
|
|
#define atomic_fetch_xor(...) \
|
|
__atomic_op_fence(atomic_fetch_xor, __VA_ARGS__)
|
|
#endif
|
|
#endif /* atomic_fetch_xor_relaxed */
|
|
|
|
|
|
/* atomic_xchg_relaxed */
|
|
#ifndef atomic_xchg_relaxed
|
|
#define atomic_xchg_relaxed atomic_xchg
|
|
#define atomic_xchg_acquire atomic_xchg
|
|
#define atomic_xchg_release atomic_xchg
|
|
|
|
#else /* atomic_xchg_relaxed */
|
|
|
|
#ifndef atomic_xchg_acquire
|
|
#define atomic_xchg_acquire(...) \
|
|
__atomic_op_acquire(atomic_xchg, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic_xchg_release
|
|
#define atomic_xchg_release(...) \
|
|
__atomic_op_release(atomic_xchg, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic_xchg
|
|
#define atomic_xchg(...) \
|
|
__atomic_op_fence(atomic_xchg, __VA_ARGS__)
|
|
#endif
|
|
#endif /* atomic_xchg_relaxed */
|
|
|
|
/* atomic_cmpxchg_relaxed */
|
|
#ifndef atomic_cmpxchg_relaxed
|
|
#define atomic_cmpxchg_relaxed atomic_cmpxchg
|
|
#define atomic_cmpxchg_acquire atomic_cmpxchg
|
|
#define atomic_cmpxchg_release atomic_cmpxchg
|
|
|
|
#else /* atomic_cmpxchg_relaxed */
|
|
|
|
#ifndef atomic_cmpxchg_acquire
|
|
#define atomic_cmpxchg_acquire(...) \
|
|
__atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic_cmpxchg_release
|
|
#define atomic_cmpxchg_release(...) \
|
|
__atomic_op_release(atomic_cmpxchg, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic_cmpxchg
|
|
#define atomic_cmpxchg(...) \
|
|
__atomic_op_fence(atomic_cmpxchg, __VA_ARGS__)
|
|
#endif
|
|
#endif /* atomic_cmpxchg_relaxed */
|
|
|
|
#ifndef atomic_try_cmpxchg
|
|
|
|
#define __atomic_try_cmpxchg(type, _p, _po, _n) \
|
|
({ \
|
|
typeof(_po) __po = (_po); \
|
|
typeof(*(_po)) __r, __o = *__po; \
|
|
__r = atomic_cmpxchg##type((_p), __o, (_n)); \
|
|
if (unlikely(__r != __o)) \
|
|
*__po = __r; \
|
|
likely(__r == __o); \
|
|
})
|
|
|
|
#define atomic_try_cmpxchg(_p, _po, _n) __atomic_try_cmpxchg(, _p, _po, _n)
|
|
#define atomic_try_cmpxchg_relaxed(_p, _po, _n) __atomic_try_cmpxchg(_relaxed, _p, _po, _n)
|
|
#define atomic_try_cmpxchg_acquire(_p, _po, _n) __atomic_try_cmpxchg(_acquire, _p, _po, _n)
|
|
#define atomic_try_cmpxchg_release(_p, _po, _n) __atomic_try_cmpxchg(_release, _p, _po, _n)
|
|
|
|
#else /* atomic_try_cmpxchg */
|
|
#define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg
|
|
#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg
|
|
#define atomic_try_cmpxchg_release atomic_try_cmpxchg
|
|
#endif /* atomic_try_cmpxchg */
|
|
|
|
/* cmpxchg_relaxed */
|
|
#ifndef cmpxchg_relaxed
|
|
#define cmpxchg_relaxed cmpxchg
|
|
#define cmpxchg_acquire cmpxchg
|
|
#define cmpxchg_release cmpxchg
|
|
|
|
#else /* cmpxchg_relaxed */
|
|
|
|
#ifndef cmpxchg_acquire
|
|
#define cmpxchg_acquire(...) \
|
|
__atomic_op_acquire(cmpxchg, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef cmpxchg_release
|
|
#define cmpxchg_release(...) \
|
|
__atomic_op_release(cmpxchg, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef cmpxchg
|
|
#define cmpxchg(...) \
|
|
__atomic_op_fence(cmpxchg, __VA_ARGS__)
|
|
#endif
|
|
#endif /* cmpxchg_relaxed */
|
|
|
|
/* cmpxchg64_relaxed */
|
|
#ifndef cmpxchg64_relaxed
|
|
#define cmpxchg64_relaxed cmpxchg64
|
|
#define cmpxchg64_acquire cmpxchg64
|
|
#define cmpxchg64_release cmpxchg64
|
|
|
|
#else /* cmpxchg64_relaxed */
|
|
|
|
#ifndef cmpxchg64_acquire
|
|
#define cmpxchg64_acquire(...) \
|
|
__atomic_op_acquire(cmpxchg64, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef cmpxchg64_release
|
|
#define cmpxchg64_release(...) \
|
|
__atomic_op_release(cmpxchg64, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef cmpxchg64
|
|
#define cmpxchg64(...) \
|
|
__atomic_op_fence(cmpxchg64, __VA_ARGS__)
|
|
#endif
|
|
#endif /* cmpxchg64_relaxed */
|
|
|
|
/* xchg_relaxed */
|
|
#ifndef xchg_relaxed
|
|
#define xchg_relaxed xchg
|
|
#define xchg_acquire xchg
|
|
#define xchg_release xchg
|
|
|
|
#else /* xchg_relaxed */
|
|
|
|
#ifndef xchg_acquire
|
|
#define xchg_acquire(...) __atomic_op_acquire(xchg, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef xchg_release
|
|
#define xchg_release(...) __atomic_op_release(xchg, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef xchg
|
|
#define xchg(...) __atomic_op_fence(xchg, __VA_ARGS__)
|
|
#endif
|
|
#endif /* xchg_relaxed */
|
|
|
|
/**
|
|
* atomic_fetch_add_unless - add unless the number is already a given value
|
|
* @v: pointer of type atomic_t
|
|
* @a: the amount to add to v...
|
|
* @u: ...unless v is equal to u.
|
|
*
|
|
* Atomically adds @a to @v, if @v was not already @u.
|
|
* Returns the original value of @v.
|
|
*/
|
|
#ifndef atomic_fetch_add_unless
|
|
static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
|
|
{
|
|
int c = atomic_read(v);
|
|
|
|
do {
|
|
if (unlikely(c == u))
|
|
break;
|
|
} while (!atomic_try_cmpxchg(v, &c, c + a));
|
|
|
|
return c;
|
|
}
|
|
#endif
|
|
|
|
/**
|
|
* atomic_add_unless - add unless the number is already a given value
|
|
* @v: pointer of type atomic_t
|
|
* @a: the amount to add to v...
|
|
* @u: ...unless v is equal to u.
|
|
*
|
|
* Atomically adds @a to @v, if @v was not already @u.
|
|
* Returns true if the addition was done.
|
|
*/
|
|
static inline bool atomic_add_unless(atomic_t *v, int a, int u)
|
|
{
|
|
return atomic_fetch_add_unless(v, a, u) != u;
|
|
}
|
|
|
|
/**
|
|
* atomic_inc_not_zero - increment unless the number is zero
|
|
* @v: pointer of type atomic_t
|
|
*
|
|
* Atomically increments @v by 1, if @v is non-zero.
|
|
* Returns true if the increment was done.
|
|
*/
|
|
#ifndef atomic_inc_not_zero
|
|
#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
|
|
#endif
|
|
|
|
/**
|
|
* atomic_inc_and_test - increment and test
|
|
* @v: pointer of type atomic_t
|
|
*
|
|
* Atomically increments @v by 1
|
|
* and returns true if the result is zero, or false for all
|
|
* other cases.
|
|
*/
|
|
#ifndef atomic_inc_and_test
|
|
static inline bool atomic_inc_and_test(atomic_t *v)
|
|
{
|
|
return atomic_inc_return(v) == 0;
|
|
}
|
|
#endif
|
|
|
|
/**
|
|
* atomic_dec_and_test - decrement and test
|
|
* @v: pointer of type atomic_t
|
|
*
|
|
* Atomically decrements @v by 1 and
|
|
* returns true if the result is 0, or false for all other
|
|
* cases.
|
|
*/
|
|
#ifndef atomic_dec_and_test
|
|
static inline bool atomic_dec_and_test(atomic_t *v)
|
|
{
|
|
return atomic_dec_return(v) == 0;
|
|
}
|
|
#endif
|
|
|
|
/**
|
|
* atomic_sub_and_test - subtract value from variable and test result
|
|
* @i: integer value to subtract
|
|
* @v: pointer of type atomic_t
|
|
*
|
|
* Atomically subtracts @i from @v and returns
|
|
* true if the result is zero, or false for all
|
|
* other cases.
|
|
*/
|
|
#ifndef atomic_sub_and_test
|
|
static inline bool atomic_sub_and_test(int i, atomic_t *v)
|
|
{
|
|
return atomic_sub_return(i, v) == 0;
|
|
}
|
|
#endif
|
|
|
|
/**
|
|
* atomic_add_negative - add and test if negative
|
|
* @i: integer value to add
|
|
* @v: pointer of type atomic_t
|
|
*
|
|
* Atomically adds @i to @v and returns true
|
|
* if the result is negative, or false when
|
|
* result is greater than or equal to zero.
|
|
*/
|
|
#ifndef atomic_add_negative
|
|
static inline bool atomic_add_negative(int i, atomic_t *v)
|
|
{
|
|
return atomic_add_return(i, v) < 0;
|
|
}
|
|
#endif
|
|
|
|
#ifndef atomic_inc_unless_negative
|
|
static inline bool atomic_inc_unless_negative(atomic_t *v)
|
|
{
|
|
int c = atomic_read(v);
|
|
|
|
do {
|
|
if (unlikely(c < 0))
|
|
return false;
|
|
} while (!atomic_try_cmpxchg(v, &c, c + 1));
|
|
|
|
return true;
|
|
}
|
|
#endif
|
|
|
|
#ifndef atomic_dec_unless_positive
|
|
static inline bool atomic_dec_unless_positive(atomic_t *v)
|
|
{
|
|
int c = atomic_read(v);
|
|
|
|
do {
|
|
if (unlikely(c > 0))
|
|
return false;
|
|
} while (!atomic_try_cmpxchg(v, &c, c - 1));
|
|
|
|
return true;
|
|
}
|
|
#endif
|
|
|
|
/*
|
|
* atomic_dec_if_positive - decrement by 1 if old value positive
|
|
* @v: pointer of type atomic_t
|
|
*
|
|
* The function returns the old value of *v minus 1, even if
|
|
* the atomic variable, v, was not decremented.
|
|
*/
|
|
#ifndef atomic_dec_if_positive
|
|
static inline int atomic_dec_if_positive(atomic_t *v)
|
|
{
|
|
int dec, c = atomic_read(v);
|
|
|
|
do {
|
|
dec = c - 1;
|
|
if (unlikely(dec < 0))
|
|
break;
|
|
} while (!atomic_try_cmpxchg(v, &c, dec));
|
|
|
|
return dec;
|
|
}
|
|
#endif
|
|
|
|
#define atomic_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
|
|
#define atomic_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
|
|
|
|
#ifdef CONFIG_GENERIC_ATOMIC64
|
|
#include <asm-generic/atomic64.h>
|
|
#endif
|
|
|
|
#ifndef atomic64_read_acquire
|
|
#define atomic64_read_acquire(v) smp_load_acquire(&(v)->counter)
|
|
#endif
|
|
|
|
#ifndef atomic64_set_release
|
|
#define atomic64_set_release(v, i) smp_store_release(&(v)->counter, (i))
|
|
#endif
|
|
|
|
/* atomic64_add_return_relaxed */
|
|
#ifndef atomic64_add_return_relaxed
|
|
#define atomic64_add_return_relaxed atomic64_add_return
|
|
#define atomic64_add_return_acquire atomic64_add_return
|
|
#define atomic64_add_return_release atomic64_add_return
|
|
|
|
#else /* atomic64_add_return_relaxed */
|
|
|
|
#ifndef atomic64_add_return_acquire
|
|
#define atomic64_add_return_acquire(...) \
|
|
__atomic_op_acquire(atomic64_add_return, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic64_add_return_release
|
|
#define atomic64_add_return_release(...) \
|
|
__atomic_op_release(atomic64_add_return, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic64_add_return
|
|
#define atomic64_add_return(...) \
|
|
__atomic_op_fence(atomic64_add_return, __VA_ARGS__)
|
|
#endif
|
|
#endif /* atomic64_add_return_relaxed */
|
|
|
|
#ifndef atomic64_inc
|
|
#define atomic64_inc(v) atomic64_add(1, (v))
|
|
#endif
|
|
|
|
/* atomic64_inc_return_relaxed */
|
|
#ifndef atomic64_inc_return_relaxed
|
|
|
|
#ifndef atomic64_inc_return
|
|
#define atomic64_inc_return(v) atomic64_add_return(1, (v))
|
|
#define atomic64_inc_return_relaxed(v) atomic64_add_return_relaxed(1, (v))
|
|
#define atomic64_inc_return_acquire(v) atomic64_add_return_acquire(1, (v))
|
|
#define atomic64_inc_return_release(v) atomic64_add_return_release(1, (v))
|
|
#else /* atomic64_inc_return */
|
|
#define atomic64_inc_return_relaxed atomic64_inc_return
|
|
#define atomic64_inc_return_acquire atomic64_inc_return
|
|
#define atomic64_inc_return_release atomic64_inc_return
|
|
#endif /* atomic64_inc_return */
|
|
|
|
#else /* atomic64_inc_return_relaxed */
|
|
|
|
#ifndef atomic64_inc_return_acquire
|
|
#define atomic64_inc_return_acquire(...) \
|
|
__atomic_op_acquire(atomic64_inc_return, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic64_inc_return_release
|
|
#define atomic64_inc_return_release(...) \
|
|
__atomic_op_release(atomic64_inc_return, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic64_inc_return
|
|
#define atomic64_inc_return(...) \
|
|
__atomic_op_fence(atomic64_inc_return, __VA_ARGS__)
|
|
#endif
|
|
#endif /* atomic64_inc_return_relaxed */
|
|
|
|
|
|
/* atomic64_sub_return_relaxed */
|
|
#ifndef atomic64_sub_return_relaxed
|
|
#define atomic64_sub_return_relaxed atomic64_sub_return
|
|
#define atomic64_sub_return_acquire atomic64_sub_return
|
|
#define atomic64_sub_return_release atomic64_sub_return
|
|
|
|
#else /* atomic64_sub_return_relaxed */
|
|
|
|
#ifndef atomic64_sub_return_acquire
|
|
#define atomic64_sub_return_acquire(...) \
|
|
__atomic_op_acquire(atomic64_sub_return, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic64_sub_return_release
|
|
#define atomic64_sub_return_release(...) \
|
|
__atomic_op_release(atomic64_sub_return, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic64_sub_return
|
|
#define atomic64_sub_return(...) \
|
|
__atomic_op_fence(atomic64_sub_return, __VA_ARGS__)
|
|
#endif
|
|
#endif /* atomic64_sub_return_relaxed */
|
|
|
|
#ifndef atomic64_dec
|
|
#define atomic64_dec(v) atomic64_sub(1, (v))
|
|
#endif
|
|
|
|
/* atomic64_dec_return_relaxed */
|
|
#ifndef atomic64_dec_return_relaxed
|
|
|
|
#ifndef atomic64_dec_return
|
|
#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
|
|
#define atomic64_dec_return_relaxed(v) atomic64_sub_return_relaxed(1, (v))
|
|
#define atomic64_dec_return_acquire(v) atomic64_sub_return_acquire(1, (v))
|
|
#define atomic64_dec_return_release(v) atomic64_sub_return_release(1, (v))
|
|
#else /* atomic64_dec_return */
|
|
#define atomic64_dec_return_relaxed atomic64_dec_return
|
|
#define atomic64_dec_return_acquire atomic64_dec_return
|
|
#define atomic64_dec_return_release atomic64_dec_return
|
|
#endif /* atomic64_dec_return */
|
|
|
|
#else /* atomic64_dec_return_relaxed */
|
|
|
|
#ifndef atomic64_dec_return_acquire
|
|
#define atomic64_dec_return_acquire(...) \
|
|
__atomic_op_acquire(atomic64_dec_return, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic64_dec_return_release
|
|
#define atomic64_dec_return_release(...) \
|
|
__atomic_op_release(atomic64_dec_return, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic64_dec_return
|
|
#define atomic64_dec_return(...) \
|
|
__atomic_op_fence(atomic64_dec_return, __VA_ARGS__)
|
|
#endif
|
|
#endif /* atomic64_dec_return_relaxed */
|
|
|
|
|
|
/* atomic64_fetch_add_relaxed */
|
|
#ifndef atomic64_fetch_add_relaxed
|
|
#define atomic64_fetch_add_relaxed atomic64_fetch_add
|
|
#define atomic64_fetch_add_acquire atomic64_fetch_add
|
|
#define atomic64_fetch_add_release atomic64_fetch_add
|
|
|
|
#else /* atomic64_fetch_add_relaxed */
|
|
|
|
#ifndef atomic64_fetch_add_acquire
|
|
#define atomic64_fetch_add_acquire(...) \
|
|
__atomic_op_acquire(atomic64_fetch_add, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_add_release
|
|
#define atomic64_fetch_add_release(...) \
|
|
__atomic_op_release(atomic64_fetch_add, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_add
|
|
#define atomic64_fetch_add(...) \
|
|
__atomic_op_fence(atomic64_fetch_add, __VA_ARGS__)
|
|
#endif
|
|
#endif /* atomic64_fetch_add_relaxed */
|
|
|
|
/* atomic64_fetch_inc_relaxed */
|
|
#ifndef atomic64_fetch_inc_relaxed
|
|
|
|
#ifndef atomic64_fetch_inc
|
|
#define atomic64_fetch_inc(v) atomic64_fetch_add(1, (v))
|
|
#define atomic64_fetch_inc_relaxed(v) atomic64_fetch_add_relaxed(1, (v))
|
|
#define atomic64_fetch_inc_acquire(v) atomic64_fetch_add_acquire(1, (v))
|
|
#define atomic64_fetch_inc_release(v) atomic64_fetch_add_release(1, (v))
|
|
#else /* atomic64_fetch_inc */
|
|
#define atomic64_fetch_inc_relaxed atomic64_fetch_inc
|
|
#define atomic64_fetch_inc_acquire atomic64_fetch_inc
|
|
#define atomic64_fetch_inc_release atomic64_fetch_inc
|
|
#endif /* atomic64_fetch_inc */
|
|
|
|
#else /* atomic64_fetch_inc_relaxed */
|
|
|
|
#ifndef atomic64_fetch_inc_acquire
|
|
#define atomic64_fetch_inc_acquire(...) \
|
|
__atomic_op_acquire(atomic64_fetch_inc, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_inc_release
|
|
#define atomic64_fetch_inc_release(...) \
|
|
__atomic_op_release(atomic64_fetch_inc, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_inc
|
|
#define atomic64_fetch_inc(...) \
|
|
__atomic_op_fence(atomic64_fetch_inc, __VA_ARGS__)
|
|
#endif
|
|
#endif /* atomic64_fetch_inc_relaxed */
|
|
|
|
/* atomic64_fetch_sub_relaxed */
|
|
#ifndef atomic64_fetch_sub_relaxed
|
|
#define atomic64_fetch_sub_relaxed atomic64_fetch_sub
|
|
#define atomic64_fetch_sub_acquire atomic64_fetch_sub
|
|
#define atomic64_fetch_sub_release atomic64_fetch_sub
|
|
|
|
#else /* atomic64_fetch_sub_relaxed */
|
|
|
|
#ifndef atomic64_fetch_sub_acquire
|
|
#define atomic64_fetch_sub_acquire(...) \
|
|
__atomic_op_acquire(atomic64_fetch_sub, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_sub_release
|
|
#define atomic64_fetch_sub_release(...) \
|
|
__atomic_op_release(atomic64_fetch_sub, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_sub
|
|
#define atomic64_fetch_sub(...) \
|
|
__atomic_op_fence(atomic64_fetch_sub, __VA_ARGS__)
|
|
#endif
|
|
#endif /* atomic64_fetch_sub_relaxed */
|
|
|
|
/* atomic64_fetch_dec_relaxed */
|
|
#ifndef atomic64_fetch_dec_relaxed
|
|
|
|
#ifndef atomic64_fetch_dec
|
|
#define atomic64_fetch_dec(v) atomic64_fetch_sub(1, (v))
|
|
#define atomic64_fetch_dec_relaxed(v) atomic64_fetch_sub_relaxed(1, (v))
|
|
#define atomic64_fetch_dec_acquire(v) atomic64_fetch_sub_acquire(1, (v))
|
|
#define atomic64_fetch_dec_release(v) atomic64_fetch_sub_release(1, (v))
|
|
#else /* atomic64_fetch_dec */
|
|
#define atomic64_fetch_dec_relaxed atomic64_fetch_dec
|
|
#define atomic64_fetch_dec_acquire atomic64_fetch_dec
|
|
#define atomic64_fetch_dec_release atomic64_fetch_dec
|
|
#endif /* atomic64_fetch_dec */
|
|
|
|
#else /* atomic64_fetch_dec_relaxed */
|
|
|
|
#ifndef atomic64_fetch_dec_acquire
|
|
#define atomic64_fetch_dec_acquire(...) \
|
|
__atomic_op_acquire(atomic64_fetch_dec, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_dec_release
|
|
#define atomic64_fetch_dec_release(...) \
|
|
__atomic_op_release(atomic64_fetch_dec, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_dec
|
|
#define atomic64_fetch_dec(...) \
|
|
__atomic_op_fence(atomic64_fetch_dec, __VA_ARGS__)
|
|
#endif
|
|
#endif /* atomic64_fetch_dec_relaxed */
|
|
|
|
/* atomic64_fetch_or_relaxed */
|
|
#ifndef atomic64_fetch_or_relaxed
|
|
#define atomic64_fetch_or_relaxed atomic64_fetch_or
|
|
#define atomic64_fetch_or_acquire atomic64_fetch_or
|
|
#define atomic64_fetch_or_release atomic64_fetch_or
|
|
|
|
#else /* atomic64_fetch_or_relaxed */
|
|
|
|
#ifndef atomic64_fetch_or_acquire
|
|
#define atomic64_fetch_or_acquire(...) \
|
|
__atomic_op_acquire(atomic64_fetch_or, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_or_release
|
|
#define atomic64_fetch_or_release(...) \
|
|
__atomic_op_release(atomic64_fetch_or, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_or
|
|
#define atomic64_fetch_or(...) \
|
|
__atomic_op_fence(atomic64_fetch_or, __VA_ARGS__)
|
|
#endif
|
|
#endif /* atomic64_fetch_or_relaxed */
|
|
|
|
/* atomic64_fetch_and_relaxed */
|
|
#ifndef atomic64_fetch_and_relaxed
|
|
#define atomic64_fetch_and_relaxed atomic64_fetch_and
|
|
#define atomic64_fetch_and_acquire atomic64_fetch_and
|
|
#define atomic64_fetch_and_release atomic64_fetch_and
|
|
|
|
#else /* atomic64_fetch_and_relaxed */
|
|
|
|
#ifndef atomic64_fetch_and_acquire
|
|
#define atomic64_fetch_and_acquire(...) \
|
|
__atomic_op_acquire(atomic64_fetch_and, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_and_release
|
|
#define atomic64_fetch_and_release(...) \
|
|
__atomic_op_release(atomic64_fetch_and, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_and
|
|
#define atomic64_fetch_and(...) \
|
|
__atomic_op_fence(atomic64_fetch_and, __VA_ARGS__)
|
|
#endif
|
|
#endif /* atomic64_fetch_and_relaxed */
|
|
|
|
#ifndef atomic64_andnot
|
|
#define atomic64_andnot(i, v) atomic64_and(~(long long)(i), (v))
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_andnot_relaxed
|
|
|
|
#ifndef atomic64_fetch_andnot
|
|
#define atomic64_fetch_andnot(i, v) atomic64_fetch_and(~(long long)(i), (v))
|
|
#define atomic64_fetch_andnot_relaxed(i, v) atomic64_fetch_and_relaxed(~(long long)(i), (v))
|
|
#define atomic64_fetch_andnot_acquire(i, v) atomic64_fetch_and_acquire(~(long long)(i), (v))
|
|
#define atomic64_fetch_andnot_release(i, v) atomic64_fetch_and_release(~(long long)(i), (v))
|
|
#else /* atomic64_fetch_andnot */
|
|
#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
|
|
#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
|
|
#define atomic64_fetch_andnot_release atomic64_fetch_andnot
|
|
#endif /* atomic64_fetch_andnot */
|
|
|
|
#else /* atomic64_fetch_andnot_relaxed */
|
|
|
|
#ifndef atomic64_fetch_andnot_acquire
|
|
#define atomic64_fetch_andnot_acquire(...) \
|
|
__atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_andnot_release
|
|
#define atomic64_fetch_andnot_release(...) \
|
|
__atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_andnot
|
|
#define atomic64_fetch_andnot(...) \
|
|
__atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
|
|
#endif
|
|
#endif /* atomic64_fetch_andnot_relaxed */
|
|
|
|
/* atomic64_fetch_xor_relaxed */
|
|
#ifndef atomic64_fetch_xor_relaxed
|
|
#define atomic64_fetch_xor_relaxed atomic64_fetch_xor
|
|
#define atomic64_fetch_xor_acquire atomic64_fetch_xor
|
|
#define atomic64_fetch_xor_release atomic64_fetch_xor
|
|
|
|
#else /* atomic64_fetch_xor_relaxed */
|
|
|
|
#ifndef atomic64_fetch_xor_acquire
|
|
#define atomic64_fetch_xor_acquire(...) \
|
|
__atomic_op_acquire(atomic64_fetch_xor, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_xor_release
|
|
#define atomic64_fetch_xor_release(...) \
|
|
__atomic_op_release(atomic64_fetch_xor, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic64_fetch_xor
|
|
#define atomic64_fetch_xor(...) \
|
|
__atomic_op_fence(atomic64_fetch_xor, __VA_ARGS__)
|
|
#endif
|
|
#endif /* atomic64_fetch_xor_relaxed */
|
|
|
|
|
|
/* atomic64_xchg_relaxed */
|
|
#ifndef atomic64_xchg_relaxed
|
|
#define atomic64_xchg_relaxed atomic64_xchg
|
|
#define atomic64_xchg_acquire atomic64_xchg
|
|
#define atomic64_xchg_release atomic64_xchg
|
|
|
|
#else /* atomic64_xchg_relaxed */
|
|
|
|
#ifndef atomic64_xchg_acquire
|
|
#define atomic64_xchg_acquire(...) \
|
|
__atomic_op_acquire(atomic64_xchg, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic64_xchg_release
|
|
#define atomic64_xchg_release(...) \
|
|
__atomic_op_release(atomic64_xchg, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic64_xchg
|
|
#define atomic64_xchg(...) \
|
|
__atomic_op_fence(atomic64_xchg, __VA_ARGS__)
|
|
#endif
|
|
#endif /* atomic64_xchg_relaxed */
|
|
|
|
/* atomic64_cmpxchg_relaxed */
|
|
#ifndef atomic64_cmpxchg_relaxed
|
|
#define atomic64_cmpxchg_relaxed atomic64_cmpxchg
|
|
#define atomic64_cmpxchg_acquire atomic64_cmpxchg
|
|
#define atomic64_cmpxchg_release atomic64_cmpxchg
|
|
|
|
#else /* atomic64_cmpxchg_relaxed */
|
|
|
|
#ifndef atomic64_cmpxchg_acquire
|
|
#define atomic64_cmpxchg_acquire(...) \
|
|
__atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic64_cmpxchg_release
|
|
#define atomic64_cmpxchg_release(...) \
|
|
__atomic_op_release(atomic64_cmpxchg, __VA_ARGS__)
|
|
#endif
|
|
|
|
#ifndef atomic64_cmpxchg
|
|
#define atomic64_cmpxchg(...) \
|
|
__atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__)
|
|
#endif
|
|
#endif /* atomic64_cmpxchg_relaxed */
|
|
|
|
#ifndef atomic64_try_cmpxchg
|
|
|
|
#define __atomic64_try_cmpxchg(type, _p, _po, _n) \
|
|
({ \
|
|
typeof(_po) __po = (_po); \
|
|
typeof(*(_po)) __r, __o = *__po; \
|
|
__r = atomic64_cmpxchg##type((_p), __o, (_n)); \
|
|
if (unlikely(__r != __o)) \
|
|
*__po = __r; \
|
|
likely(__r == __o); \
|
|
})
|
|
|
|
#define atomic64_try_cmpxchg(_p, _po, _n) __atomic64_try_cmpxchg(, _p, _po, _n)
|
|
#define atomic64_try_cmpxchg_relaxed(_p, _po, _n) __atomic64_try_cmpxchg(_relaxed, _p, _po, _n)
|
|
#define atomic64_try_cmpxchg_acquire(_p, _po, _n) __atomic64_try_cmpxchg(_acquire, _p, _po, _n)
|
|
#define atomic64_try_cmpxchg_release(_p, _po, _n) __atomic64_try_cmpxchg(_release, _p, _po, _n)
|
|
|
|
#else /* atomic64_try_cmpxchg */
|
|
#define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg
|
|
#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg
|
|
#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
|
|
#endif /* atomic64_try_cmpxchg */
|
|
|
|
/**
|
|
* atomic64_fetch_add_unless - add unless the number is already a given value
|
|
* @v: pointer of type atomic64_t
|
|
* @a: the amount to add to v...
|
|
* @u: ...unless v is equal to u.
|
|
*
|
|
* Atomically adds @a to @v, if @v was not already @u.
|
|
* Returns the original value of @v.
|
|
*/
|
|
#ifndef atomic64_fetch_add_unless
|
|
static inline long long atomic64_fetch_add_unless(atomic64_t *v, long long a,
|
|
long long u)
|
|
{
|
|
long long c = atomic64_read(v);
|
|
|
|
do {
|
|
if (unlikely(c == u))
|
|
break;
|
|
} while (!atomic64_try_cmpxchg(v, &c, c + a));
|
|
|
|
return c;
|
|
}
|
|
#endif
|
|
|
|
/**
|
|
* atomic64_add_unless - add unless the number is already a given value
|
|
* @v: pointer of type atomic_t
|
|
* @a: the amount to add to v...
|
|
* @u: ...unless v is equal to u.
|
|
*
|
|
* Atomically adds @a to @v, if @v was not already @u.
|
|
* Returns true if the addition was done.
|
|
*/
|
|
static inline bool atomic64_add_unless(atomic64_t *v, long long a, long long u)
|
|
{
|
|
return atomic64_fetch_add_unless(v, a, u) != u;
|
|
}
|
|
|
|
/**
|
|
* atomic64_inc_not_zero - increment unless the number is zero
|
|
* @v: pointer of type atomic64_t
|
|
*
|
|
* Atomically increments @v by 1, if @v is non-zero.
|
|
* Returns true if the increment was done.
|
|
*/
|
|
#ifndef atomic64_inc_not_zero
|
|
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
|
|
#endif
|
|
|
|
/**
|
|
* atomic64_inc_and_test - increment and test
|
|
* @v: pointer of type atomic64_t
|
|
*
|
|
* Atomically increments @v by 1
|
|
* and returns true if the result is zero, or false for all
|
|
* other cases.
|
|
*/
|
|
#ifndef atomic64_inc_and_test
|
|
static inline bool atomic64_inc_and_test(atomic64_t *v)
|
|
{
|
|
return atomic64_inc_return(v) == 0;
|
|
}
|
|
#endif
|
|
|
|
/**
|
|
* atomic64_dec_and_test - decrement and test
|
|
* @v: pointer of type atomic64_t
|
|
*
|
|
* Atomically decrements @v by 1 and
|
|
* returns true if the result is 0, or false for all other
|
|
* cases.
|
|
*/
|
|
#ifndef atomic64_dec_and_test
|
|
static inline bool atomic64_dec_and_test(atomic64_t *v)
|
|
{
|
|
return atomic64_dec_return(v) == 0;
|
|
}
|
|
#endif
|
|
|
|
/**
|
|
* atomic64_sub_and_test - subtract value from variable and test result
|
|
* @i: integer value to subtract
|
|
* @v: pointer of type atomic64_t
|
|
*
|
|
* Atomically subtracts @i from @v and returns
|
|
* true if the result is zero, or false for all
|
|
* other cases.
|
|
*/
|
|
#ifndef atomic64_sub_and_test
|
|
static inline bool atomic64_sub_and_test(long long i, atomic64_t *v)
|
|
{
|
|
return atomic64_sub_return(i, v) == 0;
|
|
}
|
|
#endif
|
|
|
|
/**
|
|
* atomic64_add_negative - add and test if negative
|
|
* @i: integer value to add
|
|
* @v: pointer of type atomic64_t
|
|
*
|
|
* Atomically adds @i to @v and returns true
|
|
* if the result is negative, or false when
|
|
* result is greater than or equal to zero.
|
|
*/
|
|
#ifndef atomic64_add_negative
|
|
static inline bool atomic64_add_negative(long long i, atomic64_t *v)
|
|
{
|
|
return atomic64_add_return(i, v) < 0;
|
|
}
|
|
#endif
|
|
|
|
#ifndef atomic64_inc_unless_negative
|
|
static inline bool atomic64_inc_unless_negative(atomic64_t *v)
|
|
{
|
|
long long c = atomic64_read(v);
|
|
|
|
do {
|
|
if (unlikely(c < 0))
|
|
return false;
|
|
} while (!atomic64_try_cmpxchg(v, &c, c + 1));
|
|
|
|
return true;
|
|
}
|
|
#endif
|
|
|
|
#ifndef atomic64_dec_unless_positive
|
|
static inline bool atomic64_dec_unless_positive(atomic64_t *v)
|
|
{
|
|
long long c = atomic64_read(v);
|
|
|
|
do {
|
|
if (unlikely(c > 0))
|
|
return false;
|
|
} while (!atomic64_try_cmpxchg(v, &c, c - 1));
|
|
|
|
return true;
|
|
}
|
|
#endif
|
|
|
|
/*
|
|
* atomic64_dec_if_positive - decrement by 1 if old value positive
|
|
* @v: pointer of type atomic64_t
|
|
*
|
|
* The function returns the old value of *v minus 1, even if
|
|
* the atomic64 variable, v, was not decremented.
|
|
*/
|
|
#ifndef atomic64_dec_if_positive
|
|
static inline long long atomic64_dec_if_positive(atomic64_t *v)
|
|
{
|
|
long long dec, c = atomic64_read(v);
|
|
|
|
do {
|
|
dec = c - 1;
|
|
if (unlikely(dec < 0))
|
|
break;
|
|
} while (!atomic64_try_cmpxchg(v, &c, dec));
|
|
|
|
return dec;
|
|
}
|
|
#endif
|
|
|
|
#define atomic64_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
|
|
#define atomic64_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
|
|
|
|
#include <asm-generic/atomic-long.h>
|
|
|
|
#endif /* _LINUX_ATOMIC_H */
|