mirror of
https://github.com/AuxXxilium/linux_dsm_epyc7002.git
synced 2024-12-13 16:36:53 +07:00
a5fc1abe43
On a linux-next allyesconfig build: kernel/trace/ring_buffer.c:1726: warning: passing argument 1 of 'atomic_cmpxchg' from incompatible pointer type linux-next/arch/s390/include/asm/atomic.h:112: note: expected 'struct atomic_t *' but argument is of type 'struct atomic64_t *' atomic_long_cmpxchg and atomic_long_xchg are incorrectly defined for 64 bit architectures. They should be mapped to the atomic64_* variants. Acked-by: Mathieu Desnoyers <mathieu.desnoyers@polymtl.ca> Signed-off-by: Heiko Carstens <heiko.carstens@de.ibm.com> Acked-by: Ingo Molnar <mingo@elte.hu> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
259 lines
5.1 KiB
C
259 lines
5.1 KiB
C
#ifndef _ASM_GENERIC_ATOMIC_H
|
|
#define _ASM_GENERIC_ATOMIC_H
|
|
/*
|
|
* Copyright (C) 2005 Silicon Graphics, Inc.
|
|
* Christoph Lameter
|
|
*
|
|
* Allows to provide arch independent atomic definitions without the need to
|
|
* edit all arch specific atomic.h files.
|
|
*/
|
|
|
|
#include <asm/types.h>
|
|
|
|
/*
|
|
* Suppport for atomic_long_t
|
|
*
|
|
* Casts for parameters are avoided for existing atomic functions in order to
|
|
* avoid issues with cast-as-lval under gcc 4.x and other limitations that the
|
|
* macros of a platform may have.
|
|
*/
|
|
|
|
#if BITS_PER_LONG == 64
|
|
|
|
typedef atomic64_t atomic_long_t;
|
|
|
|
#define ATOMIC_LONG_INIT(i) ATOMIC64_INIT(i)
|
|
|
|
static inline long atomic_long_read(atomic_long_t *l)
|
|
{
|
|
atomic64_t *v = (atomic64_t *)l;
|
|
|
|
return (long)atomic64_read(v);
|
|
}
|
|
|
|
static inline void atomic_long_set(atomic_long_t *l, long i)
|
|
{
|
|
atomic64_t *v = (atomic64_t *)l;
|
|
|
|
atomic64_set(v, i);
|
|
}
|
|
|
|
static inline void atomic_long_inc(atomic_long_t *l)
|
|
{
|
|
atomic64_t *v = (atomic64_t *)l;
|
|
|
|
atomic64_inc(v);
|
|
}
|
|
|
|
static inline void atomic_long_dec(atomic_long_t *l)
|
|
{
|
|
atomic64_t *v = (atomic64_t *)l;
|
|
|
|
atomic64_dec(v);
|
|
}
|
|
|
|
static inline void atomic_long_add(long i, atomic_long_t *l)
|
|
{
|
|
atomic64_t *v = (atomic64_t *)l;
|
|
|
|
atomic64_add(i, v);
|
|
}
|
|
|
|
static inline void atomic_long_sub(long i, atomic_long_t *l)
|
|
{
|
|
atomic64_t *v = (atomic64_t *)l;
|
|
|
|
atomic64_sub(i, v);
|
|
}
|
|
|
|
static inline int atomic_long_sub_and_test(long i, atomic_long_t *l)
|
|
{
|
|
atomic64_t *v = (atomic64_t *)l;
|
|
|
|
return atomic64_sub_and_test(i, v);
|
|
}
|
|
|
|
static inline int atomic_long_dec_and_test(atomic_long_t *l)
|
|
{
|
|
atomic64_t *v = (atomic64_t *)l;
|
|
|
|
return atomic64_dec_and_test(v);
|
|
}
|
|
|
|
static inline int atomic_long_inc_and_test(atomic_long_t *l)
|
|
{
|
|
atomic64_t *v = (atomic64_t *)l;
|
|
|
|
return atomic64_inc_and_test(v);
|
|
}
|
|
|
|
static inline int atomic_long_add_negative(long i, atomic_long_t *l)
|
|
{
|
|
atomic64_t *v = (atomic64_t *)l;
|
|
|
|
return atomic64_add_negative(i, v);
|
|
}
|
|
|
|
static inline long atomic_long_add_return(long i, atomic_long_t *l)
|
|
{
|
|
atomic64_t *v = (atomic64_t *)l;
|
|
|
|
return (long)atomic64_add_return(i, v);
|
|
}
|
|
|
|
static inline long atomic_long_sub_return(long i, atomic_long_t *l)
|
|
{
|
|
atomic64_t *v = (atomic64_t *)l;
|
|
|
|
return (long)atomic64_sub_return(i, v);
|
|
}
|
|
|
|
static inline long atomic_long_inc_return(atomic_long_t *l)
|
|
{
|
|
atomic64_t *v = (atomic64_t *)l;
|
|
|
|
return (long)atomic64_inc_return(v);
|
|
}
|
|
|
|
static inline long atomic_long_dec_return(atomic_long_t *l)
|
|
{
|
|
atomic64_t *v = (atomic64_t *)l;
|
|
|
|
return (long)atomic64_dec_return(v);
|
|
}
|
|
|
|
static inline long atomic_long_add_unless(atomic_long_t *l, long a, long u)
|
|
{
|
|
atomic64_t *v = (atomic64_t *)l;
|
|
|
|
return (long)atomic64_add_unless(v, a, u);
|
|
}
|
|
|
|
#define atomic_long_inc_not_zero(l) atomic64_inc_not_zero((atomic64_t *)(l))
|
|
|
|
#define atomic_long_cmpxchg(l, old, new) \
|
|
(atomic64_cmpxchg((atomic64_t *)(l), (old), (new)))
|
|
#define atomic_long_xchg(v, new) \
|
|
(atomic64_xchg((atomic64_t *)(l), (new)))
|
|
|
|
#else /* BITS_PER_LONG == 64 */
|
|
|
|
typedef atomic_t atomic_long_t;
|
|
|
|
#define ATOMIC_LONG_INIT(i) ATOMIC_INIT(i)
|
|
static inline long atomic_long_read(atomic_long_t *l)
|
|
{
|
|
atomic_t *v = (atomic_t *)l;
|
|
|
|
return (long)atomic_read(v);
|
|
}
|
|
|
|
static inline void atomic_long_set(atomic_long_t *l, long i)
|
|
{
|
|
atomic_t *v = (atomic_t *)l;
|
|
|
|
atomic_set(v, i);
|
|
}
|
|
|
|
static inline void atomic_long_inc(atomic_long_t *l)
|
|
{
|
|
atomic_t *v = (atomic_t *)l;
|
|
|
|
atomic_inc(v);
|
|
}
|
|
|
|
static inline void atomic_long_dec(atomic_long_t *l)
|
|
{
|
|
atomic_t *v = (atomic_t *)l;
|
|
|
|
atomic_dec(v);
|
|
}
|
|
|
|
static inline void atomic_long_add(long i, atomic_long_t *l)
|
|
{
|
|
atomic_t *v = (atomic_t *)l;
|
|
|
|
atomic_add(i, v);
|
|
}
|
|
|
|
static inline void atomic_long_sub(long i, atomic_long_t *l)
|
|
{
|
|
atomic_t *v = (atomic_t *)l;
|
|
|
|
atomic_sub(i, v);
|
|
}
|
|
|
|
static inline int atomic_long_sub_and_test(long i, atomic_long_t *l)
|
|
{
|
|
atomic_t *v = (atomic_t *)l;
|
|
|
|
return atomic_sub_and_test(i, v);
|
|
}
|
|
|
|
static inline int atomic_long_dec_and_test(atomic_long_t *l)
|
|
{
|
|
atomic_t *v = (atomic_t *)l;
|
|
|
|
return atomic_dec_and_test(v);
|
|
}
|
|
|
|
static inline int atomic_long_inc_and_test(atomic_long_t *l)
|
|
{
|
|
atomic_t *v = (atomic_t *)l;
|
|
|
|
return atomic_inc_and_test(v);
|
|
}
|
|
|
|
static inline int atomic_long_add_negative(long i, atomic_long_t *l)
|
|
{
|
|
atomic_t *v = (atomic_t *)l;
|
|
|
|
return atomic_add_negative(i, v);
|
|
}
|
|
|
|
static inline long atomic_long_add_return(long i, atomic_long_t *l)
|
|
{
|
|
atomic_t *v = (atomic_t *)l;
|
|
|
|
return (long)atomic_add_return(i, v);
|
|
}
|
|
|
|
static inline long atomic_long_sub_return(long i, atomic_long_t *l)
|
|
{
|
|
atomic_t *v = (atomic_t *)l;
|
|
|
|
return (long)atomic_sub_return(i, v);
|
|
}
|
|
|
|
static inline long atomic_long_inc_return(atomic_long_t *l)
|
|
{
|
|
atomic_t *v = (atomic_t *)l;
|
|
|
|
return (long)atomic_inc_return(v);
|
|
}
|
|
|
|
static inline long atomic_long_dec_return(atomic_long_t *l)
|
|
{
|
|
atomic_t *v = (atomic_t *)l;
|
|
|
|
return (long)atomic_dec_return(v);
|
|
}
|
|
|
|
static inline long atomic_long_add_unless(atomic_long_t *l, long a, long u)
|
|
{
|
|
atomic_t *v = (atomic_t *)l;
|
|
|
|
return (long)atomic_add_unless(v, a, u);
|
|
}
|
|
|
|
#define atomic_long_inc_not_zero(l) atomic_inc_not_zero((atomic_t *)(l))
|
|
|
|
#define atomic_long_cmpxchg(l, old, new) \
|
|
(atomic_cmpxchg((atomic_t *)(l), (old), (new)))
|
|
#define atomic_long_xchg(v, new) \
|
|
(atomic_xchg((atomic_t *)(v), (new)))
|
|
|
|
#endif /* BITS_PER_LONG == 64 */
|
|
|
|
#endif /* _ASM_GENERIC_ATOMIC_H */
|