mirror of
https://github.com/AuxXxilium/linux_dsm_epyc7002.git
synced 2024-12-14 17:36:44 +07:00
e87fc0ec07
Implement FETCH-OP atomic primitives, these are very similar to the existing OP-RETURN primitives we already have, except they return the value of the atomic variable _before_ modification. This is especially useful for irreversible operations -- such as bitops (because it becomes impossible to reconstruct the state prior to modification). Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Cc: Andrew Morton <akpm@linux-foundation.org> Cc: Linus Torvalds <torvalds@linux-foundation.org> Cc: Paul E. McKenney <paulmck@linux.vnet.ibm.com> Cc: Peter Zijlstra <peterz@infradead.org> Cc: Steven Miao <realmz6@gmail.com> Cc: Thomas Gleixner <tglx@linutronix.de> Cc: adi-buildroot-devel@lists.sourceforge.net Cc: linux-arch@vger.kernel.org Cc: linux-kernel@vger.kernel.org Signed-off-by: Ingo Molnar <mingo@kernel.org>
48 lines
1.5 KiB
C
48 lines
1.5 KiB
C
/*
|
|
* Copyright 2004-2011 Analog Devices Inc.
|
|
*
|
|
* Licensed under the GPL-2 or later.
|
|
*/
|
|
|
|
#ifndef __ARCH_BLACKFIN_ATOMIC__
|
|
#define __ARCH_BLACKFIN_ATOMIC__
|
|
|
|
#include <asm/cmpxchg.h>
|
|
|
|
#ifdef CONFIG_SMP
|
|
|
|
#include <asm/barrier.h>
|
|
#include <linux/linkage.h>
|
|
#include <linux/types.h>
|
|
|
|
asmlinkage int __raw_uncached_fetch_asm(const volatile int *ptr);
|
|
asmlinkage int __raw_atomic_add_asm(volatile int *ptr, int value);
|
|
asmlinkage int __raw_atomic_xadd_asm(volatile int *ptr, int value);
|
|
|
|
asmlinkage int __raw_atomic_and_asm(volatile int *ptr, int value);
|
|
asmlinkage int __raw_atomic_or_asm(volatile int *ptr, int value);
|
|
asmlinkage int __raw_atomic_xor_asm(volatile int *ptr, int value);
|
|
asmlinkage int __raw_atomic_test_asm(const volatile int *ptr, int value);
|
|
|
|
#define atomic_read(v) __raw_uncached_fetch_asm(&(v)->counter)
|
|
|
|
#define atomic_add_return(i, v) __raw_atomic_add_asm(&(v)->counter, i)
|
|
#define atomic_sub_return(i, v) __raw_atomic_add_asm(&(v)->counter, -(i))
|
|
|
|
#define atomic_fetch_add(i, v) __raw_atomic_xadd_asm(&(v)->counter, i)
|
|
#define atomic_fetch_sub(i, v) __raw_atomic_xadd_asm(&(v)->counter, -(i))
|
|
|
|
#define atomic_or(i, v) (void)__raw_atomic_or_asm(&(v)->counter, i)
|
|
#define atomic_and(i, v) (void)__raw_atomic_and_asm(&(v)->counter, i)
|
|
#define atomic_xor(i, v) (void)__raw_atomic_xor_asm(&(v)->counter, i)
|
|
|
|
#define atomic_fetch_or(i, v) __raw_atomic_or_asm(&(v)->counter, i)
|
|
#define atomic_fetch_and(i, v) __raw_atomic_and_asm(&(v)->counter, i)
|
|
#define atomic_fetch_xor(i, v) __raw_atomic_xor_asm(&(v)->counter, i)
|
|
|
|
#endif
|
|
|
|
#include <asm-generic/atomic.h>
|
|
|
|
#endif
|