mirror of
https://github.com/AuxXxilium/linux_dsm_epyc7002.git
synced 2024-12-28 11:18:45 +07:00
96d4f267e4
Nobody has actually used the type (VERIFY_READ vs VERIFY_WRITE) argument of the user address range verification function since we got rid of the old racy i386-only code to walk page tables by hand. It existed because the original 80386 would not honor the write protect bit when in kernel mode, so you had to do COW by hand before doing any user access. But we haven't supported that in a long time, and these days the 'type' argument is a purely historical artifact. A discussion about extending 'user_access_begin()' to do the range checking resulted this patch, because there is no way we're going to move the old VERIFY_xyz interface to that model. And it's best done at the end of the merge window when I've done most of my merges, so let's just get this done once and for all. This patch was mostly done with a sed-script, with manual fix-ups for the cases that weren't of the trivial 'access_ok(VERIFY_xyz' form. There were a couple of notable cases: - csky still had the old "verify_area()" name as an alias. - the iter_iov code had magical hardcoded knowledge of the actual values of VERIFY_{READ,WRITE} (not that they mattered, since nothing really used it) - microblaze used the type argument for a debug printout but other than those oddities this should be a total no-op patch. I tried to fix up all architectures, did fairly extensive grepping for access_ok() uses, and the changes are trivial, but I may have missed something. Any missed conversion should be trivially fixable, though. Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
111 lines
2.5 KiB
C
111 lines
2.5 KiB
C
/* SPDX-License-Identifier: GPL-2.0 */
|
|
#ifndef _ASM_FUTEX_H
|
|
#define _ASM_FUTEX_H
|
|
|
|
#include <linux/futex.h>
|
|
#include <linux/uaccess.h>
|
|
#include <asm/errno.h>
|
|
|
|
#define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg) \
|
|
do { \
|
|
register unsigned long r8 __asm ("r8") = 0; \
|
|
__asm__ __volatile__( \
|
|
" mf;; \n" \
|
|
"[1:] " insn ";; \n" \
|
|
" .xdata4 \"__ex_table\", 1b-., 2f-. \n" \
|
|
"[2:]" \
|
|
: "+r" (r8), "=r" (oldval) \
|
|
: "r" (uaddr), "r" (oparg) \
|
|
: "memory"); \
|
|
ret = r8; \
|
|
} while (0)
|
|
|
|
#define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg) \
|
|
do { \
|
|
register unsigned long r8 __asm ("r8") = 0; \
|
|
int val, newval; \
|
|
do { \
|
|
__asm__ __volatile__( \
|
|
" mf;; \n" \
|
|
"[1:] ld4 %3=[%4];; \n" \
|
|
" mov %2=%3 \n" \
|
|
insn ";; \n" \
|
|
" mov ar.ccv=%2;; \n" \
|
|
"[2:] cmpxchg4.acq %1=[%4],%3,ar.ccv;; \n" \
|
|
" .xdata4 \"__ex_table\", 1b-., 3f-.\n" \
|
|
" .xdata4 \"__ex_table\", 2b-., 3f-.\n" \
|
|
"[3:]" \
|
|
: "+r" (r8), "=r" (val), "=&r" (oldval), \
|
|
"=&r" (newval) \
|
|
: "r" (uaddr), "r" (oparg) \
|
|
: "memory"); \
|
|
if (unlikely (r8)) \
|
|
break; \
|
|
} while (unlikely (val != oldval)); \
|
|
ret = r8; \
|
|
} while (0)
|
|
|
|
static inline int
|
|
arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
|
|
{
|
|
int oldval = 0, ret;
|
|
|
|
pagefault_disable();
|
|
|
|
switch (op) {
|
|
case FUTEX_OP_SET:
|
|
__futex_atomic_op1("xchg4 %1=[%2],%3", ret, oldval, uaddr,
|
|
oparg);
|
|
break;
|
|
case FUTEX_OP_ADD:
|
|
__futex_atomic_op2("add %3=%3,%5", ret, oldval, uaddr, oparg);
|
|
break;
|
|
case FUTEX_OP_OR:
|
|
__futex_atomic_op2("or %3=%3,%5", ret, oldval, uaddr, oparg);
|
|
break;
|
|
case FUTEX_OP_ANDN:
|
|
__futex_atomic_op2("and %3=%3,%5", ret, oldval, uaddr,
|
|
~oparg);
|
|
break;
|
|
case FUTEX_OP_XOR:
|
|
__futex_atomic_op2("xor %3=%3,%5", ret, oldval, uaddr, oparg);
|
|
break;
|
|
default:
|
|
ret = -ENOSYS;
|
|
}
|
|
|
|
pagefault_enable();
|
|
|
|
if (!ret)
|
|
*oval = oldval;
|
|
|
|
return ret;
|
|
}
|
|
|
|
static inline int
|
|
futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
|
|
u32 oldval, u32 newval)
|
|
{
|
|
if (!access_ok(uaddr, sizeof(u32)))
|
|
return -EFAULT;
|
|
|
|
{
|
|
register unsigned long r8 __asm ("r8") = 0;
|
|
unsigned long prev;
|
|
__asm__ __volatile__(
|
|
" mf;; \n"
|
|
" mov ar.ccv=%4;; \n"
|
|
"[1:] cmpxchg4.acq %1=[%2],%3,ar.ccv \n"
|
|
" .xdata4 \"__ex_table\", 1b-., 2f-. \n"
|
|
"[2:]"
|
|
: "+r" (r8), "=&r" (prev)
|
|
: "r" (uaddr), "r" (newval),
|
|
"rO" ((long) (unsigned) oldval)
|
|
: "memory");
|
|
*uval = prev;
|
|
return r8;
|
|
}
|
|
}
|
|
|
|
#endif /* _ASM_FUTEX_H */
|