mirror of
https://github.com/AuxXxilium/linux_dsm_epyc7002.git
synced 2024-12-27 00:55:28 +07:00
44922150d8
If we have a series of events from userpsace, with %fprs=FPRS_FEF, like follows: ETRAP ETRAP VIS_ENTRY(fprs=0x4) VIS_EXIT RTRAP (kernel FPU restore with fpu_saved=0x4) RTRAP We will not restore the user registers that were clobbered by the FPU using kernel code in the inner-most trap. Traps allocate FPU save slots in the thread struct, and FPU using sequences save the "dirty" FPU registers only. This works at the initial trap level because all of the registers get recorded into the top-level FPU save area, and we'll return to userspace with the FPU disabled so that any FPU use by the user will take an FPU disabled trap wherein we'll load the registers back up properly. But this is not how trap returns from kernel to kernel operate. The simplest fix for this bug is to always save all FPU register state for anything other than the top-most FPU save area. Getting rid of the optimized inner-slot FPU saving code ends up making VISEntryHalf degenerate into plain VISEntry. Longer term we need to do something smarter to reinstate the partial save optimizations. Perhaps the fundament error is having trap entry and exit allocate FPU save slots and restore register state. Instead, the VISEntry et al. calls should be doing that work. This bug is about two decades old. Reported-by: James Y Knight <jyknight@google.com> Signed-off-by: David S. Miller <davem@davemloft.net>
67 lines
1.5 KiB
C
67 lines
1.5 KiB
C
#ifndef _SPARC64_VISASM_H
|
|
#define _SPARC64_VISASM_H
|
|
|
|
/* visasm.h: FPU saving macros for VIS routines
|
|
*
|
|
* Copyright (C) 1998 Jakub Jelinek (jj@ultra.linux.cz)
|
|
*/
|
|
|
|
#include <asm/pstate.h>
|
|
#include <asm/ptrace.h>
|
|
|
|
/* Clobbers %o5, %g1, %g2, %g3, %g7, %icc, %xcc */
|
|
|
|
#define VISEntry \
|
|
rd %fprs, %o5; \
|
|
andcc %o5, (FPRS_FEF|FPRS_DU), %g0; \
|
|
be,pt %icc, 297f; \
|
|
sethi %hi(297f), %g7; \
|
|
sethi %hi(VISenter), %g1; \
|
|
jmpl %g1 + %lo(VISenter), %g0; \
|
|
or %g7, %lo(297f), %g7; \
|
|
297: wr %g0, FPRS_FEF, %fprs; \
|
|
|
|
#define VISExit \
|
|
wr %g0, 0, %fprs;
|
|
|
|
/* Clobbers %o5, %g1, %g2, %g3, %g7, %icc, %xcc.
|
|
* Must preserve %o5 between VISEntryHalf and VISExitHalf */
|
|
|
|
#define VISEntryHalf \
|
|
VISEntry
|
|
|
|
#define VISExitHalf \
|
|
VISExit
|
|
|
|
#define VISEntryHalfFast(fail_label) \
|
|
rd %fprs, %o5; \
|
|
andcc %o5, FPRS_FEF, %g0; \
|
|
be,pt %icc, 297f; \
|
|
nop; \
|
|
ba,a,pt %xcc, fail_label; \
|
|
297: wr %o5, FPRS_FEF, %fprs;
|
|
|
|
#define VISExitHalfFast \
|
|
wr %o5, 0, %fprs;
|
|
|
|
#ifndef __ASSEMBLY__
|
|
static inline void save_and_clear_fpu(void) {
|
|
__asm__ __volatile__ (
|
|
" rd %%fprs, %%o5\n"
|
|
" andcc %%o5, %0, %%g0\n"
|
|
" be,pt %%icc, 299f\n"
|
|
" sethi %%hi(298f), %%g7\n"
|
|
" sethi %%hi(VISenter), %%g1\n"
|
|
" jmpl %%g1 + %%lo(VISenter), %%g0\n"
|
|
" or %%g7, %%lo(298f), %%g7\n"
|
|
" 298: wr %%g0, 0, %%fprs\n"
|
|
" 299:\n"
|
|
" " : : "i" (FPRS_FEF|FPRS_DU) :
|
|
"o5", "g1", "g2", "g3", "g7", "cc");
|
|
}
|
|
|
|
int vis_emul(struct pt_regs *, unsigned int);
|
|
#endif
|
|
|
|
#endif /* _SPARC64_ASI_H */
|