mirror of
https://github.com/AuxXxilium/linux_dsm_epyc7002.git
synced 2024-12-24 03:22:23 +07:00
393f203f5f
Recently instrumentation of builtin functions calls was removed from GCC 5.0. To check the memory accessed by such functions, userspace asan always uses interceptors for them. So now we should do this as well. This patch declares memset/memmove/memcpy as weak symbols. In mm/kasan/kasan.c we have our own implementation of those functions which checks memory before accessing it. Default memset/memmove/memcpy now now always have aliases with '__' prefix. For files that built without kasan instrumentation (e.g. mm/slub.c) original mem* replaced (via #define) with prefixed variants, cause we don't want to check memory accesses there. Signed-off-by: Andrey Ryabinin <a.ryabinin@samsung.com> Cc: Dmitry Vyukov <dvyukov@google.com> Cc: Konstantin Serebryany <kcc@google.com> Cc: Dmitry Chernenkov <dmitryc@google.com> Signed-off-by: Andrey Konovalov <adech.fo@gmail.com> Cc: Yuri Gribov <tetra2005@gmail.com> Cc: Konstantin Khlebnikov <koct9i@gmail.com> Cc: Sasha Levin <sasha.levin@oracle.com> Cc: Christoph Lameter <cl@linux.com> Cc: Joonsoo Kim <iamjoonsoo.kim@lge.com> Cc: Dave Hansen <dave.hansen@intel.com> Cc: Andi Kleen <andi@firstfloor.org> Cc: Ingo Molnar <mingo@elte.hu> Cc: Thomas Gleixner <tglx@linutronix.de> Cc: "H. Peter Anvin" <hpa@zytor.com> Cc: Christoph Lameter <cl@linux.com> Cc: Pekka Enberg <penberg@kernel.org> Cc: David Rientjes <rientjes@google.com> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
85 lines
2.3 KiB
C
85 lines
2.3 KiB
C
#ifndef _ASM_X86_STRING_64_H
|
|
#define _ASM_X86_STRING_64_H
|
|
|
|
#ifdef __KERNEL__
|
|
|
|
/* Written 2002 by Andi Kleen */
|
|
|
|
/* Only used for special circumstances. Stolen from i386/string.h */
|
|
static __always_inline void *__inline_memcpy(void *to, const void *from, size_t n)
|
|
{
|
|
unsigned long d0, d1, d2;
|
|
asm volatile("rep ; movsl\n\t"
|
|
"testb $2,%b4\n\t"
|
|
"je 1f\n\t"
|
|
"movsw\n"
|
|
"1:\ttestb $1,%b4\n\t"
|
|
"je 2f\n\t"
|
|
"movsb\n"
|
|
"2:"
|
|
: "=&c" (d0), "=&D" (d1), "=&S" (d2)
|
|
: "0" (n / 4), "q" (n), "1" ((long)to), "2" ((long)from)
|
|
: "memory");
|
|
return to;
|
|
}
|
|
|
|
/* Even with __builtin_ the compiler may decide to use the out of line
|
|
function. */
|
|
|
|
#define __HAVE_ARCH_MEMCPY 1
|
|
extern void *__memcpy(void *to, const void *from, size_t len);
|
|
|
|
#ifndef CONFIG_KMEMCHECK
|
|
#if (__GNUC__ == 4 && __GNUC_MINOR__ >= 3) || __GNUC__ > 4
|
|
extern void *memcpy(void *to, const void *from, size_t len);
|
|
#else
|
|
#define memcpy(dst, src, len) \
|
|
({ \
|
|
size_t __len = (len); \
|
|
void *__ret; \
|
|
if (__builtin_constant_p(len) && __len >= 64) \
|
|
__ret = __memcpy((dst), (src), __len); \
|
|
else \
|
|
__ret = __builtin_memcpy((dst), (src), __len); \
|
|
__ret; \
|
|
})
|
|
#endif
|
|
#else
|
|
/*
|
|
* kmemcheck becomes very happy if we use the REP instructions unconditionally,
|
|
* because it means that we know both memory operands in advance.
|
|
*/
|
|
#define memcpy(dst, src, len) __inline_memcpy((dst), (src), (len))
|
|
#endif
|
|
|
|
#define __HAVE_ARCH_MEMSET
|
|
void *memset(void *s, int c, size_t n);
|
|
void *__memset(void *s, int c, size_t n);
|
|
|
|
#define __HAVE_ARCH_MEMMOVE
|
|
void *memmove(void *dest, const void *src, size_t count);
|
|
void *__memmove(void *dest, const void *src, size_t count);
|
|
|
|
int memcmp(const void *cs, const void *ct, size_t count);
|
|
size_t strlen(const char *s);
|
|
char *strcpy(char *dest, const char *src);
|
|
char *strcat(char *dest, const char *src);
|
|
int strcmp(const char *cs, const char *ct);
|
|
|
|
#if defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__)
|
|
|
|
/*
|
|
* For files that not instrumented (e.g. mm/slub.c) we
|
|
* should use not instrumented version of mem* functions.
|
|
*/
|
|
|
|
#undef memcpy
|
|
#define memcpy(dst, src, len) __memcpy(dst, src, len)
|
|
#define memmove(dst, src, len) __memmove(dst, src, len)
|
|
#define memset(s, c, n) __memset(s, c, n)
|
|
#endif
|
|
|
|
#endif /* __KERNEL__ */
|
|
|
|
#endif /* _ASM_X86_STRING_64_H */
|