mirror of
https://github.com/AuxXxilium/linux_dsm_epyc7002.git
synced 2024-11-29 20:06:38 +07:00
1da177e4c3
Initial git repository build. I'm not bothering with the full history, even though we have it. We can create a separate "historical" git archive of that later if we want to, and in the meantime it's about 3.2GB when imported into git - space that would just make the early git days unnecessarily complicated, when we don't have a lot of good infrastructure for it. Let it rip!
70 lines
1.6 KiB
C
70 lines
1.6 KiB
C
/*
|
|
* Generic cache management functions. Everything is arch-specific,
|
|
* but this header exists to make sure the defines/functions can be
|
|
* used in a generic way.
|
|
*
|
|
* 2000-11-13 Arjan van de Ven <arjan@fenrus.demon.nl>
|
|
*
|
|
*/
|
|
|
|
#ifndef _LINUX_PREFETCH_H
|
|
#define _LINUX_PREFETCH_H
|
|
|
|
#include <linux/types.h>
|
|
#include <asm/processor.h>
|
|
#include <asm/cache.h>
|
|
|
|
/*
|
|
prefetch(x) attempts to pre-emptively get the memory pointed to
|
|
by address "x" into the CPU L1 cache.
|
|
prefetch(x) should not cause any kind of exception, prefetch(0) is
|
|
specifically ok.
|
|
|
|
prefetch() should be defined by the architecture, if not, the
|
|
#define below provides a no-op define.
|
|
|
|
There are 3 prefetch() macros:
|
|
|
|
prefetch(x) - prefetches the cacheline at "x" for read
|
|
prefetchw(x) - prefetches the cacheline at "x" for write
|
|
spin_lock_prefetch(x) - prefectches the spinlock *x for taking
|
|
|
|
there is also PREFETCH_STRIDE which is the architecure-prefered
|
|
"lookahead" size for prefetching streamed operations.
|
|
|
|
*/
|
|
|
|
/*
|
|
* These cannot be do{}while(0) macros. See the mental gymnastics in
|
|
* the loop macro.
|
|
*/
|
|
|
|
#ifndef ARCH_HAS_PREFETCH
|
|
static inline void prefetch(const void *x) {;}
|
|
#endif
|
|
|
|
#ifndef ARCH_HAS_PREFETCHW
|
|
static inline void prefetchw(const void *x) {;}
|
|
#endif
|
|
|
|
#ifndef ARCH_HAS_SPINLOCK_PREFETCH
|
|
#define spin_lock_prefetch(x) prefetchw(x)
|
|
#endif
|
|
|
|
#ifndef PREFETCH_STRIDE
|
|
#define PREFETCH_STRIDE (4*L1_CACHE_BYTES)
|
|
#endif
|
|
|
|
static inline void prefetch_range(void *addr, size_t len)
|
|
{
|
|
#ifdef ARCH_HAS_PREFETCH
|
|
char *cp;
|
|
char *end = addr + len;
|
|
|
|
for (cp = addr; cp < end; cp += PREFETCH_STRIDE)
|
|
prefetch(cp);
|
|
#endif
|
|
}
|
|
|
|
#endif
|