mirror of
https://github.com/AuxXxilium/linux_dsm_epyc7002.git
synced 2024-12-21 02:49:14 +07:00
9952f6918d
Based on 1 normalized pattern(s): this program is free software you can redistribute it and or modify it under the terms and conditions of the gnu general public license version 2 as published by the free software foundation this program is distributed in the hope it will be useful but without any warranty without even the implied warranty of merchantability or fitness for a particular purpose see the gnu general public license for more details you should have received a copy of the gnu general public license along with this program if not see http www gnu org licenses extracted by the scancode license scanner the SPDX license identifier GPL-2.0-only has been chosen to replace the boilerplate/reference in 228 file(s). Signed-off-by: Thomas Gleixner <tglx@linutronix.de> Reviewed-by: Allison Randal <allison@lohutok.net> Reviewed-by: Steve Winslow <swinslow@gmail.com> Reviewed-by: Richard Fontana <rfontana@redhat.com> Reviewed-by: Alexios Zavras <alexios.zavras@intel.com> Cc: linux-spdx@vger.kernel.org Link: https://lkml.kernel.org/r/20190528171438.107155473@linutronix.de Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
39 lines
995 B
C
39 lines
995 B
C
/* SPDX-License-Identifier: GPL-2.0-only */
|
|
/*
|
|
* Copyright 2011 Calxeda, Inc.
|
|
* Based on PPC version Copyright 2007 MontaVista Software, Inc.
|
|
*/
|
|
#ifndef ASM_EDAC_H
|
|
#define ASM_EDAC_H
|
|
/*
|
|
* ECC atomic, DMA, SMP and interrupt safe scrub function.
|
|
* Implements the per arch edac_atomic_scrub() that EDAC use for software
|
|
* ECC scrubbing. It reads memory and then writes back the original
|
|
* value, allowing the hardware to detect and correct memory errors.
|
|
*/
|
|
|
|
static inline void edac_atomic_scrub(void *va, u32 size)
|
|
{
|
|
#if __LINUX_ARM_ARCH__ >= 6
|
|
unsigned int *virt_addr = va;
|
|
unsigned int temp, temp2;
|
|
unsigned int i;
|
|
|
|
for (i = 0; i < size / sizeof(*virt_addr); i++, virt_addr++) {
|
|
/* Very carefully read and write to memory atomically
|
|
* so we are interrupt, DMA and SMP safe.
|
|
*/
|
|
__asm__ __volatile__("\n"
|
|
"1: ldrex %0, [%2]\n"
|
|
" strex %1, %0, [%2]\n"
|
|
" teq %1, #0\n"
|
|
" bne 1b\n"
|
|
: "=&r"(temp), "=&r"(temp2)
|
|
: "r"(virt_addr)
|
|
: "cc");
|
|
}
|
|
#endif
|
|
}
|
|
|
|
#endif
|