mirror of
https://github.com/AuxXxilium/linux_dsm_epyc7002.git
synced 2024-12-16 00:56:49 +07:00
6ecb78ef56
Previously, only IBAT1 and IBAT2 were used to map kernel linear mem.
Since commit 63b2bc6195
("powerpc/mm/32s: Use BATs for
STRICT_KERNEL_RWX"), we may have all 8 BATs used for mapping
kernel text. But the suspend/restore functions only save/restore
BATs 0 to 3, and clears BATs 4 to 7.
Make suspend and restore functions respectively save and reload
the 8 BATs on CPUs having MMU_FTR_USE_HIGH_BATS feature.
Reported-by: Andreas Schwab <schwab@linux-m68k.org>
Cc: stable@vger.kernel.org
Signed-off-by: Christophe Leroy <christophe.leroy@c-s.fr>
Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
410 lines
8.0 KiB
ArmAsm
410 lines
8.0 KiB
ArmAsm
/* SPDX-License-Identifier: GPL-2.0 */
|
|
#include <linux/threads.h>
|
|
#include <asm/processor.h>
|
|
#include <asm/page.h>
|
|
#include <asm/cputable.h>
|
|
#include <asm/thread_info.h>
|
|
#include <asm/ppc_asm.h>
|
|
#include <asm/asm-offsets.h>
|
|
#include <asm/mmu.h>
|
|
#include <asm/feature-fixups.h>
|
|
|
|
/*
|
|
* Structure for storing CPU registers on the save area.
|
|
*/
|
|
#define SL_SP 0
|
|
#define SL_PC 4
|
|
#define SL_MSR 8
|
|
#define SL_SDR1 0xc
|
|
#define SL_SPRG0 0x10 /* 4 sprg's */
|
|
#define SL_DBAT0 0x20
|
|
#define SL_IBAT0 0x28
|
|
#define SL_DBAT1 0x30
|
|
#define SL_IBAT1 0x38
|
|
#define SL_DBAT2 0x40
|
|
#define SL_IBAT2 0x48
|
|
#define SL_DBAT3 0x50
|
|
#define SL_IBAT3 0x58
|
|
#define SL_DBAT4 0x60
|
|
#define SL_IBAT4 0x68
|
|
#define SL_DBAT5 0x70
|
|
#define SL_IBAT5 0x78
|
|
#define SL_DBAT6 0x80
|
|
#define SL_IBAT6 0x88
|
|
#define SL_DBAT7 0x90
|
|
#define SL_IBAT7 0x98
|
|
#define SL_TB 0xa0
|
|
#define SL_R2 0xa8
|
|
#define SL_CR 0xac
|
|
#define SL_LR 0xb0
|
|
#define SL_R12 0xb4 /* r12 to r31 */
|
|
#define SL_SIZE (SL_R12 + 80)
|
|
|
|
.section .data
|
|
.align 5
|
|
|
|
_GLOBAL(swsusp_save_area)
|
|
.space SL_SIZE
|
|
|
|
|
|
.section .text
|
|
.align 5
|
|
|
|
_GLOBAL(swsusp_arch_suspend)
|
|
|
|
lis r11,swsusp_save_area@h
|
|
ori r11,r11,swsusp_save_area@l
|
|
|
|
mflr r0
|
|
stw r0,SL_LR(r11)
|
|
mfcr r0
|
|
stw r0,SL_CR(r11)
|
|
stw r1,SL_SP(r11)
|
|
stw r2,SL_R2(r11)
|
|
stmw r12,SL_R12(r11)
|
|
|
|
/* Save MSR & SDR1 */
|
|
mfmsr r4
|
|
stw r4,SL_MSR(r11)
|
|
mfsdr1 r4
|
|
stw r4,SL_SDR1(r11)
|
|
|
|
/* Get a stable timebase and save it */
|
|
1: mftbu r4
|
|
stw r4,SL_TB(r11)
|
|
mftb r5
|
|
stw r5,SL_TB+4(r11)
|
|
mftbu r3
|
|
cmpw r3,r4
|
|
bne 1b
|
|
|
|
/* Save SPRGs */
|
|
mfsprg r4,0
|
|
stw r4,SL_SPRG0(r11)
|
|
mfsprg r4,1
|
|
stw r4,SL_SPRG0+4(r11)
|
|
mfsprg r4,2
|
|
stw r4,SL_SPRG0+8(r11)
|
|
mfsprg r4,3
|
|
stw r4,SL_SPRG0+12(r11)
|
|
|
|
/* Save BATs */
|
|
mfdbatu r4,0
|
|
stw r4,SL_DBAT0(r11)
|
|
mfdbatl r4,0
|
|
stw r4,SL_DBAT0+4(r11)
|
|
mfdbatu r4,1
|
|
stw r4,SL_DBAT1(r11)
|
|
mfdbatl r4,1
|
|
stw r4,SL_DBAT1+4(r11)
|
|
mfdbatu r4,2
|
|
stw r4,SL_DBAT2(r11)
|
|
mfdbatl r4,2
|
|
stw r4,SL_DBAT2+4(r11)
|
|
mfdbatu r4,3
|
|
stw r4,SL_DBAT3(r11)
|
|
mfdbatl r4,3
|
|
stw r4,SL_DBAT3+4(r11)
|
|
mfibatu r4,0
|
|
stw r4,SL_IBAT0(r11)
|
|
mfibatl r4,0
|
|
stw r4,SL_IBAT0+4(r11)
|
|
mfibatu r4,1
|
|
stw r4,SL_IBAT1(r11)
|
|
mfibatl r4,1
|
|
stw r4,SL_IBAT1+4(r11)
|
|
mfibatu r4,2
|
|
stw r4,SL_IBAT2(r11)
|
|
mfibatl r4,2
|
|
stw r4,SL_IBAT2+4(r11)
|
|
mfibatu r4,3
|
|
stw r4,SL_IBAT3(r11)
|
|
mfibatl r4,3
|
|
stw r4,SL_IBAT3+4(r11)
|
|
|
|
BEGIN_MMU_FTR_SECTION
|
|
mfspr r4,SPRN_DBAT4U
|
|
stw r4,SL_DBAT4(r11)
|
|
mfspr r4,SPRN_DBAT4L
|
|
stw r4,SL_DBAT4+4(r11)
|
|
mfspr r4,SPRN_DBAT5U
|
|
stw r4,SL_DBAT5(r11)
|
|
mfspr r4,SPRN_DBAT5L
|
|
stw r4,SL_DBAT5+4(r11)
|
|
mfspr r4,SPRN_DBAT6U
|
|
stw r4,SL_DBAT6(r11)
|
|
mfspr r4,SPRN_DBAT6L
|
|
stw r4,SL_DBAT6+4(r11)
|
|
mfspr r4,SPRN_DBAT7U
|
|
stw r4,SL_DBAT7(r11)
|
|
mfspr r4,SPRN_DBAT7L
|
|
stw r4,SL_DBAT7+4(r11)
|
|
mfspr r4,SPRN_IBAT4U
|
|
stw r4,SL_IBAT4(r11)
|
|
mfspr r4,SPRN_IBAT4L
|
|
stw r4,SL_IBAT4+4(r11)
|
|
mfspr r4,SPRN_IBAT5U
|
|
stw r4,SL_IBAT5(r11)
|
|
mfspr r4,SPRN_IBAT5L
|
|
stw r4,SL_IBAT5+4(r11)
|
|
mfspr r4,SPRN_IBAT6U
|
|
stw r4,SL_IBAT6(r11)
|
|
mfspr r4,SPRN_IBAT6L
|
|
stw r4,SL_IBAT6+4(r11)
|
|
mfspr r4,SPRN_IBAT7U
|
|
stw r4,SL_IBAT7(r11)
|
|
mfspr r4,SPRN_IBAT7L
|
|
stw r4,SL_IBAT7+4(r11)
|
|
END_MMU_FTR_SECTION_IFSET(MMU_FTR_USE_HIGH_BATS)
|
|
|
|
#if 0
|
|
/* Backup various CPU config stuffs */
|
|
bl __save_cpu_setup
|
|
#endif
|
|
/* Call the low level suspend stuff (we should probably have made
|
|
* a stackframe...
|
|
*/
|
|
bl swsusp_save
|
|
|
|
/* Restore LR from the save area */
|
|
lis r11,swsusp_save_area@h
|
|
ori r11,r11,swsusp_save_area@l
|
|
lwz r0,SL_LR(r11)
|
|
mtlr r0
|
|
|
|
blr
|
|
|
|
|
|
/* Resume code */
|
|
_GLOBAL(swsusp_arch_resume)
|
|
|
|
#ifdef CONFIG_ALTIVEC
|
|
/* Stop pending alitvec streams and memory accesses */
|
|
BEGIN_FTR_SECTION
|
|
DSSALL
|
|
END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC)
|
|
#endif
|
|
sync
|
|
|
|
/* Disable MSR:DR to make sure we don't take a TLB or
|
|
* hash miss during the copy, as our hash table will
|
|
* for a while be unusable. For .text, we assume we are
|
|
* covered by a BAT. This works only for non-G5 at this
|
|
* point. G5 will need a better approach, possibly using
|
|
* a small temporary hash table filled with large mappings,
|
|
* disabling the MMU completely isn't a good option for
|
|
* performance reasons.
|
|
* (Note that 750's may have the same performance issue as
|
|
* the G5 in this case, we should investigate using moving
|
|
* BATs for these CPUs)
|
|
*/
|
|
mfmsr r0
|
|
sync
|
|
rlwinm r0,r0,0,28,26 /* clear MSR_DR */
|
|
mtmsr r0
|
|
sync
|
|
isync
|
|
|
|
/* Load ptr the list of pages to copy in r3 */
|
|
lis r11,(restore_pblist - KERNELBASE)@h
|
|
ori r11,r11,restore_pblist@l
|
|
lwz r10,0(r11)
|
|
|
|
/* Copy the pages. This is a very basic implementation, to
|
|
* be replaced by something more cache efficient */
|
|
1:
|
|
tophys(r3,r10)
|
|
li r0,256
|
|
mtctr r0
|
|
lwz r11,pbe_address(r3) /* source */
|
|
tophys(r5,r11)
|
|
lwz r10,pbe_orig_address(r3) /* destination */
|
|
tophys(r6,r10)
|
|
2:
|
|
lwz r8,0(r5)
|
|
lwz r9,4(r5)
|
|
lwz r10,8(r5)
|
|
lwz r11,12(r5)
|
|
addi r5,r5,16
|
|
stw r8,0(r6)
|
|
stw r9,4(r6)
|
|
stw r10,8(r6)
|
|
stw r11,12(r6)
|
|
addi r6,r6,16
|
|
bdnz 2b
|
|
lwz r10,pbe_next(r3)
|
|
cmpwi 0,r10,0
|
|
bne 1b
|
|
|
|
/* Do a very simple cache flush/inval of the L1 to ensure
|
|
* coherency of the icache
|
|
*/
|
|
lis r3,0x0002
|
|
mtctr r3
|
|
li r3, 0
|
|
1:
|
|
lwz r0,0(r3)
|
|
addi r3,r3,0x0020
|
|
bdnz 1b
|
|
isync
|
|
sync
|
|
|
|
/* Now flush those cache lines */
|
|
lis r3,0x0002
|
|
mtctr r3
|
|
li r3, 0
|
|
1:
|
|
dcbf 0,r3
|
|
addi r3,r3,0x0020
|
|
bdnz 1b
|
|
sync
|
|
|
|
/* Ok, we are now running with the kernel data of the old
|
|
* kernel fully restored. We can get to the save area
|
|
* easily now. As for the rest of the code, it assumes the
|
|
* loader kernel and the booted one are exactly identical
|
|
*/
|
|
lis r11,swsusp_save_area@h
|
|
ori r11,r11,swsusp_save_area@l
|
|
tophys(r11,r11)
|
|
|
|
#if 0
|
|
/* Restore various CPU config stuffs */
|
|
bl __restore_cpu_setup
|
|
#endif
|
|
/* Restore the BATs, and SDR1. Then we can turn on the MMU.
|
|
* This is a bit hairy as we are running out of those BATs,
|
|
* but first, our code is probably in the icache, and we are
|
|
* writing the same value to the BAT, so that should be fine,
|
|
* though a better solution will have to be found long-term
|
|
*/
|
|
lwz r4,SL_SDR1(r11)
|
|
mtsdr1 r4
|
|
lwz r4,SL_SPRG0(r11)
|
|
mtsprg 0,r4
|
|
lwz r4,SL_SPRG0+4(r11)
|
|
mtsprg 1,r4
|
|
lwz r4,SL_SPRG0+8(r11)
|
|
mtsprg 2,r4
|
|
lwz r4,SL_SPRG0+12(r11)
|
|
mtsprg 3,r4
|
|
|
|
#if 0
|
|
lwz r4,SL_DBAT0(r11)
|
|
mtdbatu 0,r4
|
|
lwz r4,SL_DBAT0+4(r11)
|
|
mtdbatl 0,r4
|
|
lwz r4,SL_DBAT1(r11)
|
|
mtdbatu 1,r4
|
|
lwz r4,SL_DBAT1+4(r11)
|
|
mtdbatl 1,r4
|
|
lwz r4,SL_DBAT2(r11)
|
|
mtdbatu 2,r4
|
|
lwz r4,SL_DBAT2+4(r11)
|
|
mtdbatl 2,r4
|
|
lwz r4,SL_DBAT3(r11)
|
|
mtdbatu 3,r4
|
|
lwz r4,SL_DBAT3+4(r11)
|
|
mtdbatl 3,r4
|
|
lwz r4,SL_IBAT0(r11)
|
|
mtibatu 0,r4
|
|
lwz r4,SL_IBAT0+4(r11)
|
|
mtibatl 0,r4
|
|
lwz r4,SL_IBAT1(r11)
|
|
mtibatu 1,r4
|
|
lwz r4,SL_IBAT1+4(r11)
|
|
mtibatl 1,r4
|
|
lwz r4,SL_IBAT2(r11)
|
|
mtibatu 2,r4
|
|
lwz r4,SL_IBAT2+4(r11)
|
|
mtibatl 2,r4
|
|
lwz r4,SL_IBAT3(r11)
|
|
mtibatu 3,r4
|
|
lwz r4,SL_IBAT3+4(r11)
|
|
mtibatl 3,r4
|
|
BEGIN_MMU_FTR_SECTION
|
|
lwz r4,SL_DBAT4(r11)
|
|
mtspr SPRN_DBAT4U,r4
|
|
lwz r4,SL_DBAT4+4(r11)
|
|
mtspr SPRN_DBAT4L,r4
|
|
lwz r4,SL_DBAT5(r11)
|
|
mtspr SPRN_DBAT5U,r4
|
|
lwz r4,SL_DBAT5+4(r11)
|
|
mtspr SPRN_DBAT5L,r4
|
|
lwz r4,SL_DBAT6(r11)
|
|
mtspr SPRN_DBAT6U,r4
|
|
lwz r4,SL_DBAT6+4(r11)
|
|
mtspr SPRN_DBAT6L,r4
|
|
lwz r4,SL_DBAT7(r11)
|
|
mtspr SPRN_DBAT7U,r4
|
|
lwz r4,SL_DBAT7+4(r11)
|
|
mtspr SPRN_DBAT7L,r4
|
|
lwz r4,SL_IBAT4(r11)
|
|
mtspr SPRN_IBAT4U,r4
|
|
lwz r4,SL_IBAT4+4(r11)
|
|
mtspr SPRN_IBAT4L,r4
|
|
lwz r4,SL_IBAT5(r11)
|
|
mtspr SPRN_IBAT5U,r4
|
|
lwz r4,SL_IBAT5+4(r11)
|
|
mtspr SPRN_IBAT5L,r4
|
|
lwz r4,SL_IBAT6(r11)
|
|
mtspr SPRN_IBAT6U,r4
|
|
lwz r4,SL_IBAT6+4(r11)
|
|
mtspr SPRN_IBAT6L,r4
|
|
lwz r4,SL_IBAT7(r11)
|
|
mtspr SPRN_IBAT7U,r4
|
|
lwz r4,SL_IBAT7+4(r11)
|
|
mtspr SPRN_IBAT7L,r4
|
|
END_MMU_FTR_SECTION_IFSET(MMU_FTR_USE_HIGH_BATS)
|
|
#endif
|
|
|
|
/* Flush all TLBs */
|
|
lis r4,0x1000
|
|
1: addic. r4,r4,-0x1000
|
|
tlbie r4
|
|
bgt 1b
|
|
sync
|
|
|
|
/* restore the MSR and turn on the MMU */
|
|
lwz r3,SL_MSR(r11)
|
|
bl turn_on_mmu
|
|
tovirt(r11,r11)
|
|
|
|
/* Restore TB */
|
|
li r3,0
|
|
mttbl r3
|
|
lwz r3,SL_TB(r11)
|
|
lwz r4,SL_TB+4(r11)
|
|
mttbu r3
|
|
mttbl r4
|
|
|
|
/* Kick decrementer */
|
|
li r0,1
|
|
mtdec r0
|
|
|
|
/* Restore the callee-saved registers and return */
|
|
lwz r0,SL_CR(r11)
|
|
mtcr r0
|
|
lwz r2,SL_R2(r11)
|
|
lmw r12,SL_R12(r11)
|
|
lwz r1,SL_SP(r11)
|
|
lwz r0,SL_LR(r11)
|
|
mtlr r0
|
|
|
|
// XXX Note: we don't really need to call swsusp_resume
|
|
|
|
li r3,0
|
|
blr
|
|
|
|
/* FIXME:This construct is actually not useful since we don't shut
|
|
* down the instruction MMU, we could just flip back MSR-DR on.
|
|
*/
|
|
turn_on_mmu:
|
|
mflr r4
|
|
mtsrr0 r4
|
|
mtsrr1 r3
|
|
sync
|
|
isync
|
|
rfi
|
|
|