mirror of
https://github.com/AuxXxilium/linux_dsm_epyc7002.git
synced 2024-12-04 17:36:49 +07:00
e44b7b7525
Move wakeup code to .c, so that video mode setting code can be shared between boot and wakeup. Remove nasty assembly code in 64-bit case by re-using trampoline code. Stack setup was fixed to clear high 16bits of %esp, maybe that fixes some machines. .c code sharing and morse code was done H. Peter Anvin, Sam Ravnborg reviewed kbuild related stuff, and it seems okay to him. Rafael did some cleanups. [rjw: * Made the patch stop breaking compilation on x86-32 * Added arch/x86/kernel/acpi/sleep.h * Got rid of compiler warnings in arch/x86/kernel/acpi/sleep.c * Fixed 32-bit compilation on x86-64 systems * Added include/asm-x86/trampoline.h and fixed the non-SMP compilation on 64-bit x86 * Removed arch/x86/kernel/acpi/sleep_32.c which was not used * Fixed some breakage caused by the integration of smpboot.c done under us in the meantime] Signed-off-by: Pavel Machek <pavel@suse.cz> Signed-off-by: H. Peter Anvin <hpa@zytor.com> Reviewed-by: Sam Ravnborg <sam@ravnborg.org> Signed-off-by: Rafael J. Wysocki <rjw@sisk.pl> Signed-off-by: Ingo Molnar <mingo@elte.hu>
101 lines
1.7 KiB
ArmAsm
101 lines
1.7 KiB
ArmAsm
.section .text.page_aligned
|
|
#include <linux/linkage.h>
|
|
#include <asm/segment.h>
|
|
#include <asm/page.h>
|
|
|
|
# Copyright 2003, 2008 Pavel Machek <pavel@suse.cz>, distribute under GPLv2
|
|
|
|
.code32
|
|
ALIGN
|
|
|
|
ENTRY(wakeup_pmode_return)
|
|
wakeup_pmode_return:
|
|
movw $__KERNEL_DS, %ax
|
|
movw %ax, %ss
|
|
movw %ax, %ds
|
|
movw %ax, %es
|
|
movw %ax, %fs
|
|
movw %ax, %gs
|
|
|
|
# reload the gdt, as we need the full 32 bit address
|
|
lgdt saved_gdt
|
|
lidt saved_idt
|
|
lldt saved_ldt
|
|
ljmp $(__KERNEL_CS), $1f
|
|
1:
|
|
movl %cr3, %eax
|
|
movl %eax, %cr3
|
|
wbinvd
|
|
|
|
# and restore the stack ... but you need gdt for this to work
|
|
movl saved_context_esp, %esp
|
|
|
|
movl %cs:saved_magic, %eax
|
|
cmpl $0x12345678, %eax
|
|
jne bogus_magic
|
|
|
|
# jump to place where we left off
|
|
movl saved_eip, %eax
|
|
jmp *%eax
|
|
|
|
bogus_magic:
|
|
jmp bogus_magic
|
|
|
|
|
|
|
|
save_registers:
|
|
sgdt saved_gdt
|
|
sidt saved_idt
|
|
sldt saved_ldt
|
|
str saved_tss
|
|
|
|
leal 4(%esp), %eax
|
|
movl %eax, saved_context_esp
|
|
movl %ebx, saved_context_ebx
|
|
movl %ebp, saved_context_ebp
|
|
movl %esi, saved_context_esi
|
|
movl %edi, saved_context_edi
|
|
pushfl
|
|
popl saved_context_eflags
|
|
|
|
movl $ret_point, saved_eip
|
|
ret
|
|
|
|
|
|
restore_registers:
|
|
movl saved_context_ebp, %ebp
|
|
movl saved_context_ebx, %ebx
|
|
movl saved_context_esi, %esi
|
|
movl saved_context_edi, %edi
|
|
pushl saved_context_eflags
|
|
popfl
|
|
ret
|
|
|
|
ENTRY(do_suspend_lowlevel)
|
|
call save_processor_state
|
|
call save_registers
|
|
pushl $3
|
|
call acpi_enter_sleep_state
|
|
addl $4, %esp
|
|
|
|
# In case of S3 failure, we'll emerge here. Jump
|
|
# to ret_point to recover
|
|
jmp ret_point
|
|
.p2align 4,,7
|
|
ret_point:
|
|
call restore_registers
|
|
call restore_processor_state
|
|
ret
|
|
|
|
.data
|
|
ALIGN
|
|
ENTRY(saved_magic) .long 0
|
|
ENTRY(saved_eip) .long 0
|
|
|
|
# saved registers
|
|
saved_gdt: .long 0,0
|
|
saved_idt: .long 0,0
|
|
saved_ldt: .long 0
|
|
saved_tss: .long 0
|
|
|