summaryrefslogtreecommitdiff
path: root/arch/x86/realmode
diff options
context:
space:
mode:
authorThomas Gleixner <tglx@linutronix.de>2023-05-30 13:46:22 +0300
committerThomas Gleixner <tglx@linutronix.de>2023-05-30 15:11:47 +0300
commit33e20b07bec4991c169e3c6ff28c2126583724fc (patch)
treed4995aef3605ac5ee4e899500af620b54f449a4e /arch/x86/realmode
parent5da80b28bf25c3458c7beb23794ff53622ce7eb4 (diff)
downloadlinux-33e20b07bec4991c169e3c6ff28c2126583724fc.tar.xz
x86/realmode: Make stack lock work in trampoline_compat()
The stack locking and stack assignment macro LOAD_REALMODE_ESP fails to work when invoked from the 64bit trampoline entry point: trampoline_start64 trampoline_compat LOAD_REALMODE_ESP <- lock Accessing tr_lock is only possible from 16bit mode. For the compat entry point this needs to be pa_tr_lock so that the required relocation entry is generated. Otherwise it locks the non-relocated address which is aside of being wrong never cleared in secondary_startup_64() causing all but the first CPU to get stuck on the lock. Make the macro take an argument lock_pa which defaults to 0 and rename it to LOCK_AND_LOAD_REALMODE_ESP to make it clear what this is about. Fixes: f6f1ae9128d2 ("x86/smpboot: Implement a bit spinlock to protect the realmode stack") Reported-by: Kirill A. Shutemov <kirill.shutemov@linux.intel.com> Signed-off-by: Thomas Gleixner <tglx@linutronix.de> Tested-by: Kirill A. Shutemov <kirill.shutemov@linux.intel.com> Link: https://lore.kernel.org/r/87h6rujdvl.ffs@tglx
Diffstat (limited to 'arch/x86/realmode')
-rw-r--r--arch/x86/realmode/rm/trampoline_64.S12
1 files changed, 8 insertions, 4 deletions
diff --git a/arch/x86/realmode/rm/trampoline_64.S b/arch/x86/realmode/rm/trampoline_64.S
index 4822ad2a5e89..c9f76fae902e 100644
--- a/arch/x86/realmode/rm/trampoline_64.S
+++ b/arch/x86/realmode/rm/trampoline_64.S
@@ -37,12 +37,16 @@
.text
.code16
-.macro LOAD_REALMODE_ESP
+.macro LOCK_AND_LOAD_REALMODE_ESP lock_pa=0
/*
* Make sure only one CPU fiddles with the realmode stack
*/
.Llock_rm\@:
+ .if \lock_pa
+ lock btsl $0, pa_tr_lock
+ .else
lock btsl $0, tr_lock
+ .endif
jnc 2f
pause
jmp .Llock_rm\@
@@ -63,7 +67,7 @@ SYM_CODE_START(trampoline_start)
mov %ax, %es
mov %ax, %ss
- LOAD_REALMODE_ESP
+ LOCK_AND_LOAD_REALMODE_ESP
call verify_cpu # Verify the cpu supports long mode
testl %eax, %eax # Check for return code
@@ -106,7 +110,7 @@ SYM_CODE_START(sev_es_trampoline_start)
mov %ax, %es
mov %ax, %ss
- LOAD_REALMODE_ESP
+ LOCK_AND_LOAD_REALMODE_ESP
jmp .Lswitch_to_protected
SYM_CODE_END(sev_es_trampoline_start)
@@ -189,7 +193,7 @@ SYM_CODE_START(pa_trampoline_compat)
* In compatibility mode. Prep ESP and DX for startup_32, then disable
* paging and complete the switch to legacy 32-bit mode.
*/
- LOAD_REALMODE_ESP
+ LOCK_AND_LOAD_REALMODE_ESP lock_pa=1
movw $__KERNEL_DS, %dx
movl $(CR0_STATE & ~X86_CR0_PG), %eax