summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDavid Gow <davidgow@google.com>2024-07-31 10:30:29 +0300
committerThomas Gleixner <tglx@linutronix.de>2024-08-01 22:19:10 +0300
commitdd35a0933269c636635b6af89dc6fa1782791e56 (patch)
tree0ce39c696068b85f430febb2ae43c441aeff26af
parent3db03fb4995ef85fc41e86262ead7b4852f4bcf0 (diff)
downloadlinux-dd35a0933269c636635b6af89dc6fa1782791e56.tar.xz
x86/uaccess: Zero the 8-byte get_range case on failure on 32-bit
While zeroing the upper 32 bits of an 8-byte getuser on 32-bit x86 was fixed by commit 8c860ed825cb ("x86/uaccess: Fix missed zeroing of ia32 u64 get_user() range checking") it was broken again in commit 8a2462df1547 ("x86/uaccess: Improve the 8-byte getuser() case"). This is because the register which holds the upper 32 bits (%ecx) is being cleared _after_ the check_range, so if the range check fails, %ecx is never cleared. This can be reproduced with: ./tools/testing/kunit/kunit.py run --arch i386 usercopy Instead, clear %ecx _before_ check_range in the 8-byte case. This reintroduces a bit of the ugliness we were trying to avoid by adding another #ifndef CONFIG_X86_64, but at least keeps check_range from needing a separate bad_get_user_8 jump. Fixes: 8a2462df1547 ("x86/uaccess: Improve the 8-byte getuser() case") Signed-off-by: David Gow <davidgow@google.com> Signed-off-by: Thomas Gleixner <tglx@linutronix.de> Acked-by: Linus Torvalds <torvalds@linux-foundation.org> Link: https://lore.kernel.org/all/20240731073031.4045579-1-davidgow@google.com
-rw-r--r--arch/x86/lib/getuser.S4
1 files changed, 3 insertions, 1 deletions
diff --git a/arch/x86/lib/getuser.S b/arch/x86/lib/getuser.S
index a314622aa093..d066aecf8aeb 100644
--- a/arch/x86/lib/getuser.S
+++ b/arch/x86/lib/getuser.S
@@ -88,12 +88,14 @@ SYM_FUNC_END(__get_user_4)
EXPORT_SYMBOL(__get_user_4)
SYM_FUNC_START(__get_user_8)
+#ifndef CONFIG_X86_64
+ xor %ecx,%ecx
+#endif
check_range size=8
ASM_STAC
#ifdef CONFIG_X86_64
UACCESS movq (%_ASM_AX),%rdx
#else
- xor %ecx,%ecx
UACCESS movl (%_ASM_AX),%edx
UACCESS movl 4(%_ASM_AX),%ecx
#endif