diff options
author | Andy Lutomirski <luto@kernel.org> | 2016-09-29 12:48:12 -0700 |
---|---|---|
committer | Thomas Gleixner <tglx@linutronix.de> | 2016-09-30 12:40:12 +0200 |
commit | 1ef55be16ed69538f89e0a6508be5e62fdc9851c (patch) | |
tree | 623f3cb97a13b616e813aeadb46a56689d555141 /arch/x86/include/asm/special_insns.h | |
parent | d7e25c66c9bf882450060fd9464e784bd229d3ae (diff) |
x86/asm: Get rid of __read_cr4_safe()
We use __read_cr4() vs __read_cr4_safe() inconsistently. On
CR4-less CPUs, all CR4 bits are effectively clear, so we can make
the code simpler and more robust by making __read_cr4() always fix
up faults on 32-bit kernels.
This may fix some bugs on old 486-like CPUs, but I don't have any
easy way to test that.
Signed-off-by: Andy Lutomirski <luto@kernel.org>
Cc: Brian Gerst <brgerst@gmail.com>
Cc: Borislav Petkov <bp@alien8.de>
Cc: david@saggiorato.net
Link: http://lkml.kernel.org/r/ea647033d357d9ce2ad2bbde5a631045f5052fb6.1475178370.git.luto@kernel.org
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Diffstat (limited to 'arch/x86/include/asm/special_insns.h')
-rw-r--r-- | arch/x86/include/asm/special_insns.h | 22 |
1 files changed, 7 insertions, 15 deletions
diff --git a/arch/x86/include/asm/special_insns.h b/arch/x86/include/asm/special_insns.h index 587d7914ea4b..19a2224f9e16 100644 --- a/arch/x86/include/asm/special_insns.h +++ b/arch/x86/include/asm/special_insns.h @@ -59,22 +59,19 @@ static inline void native_write_cr3(unsigned long val) static inline unsigned long native_read_cr4(void) { unsigned long val; - asm volatile("mov %%cr4,%0\n\t" : "=r" (val), "=m" (__force_order)); - return val; -} - -static inline unsigned long native_read_cr4_safe(void) -{ - unsigned long val; - /* This could fault if %cr4 does not exist. In x86_64, a cr4 always - * exists, so it will never fail. */ #ifdef CONFIG_X86_32 + /* + * This could fault if CR4 does not exist. Non-existent CR4 + * is functionally equivalent to CR4 == 0. Keep it simple and pretend + * that CR4 == 0 on CPUs that don't have CR4. + */ asm volatile("1: mov %%cr4, %0\n" "2:\n" _ASM_EXTABLE(1b, 2b) : "=r" (val), "=m" (__force_order) : "0" (0)); #else - val = native_read_cr4(); + /* CR4 always exists on x86_64. */ + asm volatile("mov %%cr4,%0\n\t" : "=r" (val), "=m" (__force_order)); #endif return val; } @@ -182,11 +179,6 @@ static inline unsigned long __read_cr4(void) return native_read_cr4(); } -static inline unsigned long __read_cr4_safe(void) -{ - return native_read_cr4_safe(); -} - static inline void __write_cr4(unsigned long x) { native_write_cr4(x); |