x86/cpu: Use pinning mask for CR4 bits needing to be 0
The X86_CR4_FSGSBASE bit of CR4 should not change after boot[1]. Older kernels should enforce this bit to zero, and newer kernels need to enforce it depending on boot-time configuration (e.g. "nofsgsbase"). To support a pinned bit being either 1 or 0, use an explicit mask in combination with the expected pinned bit values. [1] https://lore.kernel.org/lkml/20200527103147.GI325280@hirez.programming.kicks-ass.net Signed-off-by: Kees Cook <keescook@chromium.org> Signed-off-by: Thomas Gleixner <tglx@linutronix.de> Cc: stable@vger.kernel.org Link: https://lkml.kernel.org/r/202006082013.71E29A42@keescook
This commit is contained in:
parent
cc5277fe66
commit
a13b9d0b97
@ -347,6 +347,9 @@ out:
|
|||||||
cr4_clear_bits(X86_CR4_UMIP);
|
cr4_clear_bits(X86_CR4_UMIP);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* These bits should not change their value after CPU init is finished. */
|
||||||
|
static const unsigned long cr4_pinned_mask =
|
||||||
|
X86_CR4_SMEP | X86_CR4_SMAP | X86_CR4_UMIP | X86_CR4_FSGSBASE;
|
||||||
static DEFINE_STATIC_KEY_FALSE_RO(cr_pinning);
|
static DEFINE_STATIC_KEY_FALSE_RO(cr_pinning);
|
||||||
static unsigned long cr4_pinned_bits __ro_after_init;
|
static unsigned long cr4_pinned_bits __ro_after_init;
|
||||||
|
|
||||||
@ -371,20 +374,20 @@ EXPORT_SYMBOL(native_write_cr0);
|
|||||||
|
|
||||||
void native_write_cr4(unsigned long val)
|
void native_write_cr4(unsigned long val)
|
||||||
{
|
{
|
||||||
unsigned long bits_missing = 0;
|
unsigned long bits_changed = 0;
|
||||||
|
|
||||||
set_register:
|
set_register:
|
||||||
asm volatile("mov %0,%%cr4": "+r" (val), "+m" (cr4_pinned_bits));
|
asm volatile("mov %0,%%cr4": "+r" (val), "+m" (cr4_pinned_bits));
|
||||||
|
|
||||||
if (static_branch_likely(&cr_pinning)) {
|
if (static_branch_likely(&cr_pinning)) {
|
||||||
if (unlikely((val & cr4_pinned_bits) != cr4_pinned_bits)) {
|
if (unlikely((val & cr4_pinned_mask) != cr4_pinned_bits)) {
|
||||||
bits_missing = ~val & cr4_pinned_bits;
|
bits_changed = (val & cr4_pinned_mask) ^ cr4_pinned_bits;
|
||||||
val |= bits_missing;
|
val = (val & ~cr4_pinned_mask) | cr4_pinned_bits;
|
||||||
goto set_register;
|
goto set_register;
|
||||||
}
|
}
|
||||||
/* Warn after we've set the missing bits. */
|
/* Warn after we've corrected the changed bits. */
|
||||||
WARN_ONCE(bits_missing, "CR4 bits went missing: %lx!?\n",
|
WARN_ONCE(bits_changed, "pinned CR4 bits changed: 0x%lx!?\n",
|
||||||
bits_missing);
|
bits_changed);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#if IS_MODULE(CONFIG_LKDTM)
|
#if IS_MODULE(CONFIG_LKDTM)
|
||||||
@ -419,7 +422,7 @@ void cr4_init(void)
|
|||||||
if (boot_cpu_has(X86_FEATURE_PCID))
|
if (boot_cpu_has(X86_FEATURE_PCID))
|
||||||
cr4 |= X86_CR4_PCIDE;
|
cr4 |= X86_CR4_PCIDE;
|
||||||
if (static_branch_likely(&cr_pinning))
|
if (static_branch_likely(&cr_pinning))
|
||||||
cr4 |= cr4_pinned_bits;
|
cr4 = (cr4 & ~cr4_pinned_mask) | cr4_pinned_bits;
|
||||||
|
|
||||||
__write_cr4(cr4);
|
__write_cr4(cr4);
|
||||||
|
|
||||||
@ -434,10 +437,7 @@ void cr4_init(void)
|
|||||||
*/
|
*/
|
||||||
static void __init setup_cr_pinning(void)
|
static void __init setup_cr_pinning(void)
|
||||||
{
|
{
|
||||||
unsigned long mask;
|
cr4_pinned_bits = this_cpu_read(cpu_tlbstate.cr4) & cr4_pinned_mask;
|
||||||
|
|
||||||
mask = (X86_CR4_SMEP | X86_CR4_SMAP | X86_CR4_UMIP);
|
|
||||||
cr4_pinned_bits = this_cpu_read(cpu_tlbstate.cr4) & mask;
|
|
||||||
static_key_enable(&cr_pinning.key);
|
static_key_enable(&cr_pinning.key);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user