On 1/14/25 09:51, Valentin Schneider wrote:
> + cr4 = this_cpu_read(cpu_tlbstate.cr4);
> + asm volatile("mov %0,%%cr4": : "r" (cr4 ^ X86_CR4_PGE) : "memory");
> + asm volatile("mov %0,%%cr4": : "r" (cr4) : "memory");
> + /*
> + * In lieu of not having the pinning crap, hard fail if CR4 doesn't
> + * match the expected value. This ensures that anybody doing dodgy gets
> + * the fallthrough check.
> + */
> + BUG_ON(cr4 != this_cpu_read(cpu_tlbstate.cr4));
Let's say someone managed to write to cpu_tlbstate.cr4 where they
cleared one of the pinned bits.
Before this patch, CR4 pinning would WARN_ONCE() about it pretty quickly
and also reset the cleared bits.
After this patch, the first native_flush_tlb_global() can clear pinned
bits, at least until native_write_cr4() gets called the next time. That
seems like it'll undermine CR4 pinning at least somewhat.
What keeps native_write_cr4() from being noinstr-compliant now? Is it
just the WARN_ONCE()?
If so, I'd kinda rather have a native_write_cr4_nowarn() that's
noinstr-compliant but retains all the other CR4 pinning behavior. Would
something like the attached patch be _worse_?
diff --git a/arch/x86/kernel/cpu/common.c b/arch/x86/kernel/cpu/common.c
index 3e9037690814..2044d516f06f 100644
--- a/arch/x86/kernel/cpu/common.c
+++ b/arch/x86/kernel/cpu/common.c
@@ -423,24 +423,40 @@ void native_write_cr0(unsigned long val)
}
EXPORT_SYMBOL(native_write_cr0);
-void __no_profile native_write_cr4(unsigned long val)
+void __no_profile __native_write_cr4(unsigned long val, unsigned long *bits_changed)
{
- unsigned long bits_changed = 0;
-
set_register:
asm volatile("mov %0,%%cr4": "+r" (val) : : "memory");
if (static_branch_likely(&cr_pinning)) {
if (unlikely((val & cr4_pinned_mask) != cr4_pinned_bits)) {
- bits_changed = (val & cr4_pinned_mask) ^ cr4_pinned_bits;
+ *bits_changed = (val & cr4_pinned_mask) ^ cr4_pinned_bits;
val = (val & ~cr4_pinned_mask) | cr4_pinned_bits;
goto set_register;
}
- /* Warn after we've corrected the changed bits. */
- WARN_ONCE(bits_changed, "pinned CR4 bits changed: 0x%lx!?\n",
- bits_changed);
}
}
+
+void __no_profile native_write_cr4(unsigned long val)
+{
+ unsigned long bits_changed = 0;
+
+ __native_write_cr4(val, &bits_changed);
+
+ if (!bits_changed)
+ return
+
+ WARN_ONCE(bits_changed, "pinned CR4 bits changed: 0x%lx!?\n",
+ bits_changed);
+}
+
+void __no_profile native_write_cr4_nowarn(unsigned long val)
+{
+ unsigned long bits_changed = 0;
+
+ __native_write_cr4(val, &bits_changed);
+}
+
#if IS_MODULE(CONFIG_LKDTM)
EXPORT_SYMBOL_GPL(native_write_cr4);
#endif