> And if this is going to be more permanent, we can separate the mask
> (untested):

The FSGSBASE one should not be permanent, it will be replaced
with the full FSGSBASE patches that set that bit correctly.

I was a bit wary of enforcing it for all bits, there might be other
CR4 bits which have benign uses. But I guess the risk of breaking
something there is low.

-Andi


> 
> 
> diff --git a/arch/x86/kernel/cpu/common.c b/arch/x86/kernel/cpu/common.c
> index bed0cb83fe24..ead64f7420a5 100644
> --- a/arch/x86/kernel/cpu/common.c
> +++ b/arch/x86/kernel/cpu/common.c
> @@ -347,6 +347,8 @@ static __always_inline void setup_umip(struct cpuinfo_x86 
> *c)
>       cr4_clear_bits(X86_CR4_UMIP);
>  }
>  
> +static const unsigned long cr4_pinned_mask =
> +     X86_CR4_SMEP | X86_CR4_SMAP | X86_CR4_UMIP | X86_CR4_FSGSBASE;
>  static DEFINE_STATIC_KEY_FALSE_RO(cr_pinning);
>  static unsigned long cr4_pinned_bits __ro_after_init;
>  
> @@ -371,20 +373,20 @@ EXPORT_SYMBOL(native_write_cr0);
>  
>  void native_write_cr4(unsigned long val)
>  {
> -     unsigned long bits_missing = 0;
> +     unsigned long bits_changed = 0;
>  
>  set_register:
>       asm volatile("mov %0,%%cr4": "+r" (val), "+m" (cr4_pinned_bits));
>  
>       if (static_branch_likely(&cr_pinning)) {
> -             if (unlikely((val & cr4_pinned_bits) != cr4_pinned_bits)) {
> -                     bits_missing = ~val & cr4_pinned_bits;
> -                     val |= bits_missing;
> +             if (unlikely((val & cr4_pinned_mask) != cr4_pinned_bits)) {
> +                     bits_changed = ~val & cr4_pinned_mask;
> +                     val = (val & ~cr4_pinned_mask) | cr4_pinned_bits;
>                       goto set_register;
>               }
>               /* Warn after we've set the missing bits. */
> -             WARN_ONCE(bits_missing, "CR4 bits went missing: %lx!?\n",
> -                       bits_missing);
> +             WARN_ONCE(bits_changed, "pinned CR4 bits changed: %lx!?\n",
> +                       bits_changed);
>       }
>  }
>  EXPORT_SYMBOL(native_write_cr4);
> @@ -396,7 +398,7 @@ void cr4_init(void)
>       if (boot_cpu_has(X86_FEATURE_PCID))
>               cr4 |= X86_CR4_PCIDE;
>       if (static_branch_likely(&cr_pinning))
> -             cr4 |= cr4_pinned_bits;
> +             cr4 = (cr4 & ~cr4_pinned_mask) | cr4_pinned_bits;
>  
>       __write_cr4(cr4);
>  
> @@ -411,10 +413,7 @@ void cr4_init(void)
>   */
>  static void __init setup_cr_pinning(void)
>  {
> -     unsigned long mask;
> -
> -     mask = (X86_CR4_SMEP | X86_CR4_SMAP | X86_CR4_UMIP);
> -     cr4_pinned_bits = this_cpu_read(cpu_tlbstate.cr4) & mask;
> +     cr4_pinned_bits = this_cpu_read(cpu_tlbstate.cr4) & cr4_pinned_mask;
>       static_key_enable(&cr_pinning.key);
>  }
>  
> 
> -- 
> Kees Cook
> 

Reply via email to