x86: use optimal NOPs to fill the SMEP/SMAP placeholders Alternatives patching code picks the most suitable NOPs for the running system, so simply use it to replace the pre-populated ones. Use an arbitrary, always available feature to key off from, but hide this behind the new X86_FEATURE_ALWAYS. Signed-off-by: Jan Beulich --- v3: Re-base. v2: Introduce and use X86_FEATURE_ALWAYS. --- a/xen/arch/x86/x86_64/compat/entry.S +++ b/xen/arch/x86/x86_64/compat/entry.S @@ -175,12 +175,7 @@ compat_bad_hypercall: ENTRY(compat_restore_all_guest) ASSERT_INTERRUPTS_DISABLED .Lcr4_orig: - ASM_NOP8 /* testb $3,UREGS_cs(%rsp) */ - ASM_NOP2 /* jpe .Lcr4_alt_end */ - ASM_NOP8 /* mov CPUINFO_cr4...(%rsp), %rax */ - ASM_NOP6 /* and $..., %rax */ - ASM_NOP8 /* mov %rax, CPUINFO_cr4...(%rsp) */ - ASM_NOP3 /* mov %rax, %cr4 */ + .skip (.Lcr4_alt_end - .Lcr4_alt) - (. - .Lcr4_orig), 0x90 .Lcr4_orig_end: .pushsection .altinstr_replacement, "ax" .Lcr4_alt: @@ -192,6 +187,7 @@ ENTRY(compat_restore_all_guest) mov %rax, %cr4 .Lcr4_alt_end: .section .altinstructions, "a" + altinstruction_entry .Lcr4_orig, .Lcr4_orig, X86_FEATURE_ALWAYS, 12, 0 altinstruction_entry .Lcr4_orig, .Lcr4_alt, X86_FEATURE_SMEP, \ (.Lcr4_orig_end - .Lcr4_orig), \ (.Lcr4_alt_end - .Lcr4_alt) --- a/xen/include/asm-x86/asm_defns.h +++ b/xen/include/asm-x86/asm_defns.h @@ -204,6 +204,7 @@ void ret_from_intr(void); 662: __ASM_##op; \ .popsection; \ .pushsection .altinstructions, "a"; \ + altinstruction_entry 661b, 661b, X86_FEATURE_ALWAYS, 3, 0; \ altinstruction_entry 661b, 662b, X86_FEATURE_SMAP, 3, 3; \ .popsection @@ -215,6 +216,7 @@ void ret_from_intr(void); .pushsection .altinstr_replacement, "ax"; \ 668: call cr4_pv32_restore; \ .section .altinstructions, "a"; \ + altinstruction_entry 667b, 667b, X86_FEATURE_ALWAYS, 5, 0; \ altinstruction_entry 667b, 668b, X86_FEATURE_SMEP, 5, 5; \ altinstruction_entry 667b, 668b, X86_FEATURE_SMAP, 5, 5; \ .popsection --- a/xen/include/asm-x86/cpufeature.h +++ b/xen/include/asm-x86/cpufeature.h @@ -162,6 +162,9 @@ #define cpufeat_bit(idx) ((idx) % 32) #define cpufeat_mask(idx) (_AC(1, U) << cpufeat_bit(idx)) +/* An alias of a feature we know is always going to be present. */ +#define X86_FEATURE_ALWAYS X86_FEATURE_LM + #if !defined(__ASSEMBLY__) && !defined(X86_FEATURES_ONLY) #include