|
[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index] [Xen-devel] [PATCH v2] x86/asm: Remove opencoded uses of altinstruction_entry
With future changes, altinstruction_entry is going to become more complicated
to use. Furthermore, there are already ALTERNATIVE* macros which can be used
to avoid opencoding the creation of replacement information.
For ASM_STAC, ASM_CLAC and CR4_PV32_RESTORE, this means the removal of all
hardocded label numbers. For the cr4_pv32 alternatives, this means hardcoding
the extra space required in the original patch site, but the hardcoding will
be removed by a later patch.
No change to any functionality, but the handling of nops inside the original
patch sites are a bit different.
Signed-off-by: Andrew Cooper <andrew.cooper3@xxxxxxxxxx>
Reviewed-by: Wei Liu <wei.liu2@xxxxxxxxxx>
Reviewed-by: Roger Pau Monné <roger.pau@xxxxxxxxxx>
Reviewed-by: Jan Beulich <jbeulich@xxxxxxxx>
---
xen/arch/x86/x86_64/compat/entry.S | 26 +++++++++-----------------
xen/arch/x86/x86_64/entry.S | 20 +++-----------------
xen/include/asm-x86/asm_defns.h | 32 +++++++++++---------------------
3 files changed, 23 insertions(+), 55 deletions(-)
diff --git a/xen/arch/x86/x86_64/compat/entry.S
b/xen/arch/x86/x86_64/compat/entry.S
index 458d810..8aba269 100644
--- a/xen/arch/x86/x86_64/compat/entry.S
+++ b/xen/arch/x86/x86_64/compat/entry.S
@@ -111,13 +111,10 @@ ENTRY(compat_restore_all_guest)
ASSERT_INTERRUPTS_DISABLED
mov $~(X86_EFLAGS_IOPL|X86_EFLAGS_NT|X86_EFLAGS_VM),%r11d
and UREGS_eflags(%rsp),%r11d
-.Lcr4_orig:
- .skip .Lcr4_alt_end - .Lcr4_alt, 0x90
-.Lcr4_orig_end:
- .pushsection .altinstr_replacement, "ax"
-.Lcr4_alt:
+
+.macro alt_cr4_pv32
testb $3,UREGS_cs(%rsp)
- jpe .Lcr4_alt_end
+ jpe 2f
mov CPUINFO_cr4-CPUINFO_guest_cpu_user_regs(%rsp), %rax
and $~XEN_CR4_PV32_BITS, %rax
1:
@@ -135,17 +132,12 @@ ENTRY(compat_restore_all_guest)
*/
cmp %rax, CPUINFO_cr4-CPUINFO_guest_cpu_user_regs(%rsp)
jne 1b
-.Lcr4_alt_end:
- .section .altinstructions, "a"
- altinstruction_entry .Lcr4_orig, .Lcr4_orig, X86_FEATURE_ALWAYS, \
- (.Lcr4_orig_end - .Lcr4_orig), 0
- altinstruction_entry .Lcr4_orig, .Lcr4_alt, X86_FEATURE_XEN_SMEP, \
- (.Lcr4_orig_end - .Lcr4_orig), \
- (.Lcr4_alt_end - .Lcr4_alt)
- altinstruction_entry .Lcr4_orig, .Lcr4_alt, X86_FEATURE_XEN_SMAP, \
- (.Lcr4_orig_end - .Lcr4_orig), \
- (.Lcr4_alt_end - .Lcr4_alt)
- .popsection
+2:
+.endm
+ ALTERNATIVE_2 ".skip 45, 0x90", \
+ alt_cr4_pv32, X86_FEATURE_XEN_SMEP, \
+ alt_cr4_pv32, X86_FEATURE_XEN_SMAP
+
or $X86_EFLAGS_IF,%r11
mov %r11d,UREGS_eflags(%rsp)
diff --git a/xen/arch/x86/x86_64/entry.S b/xen/arch/x86/x86_64/entry.S
index 941f06f..e939f20 100644
--- a/xen/arch/x86/x86_64/entry.S
+++ b/xen/arch/x86/x86_64/entry.S
@@ -564,23 +564,9 @@ handle_exception_saved:
testb $X86_EFLAGS_IF>>8,UREGS_eflags+1(%rsp)
jz exception_with_ints_disabled
-.Lcr4_pv32_orig:
- jmp .Lcr4_pv32_done
- .skip (.Lcr4_pv32_alt_end - .Lcr4_pv32_alt) - (. - .Lcr4_pv32_orig),
0xcc
- .pushsection .altinstr_replacement, "ax"
-.Lcr4_pv32_alt:
- mov VCPU_domain(%rbx),%rax
-.Lcr4_pv32_alt_end:
- .section .altinstructions, "a"
- altinstruction_entry .Lcr4_pv32_orig, .Lcr4_pv32_alt, \
- X86_FEATURE_XEN_SMEP, \
- (.Lcr4_pv32_alt_end - .Lcr4_pv32_alt), \
- (.Lcr4_pv32_alt_end - .Lcr4_pv32_alt)
- altinstruction_entry .Lcr4_pv32_orig, .Lcr4_pv32_alt, \
- X86_FEATURE_XEN_SMAP, \
- (.Lcr4_pv32_alt_end - .Lcr4_pv32_alt), \
- (.Lcr4_pv32_alt_end - .Lcr4_pv32_alt)
- .popsection
+ ALTERNATIVE_2 "jmp .Lcr4_pv32_done; .skip 2, 0x90", \
+ __stringify(mov VCPU_domain(%rbx), %rax), X86_FEATURE_XEN_SMEP, \
+ __stringify(mov VCPU_domain(%rbx), %rax), X86_FEATURE_XEN_SMAP
testb $3,UREGS_cs(%rsp)
jz .Lcr4_pv32_done
diff --git a/xen/include/asm-x86/asm_defns.h b/xen/include/asm-x86/asm_defns.h
index ebd2c88..a484265 100644
--- a/xen/include/asm-x86/asm_defns.h
+++ b/xen/include/asm-x86/asm_defns.h
@@ -195,18 +195,13 @@ void ret_from_intr(void);
#define __ASM_STAC .byte 0x0f,0x01,0xcb
#ifdef __ASSEMBLY__
-#define ASM_AC(op) \
- 661: ASM_NOP3; \
- .pushsection .altinstr_replacement, "ax"; \
- 662: __ASM_##op; \
- .popsection; \
- .pushsection .altinstructions, "a"; \
- altinstruction_entry 661b, 661b, X86_FEATURE_ALWAYS, 3, 0; \
- altinstruction_entry 661b, 662b, X86_FEATURE_XEN_SMAP, 3, 3; \
- .popsection
-
-#define ASM_STAC ASM_AC(STAC)
-#define ASM_CLAC ASM_AC(CLAC)
+#define ASM_STAC \
+ ALTERNATIVE __stringify(ASM_NOP3), \
+ __stringify(__ASM_STAC), X86_FEATURE_XEN_SMAP
+
+#define ASM_CLAC \
+ ALTERNATIVE __stringify(ASM_NOP3), \
+ __stringify(__ASM_CLAC), X86_FEATURE_XEN_SMAP
.macro write_cr3 val:req, tmp1:req, tmp2:req
mov %cr4, %\tmp1
@@ -217,15 +212,10 @@ void ret_from_intr(void);
mov %\tmp2, %cr4
.endm
-#define CR4_PV32_RESTORE \
- 667: ASM_NOP5; \
- .pushsection .altinstr_replacement, "ax"; \
- 668: call cr4_pv32_restore; \
- .section .altinstructions, "a"; \
- altinstruction_entry 667b, 667b, X86_FEATURE_ALWAYS, 5, 0; \
- altinstruction_entry 667b, 668b, X86_FEATURE_XEN_SMEP, 5, 5; \
- altinstruction_entry 667b, 668b, X86_FEATURE_XEN_SMAP, 5, 5; \
- .popsection
+#define CR4_PV32_RESTORE \
+ ALTERNATIVE_2 __stringify(ASM_NOP5), \
+ "call cr4_pv32_restore", X86_FEATURE_XEN_SMEP, \
+ "call cr4_pv32_restore", X86_FEATURE_XEN_SMAP
#else
static always_inline void clac(void)
--
2.1.4
_______________________________________________
Xen-devel mailing list
Xen-devel@xxxxxxxxxxxxxxxxxxxx
https://lists.xenproject.org/mailman/listinfo/xen-devel
|
![]() |
Lists.xenproject.org is hosted with RackSpace, monitoring our |