[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index] [xen staging] x86/entry: Introduce EFRAME_* constants
commit 37541208f119a9c552c6c6c3246ea61be0d44035 Author: Andrew Cooper <andrew.cooper3@xxxxxxxxxx> AuthorDate: Sat Jan 27 17:52:09 2024 +0000 Commit: Andrew Cooper <andrew.cooper3@xxxxxxxxxx> CommitDate: Mon Feb 26 18:20:27 2024 +0000 x86/entry: Introduce EFRAME_* constants restore_all_guest() does a lot of manipulation of the stack after popping the GPRs, and uses raw %rsp displacements to do so. Also, almost all entrypaths use raw %rsp displacements prior to pushing GPRs. Provide better mnemonics, to aid readability and reduce the chance of errors when editing. No functional change. The resulting binary is identical. Signed-off-by: Andrew Cooper <andrew.cooper3@xxxxxxxxxx> Reviewed-by: Jan Beulich <jbeulich@xxxxxxxx> --- xen/arch/x86/x86_64/asm-offsets.c | 17 ++++++++++ xen/arch/x86/x86_64/compat/entry.S | 2 +- xen/arch/x86/x86_64/entry.S | 68 +++++++++++++++++++------------------- 3 files changed, 52 insertions(+), 35 deletions(-) diff --git a/xen/arch/x86/x86_64/asm-offsets.c b/xen/arch/x86/x86_64/asm-offsets.c index fee0edc61a..02242a9b73 100644 --- a/xen/arch/x86/x86_64/asm-offsets.c +++ b/xen/arch/x86/x86_64/asm-offsets.c @@ -51,6 +51,23 @@ void __dummy__(void) OFFSET(UREGS_kernel_sizeof, struct cpu_user_regs, es); BLANK(); + /* + * EFRAME_* is for the entry/exit logic where %rsp is pointing at + * UREGS_error_code and GPRs are still/already guest values. + */ +#define OFFSET_EF(sym, mem) \ + DEFINE(sym, offsetof(struct cpu_user_regs, mem) - \ + offsetof(struct cpu_user_regs, error_code)) + + OFFSET_EF(EFRAME_entry_vector, entry_vector); + OFFSET_EF(EFRAME_rip, rip); + OFFSET_EF(EFRAME_cs, cs); + OFFSET_EF(EFRAME_eflags, eflags); + OFFSET_EF(EFRAME_rsp, rsp); + BLANK(); + +#undef OFFSET_EF + OFFSET(VCPU_processor, struct vcpu, processor); OFFSET(VCPU_domain, struct vcpu, domain); OFFSET(VCPU_vcpu_info, struct vcpu, vcpu_info_area.map); diff --git a/xen/arch/x86/x86_64/compat/entry.S b/xen/arch/x86/x86_64/compat/entry.S index 727ab65290..2f8fe5ebfb 100644 --- a/xen/arch/x86/x86_64/compat/entry.S +++ b/xen/arch/x86/x86_64/compat/entry.S @@ -15,7 +15,7 @@ FUNC(entry_int82) ENDBR64 ALTERNATIVE "", clac, X86_FEATURE_XEN_SMAP pushq $0 - movb $HYPERCALL_VECTOR, 4(%rsp) + movb $HYPERCALL_VECTOR, EFRAME_entry_vector(%rsp) SAVE_ALL compat=1 /* DPL1 gate, restricted to 32bit PV guests only. */ SPEC_CTRL_ENTRY_FROM_PV /* Req: %rsp=regs/cpuinfo, %rdx=0, Clob: acd */ diff --git a/xen/arch/x86/x86_64/entry.S b/xen/arch/x86/x86_64/entry.S index f8938b0b42..1b846f3aaf 100644 --- a/xen/arch/x86/x86_64/entry.S +++ b/xen/arch/x86/x86_64/entry.S @@ -188,15 +188,15 @@ FUNC_LOCAL(restore_all_guest) RESTORE_ALL BUILD_BUG_ON(TRAP_syscall & 0xff) - testb $TRAP_syscall >> 8, 4+1(%rsp) + testb $TRAP_syscall >> 8, EFRAME_entry_vector + 1(%rsp) jz iret_exit_to_guest - movq 24(%rsp),%r11 # RFLAGS + mov EFRAME_eflags(%rsp), %r11 andq $~(X86_EFLAGS_IOPL | X86_EFLAGS_VM), %r11 orq $X86_EFLAGS_IF,%r11 /* Don't use SYSRET path if the return address is not canonical. */ - movq 8(%rsp),%rcx + mov EFRAME_rip(%rsp), %rcx sarq $47,%rcx incl %ecx cmpl $1,%ecx @@ -211,19 +211,19 @@ FUNC_LOCAL(restore_all_guest) ALTERNATIVE "", rag_clrssbsy, X86_FEATURE_XEN_SHSTK #endif - movq 8(%rsp), %rcx # RIP - cmpw $FLAT_USER_CS32,16(%rsp)# CS - movq 32(%rsp),%rsp # RSP + mov EFRAME_rip(%rsp), %rcx + cmpw $FLAT_USER_CS32, EFRAME_cs(%rsp) + mov EFRAME_rsp(%rsp), %rsp je 1f sysretq 1: sysretl LABEL_LOCAL(.Lrestore_rcx_iret_exit_to_guest) - movq 8(%rsp), %rcx # RIP + mov EFRAME_rip(%rsp), %rcx /* No special register assumptions. */ iret_exit_to_guest: - andl $~(X86_EFLAGS_IOPL | X86_EFLAGS_VM), 24(%rsp) - orl $X86_EFLAGS_IF,24(%rsp) + andl $~(X86_EFLAGS_IOPL | X86_EFLAGS_VM), EFRAME_eflags(%rsp) + orl $X86_EFLAGS_IF, EFRAME_eflags(%rsp) addq $8,%rsp .Lft0: iretq _ASM_PRE_EXTABLE(.Lft0, handle_exception) @@ -256,7 +256,7 @@ FUNC(lstar_enter) pushq %rcx pushq $0 BUILD_BUG_ON(TRAP_syscall & 0xff) - movb $TRAP_syscall >> 8, 4+1(%rsp) + movb $TRAP_syscall >> 8, EFRAME_entry_vector + 1(%rsp) SAVE_ALL SPEC_CTRL_ENTRY_FROM_PV /* Req: %rsp=regs/cpuinfo, %rdx=0, Clob: acd */ @@ -295,7 +295,7 @@ FUNC(cstar_enter) pushq %rcx pushq $0 BUILD_BUG_ON(TRAP_syscall & 0xff) - movb $TRAP_syscall >> 8, 4+1(%rsp) + movb $TRAP_syscall >> 8, EFRAME_entry_vector + 1(%rsp) SAVE_ALL SPEC_CTRL_ENTRY_FROM_PV /* Req: %rsp=regs/cpuinfo, %rdx=0, Clob: acd */ @@ -338,7 +338,7 @@ LABEL(sysenter_eflags_saved, 0) pushq $0 /* null rip */ pushq $0 BUILD_BUG_ON(TRAP_syscall & 0xff) - movb $TRAP_syscall >> 8, 4+1(%rsp) + movb $TRAP_syscall >> 8, EFRAME_entry_vector + 1(%rsp) SAVE_ALL SPEC_CTRL_ENTRY_FROM_PV /* Req: %rsp=regs/cpuinfo, %rdx=0, Clob: acd */ @@ -393,7 +393,7 @@ FUNC(entry_int80) ENDBR64 ALTERNATIVE "", clac, X86_FEATURE_XEN_SMAP pushq $0 - movb $0x80, 4(%rsp) + movb $0x80, EFRAME_entry_vector(%rsp) SAVE_ALL SPEC_CTRL_ENTRY_FROM_PV /* Req: %rsp=regs/cpuinfo, %rdx=0, Clob: acd */ @@ -658,7 +658,7 @@ END(ret_from_intr) .section .init.text, "ax", @progbits FUNC(early_page_fault) ENDBR64 - movb $X86_EXC_PF, 4(%rsp) + movb $X86_EXC_PF, EFRAME_entry_vector(%rsp) SAVE_ALL movq %rsp, %rdi call do_early_page_fault @@ -727,7 +727,7 @@ END(common_interrupt) FUNC(entry_PF) ENDBR64 - movb $X86_EXC_PF, 4(%rsp) + movb $X86_EXC_PF, EFRAME_entry_vector(%rsp) END(entry_PF) /* No special register assumptions. */ FUNC(handle_exception, 0) @@ -911,98 +911,98 @@ END(entry_DE) FUNC(entry_MF) ENDBR64 pushq $0 - movb $X86_EXC_MF, 4(%rsp) + movb $X86_EXC_MF, EFRAME_entry_vector(%rsp) jmp handle_exception END(entry_MF) FUNC(entry_XM) ENDBR64 pushq $0 - movb $X86_EXC_XM, 4(%rsp) + movb $X86_EXC_XM, EFRAME_entry_vector(%rsp) jmp handle_exception END(entry_XM) FUNC(entry_NM) ENDBR64 pushq $0 - movb $X86_EXC_NM, 4(%rsp) + movb $X86_EXC_NM, EFRAME_entry_vector(%rsp) jmp handle_exception END(entry_NM) FUNC(entry_DB) ENDBR64 pushq $0 - movb $X86_EXC_DB, 4(%rsp) + movb $X86_EXC_DB, EFRAME_entry_vector(%rsp) jmp handle_ist_exception END(entry_DB) FUNC(entry_BP) ENDBR64 pushq $0 - movb $X86_EXC_BP, 4(%rsp) + movb $X86_EXC_BP, EFRAME_entry_vector(%rsp) jmp handle_exception END(entry_BP) FUNC(entry_OF) ENDBR64 pushq $0 - movb $X86_EXC_OF, 4(%rsp) + movb $X86_EXC_OF, EFRAME_entry_vector(%rsp) jmp handle_exception END(entry_OF) FUNC(entry_BR) ENDBR64 pushq $0 - movb $X86_EXC_BR, 4(%rsp) + movb $X86_EXC_BR, EFRAME_entry_vector(%rsp) jmp handle_exception END(entry_BR) FUNC(entry_UD) ENDBR64 pushq $0 - movb $X86_EXC_UD, 4(%rsp) + movb $X86_EXC_UD, EFRAME_entry_vector(%rsp) jmp handle_exception END(entry_UD) FUNC(entry_TS) ENDBR64 - movb $X86_EXC_TS, 4(%rsp) + movb $X86_EXC_TS, EFRAME_entry_vector(%rsp) jmp handle_exception END(entry_TS) FUNC(entry_NP) ENDBR64 - movb $X86_EXC_NP, 4(%rsp) + movb $X86_EXC_NP, EFRAME_entry_vector(%rsp) jmp handle_exception END(entry_NP) FUNC(entry_SS) ENDBR64 - movb $X86_EXC_SS, 4(%rsp) + movb $X86_EXC_SS, EFRAME_entry_vector(%rsp) jmp handle_exception END(entry_SS) FUNC(entry_GP) ENDBR64 - movb $X86_EXC_GP, 4(%rsp) + movb $X86_EXC_GP, EFRAME_entry_vector(%rsp) jmp handle_exception END(entry_GP) FUNC(entry_AC) ENDBR64 - movb $X86_EXC_AC, 4(%rsp) + movb $X86_EXC_AC, EFRAME_entry_vector(%rsp) jmp handle_exception END(entry_AC) FUNC(entry_CP) ENDBR64 - movb $X86_EXC_CP, 4(%rsp) + movb $X86_EXC_CP, EFRAME_entry_vector(%rsp) jmp handle_exception END(entry_CP) FUNC(entry_DF) ENDBR64 - movb $X86_EXC_DF, 4(%rsp) + movb $X86_EXC_DF, EFRAME_entry_vector(%rsp) /* Set AC to reduce chance of further SMAP faults */ ALTERNATIVE "", stac, X86_FEATURE_XEN_SMAP SAVE_ALL @@ -1028,7 +1028,7 @@ END(entry_DF) FUNC(entry_NMI) ENDBR64 pushq $0 - movb $X86_EXC_NMI, 4(%rsp) + movb $X86_EXC_NMI, EFRAME_entry_vector(%rsp) END(entry_NMI) FUNC(handle_ist_exception) @@ -1164,7 +1164,7 @@ END(handle_ist_exception) FUNC(entry_MC) ENDBR64 pushq $0 - movb $X86_EXC_MC, 4(%rsp) + movb $X86_EXC_MC, EFRAME_entry_vector(%rsp) jmp handle_ist_exception END(entry_MC) @@ -1203,7 +1203,7 @@ FUNC_LOCAL(autogen_stubs, 0) /* Automatically generated stubs. */ 1: ENDBR64 pushq $0 - movb $vec,4(%rsp) + movb $vec, EFRAME_entry_vector(%rsp) jmp common_interrupt entrypoint 1b @@ -1217,7 +1217,7 @@ FUNC_LOCAL(autogen_stubs, 0) /* Automatically generated stubs. */ test $8,%spl /* 64bit exception frames are 16 byte aligned, but the word */ jz 2f /* size is 8 bytes. Check whether the processor gave us an */ pushq $0 /* error code, and insert an empty one if not. */ -2: movb $vec,4(%rsp) +2: movb $vec, EFRAME_entry_vector(%rsp) jmp handle_exception entrypoint 1b -- generated by git-patchbot for /home/xen/git/xen.git#staging
|
Lists.xenproject.org is hosted with RackSpace, monitoring our |