[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index] [Xen-changelog] [xen-unstable] x86: use MOV instead of PUSH/POP when saving/restoring register state
# HG changeset patch # User Jan Beulich <jbeulich@xxxxxxxx> # Date 1351611883 -3600 # Node ID 3daa252ed1321668a40749f1d47b1e9cf8c5a479 # Parent 8a7f0f36462dbdf2ea6414cdbcf168b4d631c309 x86: use MOV instead of PUSH/POP when saving/restoring register state Signed-off-by: Jan Beulich <jbeulich@xxxxxxxx> Acked-by: Keir Fraser <keir@xxxxxxx> --- diff -r 8a7f0f36462d -r 3daa252ed132 xen/arch/x86/x86_64/compat/entry.S --- a/xen/arch/x86/x86_64/compat/entry.S Tue Oct 30 14:53:47 2012 +0000 +++ b/xen/arch/x86/x86_64/compat/entry.S Tue Oct 30 16:44:43 2012 +0100 @@ -21,8 +21,7 @@ ENTRY(compat_hypercall) UNLIKELY_START(ne, msi_check) movl $HYPERCALL_VECTOR,%edi call check_for_unexpected_msi - RESTORE_ALL - SAVE_ALL + LOAD_C_CLOBBERED UNLIKELY_END(msi_check) GET_CURRENT(%rbx) @@ -173,8 +172,7 @@ compat_bad_hypercall: /* %rbx: struct vcpu, interrupts disabled */ compat_restore_all_guest: ASSERT_INTERRUPTS_DISABLED - RESTORE_ALL - addq $8,%rsp + RESTORE_ALL adj=8 .Lft0: iretq .section .fixup,"ax" diff -r 8a7f0f36462d -r 3daa252ed132 xen/arch/x86/x86_64/entry.S --- a/xen/arch/x86/x86_64/entry.S Tue Oct 30 14:53:47 2012 +0000 +++ b/xen/arch/x86/x86_64/entry.S Tue Oct 30 16:44:43 2012 +0100 @@ -47,12 +47,10 @@ restore_all_guest: cmpl $1,%ecx ja .Lforce_iret - addq $8,%rsp - popq %rcx # RIP - popq %r11 # CS - cmpw $FLAT_USER_CS32,%r11 - popq %r11 # RFLAGS - popq %rsp # RSP + cmpw $FLAT_USER_CS32,16(%rsp)# CS + movq 8(%rsp),%rcx # RIP + movq 24(%rsp),%r11 # RFLAGS + movq 32(%rsp),%rsp # RSP je 1f sysretq 1: sysretl @@ -101,8 +99,7 @@ 1: call create_bounce_frame ALIGN /* No special register assumptions. */ restore_all_xen: - RESTORE_ALL - addq $8,%rsp + RESTORE_ALL adj=8 iretq /* @@ -311,8 +308,7 @@ ENTRY(int80_direct_trap) UNLIKELY_START(ne, msi_check) movl $0x80,%edi call check_for_unexpected_msi - RESTORE_ALL - SAVE_ALL + LOAD_C_CLOBBERED UNLIKELY_END(msi_check) GET_CURRENT(%rbx) diff -r 8a7f0f36462d -r 3daa252ed132 xen/include/asm-x86/x86_64/asm_defns.h --- a/xen/include/asm-x86/x86_64/asm_defns.h Tue Oct 30 14:53:47 2012 +0000 +++ b/xen/include/asm-x86/x86_64/asm_defns.h Tue Oct 30 16:44:43 2012 +0100 @@ -5,11 +5,11 @@ #ifdef CONFIG_FRAME_POINTER /* Indicate special exception stack frame by inverting the frame pointer. */ -#define SETUP_EXCEPTION_FRAME_POINTER \ - movq %rsp,%rbp; \ +#define SETUP_EXCEPTION_FRAME_POINTER(offs) \ + leaq offs(%rsp),%rbp; \ notq %rbp #else -#define SETUP_EXCEPTION_FRAME_POINTER +#define SETUP_EXCEPTION_FRAME_POINTER(offs) #endif #ifndef NDEBUG @@ -27,40 +27,49 @@ 1: addq $8,%rsp; #define ASSERT_INTERRUPTS_DISABLED ASSERT_INTERRUPT_STATUS(z) #define SAVE_ALL \ + addq $-(UREGS_error_code-UREGS_r15), %rsp; \ cld; \ - pushq %rdi; \ - pushq %rsi; \ - pushq %rdx; \ - pushq %rcx; \ - pushq %rax; \ - pushq %r8; \ - pushq %r9; \ - pushq %r10; \ - pushq %r11; \ - pushq %rbx; \ - pushq %rbp; \ - SETUP_EXCEPTION_FRAME_POINTER; \ - pushq %r12; \ - pushq %r13; \ - pushq %r14; \ - pushq %r15; + movq %rdi,UREGS_rdi(%rsp); \ + movq %rsi,UREGS_rsi(%rsp); \ + movq %rdx,UREGS_rdx(%rsp); \ + movq %rcx,UREGS_rcx(%rsp); \ + movq %rax,UREGS_rax(%rsp); \ + movq %r8,UREGS_r8(%rsp); \ + movq %r9,UREGS_r9(%rsp); \ + movq %r10,UREGS_r10(%rsp); \ + movq %r11,UREGS_r11(%rsp); \ + movq %rbx,UREGS_rbx(%rsp); \ + movq %rbp,UREGS_rbp(%rsp); \ + SETUP_EXCEPTION_FRAME_POINTER(UREGS_rbp); \ + movq %r12,UREGS_r12(%rsp); \ + movq %r13,UREGS_r13(%rsp); \ + movq %r14,UREGS_r14(%rsp); \ + movq %r15,UREGS_r15(%rsp); \ -#define RESTORE_ALL \ - popq %r15; \ - popq %r14; \ - popq %r13; \ - popq %r12; \ - popq %rbp; \ - popq %rbx; \ - popq %r11; \ - popq %r10; \ - popq %r9; \ - popq %r8; \ - popq %rax; \ - popq %rcx; \ - popq %rdx; \ - popq %rsi; \ - popq %rdi; +#ifdef __ASSEMBLY__ +.macro LOAD_C_CLOBBERED + movq UREGS_r11(%rsp),%r11 + movq UREGS_r10(%rsp),%r10 + movq UREGS_r9(%rsp),%r9 + movq UREGS_r8(%rsp),%r8 + movq UREGS_rax(%rsp),%rax + movq UREGS_rcx(%rsp),%rcx + movq UREGS_rdx(%rsp),%rdx + movq UREGS_rsi(%rsp),%rsi + movq UREGS_rdi(%rsp),%rdi +.endm + +.macro RESTORE_ALL adj=0 + movq UREGS_r15(%rsp),%r15 + movq UREGS_r14(%rsp),%r14 + movq UREGS_r13(%rsp),%r13 + movq UREGS_r12(%rsp),%r12 + movq UREGS_rbp(%rsp),%rbp + movq UREGS_rbx(%rsp),%rbx + LOAD_C_CLOBBERED + subq $-(UREGS_error_code-UREGS_r15+\adj), %rsp +.endm +#endif #ifdef PERF_COUNTERS #define PERFC_INCR(_name,_idx,_cur) \ @@ -94,7 +103,7 @@ 1: addq $8,%rsp; __asm__( \ "\n" __ALIGN_STR"\n" \ "common_interrupt:\n\t" \ - STR(SAVE_ALL) \ + STR(SAVE_ALL) "\n\t" \ "movq %rsp,%rdi\n\t" \ "callq " STR(do_IRQ) "\n\t" \ "jmp ret_from_intr\n"); _______________________________________________ Xen-changelog mailing list Xen-changelog@xxxxxxxxxxxxx http://lists.xensource.com/xen-changelog
|
Lists.xenproject.org is hosted with RackSpace, monitoring our |