[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[PATCH 1/2] x86: macroize switches to/from .fixup section


  • To: "xen-devel@xxxxxxxxxxxxxxxxxxxx" <xen-devel@xxxxxxxxxxxxxxxxxxxx>
  • From: Jan Beulich <jbeulich@xxxxxxxx>
  • Date: Thu, 5 Jan 2023 12:11:35 +0100
  • Arc-authentication-results: i=1; mx.microsoft.com 1; spf=pass smtp.mailfrom=suse.com; dmarc=pass action=none header.from=suse.com; dkim=pass header.d=suse.com; arc=none
  • Arc-message-signature: i=1; a=rsa-sha256; c=relaxed/relaxed; d=microsoft.com; s=arcselector9901; h=From:Date:Subject:Message-ID:Content-Type:MIME-Version:X-MS-Exchange-AntiSpam-MessageData-ChunkCount:X-MS-Exchange-AntiSpam-MessageData-0:X-MS-Exchange-AntiSpam-MessageData-1; bh=Cm9QUjQ0OiBxmbXmS8go9kS0Co5ZCYy7d8B43NW/X6E=; b=hjpnfc4CEnTc4m52gTccQVOKhh6EERJJ3QXkgHZV9kP+LApWQfe8JZ6n1cbkeIpftSlnL8u9mQQzCJrR8K4WXQFsEjaJXnyfMhPNavRI6tvCDtzvjM62SK+LU+XmLfj1Dhg9aJdxRv18yFrnrVn4UQ6LdZhkqkTsJEuHLcBiYIb8X/vfRw4z5SF8xTpfTYHOdPdvdveK7YsOgqUPpStPy8REvlzCwlk3W92i7GRAgGsh6T6Apw58iU6x7HQ0VlTIAHQol+ccM4VxTqFPrN3zk31rbqLpieZ4uHioCkx20AC9WhfcMVZqmUrPNpt+YIUsUd2xAjAZ1ln+BK3PRuM5Fg==
  • Arc-seal: i=1; a=rsa-sha256; s=arcselector9901; d=microsoft.com; cv=none; b=n1z2ouDNT8Y943IKrK5OPbF8jF8m6MCFsdtr+V+6cJsi3bmuxH53N/8wHACn+oML4Ke7Howg7PYwVhbzikQccmyqYsHiVvvKFlNmF0OcI1mO7GPQnG67MPHBREBY10MUZ2qkObj9++TYnKxtUm1RYzaMN5Le1tZQx2DWMVgnIZqBiXk35HrLjyPqhtCyMRxrChaLiM4wKu2E2p8uY8/HjJsMofnIcqx4yr++jq/9rralAiJJyBt/J5auKHNjJn1E7qca42auMHX1ZIa4YSgY/cwccK8bGRKMLNDXV6HnXHd3jOd/dC2WW+IQE1wA8HkFSn/Q//cnFjOG4B7PpwX13Q==
  • Authentication-results: dkim=none (message not signed) header.d=none;dmarc=none action=none header.from=suse.com;
  • Cc: Andrew Cooper <andrew.cooper3@xxxxxxxxxx>, Wei Liu <wl@xxxxxxx>, Roger Pau Monné <roger.pau@xxxxxxxxxx>
  • Delivery-date: Thu, 05 Jan 2023 11:11:40 +0000
  • List-id: Xen developer discussion <xen-devel.lists.xenproject.org>

This centralizes section name and attribute setting, thus simplifying
future changes to either of these.

Signed-off-by: Jan Beulich <jbeulich@xxxxxxxx>

--- a/xen/arch/x86/cpu/amd.c
+++ b/xen/arch/x86/cpu/amd.c
@@ -57,10 +57,10 @@ static inline int rdmsr_amd_safe(unsigne
        int err;
 
        asm volatile("1: rdmsr\n2:\n"
-                    ".section .fixup,\"ax\"\n"
+                    _ASM_FIXUP "\n"
                     "3: movl %6,%2\n"
                     "   jmp 2b\n"
-                    ".previous\n"
+                    _ASM_FIXUP_END "\n"
                     _ASM_EXTABLE(1b, 3b)
                     : "=a" (*lo), "=d" (*hi), "=r" (err)
                     : "c" (msr), "D" (0x9c5a203a), "2" (0), "i" (-EFAULT));
@@ -74,10 +74,10 @@ static inline int wrmsr_amd_safe(unsigne
        int err;
 
        asm volatile("1: wrmsr\n2:\n"
-                    ".section .fixup,\"ax\"\n"
+                    _ASM_FIXUP "\n"
                     "3: movl %6,%0\n"
                     "   jmp 2b\n"
-                    ".previous\n"
+                    _ASM_FIXUP_END "\n"
                     _ASM_EXTABLE(1b, 3b)
                     : "=r" (err)
                     : "c" (msr), "a" (lo), "d" (hi), "D" (0x9c5a203a),
--- a/xen/arch/x86/domain.c
+++ b/xen/arch/x86/domain.c
@@ -1607,11 +1607,11 @@ static void load_segments(struct vcpu *n
 #define TRY_LOAD_SEG(seg, val)                          \
     asm volatile ( "1: mov %k[_val], %%" #seg "\n\t"    \
                    "2:\n\t"                             \
-                   ".section .fixup, \"ax\"\n\t"        \
+                   _ASM_FIXUP "\n\t"                    \
                    "3: xor %k[ok], %k[ok]\n\t"          \
                    "   mov %k[ok], %%" #seg "\n\t"      \
                    "   jmp 2b\n\t"                      \
-                   ".previous\n\t"                      \
+                   _ASM_FIXUP_END "\n\t"                \
                    _ASM_EXTABLE(1b, 3b)                 \
                    : [ok] "+r" (all_segs_okay)          \
                    : [_val] "rm" (val) )
--- a/xen/arch/x86/extable.c
+++ b/xen/arch/x86/extable.c
@@ -164,11 +164,11 @@ static int __init cf_check stub_selftest
 
         asm volatile ( "INDIRECT_CALL %[stb]\n"
                        ".Lret%=:\n\t"
-                       ".pushsection .fixup,\"ax\"\n"
+                       _ASM_FIXUP "\n"
                        ".Lfix%=:\n\t"
                        "pop %[exn]\n\t"
                        "jmp .Lret%=\n\t"
-                       ".popsection\n\t"
+                       _ASM_FIXUP_END "\n\t"
                        _ASM_EXTABLE(.Lret%=, .Lfix%=)
                        : [exn] "+m" (res) ASM_CALL_CONSTRAINT
                        : [stb] "r" (addr), "a" (tests[i].rax));
--- a/xen/arch/x86/i387.c
+++ b/xen/arch/x86/i387.c
@@ -67,7 +67,7 @@ static inline void fpu_fxrstor(struct vc
         asm volatile (
             /* See below for why the operands/constraints are this way. */
             "1: " REX64_PREFIX "fxrstor (%2)\n"
-            ".section .fixup,\"ax\"   \n"
+            _ASM_FIXUP               "\n"
             "2: push %%"__OP"ax       \n"
             "   push %%"__OP"cx       \n"
             "   push %%"__OP"di       \n"
@@ -79,7 +79,7 @@ static inline void fpu_fxrstor(struct vc
             "   pop  %%"__OP"cx       \n"
             "   pop  %%"__OP"ax       \n"
             "   jmp  1b               \n"
-            ".previous                \n"
+            _ASM_FIXUP_END           "\n"
             _ASM_EXTABLE(1b, 2b)
             :
             : "m" (*fpu_ctxt), "i" (sizeof(*fpu_ctxt) / 4), "R" (fpu_ctxt) );
@@ -87,7 +87,7 @@ static inline void fpu_fxrstor(struct vc
     case 4: case 2:
         asm volatile (
             "1: fxrstor %0         \n"
-            ".section .fixup,\"ax\"\n"
+            _ASM_FIXUP            "\n"
             "2: push %%"__OP"ax    \n"
             "   push %%"__OP"cx    \n"
             "   push %%"__OP"di    \n"
@@ -99,7 +99,7 @@ static inline void fpu_fxrstor(struct vc
             "   pop  %%"__OP"cx    \n"
             "   pop  %%"__OP"ax    \n"
             "   jmp  1b            \n"
-            ".previous             \n"
+            _ASM_FIXUP_END        "\n"
             _ASM_EXTABLE(1b, 2b)
             :
             : "m" (*fpu_ctxt), "i" (sizeof(*fpu_ctxt) / 4) );
--- a/xen/arch/x86/include/asm/asm_defns.h
+++ b/xen/arch/x86/include/asm/asm_defns.h
@@ -79,6 +79,15 @@ register unsigned long current_stack_poi
 #define _ASM_EXTABLE(from, to)     _ASM__EXTABLE(, from, to)
 #define _ASM_PRE_EXTABLE(from, to) _ASM__EXTABLE(.pre, from, to)
 
+/* Exception recovery code section */
+#ifdef __ASSEMBLY__
+# define _ASM_FIXUP     .pushsection .fixup, "ax", @progbits
+# define _ASM_FIXUP_END .popsection
+#else
+# define _ASM_FIXUP     " .pushsection .fixup, \"ax\", @progbits"
+# define _ASM_FIXUP_END " .popsection"
+#endif
+
 #ifdef __ASSEMBLY__
 
 #ifdef HAVE_AS_QUOTED_SYM
--- a/xen/arch/x86/include/asm/hvm/vmx/vmx.h
+++ b/xen/arch/x86/include/asm/hvm/vmx/vmx.h
@@ -579,9 +579,9 @@ static inline int __vmxon(u64 addr)
         "1: " VMXON_OPCODE MODRM_EAX_06 "\n"
         "   setna %b0 ; neg %0\n" /* CF==1 or ZF==1 --> rc = -1 */
         "2:\n"
-        ".section .fixup,\"ax\"\n"
+        _ASM_FIXUP "\n"
         "3: sub $2,%0 ; jmp 2b\n"    /* #UD or #GP --> rc = -2 */
-        ".previous\n"
+        _ASM_FIXUP_END "\n"
         _ASM_EXTABLE(1b, 3b)
         : "=q" (rc)
         : "0" (0), "a" (&addr)
--- a/xen/arch/x86/include/asm/msr.h
+++ b/xen/arch/x86/include/asm/msr.h
@@ -44,10 +44,10 @@ static inline void wrmsrl(unsigned int m
     uint32_t lo_, hi_; \
     __asm__ __volatile__( \
         "1: rdmsr\n2:\n" \
-        ".section .fixup,\"ax\"\n" \
+        _ASM_FIXUP "\n" \
         "3: xorl %0,%0\n; xorl %1,%1\n" \
         "   movl %5,%2\n; jmp 2b\n" \
-        ".previous\n" \
+        _ASM_FIXUP_END "\n" \
         _ASM_EXTABLE(1b, 3b) \
         : "=a" (lo_), "=d" (hi_), "=&r" (rc_) \
         : "c" (msr), "2" (0), "i" (-EFAULT)); \
@@ -64,9 +64,9 @@ static inline int wrmsr_safe(unsigned in
 
     __asm__ __volatile__(
         "1: wrmsr\n2:\n"
-        ".section .fixup,\"ax\"\n"
+        _ASM_FIXUP "\n"
         "3: movl %5,%0\n; jmp 2b\n"
-        ".previous\n"
+        _ASM_FIXUP_END "\n"
         _ASM_EXTABLE(1b, 3b)
         : "=&r" (rc)
         : "c" (msr), "a" (lo), "d" (hi), "0" (0), "i" (-EFAULT));
--- a/xen/arch/x86/include/asm/uaccess.h
+++ b/xen/arch/x86/include/asm/uaccess.h
@@ -160,10 +160,10 @@ struct __large_struct { unsigned long bu
                )                                                       \
                "1:     mov"itype" %"rtype"[val], (%[ptr])\n"           \
                "2:\n"                                                  \
-               ".section .fixup,\"ax\"\n"                              \
+               "       " _ASM_FIXUP "\n"                               \
                "3:     mov %[errno], %[ret]\n"                         \
                "       jmp 2b\n"                                       \
-               ".previous\n"                                           \
+               _ASM_FIXUP_END "\n"                                     \
                _ASM_EXTABLE(1b, 3b)                                    \
                : [ret] "+r" (err), [ptr] "=&r" (dummy_)                \
                  GUARD(, [scr1] "=&r" (dummy_), [scr2] "=&r" (dummy_)) \
@@ -177,11 +177,11 @@ struct __large_struct { unsigned long bu
                )                                                       \
                "1:     mov (%[ptr]), %"rtype"[val]\n"                  \
                "2:\n"                                                  \
-               ".section .fixup,\"ax\"\n"                              \
+               "       " _ASM_FIXUP "\n"                               \
                "3:     mov %[errno], %[ret]\n"                         \
                "       xor %k[val], %k[val]\n"                         \
                "       jmp 2b\n"                                       \
-               ".previous\n"                                           \
+               _ASM_FIXUP_END "\n"                                     \
                _ASM_EXTABLE(1b, 3b)                                    \
                : [ret] "+r" (err), [val] ltype (x),                    \
                  [ptr] "=&r" (dummy_)                                  \
--- a/xen/arch/x86/pv/misc-hypercalls.c
+++ b/xen/arch/x86/pv/misc-hypercalls.c
@@ -251,11 +251,11 @@ long do_set_segment_base(unsigned int wh
          * re-read %gs and compare against the input.
          */
         asm volatile ( "1: mov %[sel], %%gs\n\t"
-                       ".section .fixup, \"ax\", @progbits\n\t"
+                       _ASM_FIXUP "\n\t"
                        "2: mov %k[flat], %%gs\n\t"
                        "   xor %[sel], %[sel]\n\t"
                        "   jmp 1b\n\t"
-                       ".previous\n\t"
+                       _ASM_FIXUP_END "\n\t"
                        _ASM_EXTABLE(1b, 2b)
                        : [sel] "+r" (sel)
                        : [flat] "r" (FLAT_USER_DS32) );
--- a/xen/arch/x86/traps.c
+++ b/xen/arch/x86/traps.c
@@ -550,9 +550,9 @@ static void show_trace(const struct cpu_
 
     /* Guarded read of the stack top. */
     asm ( "1: mov %[data], %[tos]; 2:\n"
-          ".pushsection .fixup,\"ax\"\n"
+          _ASM_FIXUP "\n"
           "3: movb $1, %[fault]; jmp 2b\n"
-          ".popsection\n"
+          _ASM_FIXUP_END "\n"
           _ASM_EXTABLE(1b, 3b)
           : [tos] "+r" (tos), [fault] "+qm" (fault) : [data] "m" (*sp) );
 
--- a/xen/arch/x86/usercopy.c
+++ b/xen/arch/x86/usercopy.c
@@ -38,12 +38,12 @@ unsigned int copy_to_guest_ll(void __use
         "    mov  %[aux],%[cnt]\n"
         "1:  rep movsb\n" /* ...remainder copied as bytes */
         "2:\n"
-        ".section .fixup,\"ax\"\n"
+        "    " _ASM_FIXUP "\n"
         "5:  add %[aux], %[cnt]\n"
         "    jmp 2b\n"
         "3:  lea (%q[aux], %q[cnt], "STR(BYTES_PER_LONG)"), %[cnt]\n"
         "    jmp 2b\n"
-        ".previous\n"
+        "    " _ASM_FIXUP_END "\n"
         _ASM_EXTABLE(4b, 5b)
         _ASM_EXTABLE(0b, 3b)
         _ASM_EXTABLE(1b, 2b)
@@ -81,7 +81,7 @@ unsigned int copy_from_guest_ll(void *to
         "    mov  %[aux], %[cnt]\n"
         "1:  rep movsb\n" /* ...remainder copied as bytes */
         "2:\n"
-        ".section .fixup,\"ax\"\n"
+        "    " _ASM_FIXUP "\n"
         "5:  add  %[aux], %[cnt]\n"
         "    jmp 6f\n"
         "3:  lea  (%q[aux], %q[cnt], "STR(BYTES_PER_LONG)"), %[cnt]\n"
@@ -92,7 +92,7 @@ unsigned int copy_from_guest_ll(void *to
         "    xchg %[aux], %%eax\n"
         "    mov  %k[from], %[cnt]\n"
         "    jmp 2b\n"
-        ".previous\n"
+        "    " _ASM_FIXUP_END "\n"
         _ASM_EXTABLE(4b, 5b)
         _ASM_EXTABLE(0b, 3b)
         _ASM_EXTABLE(1b, 6b)
@@ -149,10 +149,10 @@ unsigned int clear_guest_pv(void __user
             "    mov  %[bytes], %[cnt]\n"
             "1:  rep stosb\n"
             "2:\n"
-            ".section .fixup,\"ax\"\n"
+            "    " _ASM_FIXUP "\n"
             "3:  lea  (%q[bytes], %q[longs], "STR(BYTES_PER_LONG)"), %[cnt]\n"
             "    jmp  2b\n"
-            ".previous\n"
+            "    " _ASM_FIXUP_END "\n"
             _ASM_EXTABLE(0b,3b)
             _ASM_EXTABLE(1b,2b)
             : [cnt] "=&c" (n), [to] "+D" (to), [scratch1] "=&r" (dummy),
--- a/xen/arch/x86/x86_64/compat/entry.S
+++ b/xen/arch/x86/x86_64/compat/entry.S
@@ -321,11 +321,11 @@ __UNLIKELY_END(compat_bounce_null_select
         mov   %al,  TRAPBOUNCE_flags(%rdx)
         ret
 
-.section .fixup,"ax"
+        _ASM_FIXUP
 .Lfx13:
         xorl  %edi,%edi
         jmp   .Lft13
-.previous
+        _ASM_FIXUP_END
         _ASM_EXTABLE(.Lft1,  dom_crash_sync_extable)
         _ASM_EXTABLE(.Lft2,  compat_crash_page_fault)
         _ASM_EXTABLE(.Lft3,  compat_crash_page_fault_4)
@@ -346,9 +346,9 @@ compat_crash_page_fault:
         movl  %esi,%edi
         call  show_page_walk
         jmp   dom_crash_sync_extable
-.section .fixup,"ax"
+        _ASM_FIXUP
 .Lfx14:
         xorl  %edi,%edi
         jmp   .Lft14
-.previous
+        _ASM_FIXUP_END
         _ASM_EXTABLE(.Lft14, .Lfx14)
--- a/xen/arch/x86/x86_64/entry.S
+++ b/xen/arch/x86/x86_64/entry.S
@@ -580,7 +580,7 @@ __UNLIKELY_END(create_bounce_frame_bad_b
         mov   %al,  TRAPBOUNCE_flags(%rdx)
         ret
 
-        .pushsection .fixup, "ax", @progbits
+        _ASM_FIXUP
         # Numeric tags below represent the intended overall %rsi adjustment.
 domain_crash_page_fault_6x8:
         addq  $8,%rsi
@@ -616,7 +616,7 @@ ENTRY(dom_crash_sync_extable)
 #endif
         xorl  %edi,%edi
         jmp   asm_domain_crash_synchronous /* Does not return */
-        .popsection
+        _ASM_FIXUP_END
 #endif /* CONFIG_PV */
 
 /* --- CODE BELOW THIS LINE (MOSTLY) NOT GUEST RELATED --- */
--- a/xen/arch/x86/x86_emulate/x86_emulate.c
+++ b/xen/arch/x86/x86_emulate/x86_emulate.c
@@ -1260,11 +1260,11 @@ static inline int mkec(uint8_t e, int32_
     block_speculation(); /* SCSB */                                     \
     asm volatile ( pre "\n\tINDIRECT_CALL %[stub]\n\t" post "\n"        \
                    ".Lret%=:\n\t"                                       \
-                   ".pushsection .fixup,\"ax\"\n"                       \
+                   _ASM_FIXUP "\n"                                      \
                    ".Lfix%=:\n\t"                                       \
                    "pop %[exn]\n\t"                                     \
                    "jmp .Lret%=\n\t"                                    \
-                   ".popsection\n\t"                                    \
+                   _ASM_FIXUP_END "\n\t"                                \
                    _ASM_EXTABLE(.Lret%=, .Lfix%=)                       \
                    : [exn] "+g" (stub_exn.info) ASM_CALL_CONSTRAINT,    \
                      constraints,                                       \
--- a/xen/arch/x86/xstate.c
+++ b/xen/arch/x86/xstate.c
@@ -45,10 +45,10 @@ static inline bool xsetbv(u32 index, u64
 
     asm volatile ( "1: .byte 0x0f,0x01,0xd1\n"
                    "3:                     \n"
-                   ".section .fixup,\"ax\" \n"
+                   _ASM_FIXUP             "\n"
                    "2: xor %0,%0           \n"
                    "   jmp 3b              \n"
-                   ".previous              \n"
+                   _ASM_FIXUP_END         "\n"
                    _ASM_EXTABLE(1b, 2b)
                    : "+a" (lo)
                    : "c" (index), "d" (hi));
@@ -403,10 +403,10 @@ void xrstor(struct vcpu *v, uint64_t mas
 #define _xrstor(insn) \
         asm volatile ( "1: .byte " insn "\n" \
                        "3:\n" \
-                       "   .section .fixup,\"ax\"\n" \
+                       "   " _ASM_FIXUP "\n" \
                        "2: incl %[faults]\n" \
                        "   jmp 3b\n" \
-                       "   .previous\n" \
+                       "   " _ASM_FIXUP_END "\n" \
                        _ASM_EXTABLE(1b, 2b) \
                        : [mem] "+m" (*ptr), [faults] "+g" (faults) \
                        : [lmask] "a" (lmask), [hmask] "d" (hmask), \




 


Rackspace

Lists.xenproject.org is hosted with RackSpace, monitoring our
servers 24x7x365 and backed by RackSpace's Fanatical Support®.