[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[Xen-changelog] [xen-unstable] x86: Clean up some files of inline asm, and fix



# HG changeset patch
# User Keir Fraser <keir.fraser@xxxxxxxxxx>
# Date 1195235974 0
# Node ID 2052364cb456170a70ad5c8bfb876c95f7a9fe4a
# Parent  5a72a99be9115c793a2a24eb98ee6226aaa7c33b
x86: Clean up some files of inline asm, and fix
__copy_{to,from}_user_ll() for gcc 3.4 (asm output constraints all
need to be '=&').
Based on a patch by Jan Beulich.
Signed-off-by: Keir Fraser <keir.fraser@xxxxxxxxxx>
---
 xen/arch/x86/string.c   |   16 +++-
 xen/arch/x86/usercopy.c |  174 +++++++++++++++++++++++++-----------------------
 2 files changed, 107 insertions(+), 83 deletions(-)

diff -r 5a72a99be911 -r 2052364cb456 xen/arch/x86/string.c
--- a/xen/arch/x86/string.c     Fri Nov 16 17:09:53 2007 +0000
+++ b/xen/arch/x86/string.c     Fri Nov 16 17:59:34 2007 +0000
@@ -13,7 +13,7 @@ void *memcpy(void *dest, const void *src
 {
     long d0, d1, d2;
 
-    __asm__ __volatile__ (
+    asm volatile (
 #ifdef __i386__
         "   rep movsl        ; "
 #else
@@ -42,7 +42,7 @@ void *memset(void *s, int c, size_t n)
 {
     long d0, d1;
 
-    __asm__ __volatile__ (
+    asm volatile (
         "rep stosb"
         : "=&c" (d0), "=&D" (d1)
         : "a" (c), "1" (s), "0" (n)
@@ -59,7 +59,7 @@ void *memmove(void *dest, const void *sr
     if ( dest < src )
         return memcpy(dest, src, n);
 
-    __asm__ __volatile__ (
+    asm volatile (
         "   std         ; "
         "   rep movsb   ; "
         "   cld           "
@@ -69,3 +69,13 @@ void *memmove(void *dest, const void *sr
 
     return dest;
 }
+
+/*
+ * Local variables:
+ * mode: C
+ * c-set-style: "BSD"
+ * c-basic-offset: 4
+ * tab-width: 4
+ * indent-tabs-mode: nil
+ * End:
+ */
diff -r 5a72a99be911 -r 2052364cb456 xen/arch/x86/usercopy.c
--- a/xen/arch/x86/usercopy.c   Fri Nov 16 17:09:53 2007 +0000
+++ b/xen/arch/x86/usercopy.c   Fri Nov 16 17:59:34 2007 +0000
@@ -12,83 +12,87 @@
 
 unsigned long __copy_to_user_ll(void __user *to, const void *from, unsigned n)
 {
-       unsigned long __d0, __d1, __d2, __n = n;
-       __asm__ __volatile__(
-               "       cmp  $"STR(2*BYTES_PER_LONG-1)",%0\n"
-               "       jbe  1f\n"
-               "       mov  %1,%0\n"
-               "       neg  %0\n"
-               "       and  $"STR(BYTES_PER_LONG-1)",%0\n"
-               "       sub  %0,%3\n"
-               "4:     rep; movsb\n" /* make 'to' address aligned */
-               "       mov  %3,%0\n"
-               "       shr  $"STR(LONG_BYTEORDER)",%0\n"
-               "       and  $"STR(BYTES_PER_LONG-1)",%3\n"
-               "       .align 2,0x90\n"
-               "0:     rep; movs"__OS"\n" /* as many words as possible... */
-               "       mov  %3,%0\n"
-               "1:     rep; movsb\n" /* ...remainder copied as bytes */
-               "2:\n"
-               ".section .fixup,\"ax\"\n"
-               "5:     add %3,%0\n"
-               "       jmp 2b\n"
-               "3:     lea 0(%3,%0,"STR(BYTES_PER_LONG)"),%0\n"
-               "       jmp 2b\n"
-               ".previous\n"
-               ".section __ex_table,\"a\"\n"
-               "       "__FIXUP_ALIGN"\n"
-               "       "__FIXUP_WORD" 4b,5b\n"
-               "       "__FIXUP_WORD" 0b,3b\n"
-               "       "__FIXUP_WORD" 1b,2b\n"
-               ".previous"
-               : "=&c"(__n), "=&D" (__d0), "=&S" (__d1), "=r"(__d2)
-               : "3"(__n), "0"(__n), "1"(to), "2"(from)
-               : "memory");
-       return (unsigned)__n;
+    unsigned long __d0, __d1, __d2, __n = n;
+
+    asm volatile (
+        "    cmp  $"STR(2*BYTES_PER_LONG-1)",%0\n"
+        "    jbe  1f\n"
+        "    mov  %1,%0\n"
+        "    neg  %0\n"
+        "    and  $"STR(BYTES_PER_LONG-1)",%0\n"
+        "    sub  %0,%3\n"
+        "4:  rep movsb\n" /* make 'to' address aligned */
+        "    mov  %3,%0\n"
+        "    shr  $"STR(LONG_BYTEORDER)",%0\n"
+        "    and  $"STR(BYTES_PER_LONG-1)",%3\n"
+        "    .align 2,0x90\n"
+        "0:  rep movs"__OS"\n" /* as many words as possible... */
+        "    mov  %3,%0\n"
+        "1:  rep movsb\n" /* ...remainder copied as bytes */
+        "2:\n"
+        ".section .fixup,\"ax\"\n"
+        "5:  add %3,%0\n"
+        "    jmp 2b\n"
+        "3:  lea 0(%3,%0,"STR(BYTES_PER_LONG)"),%0\n"
+        "    jmp 2b\n"
+        ".previous\n"
+        ".section __ex_table,\"a\"\n"
+        "    "__FIXUP_ALIGN"\n"
+        "    "__FIXUP_WORD" 4b,5b\n"
+        "    "__FIXUP_WORD" 0b,3b\n"
+        "    "__FIXUP_WORD" 1b,2b\n"
+        ".previous"
+        : "=&c" (__n), "=&D" (__d0), "=&S" (__d1), "=&r" (__d2)
+        : "0" (__n), "1" (to), "2" (from), "3" (__n)
+        : "memory" );
+
+    return __n;
 }
 
 unsigned long
 __copy_from_user_ll(void *to, const void __user *from, unsigned n)
 {
-       unsigned long __d0, __d1, __d2, __n = n;
-       __asm__ __volatile__(
-               "       cmp  $"STR(2*BYTES_PER_LONG-1)",%0\n"
-               "       jbe  1f\n"
-               "       mov  %1,%0\n"
-               "       neg  %0\n"
-               "       and  $"STR(BYTES_PER_LONG-1)",%0\n"
-               "       sub  %0,%3\n"
-               "4:     rep; movsb\n" /* make 'to' address aligned */
-               "       mov  %3,%0\n"
-               "       shr  $"STR(LONG_BYTEORDER)",%0\n"
-               "       and  $"STR(BYTES_PER_LONG-1)",%3\n"
-               "       .align 2,0x90\n"
-               "0:     rep; movs"__OS"\n" /* as many words as possible... */
-               "       mov  %3,%0\n"
-               "1:     rep; movsb\n" /* ...remainder copied as bytes */
-               "2:\n"
-               ".section .fixup,\"ax\"\n"
-               "5:     add %3,%0\n"
-               "       jmp 6f\n"
-               "3:     lea 0(%3,%0,"STR(BYTES_PER_LONG)"),%0\n"
-               "6:     push %0\n"
-               "       push %%"__OP"ax\n"
-               "       xor  %%eax,%%eax\n"
-               "       rep; stosb\n"
-               "       pop  %%"__OP"ax\n"
-               "       pop  %0\n"
-               "       jmp 2b\n"
-               ".previous\n"
-               ".section __ex_table,\"a\"\n"
-               "       "__FIXUP_ALIGN"\n"
-               "       "__FIXUP_WORD" 4b,5b\n"
-               "       "__FIXUP_WORD" 0b,3b\n"
-               "       "__FIXUP_WORD" 1b,6b\n"
-               ".previous"
-               : "=&c"(__n), "=&D" (__d0), "=&S" (__d1), "=r"(__d2)
-               : "3"(__n), "0"(__n), "1"(to), "2"(from)
-               : "memory");
-       return (unsigned)__n;
+    unsigned long __d0, __d1, __d2, __n = n;
+
+    asm volatile (
+        "    cmp  $"STR(2*BYTES_PER_LONG-1)",%0\n"
+        "    jbe  1f\n"
+        "    mov  %1,%0\n"
+        "    neg  %0\n"
+        "    and  $"STR(BYTES_PER_LONG-1)",%0\n"
+        "    sub  %0,%3\n"
+        "4:  rep; movsb\n" /* make 'to' address aligned */
+        "    mov  %3,%0\n"
+        "    shr  $"STR(LONG_BYTEORDER)",%0\n"
+        "    and  $"STR(BYTES_PER_LONG-1)",%3\n"
+        "    .align 2,0x90\n"
+        "0:  rep; movs"__OS"\n" /* as many words as possible... */
+        "    mov  %3,%0\n"
+        "1:  rep; movsb\n" /* ...remainder copied as bytes */
+        "2:\n"
+        ".section .fixup,\"ax\"\n"
+        "5:  add %3,%0\n"
+        "    jmp 6f\n"
+        "3:  lea 0(%3,%0,"STR(BYTES_PER_LONG)"),%0\n"
+        "6:  push %0\n"
+        "    push %%"__OP"ax\n"
+        "    xor  %%eax,%%eax\n"
+        "    rep; stosb\n"
+        "    pop  %%"__OP"ax\n"
+        "    pop  %0\n"
+        "    jmp 2b\n"
+        ".previous\n"
+        ".section __ex_table,\"a\"\n"
+        "    "__FIXUP_ALIGN"\n"
+        "    "__FIXUP_WORD" 4b,5b\n"
+        "    "__FIXUP_WORD" 0b,3b\n"
+        "    "__FIXUP_WORD" 1b,6b\n"
+        ".previous"
+        : "=&c" (__n), "=&D" (__d0), "=&S" (__d1), "=&r" (__d2)
+        : "0" (__n), "1" (to), "2" (from), "3" (__n)
+        : "memory" );
+
+    return __n;
 }
 
 /**
@@ -107,9 +111,9 @@ unsigned long
 unsigned long
 copy_to_user(void __user *to, const void *from, unsigned n)
 {
-       if (access_ok(to, n))
-               n = __copy_to_user(to, from, n);
-       return n;
+    if ( access_ok(to, n) )
+        n = __copy_to_user(to, from, n);
+    return n;
 }
 
 /**
@@ -131,9 +135,19 @@ unsigned long
 unsigned long
 copy_from_user(void *to, const void __user *from, unsigned n)
 {
-       if (access_ok(from, n))
-               n = __copy_from_user(to, from, n);
-       else
-               memset(to, 0, n);
-       return n;
+    if ( access_ok(from, n) )
+        n = __copy_from_user(to, from, n);
+    else
+        memset(to, 0, n);
+    return n;
 }
+
+/*
+ * Local variables:
+ * mode: C
+ * c-set-style: "BSD"
+ * c-basic-offset: 4
+ * tab-width: 4
+ * indent-tabs-mode: nil
+ * End:
+ */

_______________________________________________
Xen-changelog mailing list
Xen-changelog@xxxxxxxxxxxxxxxxxxx
http://lists.xensource.com/xen-changelog


 


Rackspace

Lists.xenproject.org is hosted with RackSpace, monitoring our
servers 24x7x365 and backed by RackSpace's Fanatical Support®.