[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index] [Xen-changelog] [xen-3.1-testing] x86: Clean up some files of inline asm, and fix
# HG changeset patch # User Keir Fraser <keir.fraser@xxxxxxxxxx> # Date 1196958937 0 # Node ID 120de16f76d4690a22c9e45c948c7b59fd3b49c1 # Parent 4c5138be79914bf210052a3ff81ae9a87ab13aa8 x86: Clean up some files of inline asm, and fix __copy_{to,from}_user_ll() for gcc 3.4 (asm output constraints all need to be '=&'). Based on a patch by Jan Beulich. Signed-off-by: Keir Fraser <keir.fraser@xxxxxxxxxx> xen-unstable changeset: 16392:2052364cb456170a70ad5c8bfb876c95f7a9fe4a xen-unstable date: Fri Nov 16 17:59:34 2007 +0000 --- xen/arch/x86/string.c | 16 +++- xen/arch/x86/usercopy.c | 174 +++++++++++++++++++++++++----------------------- 2 files changed, 107 insertions(+), 83 deletions(-) diff -r 4c5138be7991 -r 120de16f76d4 xen/arch/x86/string.c --- a/xen/arch/x86/string.c Thu Dec 06 16:34:56 2007 +0000 +++ b/xen/arch/x86/string.c Thu Dec 06 16:35:37 2007 +0000 @@ -13,7 +13,7 @@ void *memcpy(void *dest, const void *src { long d0, d1, d2; - __asm__ __volatile__ ( + asm volatile ( #ifdef __i386__ " rep movsl ; " #else @@ -42,7 +42,7 @@ void *memset(void *s, int c, size_t n) { long d0, d1; - __asm__ __volatile__ ( + asm volatile ( "rep stosb" : "=&c" (d0), "=&D" (d1) : "a" (c), "1" (s), "0" (n) @@ -59,7 +59,7 @@ void *memmove(void *dest, const void *sr if ( dest < src ) return memcpy(dest, src, n); - __asm__ __volatile__ ( + asm volatile ( " std ; " " rep movsb ; " " cld " @@ -69,3 +69,13 @@ void *memmove(void *dest, const void *sr return dest; } + +/* + * Local variables: + * mode: C + * c-set-style: "BSD" + * c-basic-offset: 4 + * tab-width: 4 + * indent-tabs-mode: nil + * End: + */ diff -r 4c5138be7991 -r 120de16f76d4 xen/arch/x86/usercopy.c --- a/xen/arch/x86/usercopy.c Thu Dec 06 16:34:56 2007 +0000 +++ b/xen/arch/x86/usercopy.c Thu Dec 06 16:35:37 2007 +0000 @@ -12,83 +12,87 @@ unsigned long __copy_to_user_ll(void __user *to, const void *from, unsigned n) { - unsigned long __d0, __d1, __d2, __n = n; - __asm__ __volatile__( - " cmp $"STR(2*BYTES_PER_LONG-1)",%0\n" - " jbe 1f\n" - " mov %1,%0\n" - " neg %0\n" - " and $"STR(BYTES_PER_LONG-1)",%0\n" - " sub %0,%3\n" - "4: rep; movsb\n" /* make 'to' address aligned */ - " mov %3,%0\n" - " shr $"STR(LONG_BYTEORDER)",%0\n" - " and $"STR(BYTES_PER_LONG-1)",%3\n" - " .align 2,0x90\n" - "0: rep; movs"__OS"\n" /* as many words as possible... */ - " mov %3,%0\n" - "1: rep; movsb\n" /* ...remainder copied as bytes */ - "2:\n" - ".section .fixup,\"ax\"\n" - "5: add %3,%0\n" - " jmp 2b\n" - "3: lea 0(%3,%0,"STR(BYTES_PER_LONG)"),%0\n" - " jmp 2b\n" - ".previous\n" - ".section __ex_table,\"a\"\n" - " "__FIXUP_ALIGN"\n" - " "__FIXUP_WORD" 4b,5b\n" - " "__FIXUP_WORD" 0b,3b\n" - " "__FIXUP_WORD" 1b,2b\n" - ".previous" - : "=&c"(__n), "=&D" (__d0), "=&S" (__d1), "=r"(__d2) - : "3"(__n), "0"(__n), "1"(to), "2"(from) - : "memory"); - return (unsigned)__n; + unsigned long __d0, __d1, __d2, __n = n; + + asm volatile ( + " cmp $"STR(2*BYTES_PER_LONG-1)",%0\n" + " jbe 1f\n" + " mov %1,%0\n" + " neg %0\n" + " and $"STR(BYTES_PER_LONG-1)",%0\n" + " sub %0,%3\n" + "4: rep movsb\n" /* make 'to' address aligned */ + " mov %3,%0\n" + " shr $"STR(LONG_BYTEORDER)",%0\n" + " and $"STR(BYTES_PER_LONG-1)",%3\n" + " .align 2,0x90\n" + "0: rep movs"__OS"\n" /* as many words as possible... */ + " mov %3,%0\n" + "1: rep movsb\n" /* ...remainder copied as bytes */ + "2:\n" + ".section .fixup,\"ax\"\n" + "5: add %3,%0\n" + " jmp 2b\n" + "3: lea 0(%3,%0,"STR(BYTES_PER_LONG)"),%0\n" + " jmp 2b\n" + ".previous\n" + ".section __ex_table,\"a\"\n" + " "__FIXUP_ALIGN"\n" + " "__FIXUP_WORD" 4b,5b\n" + " "__FIXUP_WORD" 0b,3b\n" + " "__FIXUP_WORD" 1b,2b\n" + ".previous" + : "=&c" (__n), "=&D" (__d0), "=&S" (__d1), "=&r" (__d2) + : "0" (__n), "1" (to), "2" (from), "3" (__n) + : "memory" ); + + return __n; } unsigned long __copy_from_user_ll(void *to, const void __user *from, unsigned n) { - unsigned long __d0, __d1, __d2, __n = n; - __asm__ __volatile__( - " cmp $"STR(2*BYTES_PER_LONG-1)",%0\n" - " jbe 1f\n" - " mov %1,%0\n" - " neg %0\n" - " and $"STR(BYTES_PER_LONG-1)",%0\n" - " sub %0,%3\n" - "4: rep; movsb\n" /* make 'to' address aligned */ - " mov %3,%0\n" - " shr $"STR(LONG_BYTEORDER)",%0\n" - " and $"STR(BYTES_PER_LONG-1)",%3\n" - " .align 2,0x90\n" - "0: rep; movs"__OS"\n" /* as many words as possible... */ - " mov %3,%0\n" - "1: rep; movsb\n" /* ...remainder copied as bytes */ - "2:\n" - ".section .fixup,\"ax\"\n" - "5: add %3,%0\n" - " jmp 6f\n" - "3: lea 0(%3,%0,"STR(BYTES_PER_LONG)"),%0\n" - "6: push %0\n" - " push %%"__OP"ax\n" - " xor %%eax,%%eax\n" - " rep; stosb\n" - " pop %%"__OP"ax\n" - " pop %0\n" - " jmp 2b\n" - ".previous\n" - ".section __ex_table,\"a\"\n" - " "__FIXUP_ALIGN"\n" - " "__FIXUP_WORD" 4b,5b\n" - " "__FIXUP_WORD" 0b,3b\n" - " "__FIXUP_WORD" 1b,6b\n" - ".previous" - : "=&c"(__n), "=&D" (__d0), "=&S" (__d1), "=r"(__d2) - : "3"(__n), "0"(__n), "1"(to), "2"(from) - : "memory"); - return (unsigned)__n; + unsigned long __d0, __d1, __d2, __n = n; + + asm volatile ( + " cmp $"STR(2*BYTES_PER_LONG-1)",%0\n" + " jbe 1f\n" + " mov %1,%0\n" + " neg %0\n" + " and $"STR(BYTES_PER_LONG-1)",%0\n" + " sub %0,%3\n" + "4: rep; movsb\n" /* make 'to' address aligned */ + " mov %3,%0\n" + " shr $"STR(LONG_BYTEORDER)",%0\n" + " and $"STR(BYTES_PER_LONG-1)",%3\n" + " .align 2,0x90\n" + "0: rep; movs"__OS"\n" /* as many words as possible... */ + " mov %3,%0\n" + "1: rep; movsb\n" /* ...remainder copied as bytes */ + "2:\n" + ".section .fixup,\"ax\"\n" + "5: add %3,%0\n" + " jmp 6f\n" + "3: lea 0(%3,%0,"STR(BYTES_PER_LONG)"),%0\n" + "6: push %0\n" + " push %%"__OP"ax\n" + " xor %%eax,%%eax\n" + " rep; stosb\n" + " pop %%"__OP"ax\n" + " pop %0\n" + " jmp 2b\n" + ".previous\n" + ".section __ex_table,\"a\"\n" + " "__FIXUP_ALIGN"\n" + " "__FIXUP_WORD" 4b,5b\n" + " "__FIXUP_WORD" 0b,3b\n" + " "__FIXUP_WORD" 1b,6b\n" + ".previous" + : "=&c" (__n), "=&D" (__d0), "=&S" (__d1), "=&r" (__d2) + : "0" (__n), "1" (to), "2" (from), "3" (__n) + : "memory" ); + + return __n; } /** @@ -107,9 +111,9 @@ unsigned long unsigned long copy_to_user(void __user *to, const void *from, unsigned n) { - if (access_ok(to, n)) - n = __copy_to_user(to, from, n); - return n; + if ( access_ok(to, n) ) + n = __copy_to_user(to, from, n); + return n; } /** @@ -131,9 +135,19 @@ unsigned long unsigned long copy_from_user(void *to, const void __user *from, unsigned n) { - if (access_ok(from, n)) - n = __copy_from_user(to, from, n); - else - memset(to, 0, n); - return n; + if ( access_ok(from, n) ) + n = __copy_from_user(to, from, n); + else + memset(to, 0, n); + return n; } + +/* + * Local variables: + * mode: C + * c-set-style: "BSD" + * c-basic-offset: 4 + * tab-width: 4 + * indent-tabs-mode: nil + * End: + */ _______________________________________________ Xen-changelog mailing list Xen-changelog@xxxxxxxxxxxxxxxxxxx http://lists.xensource.com/xen-changelog
|
Lists.xenproject.org is hosted with RackSpace, monitoring our |