[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[Xen-changelog] Merge x86/32 and x86/64 string functions.



ChangeSet 1.1683, 2005/06/06 15:48:14+01:00, kaf24@xxxxxxxxxxxxxxxxxxxx

        Merge x86/32 and x86/64 string functions.
        Signed-off-by: Keir Fraser <keir@xxxxxxxxxxxxx>



 b/xen/include/asm-x86/page.h        |   12 
 b/xen/include/asm-x86/string.h      |  446 ++++++++++++++++++++++++++++++++
 b/xen/include/asm-x86/types.h       |   18 -
 xen/include/asm-x86/x86_32/string.h |  489 ------------------------------------
 xen/include/asm-x86/x86_64/string.h |   16 -
 5 files changed, 455 insertions(+), 526 deletions(-)


diff -Nru a/xen/include/asm-x86/page.h b/xen/include/asm-x86/page.h
--- a/xen/include/asm-x86/page.h        2005-06-06 11:02:49 -04:00
+++ b/xen/include/asm-x86/page.h        2005-06-06 11:02:49 -04:00
@@ -185,22 +185,26 @@
 #define pfn_valid(_pfn)     ((_pfn) < max_page)
 
 /* High table entries are reserved by the hypervisor. */
-/* FIXME: this breaks with PAE -- kraxel */
+#if defined(CONFIG_X86_32) && !defined(CONFIG_PAE)
 #define DOMAIN_ENTRIES_PER_L2_PAGETABLE     \
   (HYPERVISOR_VIRT_START >> L2_PAGETABLE_SHIFT)
 #define HYPERVISOR_ENTRIES_PER_L2_PAGETABLE \
   (L2_PAGETABLE_ENTRIES - DOMAIN_ENTRIES_PER_L2_PAGETABLE)
+#else
+#define DOMAIN_ENTRIES_PER_L2_PAGETABLE     0
+#define HYPERVISOR_ENTRIES_PER_L2_PAGETABLE 0
+#endif
 
 #define linear_l1_table                                                 \
     ((l1_pgentry_t *)(LINEAR_PT_VIRT_START))
-#define __linear_l2_table                                                 \
+#define __linear_l2_table                                               \
     ((l2_pgentry_t *)(LINEAR_PT_VIRT_START +                            \
                      (LINEAR_PT_VIRT_START >> (PAGETABLE_ORDER<<0))))
-#define __linear_l3_table                                                 \
+#define __linear_l3_table                                               \
     ((l3_pgentry_t *)(LINEAR_PT_VIRT_START +                            \
                      (LINEAR_PT_VIRT_START >> (PAGETABLE_ORDER<<0)) +   \
                      (LINEAR_PT_VIRT_START >> (PAGETABLE_ORDER<<1))))
-#define __linear_l4_table                                                 \
+#define __linear_l4_table                                               \
     ((l4_pgentry_t *)(LINEAR_PT_VIRT_START +                            \
                      (LINEAR_PT_VIRT_START >> (PAGETABLE_ORDER<<0)) +   \
                      (LINEAR_PT_VIRT_START >> (PAGETABLE_ORDER<<1)) +   \
diff -Nru a/xen/include/asm-x86/string.h b/xen/include/asm-x86/string.h
--- a/xen/include/asm-x86/string.h      2005-06-06 11:02:49 -04:00
+++ b/xen/include/asm-x86/string.h      2005-06-06 11:02:49 -04:00
@@ -1,5 +1,445 @@
-#ifdef __x86_64__
-#include <asm/x86_64/string.h>
+#ifndef __X86_STRING_H__
+#define __X86_STRING_H__
+
+#include <xen/config.h>
+
+#define __HAVE_ARCH_STRCPY
+static inline char *strcpy(char *dest, const char *src)
+{
+    long d0, d1, d2;
+    __asm__ __volatile__ (
+        "1: lodsb          \n"
+        "   stosb          \n"
+        "   test %%al,%%al \n"
+        "   jne  1b        \n"
+        : "=&S" (d0), "=&D" (d1), "=&a" (d2)
+        : "0" (src), "1" (dest) : "memory" );
+    return dest;
+}
+
+#define __HAVE_ARCH_STRNCPY
+static inline char *strncpy(char *dest, const char *src, size_t count)
+{
+    long d0, d1, d2, d3;
+    __asm__ __volatile__ (
+        "1: dec  %2        \n"
+        "   js   2f        \n"
+        "   lodsb          \n"
+        "   stosb          \n"
+        "   test %%al,%%al \n"
+        "   jne  1b        \n"
+        "   rep ; stosb    \n"
+        "2:                \n"
+        : "=&S" (d0), "=&D" (d1), "=&c" (d2), "=&a" (d3)
+        : "0" (src), "1" (dest), "2" (count) : "memory" );
+    return dest;
+}
+
+#define __HAVE_ARCH_STRCAT
+static inline char *strcat(char *dest, const char *src)
+{
+    long d0, d1, d2, d3;
+    __asm__ __volatile__ (
+        "   repne ; scasb  \n"
+        "   dec  %1        \n"
+        "1: lodsb          \n"
+        "   stosb          \n"
+        "   test %%al,%%al \n"
+        "   jne  1b        \n"
+        : "=&S" (d0), "=&D" (d1), "=&a" (d2), "=&c" (d3)
+        : "0" (src), "1" (dest), "2" (0UL), "3" (0xffffffffUL) : "memory" );
+    return dest;
+}
+
+#define __HAVE_ARCH_STRNCAT
+static inline char *strncat(char *dest, const char *src, size_t count)
+{
+    long d0, d1, d2, d3;
+    __asm__ __volatile__ (
+        "   repne ; scasb   \n"
+        "   dec  %1         \n"
+        "   mov  %8,%3      \n"
+        "1: dec  %3         \n"
+        "   js   2f         \n"
+        "   lodsb           \n"
+        "   stosb           \n"
+        "   test %%al,%%al  \n"
+        "   jne  1b         \n"
+        "2: xor  %%eax,%%eax\n"
+        "   stosb"
+        : "=&S" (d0), "=&D" (d1), "=&a" (d2), "=&c" (d3)
+        : "0" (src), "1" (dest), "2" (0UL), "3" (0xffffffffUL), "g" (count)
+        : "memory" );
+    return dest;
+}
+
+#define __HAVE_ARCH_STRCMP
+static inline int strcmp(const char *cs, const char *ct)
+{
+    long d0, d1;
+    register int __res;
+    __asm__ __volatile__ (
+        "1: lodsb           \n"
+        "   scasb           \n"
+        "   jne  2f         \n"
+        "   test %%al,%%al  \n"
+        "   jne  1b         \n"
+        "   xor  %%eax,%%eax\n"
+        "   jmp  3f         \n"
+        "2: sbb  %%eax,%%eax\n"
+        "   or   $1,%%al    \n"
+        "3:                 \n"
+        : "=a" (__res), "=&S" (d0), "=&D" (d1)
+        : "1" (cs), "2" (ct) );
+    return __res;
+}
+
+#define __HAVE_ARCH_STRNCMP
+static inline int strncmp(const char *cs, const char *ct, size_t count)
+{
+    long d0, d1, d2;
+    register int __res;
+    __asm__ __volatile__ (
+        "1: dec  %3         \n"
+        "   js   2f         \n"
+        "   lodsb           \n"
+        "   scasb           \n"
+        "   jne  3f         \n"
+        "   test %%al,%%al  \n"
+        "   jne  1b         \n"
+        "2: xor  %%eax,%%eax\n"
+        "   jmp  4f         \n"
+        "3: sbb  %%eax,%%eax\n"
+        "   or   $1,%%al    \n"
+        "4:                 \n"
+        : "=a" (__res), "=&S" (d0), "=&D" (d1), "=&c" (d2)
+        : "1" (cs), "2" (ct), "3" (count) );
+    return __res;
+}
+
+#define __HAVE_ARCH_STRCHR
+static inline char *strchr(const char *s, int c)
+{
+    long d0;
+    register char *__res;
+    __asm__ __volatile__ (
+        "   mov  %%al,%%ah  \n"
+        "1: lodsb           \n"
+        "   cmp  %%ah,%%al  \n"
+        "   je   2f         \n"
+        "   test %%al,%%al  \n"
+        "   jne  1b         \n"
+        "   mov  $1,%1      \n"
+        "2: mov  %1,%0      \n"
+        "   dec  %0         \n"
+        : "=a" (__res), "=&S" (d0) : "1" (s), "0" (c) );
+    return __res;
+}
+
+#define __HAVE_ARCH_STRRCHR
+static inline char *strrchr(const char *s, int c)
+{
+    long d0, d1;
+    register char *__res;
+    __asm__ __volatile__ (
+        "   mov  %%al,%%ah  \n"
+        "1: lodsb           \n"
+        "   cmp  %%ah,%%al  \n"
+        "   jne  2f         \n"
+        "   lea  -1(%1),%0  \n"
+        "2: test %%al,%%al  \n"
+        "   jne  1b         \n"
+        : "=g" (__res), "=&S" (d0), "=&a" (d1) : "0" (0), "1" (s), "2" (c) );
+    return __res;
+}
+
+#define __HAVE_ARCH_STRLEN
+static inline size_t strlen(const char *s)
+{
+    long d0;
+    register int __res;
+    __asm__ __volatile__ (
+        "   repne ; scasb  \n"
+        "   notl %0        \n"
+        "   decl %0        \n"
+        : "=c" (__res), "=&D" (d0) : "1" (s), "a" (0), "0" (0xffffffffUL) );
+    return __res;
+}
+
+static inline void *__variable_memcpy(void *to, const void *from, size_t n)
+{
+    long d0, d1, d2;
+    __asm__ __volatile__ (
+        "   rep ; movs"__OS"\n"
+        "   mov %4,%3       \n"
+        "   rep ; movsb     \n"
+        : "=&c" (d0), "=&D" (d1), "=&S" (d2)
+        : "0" (n/BYTES_PER_LONG), "r" (n%BYTES_PER_LONG), "1" (to), "2" (from)
+        : "memory" );
+    return to;
+}
+
+/*
+ * This looks horribly ugly, but the compiler can optimize it totally,
+ * as the count is constant.
+ */
+static always_inline void * __constant_memcpy(
+    void * to, const void * from, size_t n)
+{
+    switch ( n )
+    {
+    case 0:
+        return to;
+    case 1:
+        *(u8 *)to = *(const u8 *)from;
+        return to;
+    case 2:
+        *(u16 *)to = *(const u16 *)from;
+        return to;
+    case 3:
+        *(u16 *)to = *(const u16 *)from;
+        *(2+(u8 *)to) = *(2+(const u8 *)from);
+        return to;
+    case 4:
+        *(u32 *)to = *(const u32 *)from;
+        return to;
+    case 5:
+        *(u32 *)to = *(const u32 *)from;
+        *(4+(u8 *)to) = *(4+(const u8 *)from);
+        return to;
+    case 6:

_______________________________________________
Xen-changelog mailing list
Xen-changelog@xxxxxxxxxxxxxxxxxxx
http://lists.xensource.com/xen-changelog


 


Rackspace

Lists.xenproject.org is hosted with RackSpace, monitoring our
servers 24x7x365 and backed by RackSpace's Fanatical Support®.