[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[Xen-changelog] [xen-unstable] x86: Remove unnecessary LOCK/LOCK_PREFIX macros.



# HG changeset patch
# User Keir Fraser <keir@xxxxxxx>
# Date 1292525215 0
# Node ID 04f15c2de8fa8e22104ce0dfc591b57ce02dadac
# Parent  669c2f83b86c0c06ab20006488c67fb7237fcdef
x86: Remove unnecessary LOCK/LOCK_PREFIX macros.

We don't support !CONFIG_SMP.

Signed-off-by: Keir Fraser <keir@xxxxxxx>
---
 xen/include/asm-ia64/config.h       |    3 --
 xen/include/asm-x86/atomic.h        |   22 +++++++-------------
 xen/include/asm-x86/bitops.h        |   24 +++++-----------------
 xen/include/asm-x86/system.h        |   38 ++++++++++++++++++------------------
 xen/include/asm-x86/x86_32/system.h |    6 ++---
 xen/include/asm-x86/x86_64/system.h |    2 -
 6 files changed, 37 insertions(+), 58 deletions(-)

diff -r 669c2f83b86c -r 04f15c2de8fa xen/include/asm-ia64/config.h
--- a/xen/include/asm-ia64/config.h     Thu Dec 16 18:37:30 2010 +0000
+++ b/xen/include/asm-ia64/config.h     Thu Dec 16 18:46:55 2010 +0000
@@ -84,9 +84,6 @@ typedef unsigned long paddr_t;
 // from linux/include/linux/types.h
 #define CLEAR_BITMAP(name,bits) \
        memset(name, 0, BITS_TO_LONGS(bits)*sizeof(unsigned long))
-
-// FIXME?: x86-ism used in xen/mm.h
-#define LOCK_PREFIX
 
 extern unsigned long total_pages;
 extern unsigned long xen_pstart;
diff -r 669c2f83b86c -r 04f15c2de8fa xen/include/asm-x86/atomic.h
--- a/xen/include/asm-x86/atomic.h      Thu Dec 16 18:37:30 2010 +0000
+++ b/xen/include/asm-x86/atomic.h      Thu Dec 16 18:46:55 2010 +0000
@@ -3,12 +3,6 @@
 
 #include <xen/config.h>
 #include <asm/system.h>
-
-#ifdef CONFIG_SMP
-#define LOCK "lock ; "
-#else
-#define LOCK ""
-#endif
 
 /*
  * NB. I've pushed the volatile qualifier into the operations. This allows
@@ -48,7 +42,7 @@ static __inline__ void atomic_add(int i,
 static __inline__ void atomic_add(int i, atomic_t *v)
 {
        asm volatile(
-               LOCK "addl %1,%0"
+               "lock; addl %1,%0"
                :"=m" (*(volatile int *)&v->counter)
                :"ir" (i), "m" (*(volatile int *)&v->counter));
 }
@@ -63,7 +57,7 @@ static __inline__ void atomic_sub(int i,
 static __inline__ void atomic_sub(int i, atomic_t *v)
 {
        asm volatile(
-               LOCK "subl %1,%0"
+               "lock; subl %1,%0"
                :"=m" (*(volatile int *)&v->counter)
                :"ir" (i), "m" (*(volatile int *)&v->counter));
 }
@@ -82,7 +76,7 @@ static __inline__ int atomic_sub_and_tes
        unsigned char c;
 
        asm volatile(
-               LOCK "subl %2,%0; sete %1"
+               "lock; subl %2,%0; sete %1"
                :"=m" (*(volatile int *)&v->counter), "=qm" (c)
                :"ir" (i), "m" (*(volatile int *)&v->counter) : "memory");
        return c;
@@ -97,7 +91,7 @@ static __inline__ void atomic_inc(atomic
 static __inline__ void atomic_inc(atomic_t *v)
 {
        asm volatile(
-               LOCK "incl %0"
+               "lock; incl %0"
                :"=m" (*(volatile int *)&v->counter)
                :"m" (*(volatile int *)&v->counter));
 }
@@ -111,7 +105,7 @@ static __inline__ void atomic_dec(atomic
 static __inline__ void atomic_dec(atomic_t *v)
 {
        asm volatile(
-               LOCK "decl %0"
+               "lock; decl %0"
                :"=m" (*(volatile int *)&v->counter)
                :"m" (*(volatile int *)&v->counter));
 }
@@ -129,7 +123,7 @@ static __inline__ int atomic_dec_and_tes
        unsigned char c;
 
        asm volatile(
-               LOCK "decl %0; sete %1"
+               "lock; decl %0; sete %1"
                :"=m" (*(volatile int *)&v->counter), "=qm" (c)
                :"m" (*(volatile int *)&v->counter) : "memory");
        return c != 0;
@@ -148,7 +142,7 @@ static __inline__ int atomic_inc_and_tes
        unsigned char c;
 
        asm volatile(
-               LOCK "incl %0; sete %1"
+               "lock; incl %0; sete %1"
                :"=m" (*(volatile int *)&v->counter), "=qm" (c)
                :"m" (*(volatile int *)&v->counter) : "memory");
        return c != 0;
@@ -168,7 +162,7 @@ static __inline__ int atomic_add_negativ
        unsigned char c;
 
        asm volatile(
-               LOCK "addl %2,%0; sets %1"
+               "lock; addl %2,%0; sets %1"
                :"=m" (*(volatile int *)&v->counter), "=qm" (c)
                :"ir" (i), "m" (*(volatile int *)&v->counter) : "memory");
        return c;
diff -r 669c2f83b86c -r 04f15c2de8fa xen/include/asm-x86/bitops.h
--- a/xen/include/asm-x86/bitops.h      Thu Dec 16 18:37:30 2010 +0000
+++ b/xen/include/asm-x86/bitops.h      Thu Dec 16 18:46:55 2010 +0000
@@ -6,12 +6,6 @@
  */
 
 #include <xen/config.h>
-
-#ifdef CONFIG_SMP
-#define LOCK_PREFIX "lock ; "
-#else
-#define LOCK_PREFIX ""
-#endif
 
 /*
  * We specify the memory operand as both input and output because the memory
@@ -41,8 +35,7 @@ static inline void set_bit(int nr, volat
 static inline void set_bit(int nr, volatile void *addr)
 {
     asm volatile (
-        LOCK_PREFIX
-        "btsl %1,%0"
+        "lock; btsl %1,%0"
         : "=m" (ADDR)
         : "Ir" (nr), "m" (ADDR) : "memory");
 }
@@ -85,8 +78,7 @@ static inline void clear_bit(int nr, vol
 static inline void clear_bit(int nr, volatile void *addr)
 {
     asm volatile (
-        LOCK_PREFIX
-        "btrl %1,%0"
+        "lock; btrl %1,%0"
         : "=m" (ADDR)
         : "Ir" (nr), "m" (ADDR) : "memory");
 }
@@ -152,8 +144,7 @@ static inline void change_bit(int nr, vo
 static inline void change_bit(int nr, volatile void *addr)
 {
     asm volatile (
-        LOCK_PREFIX
-        "btcl %1,%0"
+        "lock; btcl %1,%0"
         : "=m" (ADDR)
         : "Ir" (nr), "m" (ADDR) : "memory");
 }
@@ -175,8 +166,7 @@ static inline int test_and_set_bit(int n
     int oldbit;
 
     asm volatile (
-        LOCK_PREFIX
-        "btsl %2,%1\n\tsbbl %0,%0"
+        "lock; btsl %2,%1\n\tsbbl %0,%0"
         : "=r" (oldbit), "=m" (ADDR)
         : "Ir" (nr), "m" (ADDR) : "memory");
     return oldbit;
@@ -223,8 +213,7 @@ static inline int test_and_clear_bit(int
     int oldbit;
 
     asm volatile (
-        LOCK_PREFIX
-        "btrl %2,%1\n\tsbbl %0,%0"
+        "lock; btrl %2,%1\n\tsbbl %0,%0"
         : "=r" (oldbit), "=m" (ADDR)
         : "Ir" (nr), "m" (ADDR) : "memory");
     return oldbit;
@@ -287,8 +276,7 @@ static inline int test_and_change_bit(in
     int oldbit;
 
     asm volatile (
-        LOCK_PREFIX
-        "btcl %2,%1\n\tsbbl %0,%0"
+        "lock; btcl %2,%1\n\tsbbl %0,%0"
         : "=r" (oldbit), "=m" (ADDR)
         : "Ir" (nr), "m" (ADDR) : "memory");
     return oldbit;
diff -r 669c2f83b86c -r 04f15c2de8fa xen/include/asm-x86/system.h
--- a/xen/include/asm-x86/system.h      Thu Dec 16 18:37:30 2010 +0000
+++ b/xen/include/asm-x86/system.h      Thu Dec 16 18:46:55 2010 +0000
@@ -91,37 +91,37 @@ static always_inline unsigned long __cmp
     switch ( size )
     {
     case 1:
-        asm volatile ( LOCK_PREFIX "cmpxchgb %b1,%2"
+        asm volatile ( "lock; cmpxchgb %b1,%2"
                        : "=a" (prev)
                        : "q" (new), "m" (*__xg((volatile void *)ptr)),
                        "0" (old)
                        : "memory" );
         return prev;
     case 2:
-        asm volatile ( LOCK_PREFIX "cmpxchgw %w1,%2"
-                       : "=a" (prev)
-                       : "r" (new), "m" (*__xg((volatile void *)ptr)),
-                       "0" (old)
-                       : "memory" );
-        return prev;
-#if defined(__i386__)
-    case 4:
-        asm volatile ( LOCK_PREFIX "cmpxchgl %1,%2"
-                       : "=a" (prev)
-                       : "r" (new), "m" (*__xg((volatile void *)ptr)),
-                       "0" (old)
-                       : "memory" );
-        return prev;
-#elif defined(__x86_64__)
-    case 4:
-        asm volatile ( LOCK_PREFIX "cmpxchgl %k1,%2"
+        asm volatile ( "lock; cmpxchgw %w1,%2"
+                       : "=a" (prev)
+                       : "r" (new), "m" (*__xg((volatile void *)ptr)),
+                       "0" (old)
+                       : "memory" );
+        return prev;
+#if defined(__i386__)
+    case 4:
+        asm volatile ( "lock; cmpxchgl %1,%2"
+                       : "=a" (prev)
+                       : "r" (new), "m" (*__xg((volatile void *)ptr)),
+                       "0" (old)
+                       : "memory" );
+        return prev;
+#elif defined(__x86_64__)
+    case 4:
+        asm volatile ( "lock; cmpxchgl %k1,%2"
                        : "=a" (prev)
                        : "r" (new), "m" (*__xg((volatile void *)ptr)),
                        "0" (old)
                        : "memory" );
         return prev;
     case 8:
-        asm volatile ( LOCK_PREFIX "cmpxchgq %1,%2"
+        asm volatile ( "lock; cmpxchgq %1,%2"
                        : "=a" (prev)
                        : "r" (new), "m" (*__xg((volatile void *)ptr)),
                        "0" (old)
diff -r 669c2f83b86c -r 04f15c2de8fa xen/include/asm-x86/x86_32/system.h
--- a/xen/include/asm-x86/x86_32/system.h       Thu Dec 16 18:37:30 2010 +0000
+++ b/xen/include/asm-x86/x86_32/system.h       Thu Dec 16 18:46:55 2010 +0000
@@ -6,7 +6,7 @@ static always_inline unsigned long long 
 {
     unsigned long long prev;
     asm volatile (
-        LOCK_PREFIX "cmpxchg8b %3"
+        "lock; cmpxchg8b %3"
         : "=A" (prev)
         : "c" ((u32)(new>>32)), "b" ((u32)new),
           "m" (*__xg((volatile void *)ptr)), "0" (old)
@@ -43,7 +43,7 @@ static always_inline unsigned long long 
  */
 #define __cmpxchg_user(_p,_o,_n,_isuff,_oppre,_regtype)                 \
     asm volatile (                                                      \
-        "1: " LOCK_PREFIX "cmpxchg"_isuff" %"_oppre"2,%3\n"             \
+        "1: lock; cmpxchg"_isuff" %"_oppre"2,%3\n"                      \
         "2:\n"                                                          \
         ".section .fixup,\"ax\"\n"                                      \
         "3:     movl $1,%1\n"                                           \
@@ -72,7 +72,7 @@ static always_inline unsigned long long 
         break;                                                          \
     case 8:                                                             \
         asm volatile (                                                  \
-            "1: " LOCK_PREFIX "cmpxchg8b %4\n"                          \
+            "1: lock; cmpxchg8b %4\n"                                   \
             "2:\n"                                                      \
             ".section .fixup,\"ax\"\n"                                  \
             "3:     movl $1,%1\n"                                       \
diff -r 669c2f83b86c -r 04f15c2de8fa xen/include/asm-x86/x86_64/system.h
--- a/xen/include/asm-x86/x86_64/system.h       Thu Dec 16 18:37:30 2010 +0000
+++ b/xen/include/asm-x86/x86_64/system.h       Thu Dec 16 18:46:55 2010 +0000
@@ -13,7 +13,7 @@
  */
 #define __cmpxchg_user(_p,_o,_n,_isuff,_oppre,_regtype)                 \
     asm volatile (                                                      \
-        "1: " LOCK_PREFIX "cmpxchg"_isuff" %"_oppre"2,%3\n"             \
+        "1: lock; cmpxchg"_isuff" %"_oppre"2,%3\n"                      \
         "2:\n"                                                          \
         ".section .fixup,\"ax\"\n"                                      \
         "3:     movl $1,%1\n"                                           \

_______________________________________________
Xen-changelog mailing list
Xen-changelog@xxxxxxxxxxxxxxxxxxx
http://lists.xensource.com/xen-changelog


 


Rackspace

Lists.xenproject.org is hosted with RackSpace, monitoring our
servers 24x7x365 and backed by RackSpace's Fanatical Support®.