[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index] [Xen-changelog] [xen-unstable] [XEN] Fix bitops inline asm to specify that memory is clobbered.
# HG changeset patch # User kfraser@xxxxxxxxxxxxxxxxxxxxxxxxxxx # Node ID 698eb277331ca50f4eb514c693e6fae12afdb395 # Parent 66dd34f2f439e49e5a19c77ac7e7085a8b18a226 [XEN] Fix bitops inline asm to specify that memory is clobbered. Necessary because the modified word may not be the one directly addressed by the memory parameter (since the parameter actually points at an array, not a scalar value). The change to set_bit/__set_bit is reported to fix an issue on x460 hardware, as reported (and this fix suggested) by Ryan Harper <ryanh@xxxxxxxxxx> Signed-off-by: Keir Fraser <keir@xxxxxxxxxxxxx> --- xen/include/asm-x86/bitops.h | 57 +++++++++++++++++++++---------------------- 1 files changed, 28 insertions(+), 29 deletions(-) diff -r 66dd34f2f439 -r 698eb277331c xen/include/asm-x86/bitops.h --- a/xen/include/asm-x86/bitops.h Tue Sep 05 12:20:31 2006 -0700 +++ b/xen/include/asm-x86/bitops.h Tue Sep 05 18:28:27 2006 -0700 @@ -6,14 +6,6 @@ */ #include <xen/config.h> - -/* - * These have to be done with inline assembly: that way the bit-setting - * is guaranteed to be atomic. All bit operations return 0 if the bit - * was cleared before the operation and != 0 if it was not. - * - * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1). - */ #ifdef CONFIG_SMP #define LOCK_PREFIX "lock ; " @@ -21,6 +13,13 @@ #define LOCK_PREFIX "" #endif +/* + * We use the "+m" constraint because the memory operand is both read from + * and written to. Since the operand is in fact a word array, we also + * specify "memory" in the clobbers list to indicate that words other than + * the one directly addressed by the memory operand may be modified. + */ + #define ADDR (*(volatile long *) addr) /** @@ -37,8 +36,8 @@ static __inline__ void set_bit(int nr, v { __asm__ __volatile__( LOCK_PREFIX "btsl %1,%0" - :"=m" (ADDR) - :"dIr" (nr)); + :"+m" (ADDR) + :"dIr" (nr) : "memory"); } /** @@ -54,8 +53,8 @@ static __inline__ void __set_bit(int nr, { __asm__( "btsl %1,%0" - :"=m" (ADDR) - :"dIr" (nr)); + :"+m" (ADDR) + :"dIr" (nr) : "memory"); } /** @@ -72,8 +71,8 @@ static __inline__ void clear_bit(int nr, { __asm__ __volatile__( LOCK_PREFIX "btrl %1,%0" - :"=m" (ADDR) - :"dIr" (nr)); + :"+m" (ADDR) + :"dIr" (nr) : "memory"); } /** @@ -89,8 +88,8 @@ static __inline__ void __clear_bit(int n { __asm__( "btrl %1,%0" - :"=m" (ADDR) - :"dIr" (nr)); + :"+m" (ADDR) + :"dIr" (nr) : "memory"); } #define smp_mb__before_clear_bit() barrier() @@ -109,8 +108,8 @@ static __inline__ void __change_bit(int { __asm__ __volatile__( "btcl %1,%0" - :"=m" (ADDR) - :"dIr" (nr)); + :"+m" (ADDR) + :"dIr" (nr) : "memory"); } /** @@ -126,8 +125,8 @@ static __inline__ void change_bit(int nr { __asm__ __volatile__( LOCK_PREFIX "btcl %1,%0" - :"=m" (ADDR) - :"dIr" (nr)); + :"+m" (ADDR) + :"dIr" (nr) : "memory"); } /** @@ -144,7 +143,7 @@ static __inline__ int test_and_set_bit(i __asm__ __volatile__( LOCK_PREFIX "btsl %2,%1\n\tsbbl %0,%0" - :"=r" (oldbit),"=m" (ADDR) + :"=r" (oldbit),"+m" (ADDR) :"dIr" (nr) : "memory"); return oldbit; } @@ -164,8 +163,8 @@ static __inline__ int __test_and_set_bit __asm__( "btsl %2,%1\n\tsbbl %0,%0" - :"=r" (oldbit),"=m" (ADDR) - :"dIr" (nr)); + :"=r" (oldbit),"+m" (ADDR) + :"dIr" (nr) : "memory"); return oldbit; } @@ -183,7 +182,7 @@ static __inline__ int test_and_clear_bit __asm__ __volatile__( LOCK_PREFIX "btrl %2,%1\n\tsbbl %0,%0" - :"=r" (oldbit),"=m" (ADDR) + :"=r" (oldbit),"+m" (ADDR) :"dIr" (nr) : "memory"); return oldbit; } @@ -203,8 +202,8 @@ static __inline__ int __test_and_clear_b __asm__( "btrl %2,%1\n\tsbbl %0,%0" - :"=r" (oldbit),"=m" (ADDR) - :"dIr" (nr)); + :"=r" (oldbit),"+m" (ADDR) + :"dIr" (nr) : "memory"); return oldbit; } @@ -215,7 +214,7 @@ static __inline__ int __test_and_change_ __asm__ __volatile__( "btcl %2,%1\n\tsbbl %0,%0" - :"=r" (oldbit),"=m" (ADDR) + :"=r" (oldbit),"+m" (ADDR) :"dIr" (nr) : "memory"); return oldbit; } @@ -234,7 +233,7 @@ static __inline__ int test_and_change_bi __asm__ __volatile__( LOCK_PREFIX "btcl %2,%1\n\tsbbl %0,%0" - :"=r" (oldbit),"=m" (ADDR) + :"=r" (oldbit),"+m" (ADDR) :"dIr" (nr) : "memory"); return oldbit; } @@ -242,7 +241,7 @@ static __inline__ int test_and_change_bi static __inline__ int constant_test_bit(int nr, const volatile void * addr) { - return ((1UL << (nr & 31)) & (((const volatile unsigned int *) addr)[nr >> 5])) != 0; + return ((1U << (nr & 31)) & (((const volatile unsigned int *) addr)[nr >> 5])) != 0; } static __inline__ int variable_test_bit(int nr, volatile void * addr) _______________________________________________ Xen-changelog mailing list Xen-changelog@xxxxxxxxxxxxxxxxxxx http://lists.xensource.com/xen-changelog
|
Lists.xenproject.org is hosted with RackSpace, monitoring our |