diff options
Diffstat (limited to 'arch/x86/include/asm/atomic.h')
| -rw-r--r-- | arch/x86/include/asm/atomic.h | 36 | 
1 files changed, 6 insertions, 30 deletions
diff --git a/arch/x86/include/asm/atomic.h b/arch/x86/include/asm/atomic.h index 722aa3b0462..6dd1c7dd047 100644 --- a/arch/x86/include/asm/atomic.h +++ b/arch/x86/include/asm/atomic.h @@ -6,6 +6,8 @@  #include <asm/processor.h>  #include <asm/alternative.h>  #include <asm/cmpxchg.h> +#include <asm/rmwcc.h> +#include <asm/barrier.h>  /*   * Atomic operations that C can't guarantee us.  Useful for @@ -76,12 +78,7 @@ static inline void atomic_sub(int i, atomic_t *v)   */  static inline int atomic_sub_and_test(int i, atomic_t *v)  { -	unsigned char c; - -	asm volatile(LOCK_PREFIX "subl %2,%0; sete %1" -		     : "+m" (v->counter), "=qm" (c) -		     : "ir" (i) : "memory"); -	return c; +	GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", "e");  }  /** @@ -118,12 +115,7 @@ static inline void atomic_dec(atomic_t *v)   */  static inline int atomic_dec_and_test(atomic_t *v)  { -	unsigned char c; - -	asm volatile(LOCK_PREFIX "decl %0; sete %1" -		     : "+m" (v->counter), "=qm" (c) -		     : : "memory"); -	return c != 0; +	GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", "e");  }  /** @@ -136,12 +128,7 @@ static inline int atomic_dec_and_test(atomic_t *v)   */  static inline int atomic_inc_and_test(atomic_t *v)  { -	unsigned char c; - -	asm volatile(LOCK_PREFIX "incl %0; sete %1" -		     : "+m" (v->counter), "=qm" (c) -		     : : "memory"); -	return c != 0; +	GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, "%0", "e");  }  /** @@ -155,12 +142,7 @@ static inline int atomic_inc_and_test(atomic_t *v)   */  static inline int atomic_add_negative(int i, atomic_t *v)  { -	unsigned char c; - -	asm volatile(LOCK_PREFIX "addl %2,%0; sets %1" -		     : "+m" (v->counter), "=qm" (c) -		     : "ir" (i) : "memory"); -	return c; +	GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", "s");  }  /** @@ -262,12 +244,6 @@ static inline void atomic_or_long(unsigned long *v1, unsigned long v2)  		     : : "r" ((unsigned)(mask)), "m" (*(addr))	\  		     : "memory") -/* Atomic operations are already serializing on x86 */ -#define smp_mb__before_atomic_dec()	barrier() -#define smp_mb__after_atomic_dec()	barrier() -#define smp_mb__before_atomic_inc()	barrier() -#define smp_mb__after_atomic_inc()	barrier() -  #ifdef CONFIG_X86_32  # include <asm/atomic64_32.h>  #else  | 
