diff options
Diffstat (limited to 'arch/x86/include/asm/atomic.h')
| -rw-r--r-- | arch/x86/include/asm/atomic.h | 96 | 
1 files changed, 13 insertions, 83 deletions
diff --git a/arch/x86/include/asm/atomic.h b/arch/x86/include/asm/atomic.h index 952a826ac4e..6dd1c7dd047 100644 --- a/arch/x86/include/asm/atomic.h +++ b/arch/x86/include/asm/atomic.h @@ -6,6 +6,8 @@  #include <asm/processor.h>  #include <asm/alternative.h>  #include <asm/cmpxchg.h> +#include <asm/rmwcc.h> +#include <asm/barrier.h>  /*   * Atomic operations that C can't guarantee us.  Useful for @@ -76,12 +78,7 @@ static inline void atomic_sub(int i, atomic_t *v)   */  static inline int atomic_sub_and_test(int i, atomic_t *v)  { -	unsigned char c; - -	asm volatile(LOCK_PREFIX "subl %2,%0; sete %1" -		     : "+m" (v->counter), "=qm" (c) -		     : "ir" (i) : "memory"); -	return c; +	GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", "e");  }  /** @@ -118,12 +115,7 @@ static inline void atomic_dec(atomic_t *v)   */  static inline int atomic_dec_and_test(atomic_t *v)  { -	unsigned char c; - -	asm volatile(LOCK_PREFIX "decl %0; sete %1" -		     : "+m" (v->counter), "=qm" (c) -		     : : "memory"); -	return c != 0; +	GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", "e");  }  /** @@ -136,12 +128,7 @@ static inline int atomic_dec_and_test(atomic_t *v)   */  static inline int atomic_inc_and_test(atomic_t *v)  { -	unsigned char c; - -	asm volatile(LOCK_PREFIX "incl %0; sete %1" -		     : "+m" (v->counter), "=qm" (c) -		     : : "memory"); -	return c != 0; +	GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, "%0", "e");  }  /** @@ -155,12 +142,7 @@ static inline int atomic_inc_and_test(atomic_t *v)   */  static inline int atomic_add_negative(int i, atomic_t *v)  { -	unsigned char c; - -	asm volatile(LOCK_PREFIX "addl %2,%0; sets %1" -		     : "+m" (v->counter), "=qm" (c) -		     : "ir" (i) : "memory"); -	return c; +	GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", "s");  }  /** @@ -172,27 +154,7 @@ static inline int atomic_add_negative(int i, atomic_t *v)   */  static inline int atomic_add_return(int i, atomic_t *v)  { -	int __i; -#ifdef CONFIG_M386 -	unsigned long flags; -	if (unlikely(boot_cpu_data.x86 <= 3)) -		goto no_xadd; -#endif -	/* Modern 486+ processor */ -	__i = i; -	asm volatile(LOCK_PREFIX "xaddl %0, %1" -		     : "+r" (i), "+m" (v->counter) -		     : : "memory"); -	return i + __i; - -#ifdef CONFIG_M386 -no_xadd: /* Legacy 386 processor */ -	raw_local_irq_save(flags); -	__i = atomic_read(v); -	atomic_set(v, i + __i); -	raw_local_irq_restore(flags); -	return i + __i; -#endif +	return i + xadd(&v->counter, i);  }  /** @@ -221,15 +183,15 @@ static inline int atomic_xchg(atomic_t *v, int new)  }  /** - * atomic_add_unless - add unless the number is already a given value + * __atomic_add_unless - add unless the number is already a given value   * @v: pointer of type atomic_t   * @a: the amount to add to v...   * @u: ...unless v is equal to u.   *   * Atomically adds @a to @v, so long as @v was not already @u. - * Returns non-zero if @v was not @u, and zero otherwise. + * Returns the old value of @v.   */ -static inline int atomic_add_unless(atomic_t *v, int a, int u) +static inline int __atomic_add_unless(atomic_t *v, int a, int u)  {  	int c, old;  	c = atomic_read(v); @@ -241,32 +203,7 @@ static inline int atomic_add_unless(atomic_t *v, int a, int u)  			break;  		c = old;  	} -	return c != (u); -} - -#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) - -/* - * atomic_dec_if_positive - decrement by 1 if old value positive - * @v: pointer of type atomic_t - * - * The function returns the old value of *v minus 1, even if - * the atomic variable, v, was not decremented. - */ -static inline int atomic_dec_if_positive(atomic_t *v) -{ -	int c, old, dec; -	c = atomic_read(v); -	for (;;) { -		dec = c - 1; -		if (unlikely(dec < 0)) -			break; -		old = atomic_cmpxchg((v), c, dec); -		if (likely(old == c)) -			break; -		c = old; -	} -	return dec; +	return c;  }  /** @@ -307,17 +244,10 @@ static inline void atomic_or_long(unsigned long *v1, unsigned long v2)  		     : : "r" ((unsigned)(mask)), "m" (*(addr))	\  		     : "memory") -/* Atomic operations are already serializing on x86 */ -#define smp_mb__before_atomic_dec()	barrier() -#define smp_mb__after_atomic_dec()	barrier() -#define smp_mb__before_atomic_inc()	barrier() -#define smp_mb__after_atomic_inc()	barrier() -  #ifdef CONFIG_X86_32 -# include "atomic64_32.h" +# include <asm/atomic64_32.h>  #else -# include "atomic64_64.h" +# include <asm/atomic64_64.h>  #endif -#include <asm-generic/atomic-long.h>  #endif /* _ASM_X86_ATOMIC_H */  | 
