diff options
Diffstat (limited to 'arch/powerpc/include/asm/atomic.h')
| -rw-r--r-- | arch/powerpc/include/asm/atomic.h | 158 |
1 files changed, 102 insertions, 56 deletions
diff --git a/arch/powerpc/include/asm/atomic.h b/arch/powerpc/include/asm/atomic.h index f3fc733758f..28992d01292 100644 --- a/arch/powerpc/include/asm/atomic.h +++ b/arch/powerpc/include/asm/atomic.h @@ -5,13 +5,10 @@ * PowerPC atomic operations */ -typedef struct { int counter; } atomic_t; - #ifdef __KERNEL__ -#include <linux/compiler.h> -#include <asm/synch.h> -#include <asm/asm-compat.h> -#include <asm/system.h> +#include <linux/types.h> +#include <asm/cmpxchg.h> +#include <asm/barrier.h> #define ATOMIC_INIT(i) { (i) } @@ -49,13 +46,13 @@ static __inline__ int atomic_add_return(int a, atomic_t *v) int t; __asm__ __volatile__( - LWSYNC_ON_SMP + PPC_ATOMIC_ENTRY_BARRIER "1: lwarx %0,0,%2 # atomic_add_return\n\ add %0,%1,%0\n" PPC405_ERR77(0,%2) " stwcx. %0,0,%2 \n\ bne- 1b" - ISYNC_ON_SMP + PPC_ATOMIC_EXIT_BARRIER : "=&r" (t) : "r" (a), "r" (&v->counter) : "cc", "memory"); @@ -85,13 +82,13 @@ static __inline__ int atomic_sub_return(int a, atomic_t *v) int t; __asm__ __volatile__( - LWSYNC_ON_SMP + PPC_ATOMIC_ENTRY_BARRIER "1: lwarx %0,0,%2 # atomic_sub_return\n\ subf %0,%1,%0\n" PPC405_ERR77(0,%2) " stwcx. %0,0,%2 \n\ bne- 1b" - ISYNC_ON_SMP + PPC_ATOMIC_EXIT_BARRIER : "=&r" (t) : "r" (a), "r" (&v->counter) : "cc", "memory"); @@ -111,7 +108,7 @@ static __inline__ void atomic_inc(atomic_t *v) bne- 1b" : "=&r" (t), "+m" (v->counter) : "r" (&v->counter) - : "cc"); + : "cc", "xer"); } static __inline__ int atomic_inc_return(atomic_t *v) @@ -119,16 +116,16 @@ static __inline__ int atomic_inc_return(atomic_t *v) int t; __asm__ __volatile__( - LWSYNC_ON_SMP + PPC_ATOMIC_ENTRY_BARRIER "1: lwarx %0,0,%1 # atomic_inc_return\n\ addic %0,%0,1\n" PPC405_ERR77(0,%1) " stwcx. %0,0,%1 \n\ bne- 1b" - ISYNC_ON_SMP + PPC_ATOMIC_EXIT_BARRIER : "=&r" (t) : "r" (&v->counter) - : "cc", "memory"); + : "cc", "xer", "memory"); return t; } @@ -155,7 +152,7 @@ static __inline__ void atomic_dec(atomic_t *v) bne- 1b" : "=&r" (t), "+m" (v->counter) : "r" (&v->counter) - : "cc"); + : "cc", "xer"); } static __inline__ int atomic_dec_return(atomic_t *v) @@ -163,16 +160,16 @@ static __inline__ int atomic_dec_return(atomic_t *v) int t; __asm__ __volatile__( - LWSYNC_ON_SMP + PPC_ATOMIC_ENTRY_BARRIER "1: lwarx %0,0,%1 # atomic_dec_return\n\ addic %0,%0,-1\n" PPC405_ERR77(0,%1) " stwcx. %0,0,%1\n\ bne- 1b" - ISYNC_ON_SMP + PPC_ATOMIC_EXIT_BARRIER : "=&r" (t) : "r" (&v->counter) - : "cc", "memory"); + : "cc", "xer", "memory"); return t; } @@ -181,38 +178,67 @@ static __inline__ int atomic_dec_return(atomic_t *v) #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) /** - * atomic_add_unless - add unless the number is a given value + * __atomic_add_unless - add unless the number is a given value * @v: pointer of type atomic_t * @a: the amount to add to v... * @u: ...unless v is equal to u. * * Atomically adds @a to @v, so long as it was not @u. - * Returns non-zero if @v was not @u, and zero otherwise. + * Returns the old value of @v. */ -static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) +static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u) { int t; __asm__ __volatile__ ( - LWSYNC_ON_SMP -"1: lwarx %0,0,%1 # atomic_add_unless\n\ + PPC_ATOMIC_ENTRY_BARRIER +"1: lwarx %0,0,%1 # __atomic_add_unless\n\ cmpw 0,%0,%3 \n\ beq- 2f \n\ add %0,%2,%0 \n" PPC405_ERR77(0,%2) " stwcx. %0,0,%1 \n\ bne- 1b \n" - ISYNC_ON_SMP + PPC_ATOMIC_EXIT_BARRIER " subf %0,%2,%0 \n\ 2:" : "=&r" (t) : "r" (&v->counter), "r" (a), "r" (u) : "cc", "memory"); - return t != u; + return t; } -#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) +/** + * atomic_inc_not_zero - increment unless the number is zero + * @v: pointer of type atomic_t + * + * Atomically increments @v by 1, so long as @v is non-zero. + * Returns non-zero if @v was non-zero, and zero otherwise. + */ +static __inline__ int atomic_inc_not_zero(atomic_t *v) +{ + int t1, t2; + + __asm__ __volatile__ ( + PPC_ATOMIC_ENTRY_BARRIER +"1: lwarx %0,0,%2 # atomic_inc_not_zero\n\ + cmpwi 0,%0,0\n\ + beq- 2f\n\ + addic %1,%0,1\n" + PPC405_ERR77(0,%2) +" stwcx. %1,0,%2\n\ + bne- 1b\n" + PPC_ATOMIC_EXIT_BARRIER + "\n\ +2:" + : "=&r" (t1), "=&r" (t2) + : "r" (&v->counter) + : "cc", "xer", "memory"); + + return t1; +} +#define atomic_inc_not_zero(v) atomic_inc_not_zero((v)) #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0) #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0) @@ -227,7 +253,7 @@ static __inline__ int atomic_dec_if_positive(atomic_t *v) int t; __asm__ __volatile__( - LWSYNC_ON_SMP + PPC_ATOMIC_ENTRY_BARRIER "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\ cmpwi %0,1\n\ addi %0,%0,-1\n\ @@ -235,7 +261,7 @@ static __inline__ int atomic_dec_if_positive(atomic_t *v) PPC405_ERR77(0,%1) " stwcx. %0,0,%1\n\ bne- 1b" - ISYNC_ON_SMP + PPC_ATOMIC_EXIT_BARRIER "\n\ 2:" : "=&b" (t) : "r" (&v->counter) @@ -243,16 +269,10 @@ static __inline__ int atomic_dec_if_positive(atomic_t *v) return t; } - -#define smp_mb__before_atomic_dec() smp_mb() -#define smp_mb__after_atomic_dec() smp_mb() -#define smp_mb__before_atomic_inc() smp_mb() -#define smp_mb__after_atomic_inc() smp_mb() +#define atomic_dec_if_positive atomic_dec_if_positive #ifdef __powerpc64__ -typedef struct { long counter; } atomic64_t; - #define ATOMIC64_INIT(i) { (i) } static __inline__ long atomic64_read(const atomic64_t *v) @@ -288,12 +308,12 @@ static __inline__ long atomic64_add_return(long a, atomic64_t *v) long t; __asm__ __volatile__( - LWSYNC_ON_SMP + PPC_ATOMIC_ENTRY_BARRIER "1: ldarx %0,0,%2 # atomic64_add_return\n\ add %0,%1,%0\n\ stdcx. %0,0,%2 \n\ bne- 1b" - ISYNC_ON_SMP + PPC_ATOMIC_EXIT_BARRIER : "=&r" (t) : "r" (a), "r" (&v->counter) : "cc", "memory"); @@ -322,12 +342,12 @@ static __inline__ long atomic64_sub_return(long a, atomic64_t *v) long t; __asm__ __volatile__( - LWSYNC_ON_SMP + PPC_ATOMIC_ENTRY_BARRIER "1: ldarx %0,0,%2 # atomic64_sub_return\n\ subf %0,%1,%0\n\ stdcx. %0,0,%2 \n\ bne- 1b" - ISYNC_ON_SMP + PPC_ATOMIC_EXIT_BARRIER : "=&r" (t) : "r" (a), "r" (&v->counter) : "cc", "memory"); @@ -346,7 +366,7 @@ static __inline__ void atomic64_inc(atomic64_t *v) bne- 1b" : "=&r" (t), "+m" (v->counter) : "r" (&v->counter) - : "cc"); + : "cc", "xer"); } static __inline__ long atomic64_inc_return(atomic64_t *v) @@ -354,15 +374,15 @@ static __inline__ long atomic64_inc_return(atomic64_t *v) long t; __asm__ __volatile__( - LWSYNC_ON_SMP + PPC_ATOMIC_ENTRY_BARRIER "1: ldarx %0,0,%1 # atomic64_inc_return\n\ addic %0,%0,1\n\ stdcx. %0,0,%1 \n\ bne- 1b" - ISYNC_ON_SMP + PPC_ATOMIC_EXIT_BARRIER : "=&r" (t) : "r" (&v->counter) - : "cc", "memory"); + : "cc", "xer", "memory"); return t; } @@ -388,7 +408,7 @@ static __inline__ void atomic64_dec(atomic64_t *v) bne- 1b" : "=&r" (t), "+m" (v->counter) : "r" (&v->counter) - : "cc"); + : "cc", "xer"); } static __inline__ long atomic64_dec_return(atomic64_t *v) @@ -396,15 +416,15 @@ static __inline__ long atomic64_dec_return(atomic64_t *v) long t; __asm__ __volatile__( - LWSYNC_ON_SMP + PPC_ATOMIC_ENTRY_BARRIER "1: ldarx %0,0,%1 # atomic64_dec_return\n\ addic %0,%0,-1\n\ stdcx. %0,0,%1\n\ bne- 1b" - ISYNC_ON_SMP + PPC_ATOMIC_EXIT_BARRIER : "=&r" (t) : "r" (&v->counter) - : "cc", "memory"); + : "cc", "xer", "memory"); return t; } @@ -421,17 +441,17 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v) long t; __asm__ __volatile__( - LWSYNC_ON_SMP + PPC_ATOMIC_ENTRY_BARRIER "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\ addic. %0,%0,-1\n\ blt- 2f\n\ stdcx. %0,0,%1\n\ bne- 1b" - ISYNC_ON_SMP + PPC_ATOMIC_EXIT_BARRIER "\n\ 2:" : "=&r" (t) : "r" (&v->counter) - : "cc", "memory"); + : "cc", "xer", "memory"); return t; } @@ -446,21 +466,21 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v) * @u: ...unless v is equal to u. * * Atomically adds @a to @v, so long as it was not @u. - * Returns non-zero if @v was not @u, and zero otherwise. + * Returns the old value of @v. */ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) { long t; __asm__ __volatile__ ( - LWSYNC_ON_SMP -"1: ldarx %0,0,%1 # atomic_add_unless\n\ + PPC_ATOMIC_ENTRY_BARRIER +"1: ldarx %0,0,%1 # __atomic_add_unless\n\ cmpd 0,%0,%3 \n\ beq- 2f \n\ add %0,%2,%0 \n" " stdcx. %0,0,%1 \n\ bne- 1b \n" - ISYNC_ON_SMP + PPC_ATOMIC_EXIT_BARRIER " subf %0,%2,%0 \n\ 2:" : "=&r" (t) @@ -470,10 +490,36 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) return t != u; } -#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) +/** + * atomic_inc64_not_zero - increment unless the number is zero + * @v: pointer of type atomic64_t + * + * Atomically increments @v by 1, so long as @v is non-zero. + * Returns non-zero if @v was non-zero, and zero otherwise. + */ +static __inline__ long atomic64_inc_not_zero(atomic64_t *v) +{ + long t1, t2; + + __asm__ __volatile__ ( + PPC_ATOMIC_ENTRY_BARRIER +"1: ldarx %0,0,%2 # atomic64_inc_not_zero\n\ + cmpdi 0,%0,0\n\ + beq- 2f\n\ + addic %1,%0,1\n\ + stdcx. %1,0,%2\n\ + bne- 1b\n" + PPC_ATOMIC_EXIT_BARRIER + "\n\ +2:" + : "=&r" (t1), "=&r" (t2) + : "r" (&v->counter) + : "cc", "xer", "memory"); + + return t1; +} #endif /* __powerpc64__ */ -#include <asm-generic/atomic.h> #endif /* __KERNEL__ */ #endif /* _ASM_POWERPC_ATOMIC_H_ */ |
