aboutsummaryrefslogtreecommitdiff
path: root/arch/powerpc/include/asm/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/powerpc/include/asm/atomic.h')
-rw-r--r--arch/powerpc/include/asm/atomic.h139
1 files changed, 92 insertions, 47 deletions
diff --git a/arch/powerpc/include/asm/atomic.h b/arch/powerpc/include/asm/atomic.h
index b8f152ece02..28992d01292 100644
--- a/arch/powerpc/include/asm/atomic.h
+++ b/arch/powerpc/include/asm/atomic.h
@@ -5,13 +5,10 @@
* PowerPC atomic operations
*/
-#include <linux/types.h>
-
#ifdef __KERNEL__
-#include <linux/compiler.h>
-#include <asm/synch.h>
-#include <asm/asm-compat.h>
-#include <asm/system.h>
+#include <linux/types.h>
+#include <asm/cmpxchg.h>
+#include <asm/barrier.h>
#define ATOMIC_INIT(i) { (i) }
@@ -49,13 +46,13 @@ static __inline__ int atomic_add_return(int a, atomic_t *v)
int t;
__asm__ __volatile__(
- PPC_RELEASE_BARRIER
+ PPC_ATOMIC_ENTRY_BARRIER
"1: lwarx %0,0,%2 # atomic_add_return\n\
add %0,%1,%0\n"
PPC405_ERR77(0,%2)
" stwcx. %0,0,%2 \n\
bne- 1b"
- PPC_ACQUIRE_BARRIER
+ PPC_ATOMIC_EXIT_BARRIER
: "=&r" (t)
: "r" (a), "r" (&v->counter)
: "cc", "memory");
@@ -85,13 +82,13 @@ static __inline__ int atomic_sub_return(int a, atomic_t *v)
int t;
__asm__ __volatile__(
- PPC_RELEASE_BARRIER
+ PPC_ATOMIC_ENTRY_BARRIER
"1: lwarx %0,0,%2 # atomic_sub_return\n\
subf %0,%1,%0\n"
PPC405_ERR77(0,%2)
" stwcx. %0,0,%2 \n\
bne- 1b"
- PPC_ACQUIRE_BARRIER
+ PPC_ATOMIC_EXIT_BARRIER
: "=&r" (t)
: "r" (a), "r" (&v->counter)
: "cc", "memory");
@@ -119,13 +116,13 @@ static __inline__ int atomic_inc_return(atomic_t *v)
int t;
__asm__ __volatile__(
- PPC_RELEASE_BARRIER
+ PPC_ATOMIC_ENTRY_BARRIER
"1: lwarx %0,0,%1 # atomic_inc_return\n\
addic %0,%0,1\n"
PPC405_ERR77(0,%1)
" stwcx. %0,0,%1 \n\
bne- 1b"
- PPC_ACQUIRE_BARRIER
+ PPC_ATOMIC_EXIT_BARRIER
: "=&r" (t)
: "r" (&v->counter)
: "cc", "xer", "memory");
@@ -163,13 +160,13 @@ static __inline__ int atomic_dec_return(atomic_t *v)
int t;
__asm__ __volatile__(
- PPC_RELEASE_BARRIER
+ PPC_ATOMIC_ENTRY_BARRIER
"1: lwarx %0,0,%1 # atomic_dec_return\n\
addic %0,%0,-1\n"
PPC405_ERR77(0,%1)
" stwcx. %0,0,%1\n\
bne- 1b"
- PPC_ACQUIRE_BARRIER
+ PPC_ATOMIC_EXIT_BARRIER
: "=&r" (t)
: "r" (&v->counter)
: "cc", "xer", "memory");
@@ -181,38 +178,67 @@ static __inline__ int atomic_dec_return(atomic_t *v)
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
/**
- * atomic_add_unless - add unless the number is a given value
+ * __atomic_add_unless - add unless the number is a given value
* @v: pointer of type atomic_t
* @a: the amount to add to v...
* @u: ...unless v is equal to u.
*
* Atomically adds @a to @v, so long as it was not @u.
- * Returns non-zero if @v was not @u, and zero otherwise.
+ * Returns the old value of @v.
*/
-static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
+static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
{
int t;
__asm__ __volatile__ (
- PPC_RELEASE_BARRIER
-"1: lwarx %0,0,%1 # atomic_add_unless\n\
+ PPC_ATOMIC_ENTRY_BARRIER
+"1: lwarx %0,0,%1 # __atomic_add_unless\n\
cmpw 0,%0,%3 \n\
beq- 2f \n\
add %0,%2,%0 \n"
PPC405_ERR77(0,%2)
" stwcx. %0,0,%1 \n\
bne- 1b \n"
- PPC_ACQUIRE_BARRIER
+ PPC_ATOMIC_EXIT_BARRIER
" subf %0,%2,%0 \n\
2:"
: "=&r" (t)
: "r" (&v->counter), "r" (a), "r" (u)
: "cc", "memory");
- return t != u;
+ return t;
}
-#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
+/**
+ * atomic_inc_not_zero - increment unless the number is zero
+ * @v: pointer of type atomic_t
+ *
+ * Atomically increments @v by 1, so long as @v is non-zero.
+ * Returns non-zero if @v was non-zero, and zero otherwise.
+ */
+static __inline__ int atomic_inc_not_zero(atomic_t *v)
+{
+ int t1, t2;
+
+ __asm__ __volatile__ (
+ PPC_ATOMIC_ENTRY_BARRIER
+"1: lwarx %0,0,%2 # atomic_inc_not_zero\n\
+ cmpwi 0,%0,0\n\
+ beq- 2f\n\
+ addic %1,%0,1\n"
+ PPC405_ERR77(0,%2)
+" stwcx. %1,0,%2\n\
+ bne- 1b\n"
+ PPC_ATOMIC_EXIT_BARRIER
+ "\n\
+2:"
+ : "=&r" (t1), "=&r" (t2)
+ : "r" (&v->counter)
+ : "cc", "xer", "memory");
+
+ return t1;
+}
+#define atomic_inc_not_zero(v) atomic_inc_not_zero((v))
#define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
#define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
@@ -227,7 +253,7 @@ static __inline__ int atomic_dec_if_positive(atomic_t *v)
int t;
__asm__ __volatile__(
- PPC_RELEASE_BARRIER
+ PPC_ATOMIC_ENTRY_BARRIER
"1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
cmpwi %0,1\n\
addi %0,%0,-1\n\
@@ -235,7 +261,7 @@ static __inline__ int atomic_dec_if_positive(atomic_t *v)
PPC405_ERR77(0,%1)
" stwcx. %0,0,%1\n\
bne- 1b"
- PPC_ACQUIRE_BARRIER
+ PPC_ATOMIC_EXIT_BARRIER
"\n\
2:" : "=&b" (t)
: "r" (&v->counter)
@@ -243,11 +269,7 @@ static __inline__ int atomic_dec_if_positive(atomic_t *v)
return t;
}
-
-#define smp_mb__before_atomic_dec() smp_mb()
-#define smp_mb__after_atomic_dec() smp_mb()
-#define smp_mb__before_atomic_inc() smp_mb()
-#define smp_mb__after_atomic_inc() smp_mb()
+#define atomic_dec_if_positive atomic_dec_if_positive
#ifdef __powerpc64__
@@ -286,12 +308,12 @@ static __inline__ long atomic64_add_return(long a, atomic64_t *v)
long t;
__asm__ __volatile__(
- PPC_RELEASE_BARRIER
+ PPC_ATOMIC_ENTRY_BARRIER
"1: ldarx %0,0,%2 # atomic64_add_return\n\
add %0,%1,%0\n\
stdcx. %0,0,%2 \n\
bne- 1b"
- PPC_ACQUIRE_BARRIER
+ PPC_ATOMIC_EXIT_BARRIER
: "=&r" (t)
: "r" (a), "r" (&v->counter)
: "cc", "memory");
@@ -320,12 +342,12 @@ static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
long t;
__asm__ __volatile__(
- PPC_RELEASE_BARRIER
+ PPC_ATOMIC_ENTRY_BARRIER
"1: ldarx %0,0,%2 # atomic64_sub_return\n\
subf %0,%1,%0\n\
stdcx. %0,0,%2 \n\
bne- 1b"
- PPC_ACQUIRE_BARRIER
+ PPC_ATOMIC_EXIT_BARRIER
: "=&r" (t)
: "r" (a), "r" (&v->counter)
: "cc", "memory");
@@ -352,12 +374,12 @@ static __inline__ long atomic64_inc_return(atomic64_t *v)
long t;
__asm__ __volatile__(
- PPC_RELEASE_BARRIER
+ PPC_ATOMIC_ENTRY_BARRIER
"1: ldarx %0,0,%1 # atomic64_inc_return\n\
addic %0,%0,1\n\
stdcx. %0,0,%1 \n\
bne- 1b"
- PPC_ACQUIRE_BARRIER
+ PPC_ATOMIC_EXIT_BARRIER
: "=&r" (t)
: "r" (&v->counter)
: "cc", "xer", "memory");
@@ -394,12 +416,12 @@ static __inline__ long atomic64_dec_return(atomic64_t *v)
long t;
__asm__ __volatile__(
- PPC_RELEASE_BARRIER
+ PPC_ATOMIC_ENTRY_BARRIER
"1: ldarx %0,0,%1 # atomic64_dec_return\n\
addic %0,%0,-1\n\
stdcx. %0,0,%1\n\
bne- 1b"
- PPC_ACQUIRE_BARRIER
+ PPC_ATOMIC_EXIT_BARRIER
: "=&r" (t)
: "r" (&v->counter)
: "cc", "xer", "memory");
@@ -419,13 +441,13 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
long t;
__asm__ __volatile__(
- PPC_RELEASE_BARRIER
+ PPC_ATOMIC_ENTRY_BARRIER
"1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
addic. %0,%0,-1\n\
blt- 2f\n\
stdcx. %0,0,%1\n\
bne- 1b"
- PPC_ACQUIRE_BARRIER
+ PPC_ATOMIC_EXIT_BARRIER
"\n\
2:" : "=&r" (t)
: "r" (&v->counter)
@@ -444,21 +466,21 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
* @u: ...unless v is equal to u.
*
* Atomically adds @a to @v, so long as it was not @u.
- * Returns non-zero if @v was not @u, and zero otherwise.
+ * Returns the old value of @v.
*/
static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
{
long t;
__asm__ __volatile__ (
- PPC_RELEASE_BARRIER
-"1: ldarx %0,0,%1 # atomic_add_unless\n\
+ PPC_ATOMIC_ENTRY_BARRIER
+"1: ldarx %0,0,%1 # __atomic_add_unless\n\
cmpd 0,%0,%3 \n\
beq- 2f \n\
add %0,%2,%0 \n"
" stdcx. %0,0,%1 \n\
bne- 1b \n"
- PPC_ACQUIRE_BARRIER
+ PPC_ATOMIC_EXIT_BARRIER
" subf %0,%2,%0 \n\
2:"
: "=&r" (t)
@@ -468,13 +490,36 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
return t != u;
}
-#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
+/**
+ * atomic_inc64_not_zero - increment unless the number is zero
+ * @v: pointer of type atomic64_t
+ *
+ * Atomically increments @v by 1, so long as @v is non-zero.
+ * Returns non-zero if @v was non-zero, and zero otherwise.
+ */
+static __inline__ long atomic64_inc_not_zero(atomic64_t *v)
+{
+ long t1, t2;
+
+ __asm__ __volatile__ (
+ PPC_ATOMIC_ENTRY_BARRIER
+"1: ldarx %0,0,%2 # atomic64_inc_not_zero\n\
+ cmpdi 0,%0,0\n\
+ beq- 2f\n\
+ addic %1,%0,1\n\
+ stdcx. %1,0,%2\n\
+ bne- 1b\n"
+ PPC_ATOMIC_EXIT_BARRIER
+ "\n\
+2:"
+ : "=&r" (t1), "=&r" (t2)
+ : "r" (&v->counter)
+ : "cc", "xer", "memory");
-#else /* __powerpc64__ */
-#include <asm-generic/atomic64.h>
+ return t1;
+}
#endif /* __powerpc64__ */
-#include <asm-generic/atomic-long.h>
#endif /* __KERNEL__ */
#endif /* _ASM_POWERPC_ATOMIC_H_ */