diff options
Diffstat (limited to 'arch/arm/include/asm/cmpxchg.h')
| -rw-r--r-- | arch/arm/include/asm/cmpxchg.h | 75 |
1 files changed, 34 insertions, 41 deletions
diff --git a/arch/arm/include/asm/cmpxchg.h b/arch/arm/include/asm/cmpxchg.h index d41d7cbf0ad..abb2c3769b0 100644 --- a/arch/arm/include/asm/cmpxchg.h +++ b/arch/arm/include/asm/cmpxchg.h @@ -2,6 +2,7 @@ #define __ASM_ARM_CMPXCHG_H #include <linux/irqflags.h> +#include <linux/prefetch.h> #include <asm/barrier.h> #if defined(CONFIG_CPU_SA1100) || defined(CONFIG_CPU_SA110) @@ -35,6 +36,7 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size #endif smp_mb(); + prefetchw((const void *)ptr); switch (size) { #if __LINUX_ARM_ARCH__ >= 6 @@ -138,6 +140,8 @@ static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old, { unsigned long oldval, res; + prefetchw((const void *)ptr); + switch (size) { #ifndef CONFIG_CPU_V6 /* min ARCH >= ARMv6K */ case 1: @@ -223,45 +227,32 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr, return ret; } -#define cmpxchg_local(ptr,o,n) \ - ((__typeof__(*(ptr)))__cmpxchg_local((ptr), \ - (unsigned long)(o), \ - (unsigned long)(n), \ - sizeof(*(ptr)))) - -#ifndef CONFIG_CPU_V6 /* min ARCH >= ARMv6K */ - -/* - * Note : ARMv7-M (currently unsupported by Linux) does not support - * ldrexd/strexd. If ARMv7-M is ever supported by the Linux kernel, it should - * not be allowed to use __cmpxchg64. - */ -static inline unsigned long long __cmpxchg64(volatile void *ptr, +static inline unsigned long long __cmpxchg64(unsigned long long *ptr, unsigned long long old, unsigned long long new) { - register unsigned long long oldval asm("r0"); - register unsigned long long __old asm("r2") = old; - register unsigned long long __new asm("r4") = new; + unsigned long long oldval; unsigned long res; - do { - asm volatile( - " @ __cmpxchg8\n" - " ldrexd %1, %H1, [%2]\n" - " mov %0, #0\n" - " teq %1, %3\n" - " teqeq %H1, %H3\n" - " strexdeq %0, %4, %H4, [%2]\n" - : "=&r" (res), "=&r" (oldval) - : "r" (ptr), "Ir" (__old), "r" (__new) - : "memory", "cc"); - } while (res); + prefetchw(ptr); + + __asm__ __volatile__( +"1: ldrexd %1, %H1, [%3]\n" +" teq %1, %4\n" +" teqeq %H1, %H4\n" +" bne 2f\n" +" strexd %0, %5, %H5, [%3]\n" +" teq %0, #0\n" +" bne 1b\n" +"2:" + : "=&r" (res), "=&r" (oldval), "+Qo" (*ptr) + : "r" (ptr), "r" (old), "r" (new) + : "cc"); return oldval; } -static inline unsigned long long __cmpxchg64_mb(volatile void *ptr, +static inline unsigned long long __cmpxchg64_mb(unsigned long long *ptr, unsigned long long old, unsigned long long new) { @@ -274,21 +265,23 @@ static inline unsigned long long __cmpxchg64_mb(volatile void *ptr, return ret; } -#define cmpxchg64(ptr,o,n) \ +#define cmpxchg_local(ptr,o,n) \ + ((__typeof__(*(ptr)))__cmpxchg_local((ptr), \ + (unsigned long)(o), \ + (unsigned long)(n), \ + sizeof(*(ptr)))) + +#define cmpxchg64(ptr, o, n) \ ((__typeof__(*(ptr)))__cmpxchg64_mb((ptr), \ - (unsigned long long)(o), \ - (unsigned long long)(n))) + (unsigned long long)(o), \ + (unsigned long long)(n))) -#define cmpxchg64_local(ptr,o,n) \ +#define cmpxchg64_relaxed(ptr, o, n) \ ((__typeof__(*(ptr)))__cmpxchg64((ptr), \ - (unsigned long long)(o), \ - (unsigned long long)(n))) - -#else /* min ARCH = ARMv6 */ + (unsigned long long)(o), \ + (unsigned long long)(n))) -#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) - -#endif +#define cmpxchg64_local(ptr, o, n) cmpxchg64_relaxed((ptr), (o), (n)) #endif /* __LINUX_ARM_ARCH__ >= 6 */ |
