aboutsummaryrefslogtreecommitdiff
path: root/arch/mips/include/asm/bitops.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/mips/include/asm/bitops.h')
-rw-r--r--arch/mips/include/asm/bitops.h440
1 files changed, 178 insertions, 262 deletions
diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h
index 84a383806b2..7c8816f7b7c 100644
--- a/arch/mips/include/asm/bitops.h
+++ b/arch/mips/include/asm/bitops.h
@@ -14,10 +14,8 @@
#endif
#include <linux/compiler.h>
-#include <linux/irqflags.h>
#include <linux/types.h>
#include <asm/barrier.h>
-#include <asm/bug.h>
#include <asm/byteorder.h> /* sigh ... */
#include <asm/cpu-features.h>
#include <asm/sgidefs.h>
@@ -28,22 +26,33 @@
#define SZLONG_MASK 31UL
#define __LL "ll "
#define __SC "sc "
-#define __INS "ins "
-#define __EXT "ext "
+#define __INS "ins "
+#define __EXT "ext "
#elif _MIPS_SZLONG == 64
#define SZLONG_LOG 6
#define SZLONG_MASK 63UL
#define __LL "lld "
#define __SC "scd "
-#define __INS "dins "
-#define __EXT "dext "
+#define __INS "dins "
+#define __EXT "dext "
#endif
/*
- * clear_bit() doesn't provide any barrier for the compiler.
+ * These are the "slower" versions of the functions and are in bitops.c.
+ * These functions call raw_local_irq_{save,restore}().
*/
-#define smp_mb__before_clear_bit() smp_llsc_mb()
-#define smp_mb__after_clear_bit() smp_llsc_mb()
+void __mips_set_bit(unsigned long nr, volatile unsigned long *addr);
+void __mips_clear_bit(unsigned long nr, volatile unsigned long *addr);
+void __mips_change_bit(unsigned long nr, volatile unsigned long *addr);
+int __mips_test_and_set_bit(unsigned long nr,
+ volatile unsigned long *addr);
+int __mips_test_and_set_bit_lock(unsigned long nr,
+ volatile unsigned long *addr);
+int __mips_test_and_clear_bit(unsigned long nr,
+ volatile unsigned long *addr);
+int __mips_test_and_change_bit(unsigned long nr,
+ volatile unsigned long *addr);
+
/*
* set_bit - Atomically set a bit in memory
@@ -58,12 +67,12 @@
static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
{
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
- unsigned short bit = nr & SZLONG_MASK;
+ int bit = nr & SZLONG_MASK;
unsigned long temp;
if (kernel_uses_llsc && R10000_LLSC_WAR) {
__asm__ __volatile__(
- " .set mips3 \n"
+ " .set arch=r4000 \n"
"1: " __LL "%0, %1 # set_bit \n"
" or %0, %2 \n"
" " __SC "%0, %1 \n"
@@ -73,41 +82,28 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
: "ir" (1UL << bit), "m" (*m));
#ifdef CONFIG_CPU_MIPSR2
} else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
- __asm__ __volatile__(
- "1: " __LL "%0, %1 # set_bit \n"
- " " __INS "%0, %4, %2, 1 \n"
- " " __SC "%0, %1 \n"
- " beqz %0, 2f \n"
- " .subsection 2 \n"
- "2: b 1b \n"
- " .previous \n"
- : "=&r" (temp), "=m" (*m)
- : "ir" (bit), "m" (*m), "r" (~0));
+ do {
+ __asm__ __volatile__(
+ " " __LL "%0, %1 # set_bit \n"
+ " " __INS "%0, %3, %2, 1 \n"
+ " " __SC "%0, %1 \n"
+ : "=&r" (temp), "+m" (*m)
+ : "ir" (bit), "r" (~0));
+ } while (unlikely(!temp));
#endif /* CONFIG_CPU_MIPSR2 */
} else if (kernel_uses_llsc) {
- __asm__ __volatile__(
- " .set mips3 \n"
- "1: " __LL "%0, %1 # set_bit \n"
- " or %0, %2 \n"
- " " __SC "%0, %1 \n"
- " beqz %0, 2f \n"
- " .subsection 2 \n"
- "2: b 1b \n"
- " .previous \n"
- " .set mips0 \n"
- : "=&r" (temp), "=m" (*m)
- : "ir" (1UL << bit), "m" (*m));
- } else {
- volatile unsigned long *a = addr;
- unsigned long mask;
- unsigned long flags;
-
- a += nr >> SZLONG_LOG;
- mask = 1UL << bit;
- raw_local_irq_save(flags);
- *a |= mask;
- raw_local_irq_restore(flags);
- }
+ do {
+ __asm__ __volatile__(
+ " .set arch=r4000 \n"
+ " " __LL "%0, %1 # set_bit \n"
+ " or %0, %2 \n"
+ " " __SC "%0, %1 \n"
+ " .set mips0 \n"
+ : "=&r" (temp), "+m" (*m)
+ : "ir" (1UL << bit));
+ } while (unlikely(!temp));
+ } else
+ __mips_set_bit(nr, addr);
}
/*
@@ -117,62 +113,49 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
*
* clear_bit() is atomic and may not be reordered. However, it does
* not contain a memory barrier, so if it is used for locking purposes,
- * you should call smp_mb__before_clear_bit() and/or smp_mb__after_clear_bit()
+ * you should call smp_mb__before_atomic() and/or smp_mb__after_atomic()
* in order to ensure changes are visible on other processors.
*/
static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
{
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
- unsigned short bit = nr & SZLONG_MASK;
+ int bit = nr & SZLONG_MASK;
unsigned long temp;
if (kernel_uses_llsc && R10000_LLSC_WAR) {
__asm__ __volatile__(
- " .set mips3 \n"
+ " .set arch=r4000 \n"
"1: " __LL "%0, %1 # clear_bit \n"
" and %0, %2 \n"
" " __SC "%0, %1 \n"
" beqzl %0, 1b \n"
" .set mips0 \n"
- : "=&r" (temp), "=m" (*m)
- : "ir" (~(1UL << bit)), "m" (*m));
+ : "=&r" (temp), "+m" (*m)
+ : "ir" (~(1UL << bit)));
#ifdef CONFIG_CPU_MIPSR2
} else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
- __asm__ __volatile__(
- "1: " __LL "%0, %1 # clear_bit \n"
- " " __INS "%0, $0, %2, 1 \n"
- " " __SC "%0, %1 \n"
- " beqz %0, 2f \n"
- " .subsection 2 \n"
- "2: b 1b \n"
- " .previous \n"
- : "=&r" (temp), "=m" (*m)
- : "ir" (bit), "m" (*m));
+ do {
+ __asm__ __volatile__(
+ " " __LL "%0, %1 # clear_bit \n"
+ " " __INS "%0, $0, %2, 1 \n"
+ " " __SC "%0, %1 \n"
+ : "=&r" (temp), "+m" (*m)
+ : "ir" (bit));
+ } while (unlikely(!temp));
#endif /* CONFIG_CPU_MIPSR2 */
} else if (kernel_uses_llsc) {
- __asm__ __volatile__(
- " .set mips3 \n"
- "1: " __LL "%0, %1 # clear_bit \n"
- " and %0, %2 \n"
- " " __SC "%0, %1 \n"
- " beqz %0, 2f \n"
- " .subsection 2 \n"
- "2: b 1b \n"
- " .previous \n"
- " .set mips0 \n"
- : "=&r" (temp), "=m" (*m)
- : "ir" (~(1UL << bit)), "m" (*m));
- } else {
- volatile unsigned long *a = addr;
- unsigned long mask;
- unsigned long flags;
-
- a += nr >> SZLONG_LOG;
- mask = 1UL << bit;
- raw_local_irq_save(flags);
- *a &= ~mask;
- raw_local_irq_restore(flags);
- }
+ do {
+ __asm__ __volatile__(
+ " .set arch=r4000 \n"
+ " " __LL "%0, %1 # clear_bit \n"
+ " and %0, %2 \n"
+ " " __SC "%0, %1 \n"
+ " .set mips0 \n"
+ : "=&r" (temp), "+m" (*m)
+ : "ir" (~(1UL << bit)));
+ } while (unlikely(!temp));
+ } else
+ __mips_clear_bit(nr, addr);
}
/*
@@ -185,7 +168,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
*/
static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
{
- smp_mb__before_clear_bit();
+ smp_mb__before_atomic();
clear_bit(nr, addr);
}
@@ -200,48 +183,37 @@ static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *ad
*/
static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
{
- unsigned short bit = nr & SZLONG_MASK;
+ int bit = nr & SZLONG_MASK;
if (kernel_uses_llsc && R10000_LLSC_WAR) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
unsigned long temp;
__asm__ __volatile__(
- " .set mips3 \n"
+ " .set arch=r4000 \n"
"1: " __LL "%0, %1 # change_bit \n"
" xor %0, %2 \n"
" " __SC "%0, %1 \n"
" beqzl %0, 1b \n"
" .set mips0 \n"
- : "=&r" (temp), "=m" (*m)
- : "ir" (1UL << bit), "m" (*m));
+ : "=&r" (temp), "+m" (*m)
+ : "ir" (1UL << bit));
} else if (kernel_uses_llsc) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
unsigned long temp;
- __asm__ __volatile__(
- " .set mips3 \n"
- "1: " __LL "%0, %1 # change_bit \n"
- " xor %0, %2 \n"
- " " __SC "%0, %1 \n"
- " beqz %0, 2f \n"
- " .subsection 2 \n"
- "2: b 1b \n"
- " .previous \n"
- " .set mips0 \n"
- : "=&r" (temp), "=m" (*m)
- : "ir" (1UL << bit), "m" (*m));
- } else {
- volatile unsigned long *a = addr;
- unsigned long mask;
- unsigned long flags;
-
- a += nr >> SZLONG_LOG;
- mask = 1UL << bit;
- raw_local_irq_save(flags);
- *a ^= mask;
- raw_local_irq_restore(flags);
- }
+ do {
+ __asm__ __volatile__(
+ " .set arch=r4000 \n"
+ " " __LL "%0, %1 # change_bit \n"
+ " xor %0, %2 \n"
+ " " __SC "%0, %1 \n"
+ " .set mips0 \n"
+ : "=&r" (temp), "+m" (*m)
+ : "ir" (1UL << bit));
+ } while (unlikely(!temp));
+ } else
+ __mips_change_bit(nr, addr);
}
/*
@@ -255,59 +227,45 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
static inline int test_and_set_bit(unsigned long nr,
volatile unsigned long *addr)
{
- unsigned short bit = nr & SZLONG_MASK;
+ int bit = nr & SZLONG_MASK;
unsigned long res;
- smp_llsc_mb();
+ smp_mb__before_llsc();
if (kernel_uses_llsc && R10000_LLSC_WAR) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
unsigned long temp;
__asm__ __volatile__(
- " .set mips3 \n"
+ " .set arch=r4000 \n"
"1: " __LL "%0, %1 # test_and_set_bit \n"
" or %2, %0, %3 \n"
" " __SC "%2, %1 \n"
" beqzl %2, 1b \n"
" and %2, %0, %3 \n"
" .set mips0 \n"
- : "=&r" (temp), "=m" (*m), "=&r" (res)
- : "r" (1UL << bit), "m" (*m)
+ : "=&r" (temp), "+m" (*m), "=&r" (res)
+ : "r" (1UL << bit)
: "memory");
} else if (kernel_uses_llsc) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
unsigned long temp;
- __asm__ __volatile__(
- " .set push \n"
- " .set noreorder \n"
- " .set mips3 \n"
- "1: " __LL "%0, %1 # test_and_set_bit \n"
- " or %2, %0, %3 \n"
- " " __SC "%2, %1 \n"
- " beqz %2, 2f \n"
- " and %2, %0, %3 \n"
- " .subsection 2 \n"
- "2: b 1b \n"
- " nop \n"
- " .previous \n"
- " .set pop \n"
- : "=&r" (temp), "=m" (*m), "=&r" (res)
- : "r" (1UL << bit), "m" (*m)
- : "memory");
- } else {
- volatile unsigned long *a = addr;
- unsigned long mask;
- unsigned long flags;
-
- a += nr >> SZLONG_LOG;
- mask = 1UL << bit;
- raw_local_irq_save(flags);
- res = (mask & *a);
- *a |= mask;
- raw_local_irq_restore(flags);
- }
+ do {
+ __asm__ __volatile__(
+ " .set arch=r4000 \n"
+ " " __LL "%0, %1 # test_and_set_bit \n"
+ " or %2, %0, %3 \n"
+ " " __SC "%2, %1 \n"
+ " .set mips0 \n"
+ : "=&r" (temp), "+m" (*m), "=&r" (res)
+ : "r" (1UL << bit)
+ : "memory");
+ } while (unlikely(!res));
+
+ res = temp & (1UL << bit);
+ } else
+ res = __mips_test_and_set_bit(nr, addr);
smp_llsc_mb();
@@ -325,7 +283,7 @@ static inline int test_and_set_bit(unsigned long nr,
static inline int test_and_set_bit_lock(unsigned long nr,
volatile unsigned long *addr)
{
- unsigned short bit = nr & SZLONG_MASK;
+ int bit = nr & SZLONG_MASK;
unsigned long res;
if (kernel_uses_llsc && R10000_LLSC_WAR) {
@@ -333,49 +291,35 @@ static inline int test_and_set_bit_lock(unsigned long nr,
unsigned long temp;
__asm__ __volatile__(
- " .set mips3 \n"
+ " .set arch=r4000 \n"
"1: " __LL "%0, %1 # test_and_set_bit \n"
" or %2, %0, %3 \n"
" " __SC "%2, %1 \n"
" beqzl %2, 1b \n"
" and %2, %0, %3 \n"
" .set mips0 \n"
- : "=&r" (temp), "=m" (*m), "=&r" (res)
- : "r" (1UL << bit), "m" (*m)
+ : "=&r" (temp), "+m" (*m), "=&r" (res)
+ : "r" (1UL << bit)
: "memory");
} else if (kernel_uses_llsc) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
unsigned long temp;
- __asm__ __volatile__(
- " .set push \n"
- " .set noreorder \n"
- " .set mips3 \n"
- "1: " __LL "%0, %1 # test_and_set_bit \n"
- " or %2, %0, %3 \n"
- " " __SC "%2, %1 \n"
- " beqz %2, 2f \n"
- " and %2, %0, %3 \n"
- " .subsection 2 \n"
- "2: b 1b \n"
- " nop \n"
- " .previous \n"
- " .set pop \n"
- : "=&r" (temp), "=m" (*m), "=&r" (res)
- : "r" (1UL << bit), "m" (*m)
- : "memory");
- } else {
- volatile unsigned long *a = addr;
- unsigned long mask;
- unsigned long flags;
-
- a += nr >> SZLONG_LOG;
- mask = 1UL << bit;
- raw_local_irq_save(flags);
- res = (mask & *a);
- *a |= mask;
- raw_local_irq_restore(flags);
- }
+ do {
+ __asm__ __volatile__(
+ " .set arch=r4000 \n"
+ " " __LL "%0, %1 # test_and_set_bit \n"
+ " or %2, %0, %3 \n"
+ " " __SC "%2, %1 \n"
+ " .set mips0 \n"
+ : "=&r" (temp), "+m" (*m), "=&r" (res)
+ : "r" (1UL << bit)
+ : "memory");
+ } while (unlikely(!res));
+
+ res = temp & (1UL << bit);
+ } else
+ res = __mips_test_and_set_bit_lock(nr, addr);
smp_llsc_mb();
@@ -392,79 +336,63 @@ static inline int test_and_set_bit_lock(unsigned long nr,
static inline int test_and_clear_bit(unsigned long nr,
volatile unsigned long *addr)
{
- unsigned short bit = nr & SZLONG_MASK;
+ int bit = nr & SZLONG_MASK;
unsigned long res;
- smp_llsc_mb();
+ smp_mb__before_llsc();
if (kernel_uses_llsc && R10000_LLSC_WAR) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
unsigned long temp;
__asm__ __volatile__(
- " .set mips3 \n"
+ " .set arch=r4000 \n"
"1: " __LL "%0, %1 # test_and_clear_bit \n"
" or %2, %0, %3 \n"
" xor %2, %3 \n"
- " " __SC "%2, %1 \n"
+ " " __SC "%2, %1 \n"
" beqzl %2, 1b \n"
" and %2, %0, %3 \n"
" .set mips0 \n"
- : "=&r" (temp), "=m" (*m), "=&r" (res)
- : "r" (1UL << bit), "m" (*m)
+ : "=&r" (temp), "+m" (*m), "=&r" (res)
+ : "r" (1UL << bit)
: "memory");
#ifdef CONFIG_CPU_MIPSR2
} else if (kernel_uses_llsc && __builtin_constant_p(nr)) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
unsigned long temp;
- __asm__ __volatile__(
- "1: " __LL "%0, %1 # test_and_clear_bit \n"
- " " __EXT "%2, %0, %3, 1 \n"
- " " __INS "%0, $0, %3, 1 \n"
- " " __SC "%0, %1 \n"
- " beqz %0, 2f \n"
- " .subsection 2 \n"
- "2: b 1b \n"
- " .previous \n"
- : "=&r" (temp), "=m" (*m), "=&r" (res)
- : "ir" (bit), "m" (*m)
- : "memory");
+ do {
+ __asm__ __volatile__(
+ " " __LL "%0, %1 # test_and_clear_bit \n"
+ " " __EXT "%2, %0, %3, 1 \n"
+ " " __INS "%0, $0, %3, 1 \n"
+ " " __SC "%0, %1 \n"
+ : "=&r" (temp), "+m" (*m), "=&r" (res)
+ : "ir" (bit)
+ : "memory");
+ } while (unlikely(!temp));
#endif
} else if (kernel_uses_llsc) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
unsigned long temp;
- __asm__ __volatile__(
- " .set push \n"
- " .set noreorder \n"
- " .set mips3 \n"
- "1: " __LL "%0, %1 # test_and_clear_bit \n"
- " or %2, %0, %3 \n"
- " xor %2, %3 \n"
- " " __SC "%2, %1 \n"
- " beqz %2, 2f \n"
- " and %2, %0, %3 \n"
- " .subsection 2 \n"
- "2: b 1b \n"
- " nop \n"
- " .previous \n"
- " .set pop \n"
- : "=&r" (temp), "=m" (*m), "=&r" (res)
- : "r" (1UL << bit), "m" (*m)
- : "memory");
- } else {
- volatile unsigned long *a = addr;
- unsigned long mask;
- unsigned long flags;
-
- a += nr >> SZLONG_LOG;
- mask = 1UL << bit;
- raw_local_irq_save(flags);
- res = (mask & *a);
- *a &= ~mask;
- raw_local_irq_restore(flags);
- }
+ do {
+ __asm__ __volatile__(
+ " .set arch=r4000 \n"
+ " " __LL "%0, %1 # test_and_clear_bit \n"
+ " or %2, %0, %3 \n"
+ " xor %2, %3 \n"
+ " " __SC "%2, %1 \n"
+ " .set mips0 \n"
+ : "=&r" (temp), "+m" (*m), "=&r" (res)
+ : "r" (1UL << bit)
+ : "memory");
+ } while (unlikely(!res));
+
+ res = temp & (1UL << bit);
+ } else
+ res = __mips_test_and_clear_bit(nr, addr);
smp_llsc_mb();
@@ -482,59 +410,45 @@ static inline int test_and_clear_bit(unsigned long nr,
static inline int test_and_change_bit(unsigned long nr,
volatile unsigned long *addr)
{
- unsigned short bit = nr & SZLONG_MASK;
+ int bit = nr & SZLONG_MASK;
unsigned long res;
- smp_llsc_mb();
+ smp_mb__before_llsc();
if (kernel_uses_llsc && R10000_LLSC_WAR) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
unsigned long temp;
__asm__ __volatile__(
- " .set mips3 \n"
+ " .set arch=r4000 \n"
"1: " __LL "%0, %1 # test_and_change_bit \n"
" xor %2, %0, %3 \n"
" " __SC "%2, %1 \n"
" beqzl %2, 1b \n"
" and %2, %0, %3 \n"
" .set mips0 \n"
- : "=&r" (temp), "=m" (*m), "=&r" (res)
- : "r" (1UL << bit), "m" (*m)
+ : "=&r" (temp), "+m" (*m), "=&r" (res)
+ : "r" (1UL << bit)
: "memory");
} else if (kernel_uses_llsc) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
unsigned long temp;
- __asm__ __volatile__(
- " .set push \n"
- " .set noreorder \n"
- " .set mips3 \n"
- "1: " __LL "%0, %1 # test_and_change_bit \n"
- " xor %2, %0, %3 \n"
- " " __SC "\t%2, %1 \n"
- " beqz %2, 2f \n"
- " and %2, %0, %3 \n"
- " .subsection 2 \n"
- "2: b 1b \n"
- " nop \n"
- " .previous \n"
- " .set pop \n"
- : "=&r" (temp), "=m" (*m), "=&r" (res)
- : "r" (1UL << bit), "m" (*m)
- : "memory");
- } else {
- volatile unsigned long *a = addr;
- unsigned long mask;
- unsigned long flags;
-
- a += nr >> SZLONG_LOG;
- mask = 1UL << bit;
- raw_local_irq_save(flags);
- res = (mask & *a);
- *a ^= mask;
- raw_local_irq_restore(flags);
- }
+ do {
+ __asm__ __volatile__(
+ " .set arch=r4000 \n"
+ " " __LL "%0, %1 # test_and_change_bit \n"
+ " xor %2, %0, %3 \n"
+ " " __SC "\t%2, %1 \n"
+ " .set mips0 \n"
+ : "=&r" (temp), "+m" (*m), "=&r" (res)
+ : "r" (1UL << bit)
+ : "memory");
+ } while (unlikely(!res));
+
+ res = temp & (1UL << bit);
+ } else
+ res = __mips_test_and_change_bit(nr, addr);
smp_llsc_mb();
@@ -700,10 +614,12 @@ static inline int ffs(int word)
#ifdef __KERNEL__
#include <asm-generic/bitops/sched.h>
-#include <asm-generic/bitops/hweight.h>
-#include <asm-generic/bitops/ext2-non-atomic.h>
+
+#include <asm/arch_hweight.h>
+#include <asm-generic/bitops/const_hweight.h>
+
+#include <asm-generic/bitops/le.h>
#include <asm-generic/bitops/ext2-atomic.h>
-#include <asm-generic/bitops/minix.h>
#endif /* __KERNEL__ */