diff options
Diffstat (limited to 'arch/mips/include/asm/bitops.h')
| -rw-r--r-- | arch/mips/include/asm/bitops.h | 185 |
1 files changed, 63 insertions, 122 deletions
diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h index 50b4ef288c5..7c8816f7b7c 100644 --- a/arch/mips/include/asm/bitops.h +++ b/arch/mips/include/asm/bitops.h @@ -14,10 +14,8 @@ #endif #include <linux/compiler.h> -#include <linux/irqflags.h> #include <linux/types.h> #include <asm/barrier.h> -#include <asm/bug.h> #include <asm/byteorder.h> /* sigh ... */ #include <asm/cpu-features.h> #include <asm/sgidefs.h> @@ -28,22 +26,33 @@ #define SZLONG_MASK 31UL #define __LL "ll " #define __SC "sc " -#define __INS "ins " -#define __EXT "ext " +#define __INS "ins " +#define __EXT "ext " #elif _MIPS_SZLONG == 64 #define SZLONG_LOG 6 #define SZLONG_MASK 63UL #define __LL "lld " #define __SC "scd " -#define __INS "dins " -#define __EXT "dext " +#define __INS "dins " +#define __EXT "dext " #endif /* - * clear_bit() doesn't provide any barrier for the compiler. + * These are the "slower" versions of the functions and are in bitops.c. + * These functions call raw_local_irq_{save,restore}(). */ -#define smp_mb__before_clear_bit() smp_mb__before_llsc() -#define smp_mb__after_clear_bit() smp_llsc_mb() +void __mips_set_bit(unsigned long nr, volatile unsigned long *addr); +void __mips_clear_bit(unsigned long nr, volatile unsigned long *addr); +void __mips_change_bit(unsigned long nr, volatile unsigned long *addr); +int __mips_test_and_set_bit(unsigned long nr, + volatile unsigned long *addr); +int __mips_test_and_set_bit_lock(unsigned long nr, + volatile unsigned long *addr); +int __mips_test_and_clear_bit(unsigned long nr, + volatile unsigned long *addr); +int __mips_test_and_change_bit(unsigned long nr, + volatile unsigned long *addr); + /* * set_bit - Atomically set a bit in memory @@ -58,12 +67,12 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr) { unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned short bit = nr & SZLONG_MASK; + int bit = nr & SZLONG_MASK; unsigned long temp; if (kernel_uses_llsc && R10000_LLSC_WAR) { __asm__ __volatile__( - " .set mips3 \n" + " .set arch=r4000 \n" "1: " __LL "%0, %1 # set_bit \n" " or %0, %2 \n" " " __SC "%0, %1 \n" @@ -85,7 +94,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr) } else if (kernel_uses_llsc) { do { __asm__ __volatile__( - " .set mips3 \n" + " .set arch=r4000 \n" " " __LL "%0, %1 # set_bit \n" " or %0, %2 \n" " " __SC "%0, %1 \n" @@ -93,17 +102,8 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr) : "=&r" (temp), "+m" (*m) : "ir" (1UL << bit)); } while (unlikely(!temp)); - } else { - volatile unsigned long *a = addr; - unsigned long mask; - unsigned long flags; - - a += nr >> SZLONG_LOG; - mask = 1UL << bit; - raw_local_irq_save(flags); - *a |= mask; - raw_local_irq_restore(flags); - } + } else + __mips_set_bit(nr, addr); } /* @@ -113,18 +113,18 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr) * * clear_bit() is atomic and may not be reordered. However, it does * not contain a memory barrier, so if it is used for locking purposes, - * you should call smp_mb__before_clear_bit() and/or smp_mb__after_clear_bit() + * you should call smp_mb__before_atomic() and/or smp_mb__after_atomic() * in order to ensure changes are visible on other processors. */ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) { unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned short bit = nr & SZLONG_MASK; + int bit = nr & SZLONG_MASK; unsigned long temp; if (kernel_uses_llsc && R10000_LLSC_WAR) { __asm__ __volatile__( - " .set mips3 \n" + " .set arch=r4000 \n" "1: " __LL "%0, %1 # clear_bit \n" " and %0, %2 \n" " " __SC "%0, %1 \n" @@ -146,7 +146,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) } else if (kernel_uses_llsc) { do { __asm__ __volatile__( - " .set mips3 \n" + " .set arch=r4000 \n" " " __LL "%0, %1 # clear_bit \n" " and %0, %2 \n" " " __SC "%0, %1 \n" @@ -154,17 +154,8 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) : "=&r" (temp), "+m" (*m) : "ir" (~(1UL << bit))); } while (unlikely(!temp)); - } else { - volatile unsigned long *a = addr; - unsigned long mask; - unsigned long flags; - - a += nr >> SZLONG_LOG; - mask = 1UL << bit; - raw_local_irq_save(flags); - *a &= ~mask; - raw_local_irq_restore(flags); - } + } else + __mips_clear_bit(nr, addr); } /* @@ -177,7 +168,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) */ static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *addr) { - smp_mb__before_clear_bit(); + smp_mb__before_atomic(); clear_bit(nr, addr); } @@ -192,14 +183,14 @@ static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *ad */ static inline void change_bit(unsigned long nr, volatile unsigned long *addr) { - unsigned short bit = nr & SZLONG_MASK; + int bit = nr & SZLONG_MASK; if (kernel_uses_llsc && R10000_LLSC_WAR) { unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); unsigned long temp; __asm__ __volatile__( - " .set mips3 \n" + " .set arch=r4000 \n" "1: " __LL "%0, %1 # change_bit \n" " xor %0, %2 \n" " " __SC "%0, %1 \n" @@ -213,7 +204,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr) do { __asm__ __volatile__( - " .set mips3 \n" + " .set arch=r4000 \n" " " __LL "%0, %1 # change_bit \n" " xor %0, %2 \n" " " __SC "%0, %1 \n" @@ -221,17 +212,8 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr) : "=&r" (temp), "+m" (*m) : "ir" (1UL << bit)); } while (unlikely(!temp)); - } else { - volatile unsigned long *a = addr; - unsigned long mask; - unsigned long flags; - - a += nr >> SZLONG_LOG; - mask = 1UL << bit; - raw_local_irq_save(flags); - *a ^= mask; - raw_local_irq_restore(flags); - } + } else + __mips_change_bit(nr, addr); } /* @@ -245,7 +227,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr) static inline int test_and_set_bit(unsigned long nr, volatile unsigned long *addr) { - unsigned short bit = nr & SZLONG_MASK; + int bit = nr & SZLONG_MASK; unsigned long res; smp_mb__before_llsc(); @@ -255,7 +237,7 @@ static inline int test_and_set_bit(unsigned long nr, unsigned long temp; __asm__ __volatile__( - " .set mips3 \n" + " .set arch=r4000 \n" "1: " __LL "%0, %1 # test_and_set_bit \n" " or %2, %0, %3 \n" " " __SC "%2, %1 \n" @@ -271,7 +253,7 @@ static inline int test_and_set_bit(unsigned long nr, do { __asm__ __volatile__( - " .set mips3 \n" + " .set arch=r4000 \n" " " __LL "%0, %1 # test_and_set_bit \n" " or %2, %0, %3 \n" " " __SC "%2, %1 \n" @@ -282,18 +264,8 @@ static inline int test_and_set_bit(unsigned long nr, } while (unlikely(!res)); res = temp & (1UL << bit); - } else { - volatile unsigned long *a = addr; - unsigned long mask; - unsigned long flags; - - a += nr >> SZLONG_LOG; - mask = 1UL << bit; - raw_local_irq_save(flags); - res = (mask & *a); - *a |= mask; - raw_local_irq_restore(flags); - } + } else + res = __mips_test_and_set_bit(nr, addr); smp_llsc_mb(); @@ -311,7 +283,7 @@ static inline int test_and_set_bit(unsigned long nr, static inline int test_and_set_bit_lock(unsigned long nr, volatile unsigned long *addr) { - unsigned short bit = nr & SZLONG_MASK; + int bit = nr & SZLONG_MASK; unsigned long res; if (kernel_uses_llsc && R10000_LLSC_WAR) { @@ -319,7 +291,7 @@ static inline int test_and_set_bit_lock(unsigned long nr, unsigned long temp; __asm__ __volatile__( - " .set mips3 \n" + " .set arch=r4000 \n" "1: " __LL "%0, %1 # test_and_set_bit \n" " or %2, %0, %3 \n" " " __SC "%2, %1 \n" @@ -335,7 +307,7 @@ static inline int test_and_set_bit_lock(unsigned long nr, do { __asm__ __volatile__( - " .set mips3 \n" + " .set arch=r4000 \n" " " __LL "%0, %1 # test_and_set_bit \n" " or %2, %0, %3 \n" " " __SC "%2, %1 \n" @@ -346,18 +318,8 @@ static inline int test_and_set_bit_lock(unsigned long nr, } while (unlikely(!res)); res = temp & (1UL << bit); - } else { - volatile unsigned long *a = addr; - unsigned long mask; - unsigned long flags; - - a += nr >> SZLONG_LOG; - mask = 1UL << bit; - raw_local_irq_save(flags); - res = (mask & *a); - *a |= mask; - raw_local_irq_restore(flags); - } + } else + res = __mips_test_and_set_bit_lock(nr, addr); smp_llsc_mb(); @@ -374,7 +336,7 @@ static inline int test_and_set_bit_lock(unsigned long nr, static inline int test_and_clear_bit(unsigned long nr, volatile unsigned long *addr) { - unsigned short bit = nr & SZLONG_MASK; + int bit = nr & SZLONG_MASK; unsigned long res; smp_mb__before_llsc(); @@ -384,11 +346,11 @@ static inline int test_and_clear_bit(unsigned long nr, unsigned long temp; __asm__ __volatile__( - " .set mips3 \n" + " .set arch=r4000 \n" "1: " __LL "%0, %1 # test_and_clear_bit \n" " or %2, %0, %3 \n" " xor %2, %3 \n" - " " __SC "%2, %1 \n" + " " __SC "%2, %1 \n" " beqzl %2, 1b \n" " and %2, %0, %3 \n" " .set mips0 \n" @@ -402,10 +364,10 @@ static inline int test_and_clear_bit(unsigned long nr, do { __asm__ __volatile__( - " " __LL "%0, %1 # test_and_clear_bit \n" + " " __LL "%0, %1 # test_and_clear_bit \n" " " __EXT "%2, %0, %3, 1 \n" - " " __INS "%0, $0, %3, 1 \n" - " " __SC "%0, %1 \n" + " " __INS "%0, $0, %3, 1 \n" + " " __SC "%0, %1 \n" : "=&r" (temp), "+m" (*m), "=&r" (res) : "ir" (bit) : "memory"); @@ -417,11 +379,11 @@ static inline int test_and_clear_bit(unsigned long nr, do { __asm__ __volatile__( - " .set mips3 \n" - " " __LL "%0, %1 # test_and_clear_bit \n" + " .set arch=r4000 \n" + " " __LL "%0, %1 # test_and_clear_bit \n" " or %2, %0, %3 \n" " xor %2, %3 \n" - " " __SC "%2, %1 \n" + " " __SC "%2, %1 \n" " .set mips0 \n" : "=&r" (temp), "+m" (*m), "=&r" (res) : "r" (1UL << bit) @@ -429,18 +391,8 @@ static inline int test_and_clear_bit(unsigned long nr, } while (unlikely(!res)); res = temp & (1UL << bit); - } else { - volatile unsigned long *a = addr; - unsigned long mask; - unsigned long flags; - - a += nr >> SZLONG_LOG; - mask = 1UL << bit; - raw_local_irq_save(flags); - res = (mask & *a); - *a &= ~mask; - raw_local_irq_restore(flags); - } + } else + res = __mips_test_and_clear_bit(nr, addr); smp_llsc_mb(); @@ -458,7 +410,7 @@ static inline int test_and_clear_bit(unsigned long nr, static inline int test_and_change_bit(unsigned long nr, volatile unsigned long *addr) { - unsigned short bit = nr & SZLONG_MASK; + int bit = nr & SZLONG_MASK; unsigned long res; smp_mb__before_llsc(); @@ -468,7 +420,7 @@ static inline int test_and_change_bit(unsigned long nr, unsigned long temp; __asm__ __volatile__( - " .set mips3 \n" + " .set arch=r4000 \n" "1: " __LL "%0, %1 # test_and_change_bit \n" " xor %2, %0, %3 \n" " " __SC "%2, %1 \n" @@ -484,8 +436,8 @@ static inline int test_and_change_bit(unsigned long nr, do { __asm__ __volatile__( - " .set mips3 \n" - " " __LL "%0, %1 # test_and_change_bit \n" + " .set arch=r4000 \n" + " " __LL "%0, %1 # test_and_change_bit \n" " xor %2, %0, %3 \n" " " __SC "\t%2, %1 \n" " .set mips0 \n" @@ -495,18 +447,8 @@ static inline int test_and_change_bit(unsigned long nr, } while (unlikely(!res)); res = temp & (1UL << bit); - } else { - volatile unsigned long *a = addr; - unsigned long mask; - unsigned long flags; - - a += nr >> SZLONG_LOG; - mask = 1UL << bit; - raw_local_irq_save(flags); - res = (mask & *a); - *a ^= mask; - raw_local_irq_restore(flags); - } + } else + res = __mips_test_and_change_bit(nr, addr); smp_llsc_mb(); @@ -676,9 +618,8 @@ static inline int ffs(int word) #include <asm/arch_hweight.h> #include <asm-generic/bitops/const_hweight.h> -#include <asm-generic/bitops/ext2-non-atomic.h> +#include <asm-generic/bitops/le.h> #include <asm-generic/bitops/ext2-atomic.h> -#include <asm-generic/bitops/minix.h> #endif /* __KERNEL__ */ |
