diff options
Diffstat (limited to 'arch/mips/include/asm/bitops.h')
| -rw-r--r-- | arch/mips/include/asm/bitops.h | 39 | 
1 files changed, 16 insertions, 23 deletions
diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h index 71305a8b3d7..7c8816f7b7c 100644 --- a/arch/mips/include/asm/bitops.h +++ b/arch/mips/include/asm/bitops.h @@ -38,13 +38,6 @@  #endif  /* - * clear_bit() doesn't provide any barrier for the compiler. - */ -#define smp_mb__before_clear_bit()	smp_mb__before_llsc() -#define smp_mb__after_clear_bit()	smp_llsc_mb() - - -/*   * These are the "slower" versions of the functions and are in bitops.c.   * These functions call raw_local_irq_{save,restore}().   */ @@ -79,7 +72,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)  	if (kernel_uses_llsc && R10000_LLSC_WAR) {  		__asm__ __volatile__( -		"	.set	mips3					\n" +		"	.set	arch=r4000				\n"  		"1:	" __LL "%0, %1			# set_bit	\n"  		"	or	%0, %2					\n"  		"	" __SC	"%0, %1					\n" @@ -101,7 +94,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)  	} else if (kernel_uses_llsc) {  		do {  			__asm__ __volatile__( -			"	.set	mips3				\n" +			"	.set	arch=r4000			\n"  			"	" __LL "%0, %1		# set_bit	\n"  			"	or	%0, %2				\n"  			"	" __SC	"%0, %1				\n" @@ -120,7 +113,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)   *   * clear_bit() is atomic and may not be reordered.  However, it does   * not contain a memory barrier, so if it is used for locking purposes, - * you should call smp_mb__before_clear_bit() and/or smp_mb__after_clear_bit() + * you should call smp_mb__before_atomic() and/or smp_mb__after_atomic()   * in order to ensure changes are visible on other processors.   */  static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) @@ -131,7 +124,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)  	if (kernel_uses_llsc && R10000_LLSC_WAR) {  		__asm__ __volatile__( -		"	.set	mips3					\n" +		"	.set	arch=r4000				\n"  		"1:	" __LL "%0, %1			# clear_bit	\n"  		"	and	%0, %2					\n"  		"	" __SC "%0, %1					\n" @@ -153,7 +146,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)  	} else if (kernel_uses_llsc) {  		do {  			__asm__ __volatile__( -			"	.set	mips3				\n" +			"	.set	arch=r4000			\n"  			"	" __LL "%0, %1		# clear_bit	\n"  			"	and	%0, %2				\n"  			"	" __SC "%0, %1				\n" @@ -175,7 +168,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)   */  static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)  { -	smp_mb__before_clear_bit(); +	smp_mb__before_atomic();  	clear_bit(nr, addr);  } @@ -197,7 +190,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)  		unsigned long temp;  		__asm__ __volatile__( -		"	.set	mips3				\n" +		"	.set	arch=r4000			\n"  		"1:	" __LL "%0, %1		# change_bit	\n"  		"	xor	%0, %2				\n"  		"	" __SC	"%0, %1				\n" @@ -211,7 +204,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)  		do {  			__asm__ __volatile__( -			"	.set	mips3				\n" +			"	.set	arch=r4000			\n"  			"	" __LL "%0, %1		# change_bit	\n"  			"	xor	%0, %2				\n"  			"	" __SC	"%0, %1				\n" @@ -244,7 +237,7 @@ static inline int test_and_set_bit(unsigned long nr,  		unsigned long temp;  		__asm__ __volatile__( -		"	.set	mips3					\n" +		"	.set	arch=r4000				\n"  		"1:	" __LL "%0, %1		# test_and_set_bit	\n"  		"	or	%2, %0, %3				\n"  		"	" __SC	"%2, %1					\n" @@ -260,7 +253,7 @@ static inline int test_and_set_bit(unsigned long nr,  		do {  			__asm__ __volatile__( -			"	.set	mips3				\n" +			"	.set	arch=r4000			\n"  			"	" __LL "%0, %1	# test_and_set_bit	\n"  			"	or	%2, %0, %3			\n"  			"	" __SC	"%2, %1				\n" @@ -298,7 +291,7 @@ static inline int test_and_set_bit_lock(unsigned long nr,  		unsigned long temp;  		__asm__ __volatile__( -		"	.set	mips3					\n" +		"	.set	arch=r4000				\n"  		"1:	" __LL "%0, %1		# test_and_set_bit	\n"  		"	or	%2, %0, %3				\n"  		"	" __SC	"%2, %1					\n" @@ -314,7 +307,7 @@ static inline int test_and_set_bit_lock(unsigned long nr,  		do {  			__asm__ __volatile__( -			"	.set	mips3				\n" +			"	.set	arch=r4000			\n"  			"	" __LL "%0, %1	# test_and_set_bit	\n"  			"	or	%2, %0, %3			\n"  			"	" __SC	"%2, %1				\n" @@ -353,7 +346,7 @@ static inline int test_and_clear_bit(unsigned long nr,  		unsigned long temp;  		__asm__ __volatile__( -		"	.set	mips3					\n" +		"	.set	arch=r4000				\n"  		"1:	" __LL	"%0, %1		# test_and_clear_bit	\n"  		"	or	%2, %0, %3				\n"  		"	xor	%2, %3					\n" @@ -386,7 +379,7 @@ static inline int test_and_clear_bit(unsigned long nr,  		do {  			__asm__ __volatile__( -			"	.set	mips3				\n" +			"	.set	arch=r4000			\n"  			"	" __LL	"%0, %1 # test_and_clear_bit	\n"  			"	or	%2, %0, %3			\n"  			"	xor	%2, %3				\n" @@ -427,7 +420,7 @@ static inline int test_and_change_bit(unsigned long nr,  		unsigned long temp;  		__asm__ __volatile__( -		"	.set	mips3					\n" +		"	.set	arch=r4000				\n"  		"1:	" __LL	"%0, %1		# test_and_change_bit	\n"  		"	xor	%2, %0, %3				\n"  		"	" __SC	"%2, %1					\n" @@ -443,7 +436,7 @@ static inline int test_and_change_bit(unsigned long nr,  		do {  			__asm__ __volatile__( -			"	.set	mips3				\n" +			"	.set	arch=r4000			\n"  			"	" __LL	"%0, %1 # test_and_change_bit	\n"  			"	xor	%2, %0, %3			\n"  			"	" __SC	"\t%2, %1			\n"  | 
