diff options
Diffstat (limited to 'arch/sparc/lib/bitops.S')
| -rw-r--r-- | arch/sparc/lib/bitops.S | 220 |
1 files changed, 120 insertions, 100 deletions
diff --git a/arch/sparc/lib/bitops.S b/arch/sparc/lib/bitops.S index 3e939976907..36f72cc0e67 100644 --- a/arch/sparc/lib/bitops.S +++ b/arch/sparc/lib/bitops.S @@ -1,110 +1,130 @@ -/* bitops.S: Low level assembler bit operations. +/* bitops.S: Sparc64 atomic bit operations. * - * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu) + * Copyright (C) 2000, 2007 David S. Miller (davem@davemloft.net) */ -#include <linux/config.h> -#include <asm/ptrace.h> -#include <asm/psr.h> +#include <linux/linkage.h> +#include <asm/asi.h> +#include <asm/backoff.h> .text - .align 4 - .globl __bitops_begin -__bitops_begin: +ENTRY(test_and_set_bit) /* %o0=nr, %o1=addr */ + BACKOFF_SETUP(%o3) + srlx %o0, 6, %g1 + mov 1, %o2 + sllx %g1, 3, %g3 + and %o0, 63, %g2 + sllx %o2, %g2, %o2 + add %o1, %g3, %o1 +1: ldx [%o1], %g7 + or %g7, %o2, %g1 + casx [%o1], %g7, %g1 + cmp %g7, %g1 + bne,pn %xcc, BACKOFF_LABEL(2f, 1b) + and %g7, %o2, %g2 + clr %o0 + movrne %g2, 1, %o0 + retl + nop +2: BACKOFF_SPIN(%o3, %o4, 1b) +ENDPROC(test_and_set_bit) - /* Take bits in %g2 and set them in word at %g1, - * return whether bits were set in original value - * in %g2. %g4 holds value to restore into %o7 - * in delay slot of jmpl return, %g3 + %g5 + %g7 can be - * used as temporaries and thus is considered clobbered - * by all callers. - */ - .globl ___set_bit -___set_bit: - rd %psr, %g3 - nop; nop; nop; - or %g3, PSR_PIL, %g5 - wr %g5, 0x0, %psr - nop; nop; nop -#ifdef CONFIG_SMP - set bitops_spinlock, %g5 -2: ldstub [%g5], %g7 ! Spin on the byte lock for SMP. - orcc %g7, 0x0, %g0 ! Did we get it? - bne 2b ! Nope... -#endif - ld [%g1], %g7 - or %g7, %g2, %g5 - and %g7, %g2, %g2 -#ifdef CONFIG_SMP - st %g5, [%g1] - set bitops_spinlock, %g5 - stb %g0, [%g5] -#else - st %g5, [%g1] -#endif - wr %g3, 0x0, %psr - nop; nop; nop - jmpl %o7, %g0 - mov %g4, %o7 +ENTRY(test_and_clear_bit) /* %o0=nr, %o1=addr */ + BACKOFF_SETUP(%o3) + srlx %o0, 6, %g1 + mov 1, %o2 + sllx %g1, 3, %g3 + and %o0, 63, %g2 + sllx %o2, %g2, %o2 + add %o1, %g3, %o1 +1: ldx [%o1], %g7 + andn %g7, %o2, %g1 + casx [%o1], %g7, %g1 + cmp %g7, %g1 + bne,pn %xcc, BACKOFF_LABEL(2f, 1b) + and %g7, %o2, %g2 + clr %o0 + movrne %g2, 1, %o0 + retl + nop +2: BACKOFF_SPIN(%o3, %o4, 1b) +ENDPROC(test_and_clear_bit) - /* Same as above, but clears the bits from %g2 instead. */ - .globl ___clear_bit -___clear_bit: - rd %psr, %g3 - nop; nop; nop - or %g3, PSR_PIL, %g5 - wr %g5, 0x0, %psr - nop; nop; nop -#ifdef CONFIG_SMP - set bitops_spinlock, %g5 -2: ldstub [%g5], %g7 ! Spin on the byte lock for SMP. - orcc %g7, 0x0, %g0 ! Did we get it? - bne 2b ! Nope... -#endif - ld [%g1], %g7 - andn %g7, %g2, %g5 - and %g7, %g2, %g2 -#ifdef CONFIG_SMP - st %g5, [%g1] - set bitops_spinlock, %g5 - stb %g0, [%g5] -#else - st %g5, [%g1] -#endif - wr %g3, 0x0, %psr - nop; nop; nop - jmpl %o7, %g0 - mov %g4, %o7 +ENTRY(test_and_change_bit) /* %o0=nr, %o1=addr */ + BACKOFF_SETUP(%o3) + srlx %o0, 6, %g1 + mov 1, %o2 + sllx %g1, 3, %g3 + and %o0, 63, %g2 + sllx %o2, %g2, %o2 + add %o1, %g3, %o1 +1: ldx [%o1], %g7 + xor %g7, %o2, %g1 + casx [%o1], %g7, %g1 + cmp %g7, %g1 + bne,pn %xcc, BACKOFF_LABEL(2f, 1b) + and %g7, %o2, %g2 + clr %o0 + movrne %g2, 1, %o0 + retl + nop +2: BACKOFF_SPIN(%o3, %o4, 1b) +ENDPROC(test_and_change_bit) - /* Same thing again, but this time toggles the bits from %g2. */ - .globl ___change_bit -___change_bit: - rd %psr, %g3 - nop; nop; nop - or %g3, PSR_PIL, %g5 - wr %g5, 0x0, %psr - nop; nop; nop -#ifdef CONFIG_SMP - set bitops_spinlock, %g5 -2: ldstub [%g5], %g7 ! Spin on the byte lock for SMP. - orcc %g7, 0x0, %g0 ! Did we get it? - bne 2b ! Nope... -#endif - ld [%g1], %g7 - xor %g7, %g2, %g5 - and %g7, %g2, %g2 -#ifdef CONFIG_SMP - st %g5, [%g1] - set bitops_spinlock, %g5 - stb %g0, [%g5] -#else - st %g5, [%g1] -#endif - wr %g3, 0x0, %psr - nop; nop; nop - jmpl %o7, %g0 - mov %g4, %o7 +ENTRY(set_bit) /* %o0=nr, %o1=addr */ + BACKOFF_SETUP(%o3) + srlx %o0, 6, %g1 + mov 1, %o2 + sllx %g1, 3, %g3 + and %o0, 63, %g2 + sllx %o2, %g2, %o2 + add %o1, %g3, %o1 +1: ldx [%o1], %g7 + or %g7, %o2, %g1 + casx [%o1], %g7, %g1 + cmp %g7, %g1 + bne,pn %xcc, BACKOFF_LABEL(2f, 1b) + nop + retl + nop +2: BACKOFF_SPIN(%o3, %o4, 1b) +ENDPROC(set_bit) - .globl __bitops_end -__bitops_end: +ENTRY(clear_bit) /* %o0=nr, %o1=addr */ + BACKOFF_SETUP(%o3) + srlx %o0, 6, %g1 + mov 1, %o2 + sllx %g1, 3, %g3 + and %o0, 63, %g2 + sllx %o2, %g2, %o2 + add %o1, %g3, %o1 +1: ldx [%o1], %g7 + andn %g7, %o2, %g1 + casx [%o1], %g7, %g1 + cmp %g7, %g1 + bne,pn %xcc, BACKOFF_LABEL(2f, 1b) + nop + retl + nop +2: BACKOFF_SPIN(%o3, %o4, 1b) +ENDPROC(clear_bit) + +ENTRY(change_bit) /* %o0=nr, %o1=addr */ + BACKOFF_SETUP(%o3) + srlx %o0, 6, %g1 + mov 1, %o2 + sllx %g1, 3, %g3 + and %o0, 63, %g2 + sllx %o2, %g2, %o2 + add %o1, %g3, %o1 +1: ldx [%o1], %g7 + xor %g7, %o2, %g1 + casx [%o1], %g7, %g1 + cmp %g7, %g1 + bne,pn %xcc, BACKOFF_LABEL(2f, 1b) + nop + retl + nop +2: BACKOFF_SPIN(%o3, %o4, 1b) +ENDPROC(change_bit) |
