diff options
Diffstat (limited to 'arch/sparc/lib/atomic_64.S')
| -rw-r--r-- | arch/sparc/lib/atomic_64.S | 133 |
1 files changed, 133 insertions, 0 deletions
diff --git a/arch/sparc/lib/atomic_64.S b/arch/sparc/lib/atomic_64.S new file mode 100644 index 00000000000..85c233d0a34 --- /dev/null +++ b/arch/sparc/lib/atomic_64.S @@ -0,0 +1,133 @@ +/* atomic.S: These things are too big to do inline. + * + * Copyright (C) 1999, 2007 2012 David S. Miller (davem@davemloft.net) + */ + +#include <linux/linkage.h> +#include <asm/asi.h> +#include <asm/backoff.h> + + .text + + /* Two versions of the atomic routines, one that + * does not return a value and does not perform + * memory barriers, and a second which returns + * a value and does the barriers. + */ +ENTRY(atomic_add) /* %o0 = increment, %o1 = atomic_ptr */ + BACKOFF_SETUP(%o2) +1: lduw [%o1], %g1 + add %g1, %o0, %g7 + cas [%o1], %g1, %g7 + cmp %g1, %g7 + bne,pn %icc, BACKOFF_LABEL(2f, 1b) + nop + retl + nop +2: BACKOFF_SPIN(%o2, %o3, 1b) +ENDPROC(atomic_add) + +ENTRY(atomic_sub) /* %o0 = decrement, %o1 = atomic_ptr */ + BACKOFF_SETUP(%o2) +1: lduw [%o1], %g1 + sub %g1, %o0, %g7 + cas [%o1], %g1, %g7 + cmp %g1, %g7 + bne,pn %icc, BACKOFF_LABEL(2f, 1b) + nop + retl + nop +2: BACKOFF_SPIN(%o2, %o3, 1b) +ENDPROC(atomic_sub) + +ENTRY(atomic_add_ret) /* %o0 = increment, %o1 = atomic_ptr */ + BACKOFF_SETUP(%o2) +1: lduw [%o1], %g1 + add %g1, %o0, %g7 + cas [%o1], %g1, %g7 + cmp %g1, %g7 + bne,pn %icc, BACKOFF_LABEL(2f, 1b) + add %g1, %o0, %g1 + retl + sra %g1, 0, %o0 +2: BACKOFF_SPIN(%o2, %o3, 1b) +ENDPROC(atomic_add_ret) + +ENTRY(atomic_sub_ret) /* %o0 = decrement, %o1 = atomic_ptr */ + BACKOFF_SETUP(%o2) +1: lduw [%o1], %g1 + sub %g1, %o0, %g7 + cas [%o1], %g1, %g7 + cmp %g1, %g7 + bne,pn %icc, BACKOFF_LABEL(2f, 1b) + sub %g1, %o0, %g1 + retl + sra %g1, 0, %o0 +2: BACKOFF_SPIN(%o2, %o3, 1b) +ENDPROC(atomic_sub_ret) + +ENTRY(atomic64_add) /* %o0 = increment, %o1 = atomic_ptr */ + BACKOFF_SETUP(%o2) +1: ldx [%o1], %g1 + add %g1, %o0, %g7 + casx [%o1], %g1, %g7 + cmp %g1, %g7 + bne,pn %xcc, BACKOFF_LABEL(2f, 1b) + nop + retl + nop +2: BACKOFF_SPIN(%o2, %o3, 1b) +ENDPROC(atomic64_add) + +ENTRY(atomic64_sub) /* %o0 = decrement, %o1 = atomic_ptr */ + BACKOFF_SETUP(%o2) +1: ldx [%o1], %g1 + sub %g1, %o0, %g7 + casx [%o1], %g1, %g7 + cmp %g1, %g7 + bne,pn %xcc, BACKOFF_LABEL(2f, 1b) + nop + retl + nop +2: BACKOFF_SPIN(%o2, %o3, 1b) +ENDPROC(atomic64_sub) + +ENTRY(atomic64_add_ret) /* %o0 = increment, %o1 = atomic_ptr */ + BACKOFF_SETUP(%o2) +1: ldx [%o1], %g1 + add %g1, %o0, %g7 + casx [%o1], %g1, %g7 + cmp %g1, %g7 + bne,pn %xcc, BACKOFF_LABEL(2f, 1b) + nop + retl + add %g1, %o0, %o0 +2: BACKOFF_SPIN(%o2, %o3, 1b) +ENDPROC(atomic64_add_ret) + +ENTRY(atomic64_sub_ret) /* %o0 = decrement, %o1 = atomic_ptr */ + BACKOFF_SETUP(%o2) +1: ldx [%o1], %g1 + sub %g1, %o0, %g7 + casx [%o1], %g1, %g7 + cmp %g1, %g7 + bne,pn %xcc, BACKOFF_LABEL(2f, 1b) + nop + retl + sub %g1, %o0, %o0 +2: BACKOFF_SPIN(%o2, %o3, 1b) +ENDPROC(atomic64_sub_ret) + +ENTRY(atomic64_dec_if_positive) /* %o0 = atomic_ptr */ + BACKOFF_SETUP(%o2) +1: ldx [%o0], %g1 + brlez,pn %g1, 3f + sub %g1, 1, %g7 + casx [%o0], %g1, %g7 + cmp %g1, %g7 + bne,pn %xcc, BACKOFF_LABEL(2f, 1b) + nop +3: retl + sub %g1, 1, %o0 +2: BACKOFF_SPIN(%o2, %o3, 1b) +ENDPROC(atomic64_dec_if_positive) |
