diff options
Diffstat (limited to 'arch/sparc64/lib/atomic.S')
-rw-r--r-- | arch/sparc64/lib/atomic.S | 42 |
1 files changed, 26 insertions, 16 deletions
diff --git a/arch/sparc64/lib/atomic.S b/arch/sparc64/lib/atomic.S index e528b8d..faf87c3 100644 --- a/arch/sparc64/lib/atomic.S +++ b/arch/sparc64/lib/atomic.S @@ -7,18 +7,6 @@ #include <linux/config.h> #include <asm/asi.h> - /* On SMP we need to use memory barriers to ensure - * correct memory operation ordering, nop these out - * for uniprocessor. - */ -#ifdef CONFIG_SMP -#define ATOMIC_PRE_BARRIER membar #StoreLoad | #LoadLoad -#define ATOMIC_POST_BARRIER membar #StoreLoad | #StoreStore -#else -#define ATOMIC_PRE_BARRIER nop -#define ATOMIC_POST_BARRIER nop -#endif - .text /* Two versions of the atomic routines, one that @@ -52,6 +40,24 @@ atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */ nop .size atomic_sub, .-atomic_sub + /* On SMP we need to use memory barriers to ensure + * correct memory operation ordering, nop these out + * for uniprocessor. + */ +#ifdef CONFIG_SMP + +#define ATOMIC_PRE_BARRIER membar #StoreLoad | #LoadLoad; +#define ATOMIC_POST_BARRIER \ + ba,pt %xcc, 80b; \ + membar #StoreLoad | #StoreStore + +80: retl + nop +#else +#define ATOMIC_PRE_BARRIER +#define ATOMIC_POST_BARRIER +#endif + .globl atomic_add_ret .type atomic_add_ret,#function atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */ @@ -62,9 +68,10 @@ atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */ cmp %g1, %g7 bne,pn %icc, 1b add %g7, %o0, %g7 + sra %g7, 0, %o0 ATOMIC_POST_BARRIER retl - sra %g7, 0, %o0 + nop .size atomic_add_ret, .-atomic_add_ret .globl atomic_sub_ret @@ -77,9 +84,10 @@ atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */ cmp %g1, %g7 bne,pn %icc, 1b sub %g7, %o0, %g7 + sra %g7, 0, %o0 ATOMIC_POST_BARRIER retl - sra %g7, 0, %o0 + nop .size atomic_sub_ret, .-atomic_sub_ret .globl atomic64_add @@ -118,9 +126,10 @@ atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */ cmp %g1, %g7 bne,pn %xcc, 1b add %g7, %o0, %g7 + mov %g7, %o0 ATOMIC_POST_BARRIER retl - mov %g7, %o0 + nop .size atomic64_add_ret, .-atomic64_add_ret .globl atomic64_sub_ret @@ -133,7 +142,8 @@ atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */ cmp %g1, %g7 bne,pn %xcc, 1b sub %g7, %o0, %g7 + mov %g7, %o0 ATOMIC_POST_BARRIER retl - mov %g7, %o0 + nop .size atomic64_sub_ret, .-atomic64_sub_ret |