diff options
author | Anton Blanchard <anton@samba.org> | 2012-02-29 21:12:16 +0000 |
---|---|---|
committer | Benjamin Herrenschmidt <benh@kernel.crashing.org> | 2012-03-07 17:06:08 +1100 |
commit | a6cf7ed5119fb22f54584a9f867b638edd3c4384 (patch) | |
tree | 5a81f3e6cf92caa6e806e2a5b945406902e63e07 /arch/powerpc/include/asm/atomic.h | |
parent | b1ada6010e234d6c53bfcd5a617610c9284846b8 (diff) | |
download | op-kernel-dev-a6cf7ed5119fb22f54584a9f867b638edd3c4384.zip op-kernel-dev-a6cf7ed5119fb22f54584a9f867b638edd3c4384.tar.gz |
powerpc/atomic: Implement atomic*_inc_not_zero
Implement atomic_inc_not_zero and atomic64_inc_not_zero. At the
moment we use atomic*_add_unless which requires us to put 0 and
1 constants into registers. We can also avoid a subtract by
saving the original value in a second temporary.
This removes 3 instructions from fget:
- c0000000001b63c0: 39 00 00 00 li r8,0
- c0000000001b63c4: 39 40 00 01 li r10,1
...
- c0000000001b63e8: 7c 0a 00 50 subf r0,r10,r0
Signed-off-by: Anton Blanchard <anton@samba.org>
Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
Diffstat (limited to 'arch/powerpc/include/asm/atomic.h')
-rw-r--r-- | arch/powerpc/include/asm/atomic.h | 59 |
1 files changed, 58 insertions, 1 deletions
diff --git a/arch/powerpc/include/asm/atomic.h b/arch/powerpc/include/asm/atomic.h index 02e41b5..14174e8 100644 --- a/arch/powerpc/include/asm/atomic.h +++ b/arch/powerpc/include/asm/atomic.h @@ -212,6 +212,36 @@ static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u) return t; } +/** + * atomic_inc_not_zero - increment unless the number is zero + * @v: pointer of type atomic_t + * + * Atomically increments @v by 1, so long as @v is non-zero. + * Returns non-zero if @v was non-zero, and zero otherwise. + */ +static __inline__ int atomic_inc_not_zero(atomic_t *v) +{ + int t1, t2; + + __asm__ __volatile__ ( + PPC_ATOMIC_ENTRY_BARRIER +"1: lwarx %0,0,%2 # atomic_inc_not_zero\n\ + cmpwi 0,%0,0\n\ + beq- 2f\n\ + addic %1,%0,1\n" + PPC405_ERR77(0,%2) +" stwcx. %1,0,%2\n\ + bne- 1b\n" + PPC_ATOMIC_EXIT_BARRIER + "\n\ +2:" + : "=&r" (t1), "=&r" (t2) + : "r" (&v->counter) + : "cc", "xer", "memory"); + + return t1; +} +#define atomic_inc_not_zero(v) atomic_inc_not_zero((v)) #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0) #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0) @@ -467,7 +497,34 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) return t != u; } -#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) +/** + * atomic_inc64_not_zero - increment unless the number is zero + * @v: pointer of type atomic64_t + * + * Atomically increments @v by 1, so long as @v is non-zero. + * Returns non-zero if @v was non-zero, and zero otherwise. + */ +static __inline__ long atomic64_inc_not_zero(atomic64_t *v) +{ + long t1, t2; + + __asm__ __volatile__ ( + PPC_ATOMIC_ENTRY_BARRIER +"1: ldarx %0,0,%2 # atomic64_inc_not_zero\n\ + cmpdi 0,%0,0\n\ + beq- 2f\n\ + addic %1,%0,1\n\ + stdcx. %1,0,%2\n\ + bne- 1b\n" + PPC_ATOMIC_EXIT_BARRIER + "\n\ +2:" + : "=&r" (t1), "=&r" (t2) + : "r" (&v->counter) + : "cc", "xer", "memory"); + + return t1; +} #endif /* __powerpc64__ */ |