diff options
author | Mathieu Desnoyers <mathieu.desnoyers@polymtl.ca> | 2007-01-25 11:15:52 -0500 |
---|---|---|
committer | Paul Mackerras <paulus@samba.org> | 2007-02-16 14:00:20 +1100 |
commit | 41806ef4bfacbe5c4e520d8da2fcedcda335c922 (patch) | |
tree | 1443a7e100c5f5a4715d604b8a9de8715ca15afb /include/asm-powerpc | |
parent | 8c0238b3f1a7849b89707ac6b7b0c84e1ed2df70 (diff) | |
download | op-kernel-dev-41806ef4bfacbe5c4e520d8da2fcedcda335c922.zip op-kernel-dev-41806ef4bfacbe5c4e520d8da2fcedcda335c922.tar.gz |
[POWERPC] atomic.h: Add atomic64 cmpxchg, xchg and add_unless to powerpc
atomic.h : Add atomic64 cmpxchg, xchg and add_unless to powerpc
Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@polymtl.ca>
Signed-off-by: Paul Mackerras <paulus@samba.org>
Diffstat (limited to 'include/asm-powerpc')
-rw-r--r-- | include/asm-powerpc/atomic.h | 40 |
1 files changed, 39 insertions, 1 deletions
diff --git a/include/asm-powerpc/atomic.h b/include/asm-powerpc/atomic.h index f038e33..2ce4b6b 100644 --- a/include/asm-powerpc/atomic.h +++ b/include/asm-powerpc/atomic.h @@ -165,7 +165,8 @@ static __inline__ int atomic_dec_return(atomic_t *v) return t; } -#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) +#define atomic_cmpxchg(v, o, n) \ + ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n))) #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) /** @@ -413,6 +414,43 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v) return t; } +#define atomic64_cmpxchg(v, o, n) \ + ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n))) +#define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) + +/** + * atomic64_add_unless - add unless the number is a given value + * @v: pointer of type atomic64_t + * @a: the amount to add to v... + * @u: ...unless v is equal to u. + * + * Atomically adds @a to @v, so long as it was not @u. + * Returns non-zero if @v was not @u, and zero otherwise. + */ +static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) +{ + long t; + + __asm__ __volatile__ ( + LWSYNC_ON_SMP +"1: ldarx %0,0,%1 # atomic_add_unless\n\ + cmpd 0,%0,%3 \n\ + beq- 2f \n\ + add %0,%2,%0 \n" +" stdcx. %0,0,%1 \n\ + bne- 1b \n" + ISYNC_ON_SMP +" subf %0,%2,%0 \n\ +2:" + : "=&r" (t) + : "r" (&v->counter), "r" (a), "r" (u) + : "cc", "memory"); + + return t != u; +} + +#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) + #endif /* __powerpc64__ */ #include <asm-generic/atomic.h> |