summaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
authorMathieu Desnoyers <mathieu.desnoyers@polymtl.ca>2007-05-08 00:34:36 -0700
committerLinus Torvalds <torvalds@woody.linux-foundation.org>2007-05-08 11:15:19 -0700
commit79d365a306c3af53d8a732fec79b76c0b285d816 (patch)
tree1fa4e6fd55f46b715e7b3286e65e935dd2faff5f /include
parent2549c8589cc0550f0714d32720877d7af133ae40 (diff)
downloadop-kernel-dev-79d365a306c3af53d8a732fec79b76c0b285d816.zip
op-kernel-dev-79d365a306c3af53d8a732fec79b76c0b285d816.tar.gz
atomic.h: add atomic64 cmpxchg, xchg and add_unless to x86_64
Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@polymtl.ca> Cc: Andi Kleen <ak@muc.de> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
Diffstat (limited to 'include')
-rw-r--r--include/asm-x86_64/atomic.h36
1 files changed, 32 insertions, 4 deletions
diff --git a/include/asm-x86_64/atomic.h b/include/asm-x86_64/atomic.h
index 706ca4b..80e4fdb 100644
--- a/include/asm-x86_64/atomic.h
+++ b/include/asm-x86_64/atomic.h
@@ -375,8 +375,8 @@ static __inline__ long atomic64_add_return(long i, atomic64_t *v)
long __i = i;
__asm__ __volatile__(
LOCK_PREFIX "xaddq %0, %1;"
- :"=r"(i)
- :"m"(v->counter), "0"(i));
+ :"+r" (i), "+m" (v->counter)
+ : : "memory");
return i + __i;
}
@@ -388,7 +388,10 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t *v)
#define atomic64_inc_return(v) (atomic64_add_return(1,v))
#define atomic64_dec_return(v) (atomic64_sub_return(1,v))
-#define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new))
+#define atomic64_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
+#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
+
+#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
/**
@@ -402,7 +405,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t *v)
*/
#define atomic_add_unless(v, a, u) \
({ \
- int c, old; \
+ __typeof__((v)->counter) c, old; \
c = atomic_read(v); \
for (;;) { \
if (unlikely(c == (u))) \
@@ -416,6 +419,31 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t *v)
})
#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
+/**
+ * atomic64_add_unless - add unless the number is a given value
+ * @v: pointer of type atomic64_t
+ * @a: the amount to add to v...
+ * @u: ...unless v is equal to u.
+ *
+ * Atomically adds @a to @v, so long as it was not @u.
+ * Returns non-zero if @v was not @u, and zero otherwise.
+ */
+#define atomic64_add_unless(v, a, u) \
+({ \
+ __typeof__((v)->counter) c, old; \
+ c = atomic64_read(v); \
+ for (;;) { \
+ if (unlikely(c == (u))) \
+ break; \
+ old = atomic64_cmpxchg((v), c, c + (a)); \
+ if (likely(old == c)) \
+ break; \
+ c = old; \
+ } \
+ c != (u); \
+})
+#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
+
/* These are x86-specific, used by some header files */
#define atomic_clear_mask(mask, addr) \
__asm__ __volatile__(LOCK_PREFIX "andl %0,%1" \
OpenPOWER on IntegriCloud