summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authormjg <mjg@FreeBSD.org>2017-03-16 06:00:27 +0000
committermjg <mjg@FreeBSD.org>2017-03-16 06:00:27 +0000
commitb8af6b78be197b6a24bf372442dcb6e044d729b3 (patch)
tree0496f4e01ec297971a9b02afb62b80c63b0dd969
parent3100d6f6dfd7bea82a2fe7136cf72d8423b715bd (diff)
downloadFreeBSD-src-b8af6b78be197b6a24bf372442dcb6e044d729b3.zip
FreeBSD-src-b8af6b78be197b6a24bf372442dcb6e044d729b3.tar.gz
MFC r311169,r311898,r312925,r312973,r312975,r313007,r313040,r313080,
r313254,r313341 amd64: add atomic_fcmpset == sparc64: add atomic_fcmpset == Implement atomic_fcmpset_* for arm and arm64. == Add atomic_fcmpset_*() inlines for powerpc Summary: atomic_fcmpset_*() is analogous to atomic_cmpset(), but saves off the read value from the target memory location into the 'old' pointer in the case of failure. == i386: add atomic_fcmpset == Don't retry a lost reservation in atomic_fcmpset() The desired behavior of atomic_fcmpset_() is to always exit on error. Instead of retrying on lost reservation, leave the retry to the caller, and return == Add atomic_fcmpset_*() inlines for MIPS atomic_fcmpset_*() is analogous to atomic_cmpset(), but saves off the read value from the target memory location into the 'old' pointer. == i386: fixup fcmpset An incorrect output specifier was used which worked with clang by accident, but breaks with the in-tree gcc version. While here plug a whitespace nit. == Implement atomic_fcmpset_*() for RISC-V. == Use 64bit store instruction in atomic_fcmpset_64.
-rw-r--r--sys/amd64/include/atomic.h51
-rw-r--r--sys/arm/include/atomic-v4.h51
-rw-r--r--sys/arm/include/atomic-v6.h110
-rw-r--r--sys/arm/include/atomic.h6
-rw-r--r--sys/arm64/include/atomic.h64
-rw-r--r--sys/i386/include/atomic.h34
-rw-r--r--sys/mips/include/atomic.h103
-rw-r--r--sys/powerpc/include/atomic.h123
-rw-r--r--sys/riscv/include/atomic.h101
-rw-r--r--sys/sparc64/include/atomic.h34
10 files changed, 675 insertions, 2 deletions
diff --git a/sys/amd64/include/atomic.h b/sys/amd64/include/atomic.h
index c9526f4..09a5f9c 100644
--- a/sys/amd64/include/atomic.h
+++ b/sys/amd64/include/atomic.h
@@ -99,6 +99,8 @@ void atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
int atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src);
int atomic_cmpset_long(volatile u_long *dst, u_long expect, u_long src);
+int atomic_fcmpset_int(volatile u_int *dst, u_int *expect, u_int src);
+int atomic_fcmpset_long(volatile u_long *dst, u_long *expect, u_long src);
u_int atomic_fetchadd_int(volatile u_int *p, u_int v);
u_long atomic_fetchadd_long(volatile u_long *p, u_long v);
int atomic_testandset_int(volatile u_int *p, u_int v);
@@ -196,6 +198,42 @@ atomic_cmpset_long(volatile u_long *dst, u_long expect, u_long src)
return (res);
}
+static __inline int
+atomic_fcmpset_int(volatile u_int *dst, u_int *expect, u_int src)
+{
+ u_char res;
+
+ __asm __volatile(
+ " " MPLOCKED " "
+ " cmpxchgl %3,%1 ; "
+ " sete %0 ; "
+ "# atomic_fcmpset_int"
+ : "=r" (res), /* 0 */
+ "+m" (*dst), /* 1 */
+ "+a" (*expect) /* 2 */
+ : "r" (src) /* 3 */
+ : "memory", "cc");
+ return (res);
+}
+
+static __inline int
+atomic_fcmpset_long(volatile u_long *dst, u_long *expect, u_long src)
+{
+ u_char res;
+
+ __asm __volatile(
+ " " MPLOCKED " "
+ " cmpxchgq %3,%1 ; "
+ " sete %0 ; "
+ "# atomic_fcmpset_long"
+ : "=r" (res), /* 0 */
+ "+m" (*dst), /* 1 */
+ "+a" (*expect) /* 2 */
+ : "r" (src) /* 3 */
+ : "memory", "cc");
+ return (res);
+}
+
/*
* Atomically add the value of v to the integer pointed to by p and return
* the previous value of *p.
@@ -504,6 +542,8 @@ u_long atomic_swap_long(volatile u_long *p, u_long v);
#define atomic_subtract_rel_int atomic_subtract_barr_int
#define atomic_cmpset_acq_int atomic_cmpset_int
#define atomic_cmpset_rel_int atomic_cmpset_int
+#define atomic_fcmpset_acq_int atomic_fcmpset_int
+#define atomic_fcmpset_rel_int atomic_fcmpset_int
#define atomic_set_acq_long atomic_set_barr_long
#define atomic_set_rel_long atomic_set_barr_long
@@ -515,6 +555,8 @@ u_long atomic_swap_long(volatile u_long *p, u_long v);
#define atomic_subtract_rel_long atomic_subtract_barr_long
#define atomic_cmpset_acq_long atomic_cmpset_long
#define atomic_cmpset_rel_long atomic_cmpset_long
+#define atomic_fcmpset_acq_long atomic_fcmpset_long
+#define atomic_fcmpset_rel_long atomic_fcmpset_long
#define atomic_readandclear_int(p) atomic_swap_int(p, 0)
#define atomic_readandclear_long(p) atomic_swap_long(p, 0)
@@ -569,6 +611,9 @@ u_long atomic_swap_long(volatile u_long *p, u_long v);
#define atomic_cmpset_32 atomic_cmpset_int
#define atomic_cmpset_acq_32 atomic_cmpset_acq_int
#define atomic_cmpset_rel_32 atomic_cmpset_rel_int
+#define atomic_fcmpset_32 atomic_fcmpset_int
+#define atomic_fcmpset_acq_32 atomic_fcmpset_acq_int
+#define atomic_fcmpset_rel_32 atomic_fcmpset_rel_int
#define atomic_swap_32 atomic_swap_int
#define atomic_readandclear_32 atomic_readandclear_int
#define atomic_fetchadd_32 atomic_fetchadd_int
@@ -593,6 +638,9 @@ u_long atomic_swap_long(volatile u_long *p, u_long v);
#define atomic_cmpset_64 atomic_cmpset_long
#define atomic_cmpset_acq_64 atomic_cmpset_acq_long
#define atomic_cmpset_rel_64 atomic_cmpset_rel_long
+#define atomic_fcmpset_64 atomic_fcmpset_long
+#define atomic_fcmpset_acq_64 atomic_fcmpset_acq_long
+#define atomic_fcmpset_rel_64 atomic_fcmpset_rel_long
#define atomic_swap_64 atomic_swap_long
#define atomic_readandclear_64 atomic_readandclear_long
#define atomic_fetchadd_64 atomic_fetchadd_long
@@ -617,6 +665,9 @@ u_long atomic_swap_long(volatile u_long *p, u_long v);
#define atomic_cmpset_ptr atomic_cmpset_long
#define atomic_cmpset_acq_ptr atomic_cmpset_acq_long
#define atomic_cmpset_rel_ptr atomic_cmpset_rel_long
+#define atomic_fcmpset_ptr atomic_fcmpset_long
+#define atomic_fcmpset_acq_ptr atomic_fcmpset_acq_long
+#define atomic_fcmpset_rel_ptr atomic_fcmpset_rel_long
#define atomic_swap_ptr atomic_swap_long
#define atomic_readandclear_ptr atomic_readandclear_long
diff --git a/sys/arm/include/atomic-v4.h b/sys/arm/include/atomic-v4.h
index b4ff5bf..80f3f94 100644
--- a/sys/arm/include/atomic-v4.h
+++ b/sys/arm/include/atomic-v4.h
@@ -112,6 +112,43 @@ atomic_clear_64(volatile uint64_t *address, uint64_t clearmask)
__with_interrupts_disabled(*address &= ~clearmask);
}
+static __inline int
+atomic_fcmpset_32(volatile u_int32_t *p, volatile u_int32_t *cmpval, volatile u_int32_t newval)
+{
+ u_int32_t ret;
+
+ __with_interrupts_disabled(
+ {
+ ret = *p;
+ if (*p == *cmpval) {
+ *p = newval;
+ ret = 1;
+ } else {
+ *cmpval = *p;
+ ret = 0;
+ }
+ });
+ return (ret);
+}
+
+static __inline int
+atomic_fcmpset_64(volatile u_int64_t *p, volatile u_int64_t *cmpval, volatile u_int64_t newval)
+{
+ u_int64_t ret;
+
+ __with_interrupts_disabled(
+ {
+ if (*p == *cmpval) {
+ *p = newval;
+ ret = 1;
+ } else {
+ *cmpval = *p;
+ ret = 0;
+ }
+ });
+ return (ret);
+}
+
static __inline u_int32_t
atomic_cmpset_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
{
@@ -370,6 +407,12 @@ atomic_swap_32(volatile u_int32_t *p, u_int32_t v)
return (__swp(v, p));
}
+#define atomic_fcmpset_rel_32 atomic_fcmpset_32
+#define atomic_fcmpset_acq_32 atomic_fcmpset_32
+#define atomic_fcmpset_rel_64 atomic_fcmpset_64
+#define atomic_fcmpset_acq_64 atomic_fcmpset_64
+#define atomic_fcmpset_acq_long atomic_fcmpset_long
+#define atomic_fcmpset_rel_long atomic_fcmpset_long
#define atomic_cmpset_rel_32 atomic_cmpset_32
#define atomic_cmpset_acq_32 atomic_cmpset_32
#define atomic_cmpset_rel_64 atomic_cmpset_64
@@ -421,6 +464,14 @@ atomic_cmpset_long(volatile u_long *dst, u_long old, u_long newe)
}
static __inline u_long
+atomic_fcmpset_long(volatile u_long *dst, u_long *old, u_long newe)
+{
+
+ return (atomic_fcmpset_32((volatile uint32_t *)dst,
+ (uint32_t *)old, newe));
+}
+
+static __inline u_long
atomic_fetchadd_long(volatile u_long *p, u_long v)
{
diff --git a/sys/arm/include/atomic-v6.h b/sys/arm/include/atomic-v6.h
index 507a1d081..287ade9 100644
--- a/sys/arm/include/atomic-v6.h
+++ b/sys/arm/include/atomic-v6.h
@@ -190,6 +190,116 @@ ATOMIC_ACQ_REL(clear, 32)
ATOMIC_ACQ_REL(clear, 64)
ATOMIC_ACQ_REL_LONG(clear)
+static __inline int
+atomic_fcmpset_32(volatile uint32_t *p, uint32_t *cmpval, uint32_t newval)
+{
+ uint32_t tmp;
+ uint32_t _cmpval = *cmpval;
+ int ret;
+
+ __asm __volatile(
+ "1: mov %0, #1 \n"
+ " ldrex %1, [%2] \n"
+ " cmp %1, %3 \n"
+ " it ne \n"
+ " bne 2f \n"
+ " strex %0, %4, [%2] \n"
+ "2:"
+ : "=&r" (ret), "=&r" (tmp), "+r" (p), "+r" (_cmpval), "+r" (newval)
+ : : "cc", "memory");
+ *cmpval = tmp;
+ return (!ret);
+}
+
+static __inline uint64_t
+atomic_fcmpset_64(volatile uint64_t *p, uint64_t *cmpval, uint64_t newval)
+{
+ uint64_t tmp;
+ uint64_t _cmpval = *cmpval;
+ int ret;
+
+ __asm __volatile(
+ "1: mov %[ret], #1 \n"
+ " ldrexd %Q[tmp], %R[tmp], [%[ptr]] \n"
+ " teq %Q[tmp], %Q[_cmpval] \n"
+ " itee eq \n"
+ " teqeq %R[tmp], %R[_cmpval] \n"
+ " bne 2f \n"
+ " strexd %[ret], %Q[newval], %R[newval], [%[ptr]]\n"
+ "2: \n"
+ : [ret] "=&r" (ret),
+ [tmp] "=&r" (tmp)
+ : [ptr] "r" (p),
+ [_cmpval] "r" (_cmpval),
+ [newval] "r" (newval)
+ : "cc", "memory");
+ *cmpval = tmp;
+ return (!ret);
+}
+
+static __inline u_long
+atomic_fcmpset_long(volatile u_long *p, u_long *cmpval, u_long newval)
+{
+
+ return (atomic_fcmpset_32((volatile uint32_t *)p,
+ (uint32_t *)cmpval, newval));
+}
+
+static __inline uint64_t
+atomic_fcmpset_acq_64(volatile uint64_t *p, uint64_t *cmpval, uint64_t newval)
+{
+ uint64_t ret;
+
+ ret = atomic_fcmpset_64(p, cmpval, newval);
+ dmb();
+ return (ret);
+}
+
+static __inline u_long
+atomic_fcmpset_acq_long(volatile u_long *p, u_long *cmpval, u_long newval)
+{
+ u_long ret;
+
+ ret = atomic_fcmpset_long(p, cmpval, newval);
+ dmb();
+ return (ret);
+}
+
+static __inline uint32_t
+atomic_fcmpset_acq_32(volatile uint32_t *p, uint32_t *cmpval, uint32_t newval)
+{
+
+ uint32_t ret;
+
+ ret = atomic_fcmpset_32(p, cmpval, newval);
+ dmb();
+ return (ret);
+}
+
+static __inline uint32_t
+atomic_fcmpset_rel_32(volatile uint32_t *p, uint32_t *cmpval, uint32_t newval)
+{
+
+ dmb();
+ return (atomic_fcmpset_32(p, cmpval, newval));
+}
+
+static __inline uint64_t
+atomic_fcmpset_rel_64(volatile uint64_t *p, uint64_t *cmpval, uint64_t newval)
+{
+
+ dmb();
+ return (atomic_fcmpset_64(p, cmpval, newval));
+}
+
+static __inline u_long
+atomic_fcmpset_rel_long(volatile u_long *p, u_long *cmpval, u_long newval)
+{
+
+ dmb();
+ return (atomic_fcmpset_long(p, cmpval, newval));
+}
+
static __inline uint32_t
atomic_cmpset_32(volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
{
diff --git a/sys/arm/include/atomic.h b/sys/arm/include/atomic.h
index 091e3c1..93b202d 100644
--- a/sys/arm/include/atomic.h
+++ b/sys/arm/include/atomic.h
@@ -79,6 +79,9 @@ atomic_store_long(volatile u_long *dst, u_long src)
#define atomic_clear_ptr atomic_clear_32
#define atomic_set_ptr atomic_set_32
+#define atomic_fcmpset_ptr atomic_fcmpset_32
+#define atomic_fcmpset_rel_ptr atomic_fcmpset_rel_32
+#define atomic_fcmpset_acq_ptr atomic_fcmpset_acq_32
#define atomic_cmpset_ptr atomic_cmpset_32
#define atomic_cmpset_rel_ptr atomic_cmpset_rel_32
#define atomic_cmpset_acq_ptr atomic_cmpset_acq_32
@@ -97,6 +100,9 @@ atomic_store_long(volatile u_long *dst, u_long src)
#define atomic_set_int atomic_set_32
#define atomic_set_acq_int atomic_set_acq_32
#define atomic_set_rel_int atomic_set_rel_32
+#define atomic_fcmpset_int atomic_fcmpset_32
+#define atomic_fcmpset_acq_int atomic_fcmpset_acq_32
+#define atomic_fcmpset_rel_int atomic_fcmpset_rel_32
#define atomic_cmpset_int atomic_cmpset_32
#define atomic_cmpset_acq_int atomic_cmpset_acq_32
#define atomic_cmpset_rel_int atomic_cmpset_rel_32
diff --git a/sys/arm64/include/atomic.h b/sys/arm64/include/atomic.h
index 41ad6cc..9318de1 100644
--- a/sys/arm64/include/atomic.h
+++ b/sys/arm64/include/atomic.h
@@ -98,6 +98,61 @@ ATOMIC(clear, bic)
ATOMIC(set, orr)
ATOMIC(subtract, sub)
+#define ATOMIC_FCMPSET(bar, a, l) \
+static __inline int \
+atomic_fcmpset_##bar##32(volatile uint32_t *p, uint32_t *cmpval, \
+ uint32_t newval) \
+{ \
+ uint32_t tmp; \
+ uint32_t _cmpval = *cmpval; \
+ int res; \
+ \
+ __asm __volatile( \
+ "1: mov %w1, #1 \n" \
+ " ld"#a"xr %w0, [%2] \n" \
+ " cmp %w0, %w3 \n" \
+ " b.ne 2f \n" \
+ " st"#l"xr %w1, %w4, [%2] \n" \
+ "2:" \
+ : "=&r"(tmp), "=&r"(res) \
+ : "r" (p), "r" (_cmpval), "r" (newval) \
+ : "cc", "memory" \
+ ); \
+ *cmpval = tmp; \
+ \
+ return (!res); \
+} \
+ \
+static __inline int \
+atomic_fcmpset_##bar##64(volatile uint64_t *p, uint64_t *cmpval, \
+ uint64_t newval) \
+{ \
+ uint64_t tmp; \
+ uint64_t _cmpval = *cmpval; \
+ int res; \
+ \
+ __asm __volatile( \
+ "1: mov %w1, #1 \n" \
+ " ld"#a"xr %0, [%2] \n" \
+ " cmp %0, %3 \n" \
+ " b.ne 2f \n" \
+ " st"#l"xr %w1, %4, [%2] \n" \
+ "2:" \
+ : "=&r"(tmp), "=&r"(res) \
+ : "r" (p), "r" (_cmpval), "r" (newval) \
+ : "cc", "memory" \
+ ); \
+ *cmpval = tmp; \
+ \
+ return (!res); \
+}
+
+ATOMIC_FCMPSET( , , )
+ATOMIC_FCMPSET(acq_, a, )
+ATOMIC_FCMPSET(rel_, ,l)
+
+#undef ATOMIC_FCMPSET
+
#define ATOMIC_CMPSET(bar, a, l) \
static __inline int \
atomic_cmpset_##bar##32(volatile uint32_t *p, uint32_t cmpval, \
@@ -311,6 +366,7 @@ atomic_store_rel_64(volatile uint64_t *p, uint64_t val)
#define atomic_add_int atomic_add_32
+#define atomic_fcmpset_int atomic_fcmpset_32
#define atomic_clear_int atomic_clear_32
#define atomic_cmpset_int atomic_cmpset_32
#define atomic_fetchadd_int atomic_fetchadd_32
@@ -320,6 +376,7 @@ atomic_store_rel_64(volatile uint64_t *p, uint64_t val)
#define atomic_subtract_int atomic_subtract_32
#define atomic_add_acq_int atomic_add_acq_32
+#define atomic_fcmpset_acq_int atomic_fcmpset_acq_32
#define atomic_clear_acq_int atomic_clear_acq_32
#define atomic_cmpset_acq_int atomic_cmpset_acq_32
#define atomic_load_acq_int atomic_load_acq_32
@@ -327,6 +384,7 @@ atomic_store_rel_64(volatile uint64_t *p, uint64_t val)
#define atomic_subtract_acq_int atomic_subtract_acq_32
#define atomic_add_rel_int atomic_add_rel_32
+#define atomic_fcmpset_rel_int atomic_fcmpset_rel_32
#define atomic_clear_rel_int atomic_add_rel_32
#define atomic_cmpset_rel_int atomic_cmpset_rel_32
#define atomic_set_rel_int atomic_set_rel_32
@@ -334,6 +392,7 @@ atomic_store_rel_64(volatile uint64_t *p, uint64_t val)
#define atomic_store_rel_int atomic_store_rel_32
#define atomic_add_long atomic_add_64
+#define atomic_fcmpset_long atomic_fcmpset_64
#define atomic_clear_long atomic_clear_64
#define atomic_cmpset_long atomic_cmpset_64
#define atomic_fetchadd_long atomic_fetchadd_64
@@ -343,6 +402,7 @@ atomic_store_rel_64(volatile uint64_t *p, uint64_t val)
#define atomic_subtract_long atomic_subtract_64
#define atomic_add_ptr atomic_add_64
+#define atomic_fcmpset_ptr atomic_fcmpset_64
#define atomic_clear_ptr atomic_clear_64
#define atomic_cmpset_ptr atomic_cmpset_64
#define atomic_fetchadd_ptr atomic_fetchadd_64
@@ -352,6 +412,7 @@ atomic_store_rel_64(volatile uint64_t *p, uint64_t val)
#define atomic_subtract_ptr atomic_subtract_64
#define atomic_add_acq_long atomic_add_acq_64
+#define atomic_fcmpset_acq_long atomic_fcmpset_acq_64
#define atomic_clear_acq_long atomic_add_acq_64
#define atomic_cmpset_acq_long atomic_cmpset_acq_64
#define atomic_load_acq_long atomic_load_acq_64
@@ -359,6 +420,7 @@ atomic_store_rel_64(volatile uint64_t *p, uint64_t val)
#define atomic_subtract_acq_long atomic_subtract_acq_64
#define atomic_add_acq_ptr atomic_add_acq_64
+#define atomic_fcmpset_acq_ptr atomic_fcmpset_acq_64
#define atomic_clear_acq_ptr atomic_add_acq_64
#define atomic_cmpset_acq_ptr atomic_cmpset_acq_64
#define atomic_load_acq_ptr atomic_load_acq_64
@@ -366,6 +428,7 @@ atomic_store_rel_64(volatile uint64_t *p, uint64_t val)
#define atomic_subtract_acq_ptr atomic_subtract_acq_64
#define atomic_add_rel_long atomic_add_rel_64
+#define atomic_fcmpset_rel_long atomic_fcmpset_rel_64
#define atomic_clear_rel_long atomic_clear_rel_64
#define atomic_cmpset_rel_long atomic_cmpset_rel_64
#define atomic_set_rel_long atomic_set_rel_64
@@ -373,6 +436,7 @@ atomic_store_rel_64(volatile uint64_t *p, uint64_t val)
#define atomic_store_rel_long atomic_store_rel_64
#define atomic_add_rel_ptr atomic_add_rel_64
+#define atomic_fcmpset_rel_ptr atomic_fcmpset_rel_64
#define atomic_clear_rel_ptr atomic_clear_rel_64
#define atomic_cmpset_rel_ptr atomic_cmpset_rel_64
#define atomic_set_rel_ptr atomic_set_rel_64
diff --git a/sys/i386/include/atomic.h b/sys/i386/include/atomic.h
index e92e94b..cb1b6a3 100644
--- a/sys/i386/include/atomic.h
+++ b/sys/i386/include/atomic.h
@@ -106,6 +106,7 @@ void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v); \
void atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
int atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src);
+int atomic_fcmpset_int(volatile u_int *dst, u_int *expect, u_int src);
u_int atomic_fetchadd_int(volatile u_int *p, u_int v);
int atomic_testandset_int(volatile u_int *p, u_int v);
int atomic_testandclear_int(volatile u_int *p, u_int v);
@@ -187,6 +188,24 @@ atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src)
return (res);
}
+static __inline int
+atomic_fcmpset_int(volatile u_int *dst, u_int *expect, u_int src)
+{
+ u_char res;
+
+ __asm __volatile(
+ " " MPLOCKED " "
+ " cmpxchgl %3,%1 ; "
+ " sete %0 ; "
+ "# atomic_cmpset_int"
+ : "=q" (res), /* 0 */
+ "+m" (*dst), /* 1 */
+ "+a" (*expect) /* 2 */
+ : "r" (src) /* 3 */
+ : "memory", "cc");
+ return (res);
+}
+
/*
* Atomically add the value of v to the integer pointed to by p and return
* the previous value of *p.
@@ -655,6 +674,8 @@ u_long atomic_swap_long(volatile u_long *p, u_long v);
#define atomic_subtract_rel_int atomic_subtract_barr_int
#define atomic_cmpset_acq_int atomic_cmpset_int
#define atomic_cmpset_rel_int atomic_cmpset_int
+#define atomic_fcmpset_acq_int atomic_fcmpset_int
+#define atomic_fcmpset_rel_int atomic_fcmpset_int
#define atomic_set_acq_long atomic_set_barr_long
#define atomic_set_rel_long atomic_set_barr_long
@@ -666,6 +687,8 @@ u_long atomic_swap_long(volatile u_long *p, u_long v);
#define atomic_subtract_rel_long atomic_subtract_barr_long
#define atomic_cmpset_acq_long atomic_cmpset_long
#define atomic_cmpset_rel_long atomic_cmpset_long
+#define atomic_fcmpset_acq_long atomic_fcmpset_long
+#define atomic_fcmpset_rel_long atomic_fcmpset_long
#define atomic_readandclear_int(p) atomic_swap_int(p, 0)
#define atomic_readandclear_long(p) atomic_swap_long(p, 0)
@@ -720,6 +743,9 @@ u_long atomic_swap_long(volatile u_long *p, u_long v);
#define atomic_cmpset_32 atomic_cmpset_int
#define atomic_cmpset_acq_32 atomic_cmpset_acq_int
#define atomic_cmpset_rel_32 atomic_cmpset_rel_int
+#define atomic_fcmpset_32 atomic_fcmpset_int
+#define atomic_fcmpset_acq_32 atomic_fcmpset_acq_int
+#define atomic_fcmpset_rel_32 atomic_fcmpset_rel_int
#define atomic_swap_32 atomic_swap_int
#define atomic_readandclear_32 atomic_readandclear_int
#define atomic_fetchadd_32 atomic_fetchadd_int
@@ -763,6 +789,14 @@ u_long atomic_swap_long(volatile u_long *p, u_long v);
#define atomic_cmpset_rel_ptr(dst, old, new) \
atomic_cmpset_rel_int((volatile u_int *)(dst), (u_int)(old), \
(u_int)(new))
+#define atomic_fcmpset_ptr(dst, old, new) \
+ atomic_fcmpset_int((volatile u_int *)(dst), (u_int *)(old), (u_int)(new))
+#define atomic_fcmpset_acq_ptr(dst, old, new) \
+ atomic_fcmpset_acq_int((volatile u_int *)(dst), (u_int *)(old), \
+ (u_int)(new))
+#define atomic_fcmpset_rel_ptr(dst, old, new) \
+ atomic_fcmpset_rel_int((volatile u_int *)(dst), (u_int *)(old), \
+ (u_int)(new))
#define atomic_swap_ptr(p, v) \
atomic_swap_int((volatile u_int *)(p), (u_int)(v))
#define atomic_readandclear_ptr(p) \
diff --git a/sys/mips/include/atomic.h b/sys/mips/include/atomic.h
index d4b951e..f29ad8c 100644
--- a/sys/mips/include/atomic.h
+++ b/sys/mips/include/atomic.h
@@ -362,7 +362,7 @@ atomic_load_64(__volatile uint64_t *p, uint64_t *v)
* zero if the compare failed, nonzero otherwise.
*/
static __inline uint32_t
-atomic_cmpset_32(__volatile uint32_t* p, uint32_t cmpval, uint32_t newval)
+atomic_cmpset_32(__volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
{
uint32_t ret;
@@ -405,6 +405,46 @@ atomic_cmpset_rel_32(__volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
return (atomic_cmpset_32(p, cmpval, newval));
}
+static __inline uint32_t
+atomic_fcmpset_32(__volatile uint32_t *p, uint32_t *cmpval, uint32_t newval)
+{
+ uint32_t ret;
+
+ __asm __volatile (
+ "1:\n\t"
+ "ll %0, %1\n\t" /* load old value */
+ "bne %0, %4, 2f\n\t" /* compare */
+ "move %0, %3\n\t" /* value to store */
+ "sc %0, %1\n\t" /* attempt to store */
+ "beqz %0, 1b\n\t" /* if it failed, spin */
+ "j 3f\n\t"
+ "2:\n\t"
+ "sw %0, %2\n\t" /* save old value */
+ "li %0, 0\n\t"
+ "3:\n"
+ : "=&r" (ret), "+m" (*p), "=m" (*cmpval)
+ : "r" (newval), "r" (*cmpval)
+ : "memory");
+ return ret;
+}
+
+static __inline uint32_t
+atomic_fcmpset_acq_32(__volatile uint32_t *p, uint32_t *cmpval, uint32_t newval)
+{
+ int retval;
+
+ retval = atomic_fcmpset_32(p, cmpval, newval);
+ mips_sync();
+ return (retval);
+}
+
+static __inline uint32_t
+atomic_fcmpset_rel_32(__volatile uint32_t *p, uint32_t *cmpval, uint32_t newval)
+{
+ mips_sync();
+ return (atomic_fcmpset_32(p, cmpval, newval));
+}
+
/*
* Atomically add the value of v to the integer pointed to by p and return
* the previous value of *p.
@@ -431,7 +471,7 @@ atomic_fetchadd_32(__volatile uint32_t *p, uint32_t v)
* zero if the compare failed, nonzero otherwise.
*/
static __inline uint64_t
-atomic_cmpset_64(__volatile uint64_t* p, uint64_t cmpval, uint64_t newval)
+atomic_cmpset_64(__volatile uint64_t *p, uint64_t cmpval, uint64_t newval)
{
uint64_t ret;
@@ -475,6 +515,47 @@ atomic_cmpset_rel_64(__volatile uint64_t *p, uint64_t cmpval, uint64_t newval)
return (atomic_cmpset_64(p, cmpval, newval));
}
+static __inline uint32_t
+atomic_fcmpset_64(__volatile uint64_t *p, uint64_t *cmpval, uint64_t newval)
+{
+ uint32_t ret;
+
+ __asm __volatile (
+ "1:\n\t"
+ "lld %0, %1\n\t" /* load old value */
+ "bne %0, %4, 2f\n\t" /* compare */
+ "move %0, %3\n\t" /* value to store */
+ "scd %0, %1\n\t" /* attempt to store */
+ "beqz %0, 1b\n\t" /* if it failed, spin */
+ "j 3f\n\t"
+ "2:\n\t"
+ "sd %0, %2\n\t" /* save old value */
+ "li %0, 0\n\t"
+ "3:\n"
+ : "=&r" (ret), "+m" (*p), "=m" (*cmpval)
+ : "r" (newval), "r" (*cmpval)
+ : "memory");
+
+ return ret;
+}
+
+static __inline uint64_t
+atomic_fcmpset_acq_64(__volatile uint64_t *p, uint64_t *cmpval, uint64_t newval)
+{
+ int retval;
+
+ retval = atomic_fcmpset_64(p, cmpval, newval);
+ mips_sync();
+ return (retval);
+}
+
+static __inline uint64_t
+atomic_fcmpset_rel_64(__volatile uint64_t *p, uint64_t *cmpval, uint64_t newval)
+{
+ mips_sync();
+ return (atomic_fcmpset_64(p, cmpval, newval));
+}
+
/*
* Atomically add the value of v to the integer pointed to by p and return
* the previous value of *p.
@@ -568,6 +649,9 @@ atomic_thread_fence_seq_cst(void)
#define atomic_cmpset_int atomic_cmpset_32
#define atomic_cmpset_acq_int atomic_cmpset_acq_32
#define atomic_cmpset_rel_int atomic_cmpset_rel_32
+#define atomic_fcmpset_int atomic_fcmpset_32
+#define atomic_fcmpset_acq_int atomic_fcmpset_acq_32
+#define atomic_fcmpset_rel_int atomic_fcmpset_rel_32
#define atomic_load_acq_int atomic_load_acq_32
#define atomic_store_rel_int atomic_store_rel_32
#define atomic_readandclear_int atomic_readandclear_32
@@ -597,6 +681,9 @@ atomic_thread_fence_seq_cst(void)
#define atomic_cmpset_long atomic_cmpset_64
#define atomic_cmpset_acq_long atomic_cmpset_acq_64
#define atomic_cmpset_rel_long atomic_cmpset_rel_64
+#define atomic_fcmpset_long atomic_fcmpset_64
+#define atomic_fcmpset_acq_long atomic_fcmpset_acq_64
+#define atomic_fcmpset_rel_long atomic_fcmpset_rel_64
#define atomic_load_acq_long atomic_load_acq_64
#define atomic_store_rel_long atomic_store_rel_64
#define atomic_fetchadd_long atomic_fetchadd_64
@@ -638,6 +725,15 @@ atomic_thread_fence_seq_cst(void)
#define atomic_cmpset_rel_long(p, cmpval, newval) \
atomic_cmpset_rel_32((volatile u_int *)(p), (u_int)(cmpval), \
(u_int)(newval))
+#define atomic_fcmpset_long(p, cmpval, newval) \
+ atomic_fcmpset_32((volatile u_int *)(p), (u_int *)(cmpval), \
+ (u_int)(newval))
+#define atomic_fcmpset_acq_long(p, cmpval, newval) \
+ atomic_fcmpset_acq_32((volatile u_int *)(p), (u_int *)(cmpval), \
+ (u_int)(newval))
+#define atomic_fcmpset_rel_long(p, cmpval, newval) \
+ atomic_fcmpset_rel_32((volatile u_int *)(p), (u_int *)(cmpval), \
+ (u_int)(newval))
#define atomic_load_acq_long(p) \
(u_long)atomic_load_acq_32((volatile u_int *)(p))
#define atomic_store_rel_long(p, v) \
@@ -665,6 +761,9 @@ atomic_thread_fence_seq_cst(void)
#define atomic_cmpset_ptr atomic_cmpset_long
#define atomic_cmpset_acq_ptr atomic_cmpset_acq_long
#define atomic_cmpset_rel_ptr atomic_cmpset_rel_long
+#define atomic_fcmpset_ptr atomic_fcmpset_long
+#define atomic_fcmpset_acq_ptr atomic_fcmpset_acq_long
+#define atomic_fcmpset_rel_ptr atomic_fcmpset_rel_long
#define atomic_load_acq_ptr atomic_load_acq_long
#define atomic_store_rel_ptr atomic_store_rel_long
#define atomic_readandclear_ptr atomic_readandclear_long
diff --git a/sys/powerpc/include/atomic.h b/sys/powerpc/include/atomic.h
index c4db5ff..cd9fa0d 100644
--- a/sys/powerpc/include/atomic.h
+++ b/sys/powerpc/include/atomic.h
@@ -674,6 +674,129 @@ atomic_cmpset_rel_long(volatile u_long *p, u_long cmpval, u_long newval)
#define atomic_cmpset_rel_ptr atomic_cmpset_rel_int
#endif
+/*
+ * Atomically compare the value stored at *p with *cmpval and if the
+ * two values are equal, update the value of *p with newval. Returns
+ * zero if the compare failed and sets *cmpval to the read value from *p,
+ * nonzero otherwise.
+ */
+static __inline int
+atomic_fcmpset_int(volatile u_int *p, u_int *cmpval, u_int newval)
+{
+ int ret;
+
+#ifdef __GNUCLIKE_ASM
+ __asm __volatile (
+ "lwarx %0, 0, %3\n\t" /* load old value */
+ "cmplw %4, %0\n\t" /* compare */
+ "bne 1f\n\t" /* exit if not equal */
+ "stwcx. %5, 0, %3\n\t" /* attempt to store */
+ "bne- 1f\n\t" /* exit if failed */
+ "li %0, 1\n\t" /* success - retval = 1 */
+ "b 2f\n\t" /* we've succeeded */
+ "1:\n\t"
+ "stwcx. %0, 0, %3\n\t" /* clear reservation (74xx) */
+ "stwx %0, 0, %7\n\t"
+ "li %0, 0\n\t" /* failure - retval = 0 */
+ "2:\n\t"
+ : "=&r" (ret), "=m" (*p), "=m" (*cmpval)
+ : "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
+ : "cr0", "memory");
+#endif
+
+ return (ret);
+}
+static __inline int
+atomic_fcmpset_long(volatile u_long *p, u_long *cmpval, u_long newval)
+{
+ int ret;
+
+#ifdef __GNUCLIKE_ASM
+ __asm __volatile (
+ #ifdef __powerpc64__
+ "ldarx %0, 0, %3\n\t" /* load old value */
+ "cmpld %4, %0\n\t" /* compare */
+ "bne 1f\n\t" /* exit if not equal */
+ "stdcx. %5, 0, %3\n\t" /* attempt to store */
+ #else
+ "lwarx %0, 0, %3\n\t" /* load old value */
+ "cmplw %4, %0\n\t" /* compare */
+ "bne 1f\n\t" /* exit if not equal */
+ "stwcx. %5, 0, %3\n\t" /* attempt to store */
+ #endif
+ "bne- 1f\n\t" /* exit if failed */
+ "li %0, 1\n\t" /* success - retval = 1 */
+ "b 2f\n\t" /* we've succeeded */
+ "1:\n\t"
+ #ifdef __powerpc64__
+ "stdcx. %0, 0, %3\n\t" /* clear reservation (74xx) */
+ "stdx %0, 0, %7\n\t"
+ #else
+ "stwcx. %0, 0, %3\n\t" /* clear reservation (74xx) */
+ "stwx %0, 0, %7\n\t"
+ #endif
+ "li %0, 0\n\t" /* failure - retval = 0 */
+ "2:\n\t"
+ : "=&r" (ret), "=m" (*p), "=m" (*cmpval)
+ : "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
+ : "cr0", "memory");
+#endif
+
+ return (ret);
+}
+
+static __inline int
+atomic_fcmpset_acq_int(volatile u_int *p, u_int *cmpval, u_int newval)
+{
+ int retval;
+
+ retval = atomic_fcmpset_int(p, cmpval, newval);
+ __ATOMIC_ACQ();
+ return (retval);
+}
+
+static __inline int
+atomic_fcmpset_rel_int(volatile u_int *p, u_int *cmpval, u_int newval)
+{
+ __ATOMIC_REL();
+ return (atomic_fcmpset_int(p, cmpval, newval));
+}
+
+static __inline int
+atomic_fcmpset_acq_long(volatile u_long *p, u_long *cmpval, u_long newval)
+{
+ u_long retval;
+
+ retval = atomic_fcmpset_long(p, cmpval, newval);
+ __ATOMIC_ACQ();
+ return (retval);
+}
+
+static __inline int
+atomic_fcmpset_rel_long(volatile u_long *p, u_long *cmpval, u_long newval)
+{
+ __ATOMIC_REL();
+ return (atomic_fcmpset_long(p, cmpval, newval));
+}
+
+#define atomic_fcmpset_32 atomic_fcmpset_int
+#define atomic_fcmpset_acq_32 atomic_fcmpset_acq_int
+#define atomic_fcmpset_rel_32 atomic_fcmpset_rel_int
+
+#ifdef __powerpc64__
+#define atomic_fcmpset_64 atomic_fcmpset_long
+#define atomic_fcmpset_acq_64 atomic_fcmpset_acq_long
+#define atomic_fcmpset_rel_64 atomic_fcmpset_rel_long
+
+#define atomic_fcmpset_ptr atomic_fcmpset_long
+#define atomic_fcmpset_acq_ptr atomic_fcmpset_acq_long
+#define atomic_fcmpset_rel_ptr atomic_fcmpset_rel_long
+#else
+#define atomic_fcmpset_ptr atomic_fcmpset_int
+#define atomic_fcmpset_acq_ptr atomic_fcmpset_acq_int
+#define atomic_fcmpset_rel_ptr atomic_fcmpset_rel_int
+#endif
+
static __inline u_int
atomic_fetchadd_int(volatile u_int *p, u_int v)
{
diff --git a/sys/riscv/include/atomic.h b/sys/riscv/include/atomic.h
index e26b082..33f3d5b 100644
--- a/sys/riscv/include/atomic.h
+++ b/sys/riscv/include/atomic.h
@@ -120,6 +120,31 @@ atomic_cmpset_32(volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
return (!res);
}
+static __inline int
+atomic_fcmpset_32(volatile uint32_t *p, uint32_t *cmpval, uint32_t newval)
+{
+ uint32_t tmp;
+ int res;
+
+ res = 0;
+
+ __asm __volatile(
+ "0:"
+ "li %1, 1\n" /* Preset to fail */
+ "lr.w %0, %2\n" /* Load old value */
+ "bne %0, %z4, 1f\n" /* Compare */
+ "sc.w %1, %z5, %2\n" /* Try to store new value */
+ "j 2f\n"
+ "1:"
+ "sw %0, %3\n" /* Save old value */
+ "2:"
+ : "=&r" (tmp), "=&r" (res), "+A" (*p), "+A" (*cmpval)
+ : "rJ" (*cmpval), "rJ" (newval)
+ : "memory");
+
+ return (!res);
+}
+
static __inline uint32_t
atomic_fetchadd_32(volatile uint32_t *p, uint32_t val)
{
@@ -152,6 +177,7 @@ atomic_readandclear_32(volatile uint32_t *p)
#define atomic_add_int atomic_add_32
#define atomic_clear_int atomic_clear_32
#define atomic_cmpset_int atomic_cmpset_32
+#define atomic_fcmpset_int atomic_fcmpset_32
#define atomic_fetchadd_int atomic_fetchadd_32
#define atomic_readandclear_int atomic_readandclear_32
#define atomic_set_int atomic_set_32
@@ -183,6 +209,27 @@ atomic_cmpset_rel_32(volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
return (atomic_cmpset_32(p, cmpval, newval));
}
+static __inline int
+atomic_fcmpset_acq_32(volatile uint32_t *p, uint32_t *cmpval, uint32_t newval)
+{
+ int res;
+
+ res = atomic_fcmpset_32(p, cmpval, newval);
+
+ fence();
+
+ return (res);
+}
+
+static __inline int
+atomic_fcmpset_rel_32(volatile uint32_t *p, uint32_t *cmpval, uint32_t newval)
+{
+
+ fence();
+
+ return (atomic_fcmpset_32(p, cmpval, newval));
+}
+
static __inline uint32_t
atomic_load_acq_32(volatile uint32_t *p)
{
@@ -207,6 +254,7 @@ atomic_store_rel_32(volatile uint32_t *p, uint32_t val)
#define atomic_add_acq_int atomic_add_acq_32
#define atomic_clear_acq_int atomic_clear_acq_32
#define atomic_cmpset_acq_int atomic_cmpset_acq_32
+#define atomic_fcmpset_acq_int atomic_fcmpset_acq_32
#define atomic_load_acq_int atomic_load_acq_32
#define atomic_set_acq_int atomic_set_acq_32
#define atomic_subtract_acq_int atomic_subtract_acq_32
@@ -214,6 +262,7 @@ atomic_store_rel_32(volatile uint32_t *p, uint32_t val)
#define atomic_add_rel_int atomic_add_rel_32
#define atomic_clear_rel_int atomic_add_rel_32
#define atomic_cmpset_rel_int atomic_cmpset_rel_32
+#define atomic_fcmpset_rel_int atomic_fcmpset_rel_32
#define atomic_set_rel_int atomic_set_rel_32
#define atomic_subtract_rel_int atomic_subtract_rel_32
#define atomic_store_rel_int atomic_store_rel_32
@@ -281,6 +330,31 @@ atomic_cmpset_64(volatile uint64_t *p, uint64_t cmpval, uint64_t newval)
return (!res);
}
+static __inline int
+atomic_fcmpset_64(volatile uint64_t *p, uint64_t *cmpval, uint64_t newval)
+{
+ uint64_t tmp;
+ int res;
+
+ res = 0;
+
+ __asm __volatile(
+ "0:"
+ "li %1, 1\n" /* Preset to fail */
+ "lr.d %0, %2\n" /* Load old value */
+ "bne %0, %z4, 1f\n" /* Compare */
+ "sc.d %1, %z5, %2\n" /* Try to store new value */
+ "j 2f\n"
+ "1:"
+ "sd %0, %3\n" /* Save old value */
+ "2:"
+ : "=&r" (tmp), "=&r" (res), "+A" (*p), "+A" (*cmpval)
+ : "rJ" (*cmpval), "rJ" (newval)
+ : "memory");
+
+ return (!res);
+}
+
static __inline uint64_t
atomic_fetchadd_64(volatile uint64_t *p, uint64_t val)
{
@@ -339,6 +413,7 @@ atomic_swap_64(volatile uint64_t *p, uint64_t val)
#define atomic_add_long atomic_add_64
#define atomic_clear_long atomic_clear_64
#define atomic_cmpset_long atomic_cmpset_64
+#define atomic_fcmpset_long atomic_fcmpset_64
#define atomic_fetchadd_long atomic_fetchadd_64
#define atomic_readandclear_long atomic_readandclear_64
#define atomic_set_long atomic_set_64
@@ -347,6 +422,7 @@ atomic_swap_64(volatile uint64_t *p, uint64_t val)
#define atomic_add_ptr atomic_add_64
#define atomic_clear_ptr atomic_clear_64
#define atomic_cmpset_ptr atomic_cmpset_64
+#define atomic_fcmpset_ptr atomic_fcmpset_64
#define atomic_fetchadd_ptr atomic_fetchadd_64
#define atomic_readandclear_ptr atomic_readandclear_64
#define atomic_set_ptr atomic_set_64
@@ -378,6 +454,27 @@ atomic_cmpset_rel_64(volatile uint64_t *p, uint64_t cmpval, uint64_t newval)
return (atomic_cmpset_64(p, cmpval, newval));
}
+static __inline int
+atomic_fcmpset_acq_64(volatile uint64_t *p, uint64_t *cmpval, uint64_t newval)
+{
+ int res;
+
+ res = atomic_fcmpset_64(p, cmpval, newval);
+
+ fence();
+
+ return (res);
+}
+
+static __inline int
+atomic_fcmpset_rel_64(volatile uint64_t *p, uint64_t *cmpval, uint64_t newval)
+{
+
+ fence();
+
+ return (atomic_fcmpset_64(p, cmpval, newval));
+}
+
static __inline uint64_t
atomic_load_acq_64(volatile uint64_t *p)
{
@@ -402,6 +499,7 @@ atomic_store_rel_64(volatile uint64_t *p, uint64_t val)
#define atomic_add_acq_long atomic_add_acq_64
#define atomic_clear_acq_long atomic_add_acq_64
#define atomic_cmpset_acq_long atomic_cmpset_acq_64
+#define atomic_fcmpset_acq_long atomic_fcmpset_acq_64
#define atomic_load_acq_long atomic_load_acq_64
#define atomic_set_acq_long atomic_set_acq_64
#define atomic_subtract_acq_long atomic_subtract_acq_64
@@ -409,6 +507,7 @@ atomic_store_rel_64(volatile uint64_t *p, uint64_t val)
#define atomic_add_acq_ptr atomic_add_acq_64
#define atomic_clear_acq_ptr atomic_add_acq_64
#define atomic_cmpset_acq_ptr atomic_cmpset_acq_64
+#define atomic_fcmpset_acq_ptr atomic_fcmpset_acq_64
#define atomic_load_acq_ptr atomic_load_acq_64
#define atomic_set_acq_ptr atomic_set_acq_64
#define atomic_subtract_acq_ptr atomic_subtract_acq_64
@@ -447,6 +546,7 @@ atomic_thread_fence_seq_cst(void)
#define atomic_add_rel_long atomic_add_rel_64
#define atomic_clear_rel_long atomic_clear_rel_64
#define atomic_cmpset_rel_long atomic_cmpset_rel_64
+#define atomic_fcmpset_rel_long atomic_fcmpset_rel_64
#define atomic_set_rel_long atomic_set_rel_64
#define atomic_subtract_rel_long atomic_subtract_rel_64
#define atomic_store_rel_long atomic_store_rel_64
@@ -454,6 +554,7 @@ atomic_thread_fence_seq_cst(void)
#define atomic_add_rel_ptr atomic_add_rel_64
#define atomic_clear_rel_ptr atomic_clear_rel_64
#define atomic_cmpset_rel_ptr atomic_cmpset_rel_64
+#define atomic_fcmpset_rel_ptr atomic_fcmpset_rel_64
#define atomic_set_rel_ptr atomic_set_rel_64
#define atomic_subtract_rel_ptr atomic_subtract_rel_64
#define atomic_store_rel_ptr atomic_store_rel_64
diff --git a/sys/sparc64/include/atomic.h b/sys/sparc64/include/atomic.h
index c70e99a..b2fd8cc 100644
--- a/sys/sparc64/include/atomic.h
+++ b/sys/sparc64/include/atomic.h
@@ -219,6 +219,40 @@ atomic_cmpset_rel_ ## name(volatile ptype p, vtype e, vtype s) \
return (((vtype)atomic_cas_rel((p), (e), (s), sz)) == (e)); \
} \
\
+static __inline int \
+atomic_fcmpset_ ## name(volatile ptype p, vtype *ep, vtype s) \
+{ \
+ vtype t; \
+ \
+ t = (vtype)atomic_cas((p), (*ep), (s), sz); \
+ if (t == (*ep)) \
+ return (1); \
+ *ep = t; \
+ return (0); \
+} \
+static __inline int \
+atomic_fcmpset_acq_ ## name(volatile ptype p, vtype *ep, vtype s) \
+{ \
+ vtype t; \
+ \
+ t = (vtype)atomic_cas_acq((p), (*ep), (s), sz); \
+ if (t == (*ep)) \
+ return (1); \
+ *ep = t; \
+ return (0); \
+} \
+static __inline int \
+atomic_fcmpset_rel_ ## name(volatile ptype p, vtype *ep, vtype s) \
+{ \
+ vtype t; \
+ \
+ t = (vtype)atomic_cas_rel((p), (*ep), (s), sz); \
+ if (t == (*ep)) \
+ return (1); \
+ *ep = t; \
+ return (0); \
+} \
+ \
static __inline vtype \
atomic_load_ ## name(volatile ptype p) \
{ \
OpenPOWER on IntegriCloud