summaryrefslogtreecommitdiffstats
path: root/sys/arm64
diff options
context:
space:
mode:
authormjg <mjg@FreeBSD.org>2017-03-16 06:00:27 +0000
committermjg <mjg@FreeBSD.org>2017-03-16 06:00:27 +0000
commitb8af6b78be197b6a24bf372442dcb6e044d729b3 (patch)
tree0496f4e01ec297971a9b02afb62b80c63b0dd969 /sys/arm64
parent3100d6f6dfd7bea82a2fe7136cf72d8423b715bd (diff)
downloadFreeBSD-src-b8af6b78be197b6a24bf372442dcb6e044d729b3.zip
FreeBSD-src-b8af6b78be197b6a24bf372442dcb6e044d729b3.tar.gz
MFC r311169,r311898,r312925,r312973,r312975,r313007,r313040,r313080,
r313254,r313341 amd64: add atomic_fcmpset == sparc64: add atomic_fcmpset == Implement atomic_fcmpset_* for arm and arm64. == Add atomic_fcmpset_*() inlines for powerpc Summary: atomic_fcmpset_*() is analogous to atomic_cmpset(), but saves off the read value from the target memory location into the 'old' pointer in the case of failure. == i386: add atomic_fcmpset == Don't retry a lost reservation in atomic_fcmpset() The desired behavior of atomic_fcmpset_() is to always exit on error. Instead of retrying on lost reservation, leave the retry to the caller, and return == Add atomic_fcmpset_*() inlines for MIPS atomic_fcmpset_*() is analogous to atomic_cmpset(), but saves off the read value from the target memory location into the 'old' pointer. == i386: fixup fcmpset An incorrect output specifier was used which worked with clang by accident, but breaks with the in-tree gcc version. While here plug a whitespace nit. == Implement atomic_fcmpset_*() for RISC-V. == Use 64bit store instruction in atomic_fcmpset_64.
Diffstat (limited to 'sys/arm64')
-rw-r--r--sys/arm64/include/atomic.h64
1 files changed, 64 insertions, 0 deletions
diff --git a/sys/arm64/include/atomic.h b/sys/arm64/include/atomic.h
index 41ad6cc..9318de1 100644
--- a/sys/arm64/include/atomic.h
+++ b/sys/arm64/include/atomic.h
@@ -98,6 +98,61 @@ ATOMIC(clear, bic)
ATOMIC(set, orr)
ATOMIC(subtract, sub)
+#define ATOMIC_FCMPSET(bar, a, l) \
+static __inline int \
+atomic_fcmpset_##bar##32(volatile uint32_t *p, uint32_t *cmpval, \
+ uint32_t newval) \
+{ \
+ uint32_t tmp; \
+ uint32_t _cmpval = *cmpval; \
+ int res; \
+ \
+ __asm __volatile( \
+ "1: mov %w1, #1 \n" \
+ " ld"#a"xr %w0, [%2] \n" \
+ " cmp %w0, %w3 \n" \
+ " b.ne 2f \n" \
+ " st"#l"xr %w1, %w4, [%2] \n" \
+ "2:" \
+ : "=&r"(tmp), "=&r"(res) \
+ : "r" (p), "r" (_cmpval), "r" (newval) \
+ : "cc", "memory" \
+ ); \
+ *cmpval = tmp; \
+ \
+ return (!res); \
+} \
+ \
+static __inline int \
+atomic_fcmpset_##bar##64(volatile uint64_t *p, uint64_t *cmpval, \
+ uint64_t newval) \
+{ \
+ uint64_t tmp; \
+ uint64_t _cmpval = *cmpval; \
+ int res; \
+ \
+ __asm __volatile( \
+ "1: mov %w1, #1 \n" \
+ " ld"#a"xr %0, [%2] \n" \
+ " cmp %0, %3 \n" \
+ " b.ne 2f \n" \
+ " st"#l"xr %w1, %4, [%2] \n" \
+ "2:" \
+ : "=&r"(tmp), "=&r"(res) \
+ : "r" (p), "r" (_cmpval), "r" (newval) \
+ : "cc", "memory" \
+ ); \
+ *cmpval = tmp; \
+ \
+ return (!res); \
+}
+
+ATOMIC_FCMPSET( , , )
+ATOMIC_FCMPSET(acq_, a, )
+ATOMIC_FCMPSET(rel_, ,l)
+
+#undef ATOMIC_FCMPSET
+
#define ATOMIC_CMPSET(bar, a, l) \
static __inline int \
atomic_cmpset_##bar##32(volatile uint32_t *p, uint32_t cmpval, \
@@ -311,6 +366,7 @@ atomic_store_rel_64(volatile uint64_t *p, uint64_t val)
#define atomic_add_int atomic_add_32
+#define atomic_fcmpset_int atomic_fcmpset_32
#define atomic_clear_int atomic_clear_32
#define atomic_cmpset_int atomic_cmpset_32
#define atomic_fetchadd_int atomic_fetchadd_32
@@ -320,6 +376,7 @@ atomic_store_rel_64(volatile uint64_t *p, uint64_t val)
#define atomic_subtract_int atomic_subtract_32
#define atomic_add_acq_int atomic_add_acq_32
+#define atomic_fcmpset_acq_int atomic_fcmpset_acq_32
#define atomic_clear_acq_int atomic_clear_acq_32
#define atomic_cmpset_acq_int atomic_cmpset_acq_32
#define atomic_load_acq_int atomic_load_acq_32
@@ -327,6 +384,7 @@ atomic_store_rel_64(volatile uint64_t *p, uint64_t val)
#define atomic_subtract_acq_int atomic_subtract_acq_32
#define atomic_add_rel_int atomic_add_rel_32
+#define atomic_fcmpset_rel_int atomic_fcmpset_rel_32
#define atomic_clear_rel_int atomic_add_rel_32
#define atomic_cmpset_rel_int atomic_cmpset_rel_32
#define atomic_set_rel_int atomic_set_rel_32
@@ -334,6 +392,7 @@ atomic_store_rel_64(volatile uint64_t *p, uint64_t val)
#define atomic_store_rel_int atomic_store_rel_32
#define atomic_add_long atomic_add_64
+#define atomic_fcmpset_long atomic_fcmpset_64
#define atomic_clear_long atomic_clear_64
#define atomic_cmpset_long atomic_cmpset_64
#define atomic_fetchadd_long atomic_fetchadd_64
@@ -343,6 +402,7 @@ atomic_store_rel_64(volatile uint64_t *p, uint64_t val)
#define atomic_subtract_long atomic_subtract_64
#define atomic_add_ptr atomic_add_64
+#define atomic_fcmpset_ptr atomic_fcmpset_64
#define atomic_clear_ptr atomic_clear_64
#define atomic_cmpset_ptr atomic_cmpset_64
#define atomic_fetchadd_ptr atomic_fetchadd_64
@@ -352,6 +412,7 @@ atomic_store_rel_64(volatile uint64_t *p, uint64_t val)
#define atomic_subtract_ptr atomic_subtract_64
#define atomic_add_acq_long atomic_add_acq_64
+#define atomic_fcmpset_acq_long atomic_fcmpset_acq_64
#define atomic_clear_acq_long atomic_add_acq_64
#define atomic_cmpset_acq_long atomic_cmpset_acq_64
#define atomic_load_acq_long atomic_load_acq_64
@@ -359,6 +420,7 @@ atomic_store_rel_64(volatile uint64_t *p, uint64_t val)
#define atomic_subtract_acq_long atomic_subtract_acq_64
#define atomic_add_acq_ptr atomic_add_acq_64
+#define atomic_fcmpset_acq_ptr atomic_fcmpset_acq_64
#define atomic_clear_acq_ptr atomic_add_acq_64
#define atomic_cmpset_acq_ptr atomic_cmpset_acq_64
#define atomic_load_acq_ptr atomic_load_acq_64
@@ -366,6 +428,7 @@ atomic_store_rel_64(volatile uint64_t *p, uint64_t val)
#define atomic_subtract_acq_ptr atomic_subtract_acq_64
#define atomic_add_rel_long atomic_add_rel_64
+#define atomic_fcmpset_rel_long atomic_fcmpset_rel_64
#define atomic_clear_rel_long atomic_clear_rel_64
#define atomic_cmpset_rel_long atomic_cmpset_rel_64
#define atomic_set_rel_long atomic_set_rel_64
@@ -373,6 +436,7 @@ atomic_store_rel_64(volatile uint64_t *p, uint64_t val)
#define atomic_store_rel_long atomic_store_rel_64
#define atomic_add_rel_ptr atomic_add_rel_64
+#define atomic_fcmpset_rel_ptr atomic_fcmpset_rel_64
#define atomic_clear_rel_ptr atomic_clear_rel_64
#define atomic_cmpset_rel_ptr atomic_cmpset_rel_64
#define atomic_set_rel_ptr atomic_set_rel_64
OpenPOWER on IntegriCloud