summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorphk <phk@FreeBSD.org>2000-09-06 11:21:14 +0000
committerphk <phk@FreeBSD.org>2000-09-06 11:21:14 +0000
commit6c07bccbf798848cb394146e35ef94c64121accd (patch)
treebb9380dc43e99fb9dd5abf7fdae1d22ac81d97c1
parent75d7416f31678a75fbee4c150d761d197d6809da (diff)
downloadFreeBSD-src-6c07bccbf798848cb394146e35ef94c64121accd.zip
FreeBSD-src-6c07bccbf798848cb394146e35ef94c64121accd.tar.gz
Introduce atomic_cmpset_int() and atomic_cmpset_long() from SMPng a
few hours earlier than the rest. The next DEVFS commit needs these functions. Alpha versions by: dfr i386 versions by: jakeb Approved by: SMPng
-rw-r--r--sys/alpha/include/atomic.h105
-rw-r--r--sys/amd64/amd64/atomic.c1
-rw-r--r--sys/amd64/include/atomic.h72
-rw-r--r--sys/i386/i386/atomic.c1
-rw-r--r--sys/i386/include/atomic.h72
5 files changed, 232 insertions, 19 deletions
diff --git a/sys/alpha/include/atomic.h b/sys/alpha/include/atomic.h
index d8b0499..5cc2c3a 100644
--- a/sys/alpha/include/atomic.h
+++ b/sys/alpha/include/atomic.h
@@ -34,25 +34,25 @@
* of interrupts and SMP safe.
*/
-void atomic_set_8(u_int8_t *, u_int8_t);
-void atomic_clear_8(u_int8_t *, u_int8_t);
-void atomic_add_8(u_int8_t *, u_int8_t);
-void atomic_subtract_8(u_int8_t *, u_int8_t);
-
-void atomic_set_16(u_int16_t *, u_int16_t);
-void atomic_clear_16(u_int16_t *, u_int16_t);
-void atomic_add_16(u_int16_t *, u_int16_t);
-void atomic_subtract_16(u_int16_t *, u_int16_t);
-
-void atomic_set_32(u_int32_t *, u_int32_t);
-void atomic_clear_32(u_int32_t *, u_int32_t);
-void atomic_add_32(u_int32_t *, u_int32_t);
-void atomic_subtract_32(u_int32_t *, u_int32_t);
-
-void atomic_set_64(u_int64_t *, u_int64_t);
-void atomic_clear_64(u_int64_t *, u_int64_t);
-void atomic_add_64(u_int64_t *, u_int64_t);
-void atomic_subtract_64(u_int64_t *, u_int64_t);
+void atomic_set_8(volatile u_int8_t *, u_int8_t);
+void atomic_clear_8(volatile u_int8_t *, u_int8_t);
+void atomic_add_8(volatile u_int8_t *, u_int8_t);
+void atomic_subtract_8(volatile u_int8_t *, u_int8_t);
+
+void atomic_set_16(volatile u_int16_t *, u_int16_t);
+void atomic_clear_16(volatile u_int16_t *, u_int16_t);
+void atomic_add_16(volatile u_int16_t *, u_int16_t);
+void atomic_subtract_16(volatile u_int16_t *, u_int16_t);
+
+void atomic_set_32(volatile u_int32_t *, u_int32_t);
+void atomic_clear_32(volatile u_int32_t *, u_int32_t);
+void atomic_add_32(volatile u_int32_t *, u_int32_t);
+void atomic_subtract_32(volatile u_int32_t *, u_int32_t);
+
+void atomic_set_64(volatile u_int64_t *, u_int64_t);
+void atomic_clear_64(volatile u_int64_t *, u_int64_t);
+void atomic_add_64(volatile u_int64_t *, u_int64_t);
+void atomic_subtract_64(volatile u_int64_t *, u_int64_t);
#define atomic_set_char atomic_set_8
#define atomic_clear_char atomic_clear_8
@@ -74,4 +74,71 @@ void atomic_subtract_64(u_int64_t *, u_int64_t);
#define atomic_add_long atomic_add_64
#define atomic_subtract_long atomic_subtract_64
+/*
+ * Atomically compare the value stored at *p with cmpval and if the
+ * two values are equal, update the value of *p with newval. Returns
+ * zero if the compare failed, nonzero otherwise.
+ */
+static __inline u_int32_t
+atomic_cmpset_32(volatile u_int32_t* p, u_int32_t cmpval, u_int32_t newval)
+{
+ u_int32_t ret, temp;
+
+ __asm __volatile (
+ "1:\tldl_l %1, %5\n\t" /* load old value */
+ "cmpeq %1, %3, %0\n\t" /* compare */
+ "beq %0, 2f\n\t" /* exit if not equal */
+ "mov %4, %1\n\t" /* value to store */
+ "stl_c %1, %2\n\t" /* attempt to store */
+ "beq %1, 3f\n\t" /* if it failed, spin */
+ "2:\n" /* done */
+ ".section .text3,\"ax\"\n" /* improve branch prediction */
+ "3:\tbr 1b\n" /* try again */
+ ".previous\n"
+ : "=&r" (ret), "=r" (temp), "=m" (*p)
+ : "r" (cmpval), "r" (newval), "m" (*p)
+ : "memory");
+
+ return ret;
+}
+
+/*
+ * Atomically compare the value stored at *p with cmpval and if the
+ * two values are equal, update the value of *p with newval. Returns
+ * zero if the compare failed, nonzero otherwise.
+ */
+static __inline u_int64_t
+atomic_cmpset_64(volatile u_int64_t* p, u_int64_t cmpval, u_int64_t newval)
+{
+ u_int64_t ret, temp;
+
+ __asm __volatile (
+ "1:\tldq_l %1, %5\n\t" /* load old value */
+ "cmpeq %1, %3, %0\n\t" /* compare */
+ "beq %0, 2f\n\t" /* exit if not equal */
+ "mov %4, %1\n\t" /* value to store */
+ "stq_c %1, %2\n\t" /* attempt to store */
+ "beq %1, 3f\n\t" /* if it failed, spin */
+ "2:\n" /* done */
+ ".section .text3,\"ax\"\n" /* improve branch prediction */
+ "3:\tbr 1b\n" /* try again */
+ ".previous\n"
+ : "=&r" (ret), "=r" (temp), "=m" (*p)
+ : "r" (cmpval), "r" (newval), "m" (*p)
+ : "memory");
+
+ return ret;
+}
+
+#define atomic_cmpset_int atomic_cmpset_32
+#define atomic_cmpset_long atomic_cmpset_64
+
+static __inline int
+atomic_cmpset_ptr(volatile void *dst, void *exp, void *src)
+{
+
+ return (
+ atomic_cmpset_long((volatile u_long *)dst, (u_long)exp, (u_long)src));
+}
+
#endif /* ! _MACHINE_ATOMIC_H_ */
diff --git a/sys/amd64/amd64/atomic.c b/sys/amd64/amd64/atomic.c
index c19ba9e..ee1333b 100644
--- a/sys/amd64/amd64/atomic.c
+++ b/sys/amd64/amd64/atomic.c
@@ -40,6 +40,7 @@
#undef ATOMIC_ASM
/* Make atomic.h generate public functions */
+#define WANT_FUNCTIONS
#define static
#undef __inline
#define __inline
diff --git a/sys/amd64/include/atomic.h b/sys/amd64/include/atomic.h
index e6af303..35a710e 100644
--- a/sys/amd64/include/atomic.h
+++ b/sys/amd64/include/atomic.h
@@ -65,9 +65,15 @@
#define ATOMIC_ASM(NAME, TYPE, OP, V) \
extern void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v);
+extern int atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src);
+
#else /* !KLD_MODULE */
#if defined(SMP)
+#if defined(LOCORE)
+#define MPLOCKED lock ;
+#else
#define MPLOCKED "lock ; "
+#endif
#else
#define MPLOCKED
#endif
@@ -87,6 +93,62 @@ atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
: "0" (*p), "ir" (V)); \
}
+/*
+ * Atomic compare and set, used by the mutex functions
+ *
+ * if (*dst == exp) *dst = src (all 32 bit words)
+ *
+ * Returns 0 on failure, non-zero on success
+ */
+
+#if defined(I386_CPU)
+static __inline int
+atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
+{
+ int res = exp;
+
+ __asm __volatile(
+ " pushfl ; "
+ " cli ; "
+ " cmpl %1,%3 ; "
+ " jne 1f ; "
+ " movl %2,%3 ; "
+ "1: "
+ " sete %%al; "
+ " movzbl %%al,%0 ; "
+ " popfl ; "
+ "# atomic_cmpset_int"
+ : "=a" (res) /* 0 (result) */
+ : "0" (exp), /* 1 */
+ "r" (src), /* 2 */
+ "m" (*(dst)) /* 3 */
+ : "memory");
+
+ return (res);
+}
+#else /* defined(I386_CPU) */
+static __inline int
+atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
+{
+ int res = exp;
+
+ __asm __volatile (
+ " " MPLOCKED " "
+ " cmpxchgl %2,%3 ; "
+ " setz %%al ; "
+ " movzbl %%al,%0 ; "
+ "1: "
+ "# atomic_cmpset_int"
+ : "=a" (res) /* 0 (result) */
+ : "0" (exp), /* 1 */
+ "r" (src), /* 2 */
+ "m" (*(dst)) /* 3 */
+ : "memory");
+
+ return (res);
+}
+#endif /* defined(I386_CPU) */
+
#else
/* gcc <= 2.8 version */
#define ATOMIC_ASM(NAME, TYPE, OP, V) \
@@ -148,4 +210,14 @@ ATOMIC_ASM(subtract, long, "subl %1,%0", v)
#endif
+#ifndef WANT_FUNCTIONS
+static __inline int
+atomic_cmpset_ptr(volatile void *dst, void *exp, void *src)
+{
+
+ return (
+ atomic_cmpset_int((volatile u_int *)dst, (u_int)exp, (u_int)src));
+}
+#endif
+
#endif /* ! _MACHINE_ATOMIC_H_ */
diff --git a/sys/i386/i386/atomic.c b/sys/i386/i386/atomic.c
index c19ba9e..ee1333b 100644
--- a/sys/i386/i386/atomic.c
+++ b/sys/i386/i386/atomic.c
@@ -40,6 +40,7 @@
#undef ATOMIC_ASM
/* Make atomic.h generate public functions */
+#define WANT_FUNCTIONS
#define static
#undef __inline
#define __inline
diff --git a/sys/i386/include/atomic.h b/sys/i386/include/atomic.h
index e6af303..35a710e 100644
--- a/sys/i386/include/atomic.h
+++ b/sys/i386/include/atomic.h
@@ -65,9 +65,15 @@
#define ATOMIC_ASM(NAME, TYPE, OP, V) \
extern void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v);
+extern int atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src);
+
#else /* !KLD_MODULE */
#if defined(SMP)
+#if defined(LOCORE)
+#define MPLOCKED lock ;
+#else
#define MPLOCKED "lock ; "
+#endif
#else
#define MPLOCKED
#endif
@@ -87,6 +93,62 @@ atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
: "0" (*p), "ir" (V)); \
}
+/*
+ * Atomic compare and set, used by the mutex functions
+ *
+ * if (*dst == exp) *dst = src (all 32 bit words)
+ *
+ * Returns 0 on failure, non-zero on success
+ */
+
+#if defined(I386_CPU)
+static __inline int
+atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
+{
+ int res = exp;
+
+ __asm __volatile(
+ " pushfl ; "
+ " cli ; "
+ " cmpl %1,%3 ; "
+ " jne 1f ; "
+ " movl %2,%3 ; "
+ "1: "
+ " sete %%al; "
+ " movzbl %%al,%0 ; "
+ " popfl ; "
+ "# atomic_cmpset_int"
+ : "=a" (res) /* 0 (result) */
+ : "0" (exp), /* 1 */
+ "r" (src), /* 2 */
+ "m" (*(dst)) /* 3 */
+ : "memory");
+
+ return (res);
+}
+#else /* defined(I386_CPU) */
+static __inline int
+atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
+{
+ int res = exp;
+
+ __asm __volatile (
+ " " MPLOCKED " "
+ " cmpxchgl %2,%3 ; "
+ " setz %%al ; "
+ " movzbl %%al,%0 ; "
+ "1: "
+ "# atomic_cmpset_int"
+ : "=a" (res) /* 0 (result) */
+ : "0" (exp), /* 1 */
+ "r" (src), /* 2 */
+ "m" (*(dst)) /* 3 */
+ : "memory");
+
+ return (res);
+}
+#endif /* defined(I386_CPU) */
+
#else
/* gcc <= 2.8 version */
#define ATOMIC_ASM(NAME, TYPE, OP, V) \
@@ -148,4 +210,14 @@ ATOMIC_ASM(subtract, long, "subl %1,%0", v)
#endif
+#ifndef WANT_FUNCTIONS
+static __inline int
+atomic_cmpset_ptr(volatile void *dst, void *exp, void *src)
+{
+
+ return (
+ atomic_cmpset_int((volatile u_int *)dst, (u_int)exp, (u_int)src));
+}
+#endif
+
#endif /* ! _MACHINE_ATOMIC_H_ */
OpenPOWER on IntegriCloud