summaryrefslogtreecommitdiffstats
path: root/lib/libpthread/arch/amd64
diff options
context:
space:
mode:
authordeischen <deischen@FreeBSD.org>2003-09-03 17:56:26 +0000
committerdeischen <deischen@FreeBSD.org>2003-09-03 17:56:26 +0000
commit919bc52171f32bfe264b987934e1055b6901ac6d (patch)
tree4224af62a2f45f6a320c58acdd7f40a2692ca686 /lib/libpthread/arch/amd64
parent43632098e791da21a8b261c5b05b55cba97ae911 (diff)
downloadFreeBSD-src-919bc52171f32bfe264b987934e1055b6901ac6d.zip
FreeBSD-src-919bc52171f32bfe264b987934e1055b6901ac6d.tar.gz
Don't assume sizeof(long) = sizeof(int) on x86; use int
instead of long types for low-level locks. Add prototypes for some internal libc functions that are wrapped by the library as cancellation points. Add memory barriers to alpha atomic swap functions (submitted by davidxu). Requested by: bde
Diffstat (limited to 'lib/libpthread/arch/amd64')
-rw-r--r--lib/libpthread/arch/amd64/include/atomic_ops.h14
1 files changed, 11 insertions, 3 deletions
diff --git a/lib/libpthread/arch/amd64/include/atomic_ops.h b/lib/libpthread/arch/amd64/include/atomic_ops.h
index 5edb533..980eb8e 100644
--- a/lib/libpthread/arch/amd64/include/atomic_ops.h
+++ b/lib/libpthread/arch/amd64/include/atomic_ops.h
@@ -33,17 +33,25 @@
* Atomic swap:
* Atomic (tmp = *dst, *dst = val), then *res = tmp
*
- * void atomic_swap_long(long *dst, long val, long *res);
+ * void atomic_swap64(intptr_t *dst, intptr_t val, intptr_t *res);
*/
static inline void
-atomic_swap_long(long *dst, long val, long *res)
+atomic_swap64(intptr_t *dst, intptr_t val, intptr_t *res)
{
__asm __volatile(
"xchgq %2, %1; movq %2, %0"
: "=m" (*res) : "m" (*dst), "r" (val) : "memory");
}
+static inline void
+atomic_swap_int(int *dst, int val, int *res)
+{
+ __asm __volatile(
+ "xchgl %2, %1; movl %2, %0"
+ : "=m" (*res) : "m" (*dst), "r" (val) : "memory");
+}
+
#define atomic_swap_ptr(d, v, r) \
- atomic_swap_long((long *)(d), (long)(v), (long *)(r))
+ atomic_swap64((intptr_t *)(d), (intptr_t)(v), (intptr_t *)(r))
#endif
OpenPOWER on IntegriCloud