summaryrefslogtreecommitdiffstats
path: root/sys/powerpc/include
diff options
context:
space:
mode:
authorobrien <obrien@FreeBSD.org>2001-01-01 23:06:59 +0000
committerobrien <obrien@FreeBSD.org>2001-01-01 23:06:59 +0000
commit20531f46519da11794ae59760200817181476bdf (patch)
treee0b2858e53b15d5a27143d7683640f97b995440f /sys/powerpc/include
parentae1d8ec43f146b6656986fbe8304d715a8bab5a9 (diff)
downloadFreeBSD-src-20531f46519da11794ae59760200817181476bdf.zip
FreeBSD-src-20531f46519da11794ae59760200817181476bdf.tar.gz
Shells for the atomic operations FreeBSD needs.
This is just waiting for a budding PowerPC ASM guy to fill in the blanks.
Diffstat (limited to 'sys/powerpc/include')
-rw-r--r--sys/powerpc/include/atomic.h228
1 files changed, 228 insertions, 0 deletions
diff --git a/sys/powerpc/include/atomic.h b/sys/powerpc/include/atomic.h
new file mode 100644
index 0000000..a044891
--- /dev/null
+++ b/sys/powerpc/include/atomic.h
@@ -0,0 +1,228 @@
+/*-
+ * Copyright (c) 2001 David E. O'Brien
+ * Copyright (c) 1998 Doug Rabson
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * $FreeBSD$
+ */
+
+#ifndef _MACHINE_ATOMIC_H_
+#define _MACHINE_ATOMIC_H_
+
+/*
+ * Various simple arithmetic on memory which is atomic in the presence
+ * of interrupts and SMP safe.
+ */
+
+void atomic_set_8(volatile u_int8_t *, u_int8_t);
+void atomic_clear_8(volatile u_int8_t *, u_int8_t);
+void atomic_add_8(volatile u_int8_t *, u_int8_t);
+void atomic_subtract_8(volatile u_int8_t *, u_int8_t);
+
+void atomic_set_16(volatile u_int16_t *, u_int16_t);
+void atomic_clear_16(volatile u_int16_t *, u_int16_t);
+void atomic_add_16(volatile u_int16_t *, u_int16_t);
+void atomic_subtract_16(volatile u_int16_t *, u_int16_t);
+
+static __inline void atomic_set_32(volatile u_int32_t *p, u_int32_t v)
+{
+ u_int32_t temp;
+
+ __asm __volatile (
+ : "=&r" (temp), "=m" (*p)
+ : "m" (*p), "r" (v)
+ : "memory");
+}
+
+static __inline void atomic_clear_32(volatile u_int32_t *p, u_int32_t v)
+{
+ u_int32_t temp;
+
+ __asm __volatile (
+ : "=&r" (temp), "=m" (*p)
+ : "m" (*p), "r" (v)
+ : "memory");
+}
+
+static __inline void atomic_add_32(volatile u_int32_t *p, u_int32_t v)
+{
+ u_int32_t temp;
+
+ __asm __volatile (
+ : "=&r" (temp), "=m" (*p)
+ : "m" (*p), "r" (v)
+ : "memory");
+}
+
+static __inline void atomic_subtract_32(volatile u_int32_t *p, u_int32_t v)
+{
+ u_int32_t temp;
+
+ __asm __volatile (
+ : "=&r" (temp), "=m" (*p)
+ : "m" (*p), "r" (v)
+ : "memory");
+}
+
+static __inline u_int32_t atomic_readandclear_32(volatile u_int32_t *addr)
+{
+ u_int32_t result,temp;
+
+ __asm __volatile (
+ : "=&r"(result), "=&r"(temp), "=m" (*addr)
+ : "m"(*addr)
+ : "memory");
+
+ return result;
+}
+
+static __inline void atomic_set_64(volatile u_int64_t *p, u_int64_t v)
+{
+ u_int64_t temp;
+
+ __asm __volatile (
+ : "=&r" (temp), "=m" (*p)
+ : "m" (*p), "r" (v)
+ : "memory");
+}
+
+static __inline void atomic_clear_64(volatile u_int64_t *p, u_int64_t v)
+{
+ u_int64_t temp;
+
+ __asm __volatile (
+ : "=&r" (temp), "=m" (*p)
+ : "m" (*p), "r" (v)
+ : "memory");
+}
+
+static __inline void atomic_add_64(volatile u_int64_t *p, u_int64_t v)
+{
+ u_int64_t temp;
+
+ __asm __volatile (
+ : "=&r" (temp), "=m" (*p)
+ : "m" (*p), "r" (v)
+ : "memory");
+}
+
+static __inline void atomic_subtract_64(volatile u_int64_t *p, u_int64_t v)
+{
+ u_int64_t temp;
+
+ __asm __volatile (
+ : "=&r" (temp), "=m" (*p)
+ : "m" (*p), "r" (v)
+ : "memory");
+}
+
+static __inline u_int64_t atomic_readandclear_64(volatile u_int64_t *addr)
+{
+ u_int64_t result,temp;
+
+ __asm __volatile (
+ : "=&r"(result), "=&r"(temp), "=m" (*addr)
+ : "m"(*addr)
+ : "memory");
+
+ return result;
+}
+
+#define atomic_set_char atomic_set_8
+#define atomic_clear_char atomic_clear_8
+#define atomic_add_char atomic_add_8
+#define atomic_subtract_char atomic_subtract_8
+
+#define atomic_set_short atomic_set_16
+#define atomic_clear_short atomic_clear_16
+#define atomic_add_short atomic_add_16
+#define atomic_subtract_short atomic_subtract_16
+
+#define atomic_set_int atomic_set_32
+#define atomic_clear_int atomic_clear_32
+#define atomic_add_int atomic_add_32
+#define atomic_subtract_int atomic_subtract_32
+#define atomic_readandclear_int atomic_readandclear_32
+
+#define atomic_set_long atomic_set_32
+#define atomic_clear_long atomic_clear_32
+#define atomic_add_long atomic_add_32
+#define atomic_subtract_long atomic_subtract_32
+#define atomic_readandclear_long atomic_readandclear_32
+
+#define atomic_set_long_long atomic_set_64
+#define atomic_clear_long_long atomic_clear_64
+#define atomic_add_long_long atomic_add_64
+#define atomic_subtract_long_long atomic_subtract_64
+#define atomic_readandclear_long_long atomic_readandclear_64
+
+/*
+ * Atomically compare the value stored at *p with cmpval and if the
+ * two values are equal, update the value of *p with newval. Returns
+ * zero if the compare failed, nonzero otherwise.
+ */
+static __inline u_int32_t
+atomic_cmpset_32(volatile u_int32_t* p, u_int32_t cmpval, u_int32_t newval)
+{
+ u_int32_t ret;
+
+ __asm __volatile (
+ : "=&r" (ret), "=m" (*p)
+ : "r" (cmpval), "r" (newval), "m" (*p)
+ : "memory");
+
+ return ret;
+}
+
+/*
+ * Atomically compare the value stored at *p with cmpval and if the
+ * two values are equal, update the value of *p with newval. Returns
+ * zero if the compare failed, nonzero otherwise.
+ */
+static __inline u_int64_t
+atomic_cmpset_64(volatile u_int64_t* p, u_int64_t cmpval, u_int64_t newval)
+{
+ u_int64_t ret;
+
+ __asm __volatile (
+ : "=&r" (ret), "=m" (*p)
+ : "r" (cmpval), "r" (newval), "m" (*p)
+ : "memory");
+
+ return ret;
+}
+
+#define atomic_cmpset_int atomic_cmpset_32
+#define atomic_cmpset_long atomic_cmpset_32
+#define atomic_cmpset_long_long atomic_cmpset_64
+
+static __inline int
+atomic_cmpset_ptr(volatile void *dst, void *exp, void *src)
+{
+
+ return (
+ atomic_cmpset_long((volatile u_long *)dst, (u_long)exp, (u_long)src));
+}
+
+#endif /* ! _MACHINE_ATOMIC_H_ */
OpenPOWER on IntegriCloud