summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorcognet <cognet@FreeBSD.org>2005-04-07 22:03:04 +0000
committercognet <cognet@FreeBSD.org>2005-04-07 22:03:04 +0000
commit64c6be3ab3957c3d915832744d0aebfa4b30fd08 (patch)
tree723347bcaefa967ae7ce5ab1cc1227521850fc57
parent0eff5864a9b8ec46d3f6a458fb10ad7b10c18cbe (diff)
downloadFreeBSD-src-64c6be3ab3957c3d915832744d0aebfa4b30fd08.zip
FreeBSD-src-64c6be3ab3957c3d915832744d0aebfa4b30fd08.tar.gz
Import a basic implementation of the restartable atomic sequences to provide
atomic operations to userland (this is OK for UP only, but SMP is still so far away).
-rw-r--r--sys/arm/include/asmacros.h16
-rw-r--r--sys/arm/include/atomic.h170
2 files changed, 148 insertions, 38 deletions
diff --git a/sys/arm/include/asmacros.h b/sys/arm/include/asmacros.h
index bf5795c..6054080 100644
--- a/sys/arm/include/asmacros.h
+++ b/sys/arm/include/asmacros.h
@@ -113,6 +113,21 @@
add r0, sp, #(4*13); /* Adjust the stack pointer */ \
stmia r0, {r13-r14}^; /* Push the user mode registers */ \
mov r0, r0; /* NOP for previous instruction */ \
+ ldr r5, =0xe0000004; /* Check if there's any RAS */ \
+ ldr r3, [r5]; \
+ cmp r3, #0; /* Is the update needed ? */ \
+ beq 1f; \
+ ldr lr, [r0, #16]; \
+ ldr r1, =0xe0000008; \
+ ldr r4, [r1]; /* Get the end of the RAS */ \
+ mov r2, #0; /* Reset the magic addresses */ \
+ str r2, [r5]; \
+ str r2, [r1]; \
+ cmp lr, r3; /* Were we in the RAS ? */ \
+ blt 1f; \
+ cmp lr, r4; \
+ strlt r3, [r0, #16]; /* Yes, update the pc */ \
+ 1: \
mrs r0, spsr_all; /* Put the SPSR on the stack */ \
str r0, [sp, #-4]!
@@ -168,7 +183,6 @@ name:
#define AST_LOCALS ;\
.Lcurthread: ;\
.word _C_LABEL(__pcpu) + PC_CURTHREAD
-
#endif /* LOCORE */
diff --git a/sys/arm/include/atomic.h b/sys/arm/include/atomic.h
index 2eaa066..e44a25c 100644
--- a/sys/arm/include/atomic.h
+++ b/sys/arm/include/atomic.h
@@ -71,6 +71,9 @@
: "cc" ); \
} while(0)
+#define ARM_RAS_START 0xe0000004
+#define ARM_RAS_END 0xe0000008
+
static __inline uint32_t
__swp(uint32_t val, volatile uint32_t *ptr)
{
@@ -80,76 +83,169 @@ __swp(uint32_t val, volatile uint32_t *ptr)
}
-#define atomic_op(v, op, p) ({ \
- uint32_t e, r, s; \
- for (e = *(volatile uint32_t *)p;; e = r) { \
- s = e op v; \
- r = __swp(s, p); \
- if (r == e) \
- break; \
- } \
- e; \
-})
+#ifdef _KERNEL
static __inline void
atomic_set_32(volatile uint32_t *address, uint32_t setmask)
{
- atomic_op(setmask, |, address);
+ __with_interrupts_disabled(*address |= setmask);
}
static __inline void
atomic_clear_32(volatile uint32_t *address, uint32_t clearmask)
{
- atomic_op(clearmask, &~, address);
+ __with_interrupts_disabled(*address &= ~clearmask);
}
-static __inline int
-atomic_load_32(volatile uint32_t *v)
+static __inline u_int32_t
+atomic_cmpset_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
{
-
- return (atomic_op(0, +, v));
+ int ret;
+
+ __with_interrupts_disabled(
+ {
+ if (*p == cmpval) {
+ *p = newval;
+ ret = 1;
+ } else {
+ ret = 0;
+ }
+ });
+ return (ret);
}
static __inline void
-atomic_store_32(volatile uint32_t *dst, uint32_t src)
+atomic_add_32(volatile u_int32_t *p, u_int32_t val)
{
- __swp(src, dst);
+ __with_interrupts_disabled(*p += val);
}
-static __inline uint32_t
-atomic_readandclear_32(volatile u_int32_t *p)
+static __inline void
+atomic_subtract_32(volatile u_int32_t *p, u_int32_t val)
{
-
- return (__swp(0, p));
+ __with_interrupts_disabled(*p -= val);
}
-#ifdef _KERNEL
+#else /* !_KERNEL */
+
static __inline u_int32_t
-atomic_cmpset_32(volatile u_int32_t *p, u_int32_t cmpval, u_int32_t newval)
+atomic_cmpset_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
{
- int done;
-
- __with_interrupts_disabled(
- {
- if (*p == cmpval) {
- *p = newval;
- done = 1;
- } else
- done = 0;
- });
+ register int done, ras_start;
+
+ __asm __volatile("1:\n"
+ "mov %0, #0xe0000008\n"
+ "adr %1, 2f\n"
+ "str %1, [%0]\n"
+ "adr %1, 1b\n"
+ "mov %0, #0xe0000004\n"
+ "str %1, [%0]\n"
+ "ldr %1, [%2]\n"
+ "cmp %1, %3\n"
+ "streq %4, [%2]\n"
+ "2:\n"
+ "moveq %1, #1\n"
+ "movne %1, #0\n"
+ : "=r" (ras_start), "=r" (done)
+ ,"+r" (p), "+r" (cmpval), "+r" (newval));
return (done);
}
-#endif
static __inline void
atomic_add_32(volatile u_int32_t *p, u_int32_t val)
{
- atomic_op(val, +, p);
+ int ras_start, start;
+
+ __asm __volatile("1:\n"
+ "mov %0, #0xe0000008\n"
+ "adr %1, 2f\n"
+ "str %1, [%0]\n"
+ "adr %1, 1b\n"
+ "mov %0, #0xe0000004\n"
+ "str %1, [%0]\n"
+ "ldr %1, [%2]\n"
+ "add %1, %1, %3\n"
+ "str %1, [%2]\n"
+ "2:\n"
+ : "=r" (ras_start), "=r" (start), "+r" (p), "+r" (val));
}
static __inline void
atomic_subtract_32(volatile u_int32_t *p, u_int32_t val)
{
- atomic_op(val, -, p);
+ int ras_start, start;
+
+ __asm __volatile("1:\n"
+ "mov %0, #0xe0000008\n"
+ "adr %1, 2f\n"
+ "str %1, [%0]\n"
+ "adr %1, 1b\n"
+ "mov %0, #0xe0000004\n"
+ "str %1, [%0]\n"
+ "ldr %1, [%2]\n"
+ "sub %1, %1, %3\n"
+ "str %1, [%2]\n"
+ "2:\n"
+ : "=r" (ras_start), "=r" (start), "+r" (p), "+r" (val));
+}
+
+static __inline void
+atomic_set_32(volatile uint32_t *address, uint32_t setmask)
+{
+ int ras_start, start;
+
+ __asm __volatile("1:\n"
+ "mov %0, #0xe0000008\n"
+ "adr %1, 2f\n"
+ "str %1, [%0]\n"
+ "adr %1, 1b\n"
+ "mov %0, #0xe0000004\n"
+ "str %1, [%0]\n"
+ "ldr %1, [%2]\n"
+ "orr %1, %1, %3\n"
+ "str %1, [%2]\n"
+ "2:\n"
+ : "=r" (ras_start), "=r" (start), "+r" (address), "+r" (setmask));
+}
+
+static __inline void
+atomic_clear_32(volatile uint32_t *address, uint32_t clearmask)
+{
+ int ras_start, start;
+
+ __asm __volatile("1:\n"
+ "mov %0, #0xe0000008\n"
+ "adr %1, 2f\n"
+ "str %1, [%0]\n"
+ "adr %1, 1b\n"
+ "mov %0, #0xe0000004\n"
+ "str %1, [%0]\n"
+ "ldr %1, [%2]\n"
+ "bic %1, %1, %3\n"
+ "str %1, [%2]\n"
+ "2:\n"
+ : "=r" (ras_start), "=r" (start), "+r" (address), "+r" (clearmask));
+
+}
+#endif /* _KERNEL */
+
+static __inline int
+atomic_load_32(volatile uint32_t *v)
+{
+
+ return (*v);
+}
+
+static __inline void
+atomic_store_32(volatile uint32_t *dst, uint32_t src)
+{
+ *dst = src;
+}
+
+static __inline uint32_t
+atomic_readandclear_32(volatile u_int32_t *p)
+{
+
+ return (__swp(0, p));
}
#undef __with_interrupts_disabled
OpenPOWER on IntegriCloud