diff options
author | Vineet Gupta <vgupta@synopsys.com> | 2015-08-05 19:10:02 +0530 |
---|---|---|
committer | Vineet Gupta <vgupta@synopsys.com> | 2015-08-20 18:15:59 +0530 |
commit | 31d30c8208a38a0442cc01a9c7f6542489c76353 (patch) | |
tree | 24c482f1ba8dfac5de3c7dd8588fb1c5c76b7acc /arch/arc | |
parent | 1648c70d301e669ba03aa1c70fff46ec2c400414 (diff) | |
download | op-kernel-dev-31d30c8208a38a0442cc01a9c7f6542489c76353.zip op-kernel-dev-31d30c8208a38a0442cc01a9c7f6542489c76353.tar.gz |
ARC: add barriers to futex code
The atomic ops on futex need to provide the full barrier just like
regular atomics in kernel.
Also remove pagefault_enable/disable in futex_atomic_cmpxchg_inatomic()
as core code already does that
Cc: David Hildenbrand <dahi@linux.vnet.ibm.com>
Cc: Peter Zijlstra (Intel) <peterz@infradead.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Cc: Michel Lespinasse <walken@google.com>
Signed-off-by: Vineet Gupta <vgupta@synopsys.com>
Diffstat (limited to 'arch/arc')
-rw-r--r-- | arch/arc/include/asm/futex.h | 21 |
1 files changed, 10 insertions, 11 deletions
diff --git a/arch/arc/include/asm/futex.h b/arch/arc/include/asm/futex.h index 70cfe16..9de18a5 100644 --- a/arch/arc/include/asm/futex.h +++ b/arch/arc/include/asm/futex.h @@ -20,6 +20,7 @@ #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)\ \ + smp_mb(); \ __asm__ __volatile__( \ "1: llock %1, [%2] \n" \ insn "\n" \ @@ -40,12 +41,14 @@ \ : "=&r" (ret), "=&r" (oldval) \ : "r" (uaddr), "r" (oparg), "ir" (-EFAULT) \ - : "cc", "memory") + : "cc", "memory"); \ + smp_mb() \ #else /* !CONFIG_ARC_HAS_LLSC */ #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)\ \ + smp_mb(); \ __asm__ __volatile__( \ "1: ld %1, [%2] \n" \ insn "\n" \ @@ -65,7 +68,8 @@ \ : "=&r" (ret), "=&r" (oldval) \ : "r" (uaddr), "r" (oparg), "ir" (-EFAULT) \ - : "cc", "memory") + : "cc", "memory"); \ + smp_mb() \ #endif @@ -134,13 +138,8 @@ static inline int futex_atomic_op_inuser(int encoded_op, u32 __user *uaddr) return ret; } -/* Compare-xchg with pagefaults disabled. - * Notes: - * -Best-Effort: Exchg happens only if compare succeeds. - * If compare fails, returns; leaving retry/looping to upper layers - * -successful cmp-xchg: return orig value in @addr (same as cmp val) - * -Compare fails: return orig value in @addr - * -user access r/w fails: return -EFAULT +/* + * cmpxchg of futex (pagefaults disabled by caller) */ static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, u32 oldval, @@ -151,7 +150,7 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, u32 oldval, if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int))) return -EFAULT; - pagefault_disable(); + smp_mb(); __asm__ __volatile__( #ifdef CONFIG_ARC_HAS_LLSC @@ -178,7 +177,7 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, u32 oldval, : "r"(oldval), "r"(newval), "r"(uaddr), "ir"(-EFAULT) : "cc", "memory"); - pagefault_enable(); + smp_mb(); *uval = val; return val; |