diff options
author | Peter Zijlstra <peterz@infradead.org> | 2014-04-23 19:32:50 +0200 |
---|---|---|
committer | Thomas Gleixner <tglx@linutronix.de> | 2015-07-27 14:06:24 +0200 |
commit | e6942b7de2dfe44ebde9bae57dadece5abca9de8 (patch) | |
tree | b235317c5f6d200bcc25b43a406237a0d15319cf /arch | |
parent | 2957c035395e492463d7f589af9dd32388967bbb (diff) | |
download | op-kernel-dev-e6942b7de2dfe44ebde9bae57dadece5abca9de8.zip op-kernel-dev-e6942b7de2dfe44ebde9bae57dadece5abca9de8.tar.gz |
atomic: Provide atomic_{or,xor,and}
Implement atomic logic ops -- atomic_{or,xor,and}.
These will replace the atomic_{set,clear}_mask functions that are
available on some archs.
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Diffstat (limited to 'arch')
25 files changed, 0 insertions, 46 deletions
diff --git a/arch/alpha/include/asm/atomic.h b/arch/alpha/include/asm/atomic.h index 0eff853..e8c9560 100644 --- a/arch/alpha/include/asm/atomic.h +++ b/arch/alpha/include/asm/atomic.h @@ -110,7 +110,6 @@ static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \ ATOMIC_OPS(add) ATOMIC_OPS(sub) -#define CONFIG_ARCH_HAS_ATOMIC_OR #define atomic_andnot atomic_andnot #define atomic64_andnot atomic64_andnot diff --git a/arch/arc/include/asm/atomic.h b/arch/arc/include/asm/atomic.h index e90b701..2a84782 100644 --- a/arch/arc/include/asm/atomic.h +++ b/arch/arc/include/asm/atomic.h @@ -144,7 +144,6 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \ ATOMIC_OPS(add, +=, add) ATOMIC_OPS(sub, -=, sub) -#define CONFIG_ARCH_HAS_ATOMIC_OR #define atomic_andnot atomic_andnot ATOMIC_OP(and, &=, and) diff --git a/arch/arm/include/asm/atomic.h b/arch/arm/include/asm/atomic.h index ff214ba..82b75a7 100644 --- a/arch/arm/include/asm/atomic.h +++ b/arch/arm/include/asm/atomic.h @@ -194,7 +194,6 @@ static inline int __atomic_add_unless(atomic_t *v, int a, int u) ATOMIC_OPS(add, +=, add) ATOMIC_OPS(sub, -=, sub) -#define CONFIG_ARCH_HAS_ATOMIC_OR #define atomic_andnot atomic_andnot ATOMIC_OP(and, &=, and) diff --git a/arch/arm64/include/asm/atomic.h b/arch/arm64/include/asm/atomic.h index 2876173..866a71f 100644 --- a/arch/arm64/include/asm/atomic.h +++ b/arch/arm64/include/asm/atomic.h @@ -85,7 +85,6 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \ ATOMIC_OPS(add, add) ATOMIC_OPS(sub, sub) -#define CONFIG_ARCH_HAS_ATOMIC_OR #define atomic_andnot atomic_andnot ATOMIC_OP(and, and) diff --git a/arch/avr32/include/asm/atomic.h b/arch/avr32/include/asm/atomic.h index 115d300..97c9bdf 100644 --- a/arch/avr32/include/asm/atomic.h +++ b/arch/avr32/include/asm/atomic.h @@ -51,8 +51,6 @@ static inline void atomic_##op(int i, atomic_t *v) \ (void)__atomic_##op##_return(i, v); \ } -#define CONFIG_ARCH_HAS_ATOMIC_OR - ATOMIC_OP(and, and) ATOMIC_OP(or, or) ATOMIC_OP(xor, eor) diff --git a/arch/blackfin/include/asm/atomic.h b/arch/blackfin/include/asm/atomic.h index eafa55b..2d6a7a3 100644 --- a/arch/blackfin/include/asm/atomic.h +++ b/arch/blackfin/include/asm/atomic.h @@ -28,8 +28,6 @@ asmlinkage int __raw_atomic_test_asm(const volatile int *ptr, int value); #define atomic_add_return(i, v) __raw_atomic_add_asm(&(v)->counter, i) #define atomic_sub_return(i, v) __raw_atomic_add_asm(&(v)->counter, -(i)) -#define CONFIG_ARCH_HAS_ATOMIC_OR - #define atomic_or(i, v) (void)__raw_atomic_or_asm(&(v)->counter, i) #define atomic_and(i, v) (void)__raw_atomic_and_asm(&(v)->counter, i) #define atomic_xor(i, v) (void)__raw_atomic_xor_asm(&(v)->counter, i) diff --git a/arch/frv/include/asm/atomic.h b/arch/frv/include/asm/atomic.h index 74d2245..fc48bea 100644 --- a/arch/frv/include/asm/atomic.h +++ b/arch/frv/include/asm/atomic.h @@ -192,8 +192,6 @@ static inline void atomic64_##op(long long i, atomic64_t *v) \ (void)__atomic64_fetch_##op(i, &v->counter); \ } -#define CONFIG_ARCH_HAS_ATOMIC_OR - ATOMIC_OP(or) ATOMIC_OP(and) ATOMIC_OP(xor) diff --git a/arch/h8300/include/asm/atomic.h b/arch/h8300/include/asm/atomic.h index f181f82..c4d061f 100644 --- a/arch/h8300/include/asm/atomic.h +++ b/arch/h8300/include/asm/atomic.h @@ -41,8 +41,6 @@ static inline void atomic_##op(int i, atomic_t *v) \ ATOMIC_OP_RETURN(add, +=) ATOMIC_OP_RETURN(sub, -=) -#define CONFIG_ARCH_HAS_ATOMIC_OR - ATOMIC_OP(and, &=) ATOMIC_OP(or, |=) ATOMIC_OP(xor, ^=) diff --git a/arch/hexagon/include/asm/atomic.h b/arch/hexagon/include/asm/atomic.h index 4efe2c7..811d61f 100644 --- a/arch/hexagon/include/asm/atomic.h +++ b/arch/hexagon/include/asm/atomic.h @@ -132,8 +132,6 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \ ATOMIC_OPS(add) ATOMIC_OPS(sub) -#define CONFIG_ARCH_HAS_ATOMIC_OR - ATOMIC_OP(and) ATOMIC_OP(or) ATOMIC_OP(xor) diff --git a/arch/ia64/include/asm/atomic.h b/arch/ia64/include/asm/atomic.h index 0809ef5..be4beeb 100644 --- a/arch/ia64/include/asm/atomic.h +++ b/arch/ia64/include/asm/atomic.h @@ -69,8 +69,6 @@ ATOMIC_OP(sub, -) : ia64_atomic_sub(__ia64_asr_i, v); \ }) -#define CONFIG_ARCH_HAS_ATOMIC_OR - ATOMIC_OP(and, &) ATOMIC_OP(or, |) ATOMIC_OP(xor, ^) diff --git a/arch/m32r/include/asm/atomic.h b/arch/m32r/include/asm/atomic.h index 7245463..b2a13fb 100644 --- a/arch/m32r/include/asm/atomic.h +++ b/arch/m32r/include/asm/atomic.h @@ -94,8 +94,6 @@ static __inline__ int atomic_##op##_return(int i, atomic_t *v) \ ATOMIC_OPS(add) ATOMIC_OPS(sub) -#define CONFIG_ARCH_HAS_ATOMIC_OR - ATOMIC_OP(and) ATOMIC_OP(or) ATOMIC_OP(xor) diff --git a/arch/m68k/include/asm/atomic.h b/arch/m68k/include/asm/atomic.h index c30e43e..93ebd96 100644 --- a/arch/m68k/include/asm/atomic.h +++ b/arch/m68k/include/asm/atomic.h @@ -77,8 +77,6 @@ static inline int atomic_##op##_return(int i, atomic_t * v) \ ATOMIC_OPS(add, +=, add) ATOMIC_OPS(sub, -=, sub) -#define CONFIG_ARCH_HAS_ATOMIC_OR - ATOMIC_OP(and, &=, and) ATOMIC_OP(or, |=, or) ATOMIC_OP(xor, ^=, eor) diff --git a/arch/metag/include/asm/atomic_lnkget.h b/arch/metag/include/asm/atomic_lnkget.h index 930c12c..0642606 100644 --- a/arch/metag/include/asm/atomic_lnkget.h +++ b/arch/metag/include/asm/atomic_lnkget.h @@ -74,8 +74,6 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \ ATOMIC_OPS(add) ATOMIC_OPS(sub) -#define CONFIG_ARCH_HAS_ATOMIC_OR - ATOMIC_OP(and) ATOMIC_OP(or) ATOMIC_OP(xor) diff --git a/arch/mips/include/asm/atomic.h b/arch/mips/include/asm/atomic.h index 0430ba6..4c42fd9 100644 --- a/arch/mips/include/asm/atomic.h +++ b/arch/mips/include/asm/atomic.h @@ -137,8 +137,6 @@ static __inline__ int atomic_##op##_return(int i, atomic_t * v) \ ATOMIC_OPS(add, +=, addu) ATOMIC_OPS(sub, -=, subu) -#define CONFIG_ARCH_HAS_ATOMIC_OR - ATOMIC_OP(and, &=, and) ATOMIC_OP(or, |=, or) ATOMIC_OP(xor, ^=, xor) diff --git a/arch/mn10300/include/asm/atomic.h b/arch/mn10300/include/asm/atomic.h index 03eea81..f5a63f0 100644 --- a/arch/mn10300/include/asm/atomic.h +++ b/arch/mn10300/include/asm/atomic.h @@ -89,8 +89,6 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \ ATOMIC_OPS(add) ATOMIC_OPS(sub) -#define CONFIG_ARCH_HAS_ATOMIC_OR - ATOMIC_OP(and) ATOMIC_OP(or) ATOMIC_OP(xor) diff --git a/arch/parisc/include/asm/atomic.h b/arch/parisc/include/asm/atomic.h index be2c50d..2536965 100644 --- a/arch/parisc/include/asm/atomic.h +++ b/arch/parisc/include/asm/atomic.h @@ -126,8 +126,6 @@ static __inline__ int atomic_##op##_return(int i, atomic_t *v) \ ATOMIC_OPS(add, +=) ATOMIC_OPS(sub, -=) -#define CONFIG_ARCH_HAS_ATOMIC_OR - ATOMIC_OP(and, &=) ATOMIC_OP(or, |=) ATOMIC_OP(xor, ^=) diff --git a/arch/powerpc/include/asm/atomic.h b/arch/powerpc/include/asm/atomic.h index 6ca89e2..55f106e 100644 --- a/arch/powerpc/include/asm/atomic.h +++ b/arch/powerpc/include/asm/atomic.h @@ -67,8 +67,6 @@ static __inline__ int atomic_##op##_return(int a, atomic_t *v) \ ATOMIC_OPS(add, add) ATOMIC_OPS(sub, subf) -#define CONFIG_ARCH_HAS_ATOMIC_OR - ATOMIC_OP(and, and) ATOMIC_OP(or, or) ATOMIC_OP(xor, xor) diff --git a/arch/s390/include/asm/atomic.h b/arch/s390/include/asm/atomic.h index b3859d8..d761aef 100644 --- a/arch/s390/include/asm/atomic.h +++ b/arch/s390/include/asm/atomic.h @@ -282,8 +282,6 @@ static inline void atomic64_##op(long i, atomic64_t *v) \ __ATOMIC64_LOOP(v, i, __ATOMIC64_##OP, __ATOMIC64_NO_BARRIER); \ } -#define CONFIG_ARCH_HAS_ATOMIC_OR - ATOMIC64_OP(and, AND) ATOMIC64_OP(or, OR) ATOMIC64_OP(xor, XOR) diff --git a/arch/sh/include/asm/atomic-grb.h b/arch/sh/include/asm/atomic-grb.h index 4b03830..b94df40 100644 --- a/arch/sh/include/asm/atomic-grb.h +++ b/arch/sh/include/asm/atomic-grb.h @@ -48,8 +48,6 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \ ATOMIC_OPS(add) ATOMIC_OPS(sub) -#define CONFIG_ARCH_HAS_ATOMIC_OR - ATOMIC_OP(and) ATOMIC_OP(or) ATOMIC_OP(xor) diff --git a/arch/sparc/include/asm/atomic_32.h b/arch/sparc/include/asm/atomic_32.h index e19d888..7dcbebb 100644 --- a/arch/sparc/include/asm/atomic_32.h +++ b/arch/sparc/include/asm/atomic_32.h @@ -17,8 +17,6 @@ #include <asm/barrier.h> #include <asm-generic/atomic64.h> -#define CONFIG_ARCH_HAS_ATOMIC_OR - #define ATOMIC_INIT(i) { (i) } int atomic_add_return(int, atomic_t *); diff --git a/arch/sparc/include/asm/atomic_64.h b/arch/sparc/include/asm/atomic_64.h index d6af27c..917084a 100644 --- a/arch/sparc/include/asm/atomic_64.h +++ b/arch/sparc/include/asm/atomic_64.h @@ -33,8 +33,6 @@ long atomic64_##op##_return(long, atomic64_t *); ATOMIC_OPS(add) ATOMIC_OPS(sub) -#define CONFIG_ARCH_HAS_ATOMIC_OR - ATOMIC_OP(and) ATOMIC_OP(or) ATOMIC_OP(xor) diff --git a/arch/tile/include/asm/atomic_32.h b/arch/tile/include/asm/atomic_32.h index 9423792..d320ce2 100644 --- a/arch/tile/include/asm/atomic_32.h +++ b/arch/tile/include/asm/atomic_32.h @@ -41,8 +41,6 @@ static inline void atomic_##op(int i, atomic_t *v) \ _atomic_##op((unsigned long *)&v->counter, i); \ } -#define CONFIG_ARCH_HAS_ATOMIC_OR - ATOMIC_OP(and) ATOMIC_OP(or) ATOMIC_OP(xor) diff --git a/arch/tile/include/asm/atomic_64.h b/arch/tile/include/asm/atomic_64.h index d07d9fc..096a56d 100644 --- a/arch/tile/include/asm/atomic_64.h +++ b/arch/tile/include/asm/atomic_64.h @@ -58,8 +58,6 @@ static inline int __atomic_add_unless(atomic_t *v, int a, int u) return oldval; } -#define CONFIG_ARCH_HAS_ATOMIC_OR - static inline void atomic_and(int i, atomic_t *v) { __insn_fetchand4((void *)&v->counter, i); diff --git a/arch/x86/include/asm/atomic.h b/arch/x86/include/asm/atomic.h index f3a3ec0..b349302 100644 --- a/arch/x86/include/asm/atomic.h +++ b/arch/x86/include/asm/atomic.h @@ -191,8 +191,6 @@ static inline void atomic_##op(int i, atomic_t *v) \ : "memory"); \ } -#define CONFIG_ARCH_HAS_ATOMIC_OR - ATOMIC_OP(and) ATOMIC_OP(or) ATOMIC_OP(xor) diff --git a/arch/xtensa/include/asm/atomic.h b/arch/xtensa/include/asm/atomic.h index 4dd2450..31371f4 100644 --- a/arch/xtensa/include/asm/atomic.h +++ b/arch/xtensa/include/asm/atomic.h @@ -145,8 +145,6 @@ static inline int atomic_##op##_return(int i, atomic_t * v) \ ATOMIC_OPS(add) ATOMIC_OPS(sub) -#define CONFIG_ARCH_HAS_ATOMIC_OR - ATOMIC_OP(and) ATOMIC_OP(or) ATOMIC_OP(xor) |