summaryrefslogtreecommitdiffstats
path: root/include/asm-generic/percpu.h
diff options
context:
space:
mode:
authorTejun Heo <tj@kernel.org>2014-06-17 19:12:39 -0400
committerTejun Heo <tj@kernel.org>2014-06-17 19:12:39 -0400
commit47b69ad673d9aa53c1d6032a6a522fc0ce8d6fc1 (patch)
tree73f5da096b50009412498af666e828a8297e87d5 /include/asm-generic/percpu.h
parentdcba4333683c3a0642fd575e475c6c740122a037 (diff)
downloadop-kernel-dev-47b69ad673d9aa53c1d6032a6a522fc0ce8d6fc1.zip
op-kernel-dev-47b69ad673d9aa53c1d6032a6a522fc0ce8d6fc1.tar.gz
percpu: move generic {raw|this}_cpu_*_N() definitions to include/asm-generic/percpu.h
{raw|this}_cpu_*_N() operations are expected to be provided by archs and the generic definitions are provided as fallbacks. As such, these firmly belong to include/asm-generic/percpu.h. Move the generic definitions to include/asm-generic/percpu.h. The code is moved mostly verbatim; however, raw_cpu_*_N() are placed above this_cpu_*_N() which is more conventional as the raw operations may be used to defined other variants. This is pure reorganization. Signed-off-by: Tejun Heo <tj@kernel.org> Acked-by: Christoph Lameter <cl@linux.com>
Diffstat (limited to 'include/asm-generic/percpu.h')
-rw-r--r--include/asm-generic/percpu.h341
1 files changed, 341 insertions, 0 deletions
diff --git a/include/asm-generic/percpu.h b/include/asm-generic/percpu.h
index e5ace4d..932ce60 100644
--- a/include/asm-generic/percpu.h
+++ b/include/asm-generic/percpu.h
@@ -65,4 +65,345 @@ extern void setup_per_cpu_areas(void);
#define PER_CPU_DEF_ATTRIBUTES
#endif
+# ifndef raw_cpu_read_1
+# define raw_cpu_read_1(pcp) (*raw_cpu_ptr(&(pcp)))
+# endif
+# ifndef raw_cpu_read_2
+# define raw_cpu_read_2(pcp) (*raw_cpu_ptr(&(pcp)))
+# endif
+# ifndef raw_cpu_read_4
+# define raw_cpu_read_4(pcp) (*raw_cpu_ptr(&(pcp)))
+# endif
+# ifndef raw_cpu_read_8
+# define raw_cpu_read_8(pcp) (*raw_cpu_ptr(&(pcp)))
+# endif
+
+#define raw_cpu_generic_to_op(pcp, val, op) \
+do { \
+ *raw_cpu_ptr(&(pcp)) op val; \
+} while (0)
+
+# ifndef raw_cpu_write_1
+# define raw_cpu_write_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), =)
+# endif
+# ifndef raw_cpu_write_2
+# define raw_cpu_write_2(pcp, val) raw_cpu_generic_to_op((pcp), (val), =)
+# endif
+# ifndef raw_cpu_write_4
+# define raw_cpu_write_4(pcp, val) raw_cpu_generic_to_op((pcp), (val), =)
+# endif
+# ifndef raw_cpu_write_8
+# define raw_cpu_write_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), =)
+# endif
+
+# ifndef raw_cpu_add_1
+# define raw_cpu_add_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), +=)
+# endif
+# ifndef raw_cpu_add_2
+# define raw_cpu_add_2(pcp, val) raw_cpu_generic_to_op((pcp), (val), +=)
+# endif
+# ifndef raw_cpu_add_4
+# define raw_cpu_add_4(pcp, val) raw_cpu_generic_to_op((pcp), (val), +=)
+# endif
+# ifndef raw_cpu_add_8
+# define raw_cpu_add_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), +=)
+# endif
+
+# ifndef raw_cpu_and_1
+# define raw_cpu_and_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), &=)
+# endif
+# ifndef raw_cpu_and_2
+# define raw_cpu_and_2(pcp, val) raw_cpu_generic_to_op((pcp), (val), &=)
+# endif
+# ifndef raw_cpu_and_4
+# define raw_cpu_and_4(pcp, val) raw_cpu_generic_to_op((pcp), (val), &=)
+# endif
+# ifndef raw_cpu_and_8
+# define raw_cpu_and_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), &=)
+# endif
+
+# ifndef raw_cpu_or_1
+# define raw_cpu_or_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), |=)
+# endif
+# ifndef raw_cpu_or_2
+# define raw_cpu_or_2(pcp, val) raw_cpu_generic_to_op((pcp), (val), |=)
+# endif
+# ifndef raw_cpu_or_4
+# define raw_cpu_or_4(pcp, val) raw_cpu_generic_to_op((pcp), (val), |=)
+# endif
+# ifndef raw_cpu_or_8
+# define raw_cpu_or_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), |=)
+# endif
+
+#define raw_cpu_generic_add_return(pcp, val) \
+({ \
+ raw_cpu_add(pcp, val); \
+ raw_cpu_read(pcp); \
+})
+
+# ifndef raw_cpu_add_return_1
+# define raw_cpu_add_return_1(pcp, val) raw_cpu_generic_add_return(pcp, val)
+# endif
+# ifndef raw_cpu_add_return_2
+# define raw_cpu_add_return_2(pcp, val) raw_cpu_generic_add_return(pcp, val)
+# endif
+# ifndef raw_cpu_add_return_4
+# define raw_cpu_add_return_4(pcp, val) raw_cpu_generic_add_return(pcp, val)
+# endif
+# ifndef raw_cpu_add_return_8
+# define raw_cpu_add_return_8(pcp, val) raw_cpu_generic_add_return(pcp, val)
+# endif
+
+#define raw_cpu_generic_xchg(pcp, nval) \
+({ typeof(pcp) ret__; \
+ ret__ = raw_cpu_read(pcp); \
+ raw_cpu_write(pcp, nval); \
+ ret__; \
+})
+
+# ifndef raw_cpu_xchg_1
+# define raw_cpu_xchg_1(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
+# endif
+# ifndef raw_cpu_xchg_2
+# define raw_cpu_xchg_2(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
+# endif
+# ifndef raw_cpu_xchg_4
+# define raw_cpu_xchg_4(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
+# endif
+# ifndef raw_cpu_xchg_8
+# define raw_cpu_xchg_8(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
+# endif
+
+#define raw_cpu_generic_cmpxchg(pcp, oval, nval) \
+({ \
+ typeof(pcp) ret__; \
+ ret__ = raw_cpu_read(pcp); \
+ if (ret__ == (oval)) \
+ raw_cpu_write(pcp, nval); \
+ ret__; \
+})
+
+# ifndef raw_cpu_cmpxchg_1
+# define raw_cpu_cmpxchg_1(pcp, oval, nval) raw_cpu_generic_cmpxchg(pcp, oval, nval)
+# endif
+# ifndef raw_cpu_cmpxchg_2
+# define raw_cpu_cmpxchg_2(pcp, oval, nval) raw_cpu_generic_cmpxchg(pcp, oval, nval)
+# endif
+# ifndef raw_cpu_cmpxchg_4
+# define raw_cpu_cmpxchg_4(pcp, oval, nval) raw_cpu_generic_cmpxchg(pcp, oval, nval)
+# endif
+# ifndef raw_cpu_cmpxchg_8
+# define raw_cpu_cmpxchg_8(pcp, oval, nval) raw_cpu_generic_cmpxchg(pcp, oval, nval)
+# endif
+
+#define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
+({ \
+ int __ret = 0; \
+ if (raw_cpu_read(pcp1) == (oval1) && \
+ raw_cpu_read(pcp2) == (oval2)) { \
+ raw_cpu_write(pcp1, (nval1)); \
+ raw_cpu_write(pcp2, (nval2)); \
+ __ret = 1; \
+ } \
+ (__ret); \
+})
+
+# ifndef raw_cpu_cmpxchg_double_1
+# define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
+ raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
+# endif
+# ifndef raw_cpu_cmpxchg_double_2
+# define raw_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
+ raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
+# endif
+# ifndef raw_cpu_cmpxchg_double_4
+# define raw_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
+ raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
+# endif
+# ifndef raw_cpu_cmpxchg_double_8
+# define raw_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
+ raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
+# endif
+
+#define _this_cpu_generic_read(pcp) \
+({ typeof(pcp) ret__; \
+ preempt_disable(); \
+ ret__ = *this_cpu_ptr(&(pcp)); \
+ preempt_enable(); \
+ ret__; \
+})
+
+# ifndef this_cpu_read_1
+# define this_cpu_read_1(pcp) _this_cpu_generic_read(pcp)
+# endif
+# ifndef this_cpu_read_2
+# define this_cpu_read_2(pcp) _this_cpu_generic_read(pcp)
+# endif
+# ifndef this_cpu_read_4
+# define this_cpu_read_4(pcp) _this_cpu_generic_read(pcp)
+# endif
+# ifndef this_cpu_read_8
+# define this_cpu_read_8(pcp) _this_cpu_generic_read(pcp)
+# endif
+
+#define _this_cpu_generic_to_op(pcp, val, op) \
+do { \
+ unsigned long flags; \
+ raw_local_irq_save(flags); \
+ *raw_cpu_ptr(&(pcp)) op val; \
+ raw_local_irq_restore(flags); \
+} while (0)
+
+# ifndef this_cpu_write_1
+# define this_cpu_write_1(pcp, val) _this_cpu_generic_to_op((pcp), (val), =)
+# endif
+# ifndef this_cpu_write_2
+# define this_cpu_write_2(pcp, val) _this_cpu_generic_to_op((pcp), (val), =)
+# endif
+# ifndef this_cpu_write_4
+# define this_cpu_write_4(pcp, val) _this_cpu_generic_to_op((pcp), (val), =)
+# endif
+# ifndef this_cpu_write_8
+# define this_cpu_write_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), =)
+# endif
+
+# ifndef this_cpu_add_1
+# define this_cpu_add_1(pcp, val) _this_cpu_generic_to_op((pcp), (val), +=)
+# endif
+# ifndef this_cpu_add_2
+# define this_cpu_add_2(pcp, val) _this_cpu_generic_to_op((pcp), (val), +=)
+# endif
+# ifndef this_cpu_add_4
+# define this_cpu_add_4(pcp, val) _this_cpu_generic_to_op((pcp), (val), +=)
+# endif
+# ifndef this_cpu_add_8
+# define this_cpu_add_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), +=)
+# endif
+
+# ifndef this_cpu_and_1
+# define this_cpu_and_1(pcp, val) _this_cpu_generic_to_op((pcp), (val), &=)
+# endif
+# ifndef this_cpu_and_2
+# define this_cpu_and_2(pcp, val) _this_cpu_generic_to_op((pcp), (val), &=)
+# endif
+# ifndef this_cpu_and_4
+# define this_cpu_and_4(pcp, val) _this_cpu_generic_to_op((pcp), (val), &=)
+# endif
+# ifndef this_cpu_and_8
+# define this_cpu_and_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), &=)
+# endif
+
+# ifndef this_cpu_or_1
+# define this_cpu_or_1(pcp, val) _this_cpu_generic_to_op((pcp), (val), |=)
+# endif
+# ifndef this_cpu_or_2
+# define this_cpu_or_2(pcp, val) _this_cpu_generic_to_op((pcp), (val), |=)
+# endif
+# ifndef this_cpu_or_4
+# define this_cpu_or_4(pcp, val) _this_cpu_generic_to_op((pcp), (val), |=)
+# endif
+# ifndef this_cpu_or_8
+# define this_cpu_or_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), |=)
+# endif
+
+#define _this_cpu_generic_add_return(pcp, val) \
+({ \
+ typeof(pcp) ret__; \
+ unsigned long flags; \
+ raw_local_irq_save(flags); \
+ raw_cpu_add(pcp, val); \
+ ret__ = raw_cpu_read(pcp); \
+ raw_local_irq_restore(flags); \
+ ret__; \
+})
+
+# ifndef this_cpu_add_return_1
+# define this_cpu_add_return_1(pcp, val) _this_cpu_generic_add_return(pcp, val)
+# endif
+# ifndef this_cpu_add_return_2
+# define this_cpu_add_return_2(pcp, val) _this_cpu_generic_add_return(pcp, val)
+# endif
+# ifndef this_cpu_add_return_4
+# define this_cpu_add_return_4(pcp, val) _this_cpu_generic_add_return(pcp, val)
+# endif
+# ifndef this_cpu_add_return_8
+# define this_cpu_add_return_8(pcp, val) _this_cpu_generic_add_return(pcp, val)
+# endif
+
+#define _this_cpu_generic_xchg(pcp, nval) \
+({ typeof(pcp) ret__; \
+ unsigned long flags; \
+ raw_local_irq_save(flags); \
+ ret__ = raw_cpu_read(pcp); \
+ raw_cpu_write(pcp, nval); \
+ raw_local_irq_restore(flags); \
+ ret__; \
+})
+
+# ifndef this_cpu_xchg_1
+# define this_cpu_xchg_1(pcp, nval) _this_cpu_generic_xchg(pcp, nval)
+# endif
+# ifndef this_cpu_xchg_2
+# define this_cpu_xchg_2(pcp, nval) _this_cpu_generic_xchg(pcp, nval)
+# endif
+# ifndef this_cpu_xchg_4
+# define this_cpu_xchg_4(pcp, nval) _this_cpu_generic_xchg(pcp, nval)
+# endif
+# ifndef this_cpu_xchg_8
+# define this_cpu_xchg_8(pcp, nval) _this_cpu_generic_xchg(pcp, nval)
+# endif
+
+#define _this_cpu_generic_cmpxchg(pcp, oval, nval) \
+({ \
+ typeof(pcp) ret__; \
+ unsigned long flags; \
+ raw_local_irq_save(flags); \
+ ret__ = raw_cpu_read(pcp); \
+ if (ret__ == (oval)) \
+ raw_cpu_write(pcp, nval); \
+ raw_local_irq_restore(flags); \
+ ret__; \
+})
+
+# ifndef this_cpu_cmpxchg_1
+# define this_cpu_cmpxchg_1(pcp, oval, nval) _this_cpu_generic_cmpxchg(pcp, oval, nval)
+# endif
+# ifndef this_cpu_cmpxchg_2
+# define this_cpu_cmpxchg_2(pcp, oval, nval) _this_cpu_generic_cmpxchg(pcp, oval, nval)
+# endif
+# ifndef this_cpu_cmpxchg_4
+# define this_cpu_cmpxchg_4(pcp, oval, nval) _this_cpu_generic_cmpxchg(pcp, oval, nval)
+# endif
+# ifndef this_cpu_cmpxchg_8
+# define this_cpu_cmpxchg_8(pcp, oval, nval) _this_cpu_generic_cmpxchg(pcp, oval, nval)
+# endif
+
+#define _this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
+({ \
+ int ret__; \
+ unsigned long flags; \
+ raw_local_irq_save(flags); \
+ ret__ = raw_cpu_generic_cmpxchg_double(pcp1, pcp2, \
+ oval1, oval2, nval1, nval2); \
+ raw_local_irq_restore(flags); \
+ ret__; \
+})
+
+# ifndef this_cpu_cmpxchg_double_1
+# define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
+ _this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
+# endif
+# ifndef this_cpu_cmpxchg_double_2
+# define this_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
+ _this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
+# endif
+# ifndef this_cpu_cmpxchg_double_4
+# define this_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
+ _this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
+# endif
+# ifndef this_cpu_cmpxchg_double_8
+# define this_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
+ _this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
+# endif
+
#endif /* _ASM_GENERIC_PERCPU_H_ */
OpenPOWER on IntegriCloud