summaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
authorPaul Mundt <lethal@linux-sh.org>2006-09-27 16:05:56 +0900
committerPaul Mundt <lethal@linux-sh.org>2006-09-27 16:05:56 +0900
commit00b3aa3fc9bd827caaa859de90d9eba831b77d40 (patch)
tree303ec8e19c9289b1a54d2c67005ac44f68adcd9d /include
parentbc8bff63bacea47561de34e04a17c79846ecfe91 (diff)
downloadop-kernel-dev-00b3aa3fc9bd827caaa859de90d9eba831b77d40.zip
op-kernel-dev-00b3aa3fc9bd827caaa859de90d9eba831b77d40.tar.gz
sh: xchg()/__xchg() always_inline fixes for gcc4.
Make __xchg() a macro, so that gcc 4.0 doesn't blow up thanks to always_inline.. Signed-off-by: Paul Mundt <lethal@linux-sh.org>
Diffstat (limited to 'include')
-rw-r--r--include/asm-sh/system.h57
1 files changed, 32 insertions, 25 deletions
diff --git a/include/asm-sh/system.h b/include/asm-sh/system.h
index 198d17e..bd7dc05 100644
--- a/include/asm-sh/system.h
+++ b/include/asm-sh/system.h
@@ -79,10 +79,8 @@ static inline void sched_cacheflush(void)
}
#endif
-#define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
-
static __inline__ unsigned long tas(volatile int *m)
-{ /* #define tas(ptr) (xchg((ptr),1)) */
+{
unsigned long retval;
__asm__ __volatile__ ("tas.b @%1\n\t"
@@ -91,8 +89,6 @@ static __inline__ unsigned long tas(volatile int *m)
return retval;
}
-extern void __xchg_called_with_bad_pointer(void);
-
/*
* A brief note on ctrl_barrier(), the control register write barrier.
*
@@ -220,17 +216,17 @@ static __inline__ void local_irq_restore(unsigned long x)
}
}
#else
-#define local_irq_restore(x) do { \
+#define local_irq_restore(x) do { \
if ((x & 0x000000f0) != 0x000000f0) \
- local_irq_enable(); \
+ local_irq_enable(); \
} while (0)
#endif
-#define really_restore_flags(x) do { \
+#define really_restore_flags(x) do { \
if ((x & 0x000000f0) != 0x000000f0) \
- local_irq_enable(); \
+ local_irq_enable(); \
else \
- local_irq_disable(); \
+ local_irq_disable(); \
} while (0)
/*
@@ -272,7 +268,7 @@ do { \
/* For spinlocks etc */
#define local_irq_save(x) x = local_irq_save()
-static __inline__ unsigned long xchg_u32(volatile int * m, unsigned long val)
+static inline unsigned long xchg_u32(volatile u32 *m, unsigned long val)
{
unsigned long flags, retval;
@@ -283,7 +279,7 @@ static __inline__ unsigned long xchg_u32(volatile int * m, unsigned long val)
return retval;
}
-static __inline__ unsigned long xchg_u8(volatile unsigned char * m, unsigned long val)
+static inline unsigned long xchg_u8(volatile u8 *m, unsigned long val)
{
unsigned long flags, retval;
@@ -294,19 +290,30 @@ static __inline__ unsigned long xchg_u8(volatile unsigned char * m, unsigned lon
return retval;
}
-static __inline__ unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
-{
- switch (size) {
- case 4:
- return xchg_u32(ptr, x);
- break;
- case 1:
- return xchg_u8(ptr, x);
- break;
- }
- __xchg_called_with_bad_pointer();
- return x;
-}
+extern void __xchg_called_with_bad_pointer(void);
+
+#define __xchg(ptr, x, size) \
+({ \
+ unsigned long __xchg__res; \
+ volatile void *__xchg_ptr = (ptr); \
+ switch (size) { \
+ case 4: \
+ __xchg__res = xchg_u32(__xchg_ptr, x); \
+ break; \
+ case 1: \
+ __xchg__res = xchg_u8(__xchg_ptr, x); \
+ break; \
+ default: \
+ __xchg_called_with_bad_pointer(); \
+ __xchg__res = x; \
+ break; \
+ } \
+ \
+ __xchg__res; \
+})
+
+#define xchg(ptr,x) \
+ ((__typeof__(*(ptr)))__xchg((ptr),(unsigned long)(x), sizeof(*(ptr))))
static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old,
unsigned long new)
OpenPOWER on IntegriCloud