Commit 00b3aa3f authored by Paul Mundt's avatar Paul Mundt

sh: xchg()/__xchg() always_inline fixes for gcc4.

Make __xchg() a macro, so that gcc 4.0 doesn't blow up thanks to
always_inline..
Signed-off-by: default avatarPaul Mundt <lethal@linux-sh.org>
parent bc8bff63
...@@ -79,10 +79,8 @@ static inline void sched_cacheflush(void) ...@@ -79,10 +79,8 @@ static inline void sched_cacheflush(void)
} }
#endif #endif
#define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
static __inline__ unsigned long tas(volatile int *m) static __inline__ unsigned long tas(volatile int *m)
{ /* #define tas(ptr) (xchg((ptr),1)) */ {
unsigned long retval; unsigned long retval;
__asm__ __volatile__ ("tas.b @%1\n\t" __asm__ __volatile__ ("tas.b @%1\n\t"
...@@ -91,8 +89,6 @@ static __inline__ unsigned long tas(volatile int *m) ...@@ -91,8 +89,6 @@ static __inline__ unsigned long tas(volatile int *m)
return retval; return retval;
} }
extern void __xchg_called_with_bad_pointer(void);
/* /*
* A brief note on ctrl_barrier(), the control register write barrier. * A brief note on ctrl_barrier(), the control register write barrier.
* *
...@@ -272,7 +268,7 @@ do { \ ...@@ -272,7 +268,7 @@ do { \
/* For spinlocks etc */ /* For spinlocks etc */
#define local_irq_save(x) x = local_irq_save() #define local_irq_save(x) x = local_irq_save()
static __inline__ unsigned long xchg_u32(volatile int * m, unsigned long val) static inline unsigned long xchg_u32(volatile u32 *m, unsigned long val)
{ {
unsigned long flags, retval; unsigned long flags, retval;
...@@ -283,7 +279,7 @@ static __inline__ unsigned long xchg_u32(volatile int * m, unsigned long val) ...@@ -283,7 +279,7 @@ static __inline__ unsigned long xchg_u32(volatile int * m, unsigned long val)
return retval; return retval;
} }
static __inline__ unsigned long xchg_u8(volatile unsigned char * m, unsigned long val) static inline unsigned long xchg_u8(volatile u8 *m, unsigned long val)
{ {
unsigned long flags, retval; unsigned long flags, retval;
...@@ -294,19 +290,30 @@ static __inline__ unsigned long xchg_u8(volatile unsigned char * m, unsigned lon ...@@ -294,19 +290,30 @@ static __inline__ unsigned long xchg_u8(volatile unsigned char * m, unsigned lon
return retval; return retval;
} }
static __inline__ unsigned long __xchg(unsigned long x, volatile void * ptr, int size) extern void __xchg_called_with_bad_pointer(void);
{
switch (size) { #define __xchg(ptr, x, size) \
case 4: ({ \
return xchg_u32(ptr, x); unsigned long __xchg__res; \
break; volatile void *__xchg_ptr = (ptr); \
case 1: switch (size) { \
return xchg_u8(ptr, x); case 4: \
break; __xchg__res = xchg_u32(__xchg_ptr, x); \
} break; \
__xchg_called_with_bad_pointer(); case 1: \
return x; __xchg__res = xchg_u8(__xchg_ptr, x); \
} break; \
default: \
__xchg_called_with_bad_pointer(); \
__xchg__res = x; \
break; \
} \
\
__xchg__res; \
})
#define xchg(ptr,x) \
((__typeof__(*(ptr)))__xchg((ptr),(unsigned long)(x), sizeof(*(ptr))))
static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old, static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old,
unsigned long new) unsigned long new)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment