sh: xchg()/__xchg() always_inline fixes for gcc4.

Make __xchg() a macro, so that gcc 4.0 doesn't blow up thanks to
always_inline..

Signed-off-by: Paul Mundt <lethal@linux-sh.org>
This commit is contained in:
Paul Mundt 2006-09-27 16:05:56 +09:00
parent bc8bff63ba
commit 00b3aa3fc9

View file

@ -79,10 +79,8 @@ static inline void sched_cacheflush(void)
} }
#endif #endif
#define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
static __inline__ unsigned long tas(volatile int *m) static __inline__ unsigned long tas(volatile int *m)
{ /* #define tas(ptr) (xchg((ptr),1)) */ {
unsigned long retval; unsigned long retval;
__asm__ __volatile__ ("tas.b @%1\n\t" __asm__ __volatile__ ("tas.b @%1\n\t"
@ -91,8 +89,6 @@ static __inline__ unsigned long tas(volatile int *m)
return retval; return retval;
} }
extern void __xchg_called_with_bad_pointer(void);
/* /*
* A brief note on ctrl_barrier(), the control register write barrier. * A brief note on ctrl_barrier(), the control register write barrier.
* *
@ -220,17 +216,17 @@ static __inline__ void local_irq_restore(unsigned long x)
} }
} }
#else #else
#define local_irq_restore(x) do { \ #define local_irq_restore(x) do { \
if ((x & 0x000000f0) != 0x000000f0) \ if ((x & 0x000000f0) != 0x000000f0) \
local_irq_enable(); \ local_irq_enable(); \
} while (0) } while (0)
#endif #endif
#define really_restore_flags(x) do { \ #define really_restore_flags(x) do { \
if ((x & 0x000000f0) != 0x000000f0) \ if ((x & 0x000000f0) != 0x000000f0) \
local_irq_enable(); \ local_irq_enable(); \
else \ else \
local_irq_disable(); \ local_irq_disable(); \
} while (0) } while (0)
/* /*
@ -272,7 +268,7 @@ do { \
/* For spinlocks etc */ /* For spinlocks etc */
#define local_irq_save(x) x = local_irq_save() #define local_irq_save(x) x = local_irq_save()
static __inline__ unsigned long xchg_u32(volatile int * m, unsigned long val) static inline unsigned long xchg_u32(volatile u32 *m, unsigned long val)
{ {
unsigned long flags, retval; unsigned long flags, retval;
@ -283,7 +279,7 @@ static __inline__ unsigned long xchg_u32(volatile int * m, unsigned long val)
return retval; return retval;
} }
static __inline__ unsigned long xchg_u8(volatile unsigned char * m, unsigned long val) static inline unsigned long xchg_u8(volatile u8 *m, unsigned long val)
{ {
unsigned long flags, retval; unsigned long flags, retval;
@ -294,19 +290,30 @@ static __inline__ unsigned long xchg_u8(volatile unsigned char * m, unsigned lon
return retval; return retval;
} }
static __inline__ unsigned long __xchg(unsigned long x, volatile void * ptr, int size) extern void __xchg_called_with_bad_pointer(void);
{
switch (size) { #define __xchg(ptr, x, size) \
case 4: ({ \
return xchg_u32(ptr, x); unsigned long __xchg__res; \
break; volatile void *__xchg_ptr = (ptr); \
case 1: switch (size) { \
return xchg_u8(ptr, x); case 4: \
break; __xchg__res = xchg_u32(__xchg_ptr, x); \
} break; \
__xchg_called_with_bad_pointer(); case 1: \
return x; __xchg__res = xchg_u8(__xchg_ptr, x); \
} break; \
default: \
__xchg_called_with_bad_pointer(); \
__xchg__res = x; \
break; \
} \
\
__xchg__res; \
})
#define xchg(ptr,x) \
((__typeof__(*(ptr)))__xchg((ptr),(unsigned long)(x), sizeof(*(ptr))))
static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old, static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old,
unsigned long new) unsigned long new)