static inline int atomic_add_return(int i, atomic_t *v)
{
- unsigned long flags;
+ unsigned short ccr;
int ret;
- local_irq_save(flags);
- ret = v->counter += i;
- local_irq_restore(flags);
+ __asm__ __volatile__ (
+ "stc ccr,%w2\n\t"
+ "orc #0x80,ccr\n\t"
+ "mov.l %1,%0\n\t"
+ "add.l %3,%0\n\t"
+ "mov.l %0,%1\n\t"
+ "ldc %w2,ccr"
+ : "=r"(ret), "+m"(v->counter), "=r"(ccr)
+ : "ri"(i));
return ret;
}
static inline int atomic_sub_return(int i, atomic_t *v)
{
- unsigned long flags;
+ unsigned short ccr;
int ret;
- local_irq_save(flags);
- ret = v->counter -= i;
- local_irq_restore(flags);
+ __asm__ __volatile__ (
+ "stc ccr,%w2\n\t"
+ "orc #0x80,ccr\n\t"
+ "mov.l %1,%0\n\t"
+ "sub.l %3,%0\n\t"
+ "mov.l %0,%1\n\t"
+ "ldc %w2,ccr"
+ : "=r"(ret), "+m"(v->counter), "=r"(ccr)
+ : "ri"(i));
return ret;
}
static inline int atomic_inc_return(atomic_t *v)
{
- unsigned long flags;
+ unsigned short ccr;
int ret;
- local_irq_save(flags);
- v->counter++;
- ret = v->counter;
- local_irq_restore(flags);
+ __asm__ __volatile__ (
+ "stc ccr,%w2\n\t"
+ "orc #0x80,ccr\n\t"
+ "mov.l %1,%0\n\t"
+ "inc.l #1,%0\n\t"
+ "mov.l %0,%1\n\t"
+ "ldc %w2,ccr"
+ : "=r"(ret), "+m"(v->counter), "=r"(ccr));
return ret;
}
static inline int atomic_dec_return(atomic_t *v)
{
- unsigned long flags;
+ unsigned short ccr;
int ret;
- local_irq_save(flags);
- --v->counter;
- ret = v->counter;
- local_irq_restore(flags);
+ __asm__ __volatile__ (
+ "stc ccr,%w2\n\t"
+ "orc #0x80,ccr\n\t"
+ "mov.l %1,%0\n\t"
+ "dec.l #1,%0\n\t"
+ "mov.l %0,%1\n\t"
+ "ldc %w2,ccr"
+ : "=r"(ret), "+m"(v->counter), "=r"(ccr));
return ret;
}
static inline int atomic_dec_and_test(atomic_t *v)
{
- unsigned long flags;
+ unsigned short ccr;
int ret;
- local_irq_save(flags);
- --v->counter;
- ret = v->counter;
- local_irq_restore(flags);
+ __asm__ __volatile__ (
+ "stc ccr,%w2\n\t"
+ "orc #0x80,ccr\n\t"
+ "mov.l %1,%0\n\t"
+ "dec.l #1,%0\n\t"
+ "mov.l %0,%1\n\t"
+ "ldc %w2,ccr"
+ : "=r"(ret), "+m"(v->counter), "=r"(ccr));
return ret == 0;
}
static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
{
int ret;
- unsigned long flags;
-
- local_irq_save(flags);
- ret = v->counter;
- if (likely(ret == old))
- v->counter = new;
- local_irq_restore(flags);
+ unsigned short ccr;
+
+ __asm__ __volatile__ (
+ "stc ccr,%w2\n\t"
+ "orc #0x80,ccr\n\t"
+ "mov.l %1,%0\n\t"
+ "cmp.l %3,%0\n\t"
+ "bne 1f\n\t"
+ "mov.l %4,%1\n"
+ "1:\tldc %w2,ccr"
+ : "=r"(ret), "+m"(v->counter), "=r"(ccr)
+ : "g"(old), "r"(new));
return ret;
}
static inline int __atomic_add_unless(atomic_t *v, int a, int u)
{
int ret;
- unsigned long flags;
+ unsigned char ccr;
- local_irq_save(flags);
- ret = v->counter;
- if (ret != u)
- v->counter += a;
- local_irq_restore(flags);
+ __asm__ __volatile__ (
+ "stc ccr,%w2\n\t"
+ "orc #0x80,ccr\n\t"
+ "mov.l %1,%0\n\t"
+ "cmp.l %4,%0\n\t"
+ "beq 1f\n\t"
+ "add.l %0,%3\n\t"
+ "mov.l %3,%1\n"
+ "1:\tldc %w2,ccr"
+ : "=r"(ret), "+m"(v->counter), "=r"(ccr), "+r"(a)
+ : "ri"(u));
return ret;
}
unsigned char ccr;
unsigned long tmp;
- __asm__ __volatile__("stc ccr,%w3\n\t"
- "orc #0x80,ccr\n\t"
- "mov.l %0,%1\n\t"
- "and.l %2,%1\n\t"
- "mov.l %1,%0\n\t"
- "ldc %w3,ccr"
- : "=m"(*v), "=r"(tmp)
- : "g"(~(mask)), "r"(ccr));
+ __asm__ __volatile__(
+ "stc ccr,%w3\n\t"
+ "orc #0x80,ccr\n\t"
+ "mov.l %0,%1\n\t"
+ "and.l %2,%1\n\t"
+ "mov.l %1,%0\n\t"
+ "ldc %w3,ccr"
+ : "=m"(*v), "=r"(tmp)
+ : "g"(~(mask)), "r"(ccr));
}
static inline void atomic_set_mask(unsigned long mask, unsigned long *v)
unsigned char ccr;
unsigned long tmp;
- __asm__ __volatile__("stc ccr,%w3\n\t"
- "orc #0x80,ccr\n\t"
- "mov.l %0,%1\n\t"
- "or.l %2,%1\n\t"
- "mov.l %1,%0\n\t"
- "ldc %w3,ccr"
- : "=m"(*v), "=r"(tmp)
- : "g"(~(mask)), "r"(ccr));
+ __asm__ __volatile__(
+ "stc ccr,%w3\n\t"
+ "orc #0x80,ccr\n\t"
+ "mov.l %0,%1\n\t"
+ "or.l %2,%1\n\t"
+ "mov.l %1,%0\n\t"
+ "ldc %w3,ccr"
+ : "=m"(*v), "=r"(tmp)
+ : "g"(~(mask)), "r"(ccr));
}
/* Atomic operations are already serializing */
return result;
}
-#define H8300_GEN_BITOP_CONST(OP, BIT) \
- case BIT: \
- __asm__(OP " #" #BIT ",@%0"::"r"(b_addr):"memory"); \
- break
-
-#define H8300_GEN_BITOP(FNAME, OP) \
-static inline void FNAME(int nr, volatile unsigned long *addr) \
-{ \
- volatile unsigned char *b_addr; \
- b_addr = (volatile unsigned char *)addr + ((nr >> 3) ^ 3); \
- if (__builtin_constant_p(nr)) { \
- switch(nr & 7) { \
- H8300_GEN_BITOP_CONST(OP, 0); \
- H8300_GEN_BITOP_CONST(OP, 1); \
- H8300_GEN_BITOP_CONST(OP, 2); \
- H8300_GEN_BITOP_CONST(OP, 3); \
- H8300_GEN_BITOP_CONST(OP, 4); \
- H8300_GEN_BITOP_CONST(OP, 5); \
- H8300_GEN_BITOP_CONST(OP, 6); \
- H8300_GEN_BITOP_CONST(OP, 7); \
- } \
- } else { \
- __asm__(OP " %w0,@%1"::"r"(nr), "r"(b_addr):"memory");\
- } \
+#define H8300_GEN_BITOP(FNAME, OP) \
+static inline void FNAME(int nr, volatile unsigned long *addr) \
+{ \
+ unsigned char *b_addr; \
+ unsigned char bit = nr & 7; \
+ \
+ b_addr = (unsigned char *)addr + ((nr >> 3) ^ 3); \
+ if (__builtin_constant_p(nr)) { \
+ __asm__(OP " %1,%0" : "+WU"(*b_addr) : "i"(nr & 7)); \
+ } else { \
+ __asm__(OP " %s1,%0" : "+WU"(*b_addr) : "r"(bit)); \
+ } \
}
/*
#define __change_bit(nr, addr) change_bit((nr), (addr))
#undef H8300_GEN_BITOP
-#undef H8300_GEN_BITOP_CONST
static inline int test_bit(int nr, const unsigned long *addr)
{
- return (*((volatile unsigned char *)addr +
- ((nr >> 3) ^ 3)) & (1UL << (nr & 7))) != 0;
+ int ret = 0;
+ unsigned char *b_addr;
+ unsigned char bit = nr & 7;
+
+ b_addr = (unsigned char *)addr + ((nr >> 3) ^ 3);
+ if (__builtin_constant_p(nr)) {
+ __asm__("bld %Z2,%1\n\t"
+ "rotxl %0\n\t"
+ : "=r"(ret)
+ : "WU"(*b_addr), "i"(nr & 7), "0"(ret) : "cc");
+ } else {
+ __asm__("btst %w2,%1\n\t"
+ "beq 1f\n\t"
+ "inc.l #1,%0\n"
+ "1:"
+ : "=r"(ret)
+ : "WU"(*b_addr), "r"(bit), "0"(ret) : "cc");
+ }
+ return ret;
}
#define __test_bit(nr, addr) test_bit(nr, addr)
-#define H8300_GEN_TEST_BITOP_CONST_INT(OP, BIT) \
- case BIT: \
- __asm__("stc ccr,%w1\n\t" \
- "orc #0x80,ccr\n\t" \
- "bld #" #BIT ",@%4\n\t" \
- OP " #" #BIT ",@%4\n\t" \
- "rotxl.l %0\n\t" \
- "ldc %w1,ccr" \
- : "=r"(retval), "=&r"(ccrsave), "=m"(*b_addr) \
- : "0" (retval), "r" (b_addr)); \
- break
-
-#define H8300_GEN_TEST_BITOP_CONST(OP, BIT) \
- case BIT: \
- __asm__("bld #" #BIT ",@%3\n\t" \
- OP " #" #BIT ",@%3\n\t" \
- "rotxl.l %0\n\t" \
- : "=r"(retval), "=m"(*b_addr) \
- : "0" (retval), "r" (b_addr)); \
- break
-
-#define H8300_GEN_TEST_BITOP(FNNAME, OP) \
-static inline int FNNAME(int nr, volatile void *addr) \
-{ \
- int retval = 0; \
- char ccrsave; \
- volatile unsigned char *b_addr; \
- b_addr = (volatile unsigned char *)addr + ((nr >> 3) ^ 3); \
- if (__builtin_constant_p(nr)) { \
- switch (nr & 7) { \
- H8300_GEN_TEST_BITOP_CONST_INT(OP, 0); \
- H8300_GEN_TEST_BITOP_CONST_INT(OP, 1); \
- H8300_GEN_TEST_BITOP_CONST_INT(OP, 2); \
- H8300_GEN_TEST_BITOP_CONST_INT(OP, 3); \
- H8300_GEN_TEST_BITOP_CONST_INT(OP, 4); \
- H8300_GEN_TEST_BITOP_CONST_INT(OP, 5); \
- H8300_GEN_TEST_BITOP_CONST_INT(OP, 6); \
- H8300_GEN_TEST_BITOP_CONST_INT(OP, 7); \
- } \
- } else { \
- __asm__("stc ccr,%w1\n\t" \
- "orc #0x80,ccr\n\t" \
- "btst %w5,@%4\n\t" \
- OP " %w5,@%4\n\t" \
- "beq 1f\n\t" \
- "inc.l #1,%0\n" \
- "1:\n\t" \
- "ldc %w1,ccr" \
- : "=r"(retval), "=&r"(ccrsave), "=m"(*b_addr)\
- : "0" (retval), "r" (b_addr), "r"(nr)); \
- } \
- return retval; \
-} \
- \
-static inline int __ ## FNNAME(int nr, volatile void *addr) \
-{ \
- int retval = 0; \
- volatile unsigned char *b_addr; \
- b_addr = (volatile unsigned char *)addr + ((nr >> 3) ^ 3); \
- if (__builtin_constant_p(nr)) { \
- switch (nr & 7) { \
- H8300_GEN_TEST_BITOP_CONST(OP, 0); \
- H8300_GEN_TEST_BITOP_CONST(OP, 1); \
- H8300_GEN_TEST_BITOP_CONST(OP, 2); \
- H8300_GEN_TEST_BITOP_CONST(OP, 3); \
- H8300_GEN_TEST_BITOP_CONST(OP, 4); \
- H8300_GEN_TEST_BITOP_CONST(OP, 5); \
- H8300_GEN_TEST_BITOP_CONST(OP, 6); \
- H8300_GEN_TEST_BITOP_CONST(OP, 7); \
- } \
- } else { \
- __asm__("btst %w4,@%3\n\t" \
- OP " %w4,@%3\n\t" \
- "beq 1f\n\t" \
- "inc.l #1,%0\n" \
- "1:" \
- : "=r"(retval), "=m"(*b_addr) \
- : "0" (retval), "r" (b_addr), "r"(nr) \
- : "memory"); \
- } \
- return retval; \
+#define H8300_GEN_TEST_BITOP(FNNAME, OP) \
+static inline int FNNAME(int nr, void *addr) \
+{ \
+ int retval = 0; \
+ char ccrsave; \
+ unsigned char *b_addr; \
+ unsigned char bit = nr & 7; \
+ \
+ b_addr = (unsigned char *)addr + ((nr >> 3) ^ 3); \
+ if (__builtin_constant_p(nr)) { \
+ __asm__("stc ccr,%s2\n\t" \
+ "orc #0x80,ccr\n\t" \
+ "bld %4,%1\n\t" \
+ OP " %4,%1\n\t" \
+ "rotxl.l %0\n\t" \
+ "ldc %s2,ccr" \
+ : "=r"(retval), "+WU" (*b_addr), "=&r"(ccrsave) \
+ : "0"(retval), "i"(nr & 7) : "cc"); \
+ } else { \
+ __asm__("stc ccr,%t3\n\t" \
+ "orc #0x80,ccr\n\t" \
+ "btst %s3,%1\n\t" \
+ OP " %s3,%1\n\t" \
+ "beq 1f\n\t" \
+ "inc.l #1,%0\n\t" \
+ "1:\n" \
+ "ldc %t3,ccr" \
+ : "=r"(retval), "+WU" (*b_addr) \
+ : "0" (retval), "r"(bit) : "cc"); \
+ } \
+ return retval; \
+} \
+ \
+static inline int __ ## FNNAME(int nr, void *addr) \
+{ \
+ int retval = 0; \
+ unsigned char *b_addr; \
+ unsigned char bit = nr & 7; \
+ \
+ b_addr = (unsigned char *)addr + ((nr >> 3) ^ 3); \
+ if (__builtin_constant_p(nr)) { \
+ __asm__("bld %3,%1\n\t" \
+ OP " %3,%1\n\t" \
+ "rotxl.l %0\n\t" \
+ : "=r"(retval), "+WU"(*b_addr) \
+ : "0" (retval), "i"(nr & 7)); \
+ } else { \
+ __asm__("btst %s3,%1\n\t" \
+ OP " %s3,%1\n\t" \
+ "beq 1f\n\t" \
+ "inc.l #1,%0\n\t" \
+ "1:" \
+ : "=r"(retval), "+WU"(*b_addr) \
+ : "0" (retval), "r"(bit)); \
+ } \
+ return retval; \
}
H8300_GEN_TEST_BITOP(test_and_set_bit, "bset")
H8300_GEN_TEST_BITOP(test_and_clear_bit, "bclr")
H8300_GEN_TEST_BITOP(test_and_change_bit, "bnot")
-#undef H8300_GEN_TEST_BITOP_CONST
-#undef H8300_GEN_TEST_BITOP_CONST_INT
#undef H8300_GEN_TEST_BITOP
#include <asm-generic/bitops/ffs.h>
static inline void ctrl_outl(unsigned long b, unsigned long addr)
{
- *(volatile unsigned long *)addr = b;
+ *(volatile unsigned long *)addr = b;
}
static inline void ctrl_bclr(int b, unsigned long addr)
{
if (__builtin_constant_p(b))
- switch (b) {
- case 0: __asm__("bclr #0,@%0" : : "m"(addr)); break;
- case 1: __asm__("bclr #1,@%0" : : "m"(addr)); break;
- case 2: __asm__("bclr #2,@%0" : : "m"(addr)); break;
- case 3: __asm__("bclr #3,@%0" : : "m"(addr)); break;
- case 4: __asm__("bclr #4,@%0" : : "m"(addr)); break;
- case 5: __asm__("bclr #5,@%0" : : "m"(addr)); break;
- case 6: __asm__("bclr #6,@%0" : : "m"(addr)); break;
- case 7: __asm__("bclr #7,@%0" : : "m"(addr)); break;
- }
+ __asm__("bclr %1,@%o0:8" : : "i"(addr & 0xff), "i"(b));
else
- __asm__("bclr %w0,@%1" : : "r"(b), "m"(addr));
+ __asm__("bclr %w1,@%o0:8" : : "i"(addr & 0xff), "r"(b));
}
static inline void ctrl_bset(int b, unsigned long addr)
{
if (__builtin_constant_p(b))
- switch (b) {
- case 0: __asm__("bset #0,@%0" : : "m"(addr)); break;
- case 1: __asm__("bset #1,@%0" : : "m"(addr)); break;
- case 2: __asm__("bset #2,@%0" : : "m"(addr)); break;
- case 3: __asm__("bset #3,@%0" : : "m"(addr)); break;
- case 4: __asm__("bset #4,@%0" : : "m"(addr)); break;
- case 5: __asm__("bset #5,@%0" : : "m"(addr)); break;
- case 6: __asm__("bset #6,@%0" : : "m"(addr)); break;
- case 7: __asm__("bset #7,@%0" : : "m"(addr)); break;
- }
+ __asm__("bset %1,@%o0:8" : : "i"(addr & 0xff), "i"(b));
else
- __asm__("bset %w0,@%1" : : "r"(b), "m"(addr));
+ __asm__("bset %w1,@%o0:8" : : "i"(addr & 0xff), "r"(b));
}
/*
{
unsigned char flags;
- asm volatile ("stc ccr,%w0" : "=r" (flags));
+ __asm__ volatile ("stc ccr,%w0" : "=r" (flags));
return flags;
}
static inline void arch_local_irq_disable(void)
{
- asm volatile ("orc #0xc0,ccr" : : : "cc", "memory");
+ __asm__ volatile ("orc #0xc0,ccr");
}
static inline void arch_local_irq_enable(void)
{
- asm volatile ("andc #0x3f,ccr" : : : "cc", "memory");
+ __asm__ volatile ("andc #0x3f,ccr");
}
static inline unsigned char arch_local_irq_save(void)
{
unsigned char flags;
- asm volatile ("stc ccr,%w0\n\t"
- "orc #0xc0,ccr" : "=r" (flags) : : "cc", "memory");
+ __asm__ volatile ("stc ccr,%w0\n\t"
+ "orc #0xc0,ccr" : "=r" (flags));
return flags;
}
static inline void arch_local_irq_restore(unsigned char flags)
{
- asm volatile ("ldc %w0,ccr" : : "r" (flags) : "cc","memory");
+ __asm__ volatile ("ldc %w0,ccr" : : "r" (flags) : "cc");
}
static inline int arch_irqs_disabled_flags(unsigned long flags)
}
#endif
#ifdef CONFIG_CPU_H8S
-static inline unsigned long arch_local_save_flags(void)
+static inline unsigned short arch_local_save_flags(void)
{
unsigned short flags;
- asm volatile ("stc ccr,%w0\n\tstc exr,%x0" : "=r" (flags));
+ __asm__ volatile ("stc ccr,%w0\n\tstc exr,%x0" : "=r" (flags));
return flags;
}
static inline void arch_local_irq_disable(void)
{
- asm volatile ("orc #0x80,ccr\n\t"
- "orc #0x07,exr" : : : "cc", "memory");
+ __asm__ volatile ("orc #0x80,ccr\n\t");
}
static inline void arch_local_irq_enable(void)
{
- asm volatile ("andc #0x7f,ccr\n\t"
- "andc #0xf0,exr\n\t": : : "cc", "memory");
+ __asm__ volatile ("andc #0x7f,ccr\n\t"
+ "andc #0xf0,exr\n\t");
}
-static inline unsigned long arch_local_irq_save(void)
+static inline unsigned short arch_local_irq_save(void)
{
unsigned short flags;
- asm volatile ("stc ccr,%w0\n\t"
+ __asm__ volatile ("stc ccr,%w0\n\t"
"stc exr,%x0\n\t"
"orc #0x80,ccr\n\t"
- "orc #0x07,exr"
- : "=r" (flags) : : "cc", "memory");
+ : "=r" (flags));
return flags;
}
-static inline void arch_local_irq_restore(unsigned long flags)
+static inline void arch_local_irq_restore(unsigned short flags)
{
- asm volatile ("ldc %w0,ccr\n\t"
+ __asm__ volatile ("ldc %w0,ccr\n\t"
"ldc %x0,exr"
- : : "r" (flags) : "cc", "memory");
+ : : "r" (flags) : "cc");
}
static inline int arch_irqs_disabled_flags(unsigned short flags)
{
- return (flags & 0x0780) == 0x0780;
+ return (flags & 0x0080) == 0x0080;
}
#endif