OSDN Git Service
(root)
/
uclinux-h8
/
linux.git
/ blobdiff
commit
grep
author
committer
pickaxe
?
search:
re
summary
|
shortlog
|
log
|
commit
|
commitdiff
|
tree
raw
|
inline
| side by side
MIPS: asm: Rename GCC_OFF12_ASM to GCC_OFF_SMALL_ASM
[uclinux-h8/linux.git]
/
arch
/
mips
/
include
/
asm
/
cmpxchg.h
diff --git
a/arch/mips/include/asm/cmpxchg.h
b/arch/mips/include/asm/cmpxchg.h
index
28b1edf
..
68baa0c
100644
(file)
--- a/
arch/mips/include/asm/cmpxchg.h
+++ b/
arch/mips/include/asm/cmpxchg.h
@@
-31,8
+31,8
@@
static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
" sc %2, %1 \n"
" beqzl %2, 1b \n"
" .set mips0 \n"
" sc %2, %1 \n"
" beqzl %2, 1b \n"
" .set mips0 \n"
- : "=&r" (retval), "=" GCC_OFF
12
_ASM() (*m), "=&r" (dummy)
- : GCC_OFF
12
_ASM() (*m), "Jr" (val)
+ : "=&r" (retval), "=" GCC_OFF
_SMALL
_ASM() (*m), "=&r" (dummy)
+ : GCC_OFF
_SMALL
_ASM() (*m), "Jr" (val)
: "memory");
} else if (kernel_uses_llsc) {
unsigned long dummy;
: "memory");
} else if (kernel_uses_llsc) {
unsigned long dummy;
@@
-46,9
+46,9
@@
static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
" .set arch=r4000 \n"
" sc %2, %1 \n"
" .set mips0 \n"
" .set arch=r4000 \n"
" sc %2, %1 \n"
" .set mips0 \n"
- : "=&r" (retval), "=" GCC_OFF
12
_ASM() (*m),
+ : "=&r" (retval), "=" GCC_OFF
_SMALL
_ASM() (*m),
"=&r" (dummy)
"=&r" (dummy)
- : GCC_OFF
12
_ASM() (*m), "Jr" (val)
+ : GCC_OFF
_SMALL
_ASM() (*m), "Jr" (val)
: "memory");
} while (unlikely(!dummy));
} else {
: "memory");
} while (unlikely(!dummy));
} else {
@@
-82,8
+82,8
@@
static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
" scd %2, %1 \n"
" beqzl %2, 1b \n"
" .set mips0 \n"
" scd %2, %1 \n"
" beqzl %2, 1b \n"
" .set mips0 \n"
- : "=&r" (retval), "=" GCC_OFF
12
_ASM() (*m), "=&r" (dummy)
- : GCC_OFF
12
_ASM() (*m), "Jr" (val)
+ : "=&r" (retval), "=" GCC_OFF
_SMALL
_ASM() (*m), "=&r" (dummy)
+ : GCC_OFF
_SMALL
_ASM() (*m), "Jr" (val)
: "memory");
} else if (kernel_uses_llsc) {
unsigned long dummy;
: "memory");
} else if (kernel_uses_llsc) {
unsigned long dummy;
@@
-95,9
+95,9
@@
static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
" move %2, %z4 \n"
" scd %2, %1 \n"
" .set mips0 \n"
" move %2, %z4 \n"
" scd %2, %1 \n"
" .set mips0 \n"
- : "=&r" (retval), "=" GCC_OFF
12
_ASM() (*m),
+ : "=&r" (retval), "=" GCC_OFF
_SMALL
_ASM() (*m),
"=&r" (dummy)
"=&r" (dummy)
- : GCC_OFF
12
_ASM() (*m), "Jr" (val)
+ : GCC_OFF
_SMALL
_ASM() (*m), "Jr" (val)
: "memory");
} while (unlikely(!dummy));
} else {
: "memory");
} while (unlikely(!dummy));
} else {
@@
-158,8
+158,8
@@
static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz
" beqzl $1, 1b \n" \
"2: \n" \
" .set pop \n" \
" beqzl $1, 1b \n" \
"2: \n" \
" .set pop \n" \
- : "=&r" (__ret), "=" GCC_OFF
12_ASM() (*m)
\
- : GCC_OFF
12_ASM() (*m), "Jr" (old), "Jr" (new)
\
+ : "=&r" (__ret), "=" GCC_OFF
_SMALL_ASM() (*m)
\
+ : GCC_OFF
_SMALL_ASM() (*m), "Jr" (old), "Jr" (new)
\
: "memory"); \
} else if (kernel_uses_llsc) { \
__asm__ __volatile__( \
: "memory"); \
} else if (kernel_uses_llsc) { \
__asm__ __volatile__( \
@@
-175,8
+175,8
@@
static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz
" beqz $1, 1b \n" \
" .set pop \n" \
"2: \n" \
" beqz $1, 1b \n" \
" .set pop \n" \
"2: \n" \
- : "=&r" (__ret), "=" GCC_OFF
12_ASM() (*m)
\
- : GCC_OFF
12_ASM() (*m), "Jr" (old), "Jr" (new)
\
+ : "=&r" (__ret), "=" GCC_OFF
_SMALL_ASM() (*m)
\
+ : GCC_OFF
_SMALL_ASM() (*m), "Jr" (old), "Jr" (new)
\
: "memory"); \
} else { \
unsigned long __flags; \
: "memory"); \
} else { \
unsigned long __flags; \