OSDN Git Service

MIPS: bitops: Use MIPS_ISA_REV, not #ifdefs
[tomoyo/tomoyo-test1.git] / arch / mips / include / asm / bitops.h
1 /*
2  * This file is subject to the terms and conditions of the GNU General Public
3  * License.  See the file "COPYING" in the main directory of this archive
4  * for more details.
5  *
6  * Copyright (c) 1994 - 1997, 99, 2000, 06, 07  Ralf Baechle (ralf@linux-mips.org)
7  * Copyright (c) 1999, 2000  Silicon Graphics, Inc.
8  */
9 #ifndef _ASM_BITOPS_H
10 #define _ASM_BITOPS_H
11
12 #ifndef _LINUX_BITOPS_H
13 #error only <linux/bitops.h> can be included directly
14 #endif
15
16 #include <linux/compiler.h>
17 #include <linux/types.h>
18 #include <asm/barrier.h>
19 #include <asm/byteorder.h>              /* sigh ... */
20 #include <asm/compiler.h>
21 #include <asm/cpu-features.h>
22 #include <asm/isa-rev.h>
23 #include <asm/llsc.h>
24 #include <asm/sgidefs.h>
25 #include <asm/war.h>
26
27 /*
28  * These are the "slower" versions of the functions and are in bitops.c.
29  * These functions call raw_local_irq_{save,restore}().
30  */
31 void __mips_set_bit(unsigned long nr, volatile unsigned long *addr);
32 void __mips_clear_bit(unsigned long nr, volatile unsigned long *addr);
33 void __mips_change_bit(unsigned long nr, volatile unsigned long *addr);
34 int __mips_test_and_set_bit(unsigned long nr,
35                             volatile unsigned long *addr);
36 int __mips_test_and_set_bit_lock(unsigned long nr,
37                                  volatile unsigned long *addr);
38 int __mips_test_and_clear_bit(unsigned long nr,
39                               volatile unsigned long *addr);
40 int __mips_test_and_change_bit(unsigned long nr,
41                                volatile unsigned long *addr);
42
43
44 /*
45  * set_bit - Atomically set a bit in memory
46  * @nr: the bit to set
47  * @addr: the address to start counting from
48  *
49  * This function is atomic and may not be reordered.  See __set_bit()
50  * if you do not require the atomic guarantees.
51  * Note that @nr may be almost arbitrarily large; this function is not
52  * restricted to acting on a single-word quantity.
53  */
54 static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
55 {
56         unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
57         int bit = nr & SZLONG_MASK;
58         unsigned long temp;
59
60         if (!kernel_uses_llsc) {
61                 __mips_set_bit(nr, addr);
62                 return;
63         }
64
65         if (R10000_LLSC_WAR) {
66                 __asm__ __volatile__(
67                 "       .set    push                                    \n"
68                 "       .set    arch=r4000                              \n"
69                 "1:     " __LL "%0, %1                  # set_bit       \n"
70                 "       or      %0, %2                                  \n"
71                 "       " __SC  "%0, %1                                 \n"
72                 "       beqzl   %0, 1b                                  \n"
73                 "       .set    pop                                     \n"
74                 : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*m)
75                 : "ir" (1UL << bit), GCC_OFF_SMALL_ASM() (*m)
76                 : __LLSC_CLOBBER);
77                 return;
78         }
79
80         if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit) && (bit >= 16)) {
81                 loongson_llsc_mb();
82                 do {
83                         __asm__ __volatile__(
84                         "       " __LL "%0, %1          # set_bit       \n"
85                         "       " __INS "%0, %3, %2, 1                  \n"
86                         "       " __SC "%0, %1                          \n"
87                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
88                         : "ir" (bit), "r" (~0)
89                         : __LLSC_CLOBBER);
90                 } while (unlikely(!temp));
91                 return;
92         }
93
94         loongson_llsc_mb();
95         do {
96                 __asm__ __volatile__(
97                 "       .set    push                            \n"
98                 "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
99                 "       " __LL "%0, %1          # set_bit       \n"
100                 "       or      %0, %2                          \n"
101                 "       " __SC  "%0, %1                         \n"
102                 "       .set    pop                             \n"
103                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
104                 : "ir" (1UL << bit)
105                 : __LLSC_CLOBBER);
106         } while (unlikely(!temp));
107 }
108
109 /*
110  * clear_bit - Clears a bit in memory
111  * @nr: Bit to clear
112  * @addr: Address to start counting from
113  *
114  * clear_bit() is atomic and may not be reordered.  However, it does
115  * not contain a memory barrier, so if it is used for locking purposes,
116  * you should call smp_mb__before_atomic() and/or smp_mb__after_atomic()
117  * in order to ensure changes are visible on other processors.
118  */
119 static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
120 {
121         unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
122         int bit = nr & SZLONG_MASK;
123         unsigned long temp;
124
125         if (!kernel_uses_llsc) {
126                 __mips_clear_bit(nr, addr);
127                 return;
128         }
129
130         if (R10000_LLSC_WAR) {
131                 __asm__ __volatile__(
132                 "       .set    push                                    \n"
133                 "       .set    arch=r4000                              \n"
134                 "1:     " __LL "%0, %1                  # clear_bit     \n"
135                 "       and     %0, %2                                  \n"
136                 "       " __SC "%0, %1                                  \n"
137                 "       beqzl   %0, 1b                                  \n"
138                 "       .set    pop                                     \n"
139                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
140                 : "ir" (~(1UL << bit))
141                 : __LLSC_CLOBBER);
142                 return;
143         }
144
145         if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit)) {
146                 loongson_llsc_mb();
147                 do {
148                         __asm__ __volatile__(
149                         "       " __LL "%0, %1          # clear_bit     \n"
150                         "       " __INS "%0, $0, %2, 1                  \n"
151                         "       " __SC "%0, %1                          \n"
152                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
153                         : "ir" (bit)
154                         : __LLSC_CLOBBER);
155                 } while (unlikely(!temp));
156                 return;
157         }
158
159         loongson_llsc_mb();
160         do {
161                 __asm__ __volatile__(
162                 "       .set    push                            \n"
163                 "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
164                 "       " __LL "%0, %1          # clear_bit     \n"
165                 "       and     %0, %2                          \n"
166                 "       " __SC "%0, %1                          \n"
167                 "       .set    pop                             \n"
168                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
169                 : "ir" (~(1UL << bit))
170                 : __LLSC_CLOBBER);
171         } while (unlikely(!temp));
172 }
173
174 /*
175  * clear_bit_unlock - Clears a bit in memory
176  * @nr: Bit to clear
177  * @addr: Address to start counting from
178  *
179  * clear_bit() is atomic and implies release semantics before the memory
180  * operation. It can be used for an unlock.
181  */
182 static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
183 {
184         smp_mb__before_atomic();
185         clear_bit(nr, addr);
186 }
187
188 /*
189  * change_bit - Toggle a bit in memory
190  * @nr: Bit to change
191  * @addr: Address to start counting from
192  *
193  * change_bit() is atomic and may not be reordered.
194  * Note that @nr may be almost arbitrarily large; this function is not
195  * restricted to acting on a single-word quantity.
196  */
197 static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
198 {
199         unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
200         int bit = nr & SZLONG_MASK;
201         unsigned long temp;
202
203         if (!kernel_uses_llsc) {
204                 __mips_change_bit(nr, addr);
205                 return;
206         }
207
208         if (R10000_LLSC_WAR) {
209                 __asm__ __volatile__(
210                 "       .set    push                            \n"
211                 "       .set    arch=r4000                      \n"
212                 "1:     " __LL "%0, %1          # change_bit    \n"
213                 "       xor     %0, %2                          \n"
214                 "       " __SC  "%0, %1                         \n"
215                 "       beqzl   %0, 1b                          \n"
216                 "       .set    pop                             \n"
217                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
218                 : "ir" (1UL << bit)
219                 : __LLSC_CLOBBER);
220                 return;
221         }
222
223         loongson_llsc_mb();
224         do {
225                 __asm__ __volatile__(
226                 "       .set    push                            \n"
227                 "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
228                 "       " __LL "%0, %1          # change_bit    \n"
229                 "       xor     %0, %2                          \n"
230                 "       " __SC  "%0, %1                         \n"
231                 "       .set    pop                             \n"
232                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
233                 : "ir" (1UL << bit)
234                 : __LLSC_CLOBBER);
235         } while (unlikely(!temp));
236 }
237
238 /*
239  * test_and_set_bit - Set a bit and return its old value
240  * @nr: Bit to set
241  * @addr: Address to count from
242  *
243  * This operation is atomic and cannot be reordered.
244  * It also implies a memory barrier.
245  */
246 static inline int test_and_set_bit(unsigned long nr,
247         volatile unsigned long *addr)
248 {
249         unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
250         int bit = nr & SZLONG_MASK;
251         unsigned long res, temp;
252
253         smp_mb__before_llsc();
254
255         if (!kernel_uses_llsc) {
256                 res = __mips_test_and_set_bit(nr, addr);
257         } else if (R10000_LLSC_WAR) {
258                 __asm__ __volatile__(
259                 "       .set    push                                    \n"
260                 "       .set    arch=r4000                              \n"
261                 "1:     " __LL "%0, %1          # test_and_set_bit      \n"
262                 "       or      %2, %0, %3                              \n"
263                 "       " __SC  "%2, %1                                 \n"
264                 "       beqzl   %2, 1b                                  \n"
265                 "       and     %2, %0, %3                              \n"
266                 "       .set    pop                                     \n"
267                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
268                 : "r" (1UL << bit)
269                 : __LLSC_CLOBBER);
270         } else {
271                 loongson_llsc_mb();
272                 do {
273                         __asm__ __volatile__(
274                         "       .set    push                            \n"
275                         "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
276                         "       " __LL "%0, %1  # test_and_set_bit      \n"
277                         "       or      %2, %0, %3                      \n"
278                         "       " __SC  "%2, %1                         \n"
279                         "       .set    pop                             \n"
280                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
281                         : "r" (1UL << bit)
282                         : __LLSC_CLOBBER);
283                 } while (unlikely(!res));
284
285                 res = temp & (1UL << bit);
286         }
287
288         smp_llsc_mb();
289
290         return res != 0;
291 }
292
293 /*
294  * test_and_set_bit_lock - Set a bit and return its old value
295  * @nr: Bit to set
296  * @addr: Address to count from
297  *
298  * This operation is atomic and implies acquire ordering semantics
299  * after the memory operation.
300  */
301 static inline int test_and_set_bit_lock(unsigned long nr,
302         volatile unsigned long *addr)
303 {
304         unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
305         int bit = nr & SZLONG_MASK;
306         unsigned long res, temp;
307
308         if (!kernel_uses_llsc) {
309                 res = __mips_test_and_set_bit_lock(nr, addr);
310         } else if (R10000_LLSC_WAR) {
311                 __asm__ __volatile__(
312                 "       .set    push                                    \n"
313                 "       .set    arch=r4000                              \n"
314                 "1:     " __LL "%0, %1          # test_and_set_bit      \n"
315                 "       or      %2, %0, %3                              \n"
316                 "       " __SC  "%2, %1                                 \n"
317                 "       beqzl   %2, 1b                                  \n"
318                 "       and     %2, %0, %3                              \n"
319                 "       .set    pop                                     \n"
320                 : "=&r" (temp), "+m" (*m), "=&r" (res)
321                 : "r" (1UL << bit)
322                 : __LLSC_CLOBBER);
323         } else {
324                 do {
325                         __asm__ __volatile__(
326                         "       .set    push                            \n"
327                         "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
328                         "       " __LL "%0, %1  # test_and_set_bit      \n"
329                         "       or      %2, %0, %3                      \n"
330                         "       " __SC  "%2, %1                         \n"
331                         "       .set    pop                             \n"
332                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
333                         : "r" (1UL << bit)
334                         : __LLSC_CLOBBER);
335                 } while (unlikely(!res));
336
337                 res = temp & (1UL << bit);
338         }
339
340         smp_llsc_mb();
341
342         return res != 0;
343 }
344 /*
345  * test_and_clear_bit - Clear a bit and return its old value
346  * @nr: Bit to clear
347  * @addr: Address to count from
348  *
349  * This operation is atomic and cannot be reordered.
350  * It also implies a memory barrier.
351  */
352 static inline int test_and_clear_bit(unsigned long nr,
353         volatile unsigned long *addr)
354 {
355         unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
356         int bit = nr & SZLONG_MASK;
357         unsigned long res, temp;
358
359         smp_mb__before_llsc();
360
361         if (!kernel_uses_llsc) {
362                 res = __mips_test_and_clear_bit(nr, addr);
363         } else if (R10000_LLSC_WAR) {
364                 __asm__ __volatile__(
365                 "       .set    push                                    \n"
366                 "       .set    arch=r4000                              \n"
367                 "1:     " __LL  "%0, %1         # test_and_clear_bit    \n"
368                 "       or      %2, %0, %3                              \n"
369                 "       xor     %2, %3                                  \n"
370                 "       " __SC  "%2, %1                                 \n"
371                 "       beqzl   %2, 1b                                  \n"
372                 "       and     %2, %0, %3                              \n"
373                 "       .set    pop                                     \n"
374                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
375                 : "r" (1UL << bit)
376                 : __LLSC_CLOBBER);
377         } else if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(nr)) {
378                 loongson_llsc_mb();
379                 do {
380                         __asm__ __volatile__(
381                         "       " __LL  "%0, %1 # test_and_clear_bit    \n"
382                         "       " __EXT "%2, %0, %3, 1                  \n"
383                         "       " __INS "%0, $0, %3, 1                  \n"
384                         "       " __SC  "%0, %1                         \n"
385                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
386                         : "ir" (bit)
387                         : __LLSC_CLOBBER);
388                 } while (unlikely(!temp));
389         } else {
390                 loongson_llsc_mb();
391                 do {
392                         __asm__ __volatile__(
393                         "       .set    push                            \n"
394                         "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
395                         "       " __LL  "%0, %1 # test_and_clear_bit    \n"
396                         "       or      %2, %0, %3                      \n"
397                         "       xor     %2, %3                          \n"
398                         "       " __SC  "%2, %1                         \n"
399                         "       .set    pop                             \n"
400                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
401                         : "r" (1UL << bit)
402                         : __LLSC_CLOBBER);
403                 } while (unlikely(!res));
404
405                 res = temp & (1UL << bit);
406         }
407
408         smp_llsc_mb();
409
410         return res != 0;
411 }
412
413 /*
414  * test_and_change_bit - Change a bit and return its old value
415  * @nr: Bit to change
416  * @addr: Address to count from
417  *
418  * This operation is atomic and cannot be reordered.
419  * It also implies a memory barrier.
420  */
421 static inline int test_and_change_bit(unsigned long nr,
422         volatile unsigned long *addr)
423 {
424         unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
425         int bit = nr & SZLONG_MASK;
426         unsigned long res, temp;
427
428         smp_mb__before_llsc();
429
430         if (!kernel_uses_llsc) {
431                 res = __mips_test_and_change_bit(nr, addr);
432         } else if (R10000_LLSC_WAR) {
433                 __asm__ __volatile__(
434                 "       .set    push                                    \n"
435                 "       .set    arch=r4000                              \n"
436                 "1:     " __LL  "%0, %1         # test_and_change_bit   \n"
437                 "       xor     %2, %0, %3                              \n"
438                 "       " __SC  "%2, %1                                 \n"
439                 "       beqzl   %2, 1b                                  \n"
440                 "       and     %2, %0, %3                              \n"
441                 "       .set    pop                                     \n"
442                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
443                 : "r" (1UL << bit)
444                 : __LLSC_CLOBBER);
445         } else {
446                 loongson_llsc_mb();
447                 do {
448                         __asm__ __volatile__(
449                         "       .set    push                            \n"
450                         "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
451                         "       " __LL  "%0, %1 # test_and_change_bit   \n"
452                         "       xor     %2, %0, %3                      \n"
453                         "       " __SC  "\t%2, %1                       \n"
454                         "       .set    pop                             \n"
455                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
456                         : "r" (1UL << bit)
457                         : __LLSC_CLOBBER);
458                 } while (unlikely(!res));
459
460                 res = temp & (1UL << bit);
461         }
462
463         smp_llsc_mb();
464
465         return res != 0;
466 }
467
468 #include <asm-generic/bitops/non-atomic.h>
469
470 /*
471  * __clear_bit_unlock - Clears a bit in memory
472  * @nr: Bit to clear
473  * @addr: Address to start counting from
474  *
475  * __clear_bit() is non-atomic and implies release semantics before the memory
476  * operation. It can be used for an unlock if no other CPUs can concurrently
477  * modify other bits in the word.
478  */
479 static inline void __clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
480 {
481         smp_mb__before_llsc();
482         __clear_bit(nr, addr);
483         nudge_writes();
484 }
485
486 /*
487  * Return the bit position (0..63) of the most significant 1 bit in a word
488  * Returns -1 if no 1 bit exists
489  */
490 static __always_inline unsigned long __fls(unsigned long word)
491 {
492         int num;
493
494         if (BITS_PER_LONG == 32 && !__builtin_constant_p(word) &&
495             __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
496                 __asm__(
497                 "       .set    push                                    \n"
498                 "       .set    "MIPS_ISA_LEVEL"                        \n"
499                 "       clz     %0, %1                                  \n"
500                 "       .set    pop                                     \n"
501                 : "=r" (num)
502                 : "r" (word));
503
504                 return 31 - num;
505         }
506
507         if (BITS_PER_LONG == 64 && !__builtin_constant_p(word) &&
508             __builtin_constant_p(cpu_has_mips64) && cpu_has_mips64) {
509                 __asm__(
510                 "       .set    push                                    \n"
511                 "       .set    "MIPS_ISA_LEVEL"                        \n"
512                 "       dclz    %0, %1                                  \n"
513                 "       .set    pop                                     \n"
514                 : "=r" (num)
515                 : "r" (word));
516
517                 return 63 - num;
518         }
519
520         num = BITS_PER_LONG - 1;
521
522 #if BITS_PER_LONG == 64
523         if (!(word & (~0ul << 32))) {
524                 num -= 32;
525                 word <<= 32;
526         }
527 #endif
528         if (!(word & (~0ul << (BITS_PER_LONG-16)))) {
529                 num -= 16;
530                 word <<= 16;
531         }
532         if (!(word & (~0ul << (BITS_PER_LONG-8)))) {
533                 num -= 8;
534                 word <<= 8;
535         }
536         if (!(word & (~0ul << (BITS_PER_LONG-4)))) {
537                 num -= 4;
538                 word <<= 4;
539         }
540         if (!(word & (~0ul << (BITS_PER_LONG-2)))) {
541                 num -= 2;
542                 word <<= 2;
543         }
544         if (!(word & (~0ul << (BITS_PER_LONG-1))))
545                 num -= 1;
546         return num;
547 }
548
549 /*
550  * __ffs - find first bit in word.
551  * @word: The word to search
552  *
553  * Returns 0..SZLONG-1
554  * Undefined if no bit exists, so code should check against 0 first.
555  */
556 static __always_inline unsigned long __ffs(unsigned long word)
557 {
558         return __fls(word & -word);
559 }
560
561 /*
562  * fls - find last bit set.
563  * @word: The word to search
564  *
565  * This is defined the same way as ffs.
566  * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
567  */
568 static inline int fls(unsigned int x)
569 {
570         int r;
571
572         if (!__builtin_constant_p(x) &&
573             __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
574                 __asm__(
575                 "       .set    push                                    \n"
576                 "       .set    "MIPS_ISA_LEVEL"                        \n"
577                 "       clz     %0, %1                                  \n"
578                 "       .set    pop                                     \n"
579                 : "=r" (x)
580                 : "r" (x));
581
582                 return 32 - x;
583         }
584
585         r = 32;
586         if (!x)
587                 return 0;
588         if (!(x & 0xffff0000u)) {
589                 x <<= 16;
590                 r -= 16;
591         }
592         if (!(x & 0xff000000u)) {
593                 x <<= 8;
594                 r -= 8;
595         }
596         if (!(x & 0xf0000000u)) {
597                 x <<= 4;
598                 r -= 4;
599         }
600         if (!(x & 0xc0000000u)) {
601                 x <<= 2;
602                 r -= 2;
603         }
604         if (!(x & 0x80000000u)) {
605                 x <<= 1;
606                 r -= 1;
607         }
608         return r;
609 }
610
611 #include <asm-generic/bitops/fls64.h>
612
613 /*
614  * ffs - find first bit set.
615  * @word: The word to search
616  *
617  * This is defined the same way as
618  * the libc and compiler builtin ffs routines, therefore
619  * differs in spirit from the above ffz (man ffs).
620  */
621 static inline int ffs(int word)
622 {
623         if (!word)
624                 return 0;
625
626         return fls(word & -word);
627 }
628
629 #include <asm-generic/bitops/ffz.h>
630 #include <asm-generic/bitops/find.h>
631
632 #ifdef __KERNEL__
633
634 #include <asm-generic/bitops/sched.h>
635
636 #include <asm/arch_hweight.h>
637 #include <asm-generic/bitops/const_hweight.h>
638
639 #include <asm-generic/bitops/le.h>
640 #include <asm-generic/bitops/ext2-atomic.h>
641
642 #endif /* __KERNEL__ */
643
644 #endif /* _ASM_BITOPS_H */