OSDN Git Service

Merge branch 'i2c/for-current' of git://git.kernel.org/pub/scm/linux/kernel/git/wsa...
[uclinux-h8/linux.git] / arch / mips / include / asm / bitops.h
1 /*
2  * This file is subject to the terms and conditions of the GNU General Public
3  * License.  See the file "COPYING" in the main directory of this archive
4  * for more details.
5  *
6  * Copyright (c) 1994 - 1997, 99, 2000, 06, 07  Ralf Baechle (ralf@linux-mips.org)
7  * Copyright (c) 1999, 2000  Silicon Graphics, Inc.
8  */
9 #ifndef _ASM_BITOPS_H
10 #define _ASM_BITOPS_H
11
12 #ifndef _LINUX_BITOPS_H
13 #error only <linux/bitops.h> can be included directly
14 #endif
15
16 #include <linux/compiler.h>
17 #include <linux/types.h>
18 #include <asm/barrier.h>
19 #include <asm/byteorder.h>              /* sigh ... */
20 #include <asm/compiler.h>
21 #include <asm/cpu-features.h>
22 #include <asm/llsc.h>
23 #include <asm/sgidefs.h>
24 #include <asm/war.h>
25
26 /*
27  * These are the "slower" versions of the functions and are in bitops.c.
28  * These functions call raw_local_irq_{save,restore}().
29  */
30 void __mips_set_bit(unsigned long nr, volatile unsigned long *addr);
31 void __mips_clear_bit(unsigned long nr, volatile unsigned long *addr);
32 void __mips_change_bit(unsigned long nr, volatile unsigned long *addr);
33 int __mips_test_and_set_bit(unsigned long nr,
34                             volatile unsigned long *addr);
35 int __mips_test_and_set_bit_lock(unsigned long nr,
36                                  volatile unsigned long *addr);
37 int __mips_test_and_clear_bit(unsigned long nr,
38                               volatile unsigned long *addr);
39 int __mips_test_and_change_bit(unsigned long nr,
40                                volatile unsigned long *addr);
41
42
43 /*
44  * set_bit - Atomically set a bit in memory
45  * @nr: the bit to set
46  * @addr: the address to start counting from
47  *
48  * This function is atomic and may not be reordered.  See __set_bit()
49  * if you do not require the atomic guarantees.
50  * Note that @nr may be almost arbitrarily large; this function is not
51  * restricted to acting on a single-word quantity.
52  */
53 static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
54 {
55         unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
56         int bit = nr & SZLONG_MASK;
57         unsigned long temp;
58
59         if (kernel_uses_llsc && R10000_LLSC_WAR) {
60                 __asm__ __volatile__(
61                 "       .set    push                                    \n"
62                 "       .set    arch=r4000                              \n"
63                 "1:     " __LL "%0, %1                  # set_bit       \n"
64                 "       or      %0, %2                                  \n"
65                 "       " __SC  "%0, %1                                 \n"
66                 "       beqzl   %0, 1b                                  \n"
67                 "       .set    pop                                     \n"
68                 : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*m)
69                 : "ir" (1UL << bit), GCC_OFF_SMALL_ASM() (*m));
70 #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
71         } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
72                 loongson_llsc_mb();
73                 do {
74                         __asm__ __volatile__(
75                         "       " __LL "%0, %1          # set_bit       \n"
76                         "       " __INS "%0, %3, %2, 1                  \n"
77                         "       " __SC "%0, %1                          \n"
78                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
79                         : "ir" (bit), "r" (~0));
80                 } while (unlikely(!temp));
81 #endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */
82         } else if (kernel_uses_llsc) {
83                 loongson_llsc_mb();
84                 do {
85                         __asm__ __volatile__(
86                         "       .set    push                            \n"
87                         "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
88                         "       " __LL "%0, %1          # set_bit       \n"
89                         "       or      %0, %2                          \n"
90                         "       " __SC  "%0, %1                         \n"
91                         "       .set    pop                             \n"
92                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
93                         : "ir" (1UL << bit));
94                 } while (unlikely(!temp));
95         } else
96                 __mips_set_bit(nr, addr);
97 }
98
99 /*
100  * clear_bit - Clears a bit in memory
101  * @nr: Bit to clear
102  * @addr: Address to start counting from
103  *
104  * clear_bit() is atomic and may not be reordered.  However, it does
105  * not contain a memory barrier, so if it is used for locking purposes,
106  * you should call smp_mb__before_atomic() and/or smp_mb__after_atomic()
107  * in order to ensure changes are visible on other processors.
108  */
109 static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
110 {
111         unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
112         int bit = nr & SZLONG_MASK;
113         unsigned long temp;
114
115         if (kernel_uses_llsc && R10000_LLSC_WAR) {
116                 __asm__ __volatile__(
117                 "       .set    push                                    \n"
118                 "       .set    arch=r4000                              \n"
119                 "1:     " __LL "%0, %1                  # clear_bit     \n"
120                 "       and     %0, %2                                  \n"
121                 "       " __SC "%0, %1                                  \n"
122                 "       beqzl   %0, 1b                                  \n"
123                 "       .set    pop                                     \n"
124                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
125                 : "ir" (~(1UL << bit)));
126 #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
127         } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
128                 loongson_llsc_mb();
129                 do {
130                         __asm__ __volatile__(
131                         "       " __LL "%0, %1          # clear_bit     \n"
132                         "       " __INS "%0, $0, %2, 1                  \n"
133                         "       " __SC "%0, %1                          \n"
134                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
135                         : "ir" (bit));
136                 } while (unlikely(!temp));
137 #endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */
138         } else if (kernel_uses_llsc) {
139                 loongson_llsc_mb();
140                 do {
141                         __asm__ __volatile__(
142                         "       .set    push                            \n"
143                         "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
144                         "       " __LL "%0, %1          # clear_bit     \n"
145                         "       and     %0, %2                          \n"
146                         "       " __SC "%0, %1                          \n"
147                         "       .set    pop                             \n"
148                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
149                         : "ir" (~(1UL << bit)));
150                 } while (unlikely(!temp));
151         } else
152                 __mips_clear_bit(nr, addr);
153 }
154
155 /*
156  * clear_bit_unlock - Clears a bit in memory
157  * @nr: Bit to clear
158  * @addr: Address to start counting from
159  *
160  * clear_bit() is atomic and implies release semantics before the memory
161  * operation. It can be used for an unlock.
162  */
163 static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
164 {
165         smp_mb__before_atomic();
166         clear_bit(nr, addr);
167 }
168
169 /*
170  * change_bit - Toggle a bit in memory
171  * @nr: Bit to change
172  * @addr: Address to start counting from
173  *
174  * change_bit() is atomic and may not be reordered.
175  * Note that @nr may be almost arbitrarily large; this function is not
176  * restricted to acting on a single-word quantity.
177  */
178 static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
179 {
180         int bit = nr & SZLONG_MASK;
181
182         if (kernel_uses_llsc && R10000_LLSC_WAR) {
183                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
184                 unsigned long temp;
185
186                 __asm__ __volatile__(
187                 "       .set    push                            \n"
188                 "       .set    arch=r4000                      \n"
189                 "1:     " __LL "%0, %1          # change_bit    \n"
190                 "       xor     %0, %2                          \n"
191                 "       " __SC  "%0, %1                         \n"
192                 "       beqzl   %0, 1b                          \n"
193                 "       .set    pop                             \n"
194                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
195                 : "ir" (1UL << bit));
196         } else if (kernel_uses_llsc) {
197                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
198                 unsigned long temp;
199
200                 loongson_llsc_mb();
201                 do {
202                         __asm__ __volatile__(
203                         "       .set    push                            \n"
204                         "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
205                         "       " __LL "%0, %1          # change_bit    \n"
206                         "       xor     %0, %2                          \n"
207                         "       " __SC  "%0, %1                         \n"
208                         "       .set    pop                             \n"
209                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
210                         : "ir" (1UL << bit));
211                 } while (unlikely(!temp));
212         } else
213                 __mips_change_bit(nr, addr);
214 }
215
216 /*
217  * test_and_set_bit - Set a bit and return its old value
218  * @nr: Bit to set
219  * @addr: Address to count from
220  *
221  * This operation is atomic and cannot be reordered.
222  * It also implies a memory barrier.
223  */
224 static inline int test_and_set_bit(unsigned long nr,
225         volatile unsigned long *addr)
226 {
227         int bit = nr & SZLONG_MASK;
228         unsigned long res;
229
230         smp_mb__before_llsc();
231
232         if (kernel_uses_llsc && R10000_LLSC_WAR) {
233                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
234                 unsigned long temp;
235
236                 __asm__ __volatile__(
237                 "       .set    push                                    \n"
238                 "       .set    arch=r4000                              \n"
239                 "1:     " __LL "%0, %1          # test_and_set_bit      \n"
240                 "       or      %2, %0, %3                              \n"
241                 "       " __SC  "%2, %1                                 \n"
242                 "       beqzl   %2, 1b                                  \n"
243                 "       and     %2, %0, %3                              \n"
244                 "       .set    pop                                     \n"
245                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
246                 : "r" (1UL << bit)
247                 : "memory");
248         } else if (kernel_uses_llsc) {
249                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
250                 unsigned long temp;
251
252                 do {
253                         __asm__ __volatile__(
254                         "       .set    push                            \n"
255                         "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
256                         "       " __LL "%0, %1  # test_and_set_bit      \n"
257                         "       or      %2, %0, %3                      \n"
258                         "       " __SC  "%2, %1                         \n"
259                         "       .set    pop                             \n"
260                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
261                         : "r" (1UL << bit)
262                         : "memory");
263                 } while (unlikely(!res));
264
265                 res = temp & (1UL << bit);
266         } else
267                 res = __mips_test_and_set_bit(nr, addr);
268
269         smp_llsc_mb();
270
271         return res != 0;
272 }
273
274 /*
275  * test_and_set_bit_lock - Set a bit and return its old value
276  * @nr: Bit to set
277  * @addr: Address to count from
278  *
279  * This operation is atomic and implies acquire ordering semantics
280  * after the memory operation.
281  */
282 static inline int test_and_set_bit_lock(unsigned long nr,
283         volatile unsigned long *addr)
284 {
285         int bit = nr & SZLONG_MASK;
286         unsigned long res;
287
288         if (kernel_uses_llsc && R10000_LLSC_WAR) {
289                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
290                 unsigned long temp;
291
292                 __asm__ __volatile__(
293                 "       .set    push                                    \n"
294                 "       .set    arch=r4000                              \n"
295                 "1:     " __LL "%0, %1          # test_and_set_bit      \n"
296                 "       or      %2, %0, %3                              \n"
297                 "       " __SC  "%2, %1                                 \n"
298                 "       beqzl   %2, 1b                                  \n"
299                 "       and     %2, %0, %3                              \n"
300                 "       .set    pop                                     \n"
301                 : "=&r" (temp), "+m" (*m), "=&r" (res)
302                 : "r" (1UL << bit)
303                 : "memory");
304         } else if (kernel_uses_llsc) {
305                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
306                 unsigned long temp;
307
308                 do {
309                         __asm__ __volatile__(
310                         "       .set    push                            \n"
311                         "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
312                         "       " __LL "%0, %1  # test_and_set_bit      \n"
313                         "       or      %2, %0, %3                      \n"
314                         "       " __SC  "%2, %1                         \n"
315                         "       .set    pop                             \n"
316                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
317                         : "r" (1UL << bit)
318                         : "memory");
319                 } while (unlikely(!res));
320
321                 res = temp & (1UL << bit);
322         } else
323                 res = __mips_test_and_set_bit_lock(nr, addr);
324
325         smp_llsc_mb();
326
327         return res != 0;
328 }
329 /*
330  * test_and_clear_bit - Clear a bit and return its old value
331  * @nr: Bit to clear
332  * @addr: Address to count from
333  *
334  * This operation is atomic and cannot be reordered.
335  * It also implies a memory barrier.
336  */
337 static inline int test_and_clear_bit(unsigned long nr,
338         volatile unsigned long *addr)
339 {
340         int bit = nr & SZLONG_MASK;
341         unsigned long res;
342
343         smp_mb__before_llsc();
344
345         if (kernel_uses_llsc && R10000_LLSC_WAR) {
346                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
347                 unsigned long temp;
348
349                 __asm__ __volatile__(
350                 "       .set    push                                    \n"
351                 "       .set    arch=r4000                              \n"
352                 "1:     " __LL  "%0, %1         # test_and_clear_bit    \n"
353                 "       or      %2, %0, %3                              \n"
354                 "       xor     %2, %3                                  \n"
355                 "       " __SC  "%2, %1                                 \n"
356                 "       beqzl   %2, 1b                                  \n"
357                 "       and     %2, %0, %3                              \n"
358                 "       .set    pop                                     \n"
359                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
360                 : "r" (1UL << bit)
361                 : "memory");
362 #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
363         } else if (kernel_uses_llsc && __builtin_constant_p(nr)) {
364                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
365                 unsigned long temp;
366
367                 do {
368                         __asm__ __volatile__(
369                         "       " __LL  "%0, %1 # test_and_clear_bit    \n"
370                         "       " __EXT "%2, %0, %3, 1                  \n"
371                         "       " __INS "%0, $0, %3, 1                  \n"
372                         "       " __SC  "%0, %1                         \n"
373                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
374                         : "ir" (bit)
375                         : "memory");
376                 } while (unlikely(!temp));
377 #endif
378         } else if (kernel_uses_llsc) {
379                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
380                 unsigned long temp;
381
382                 do {
383                         __asm__ __volatile__(
384                         "       .set    push                            \n"
385                         "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
386                         "       " __LL  "%0, %1 # test_and_clear_bit    \n"
387                         "       or      %2, %0, %3                      \n"
388                         "       xor     %2, %3                          \n"
389                         "       " __SC  "%2, %1                         \n"
390                         "       .set    pop                             \n"
391                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
392                         : "r" (1UL << bit)
393                         : "memory");
394                 } while (unlikely(!res));
395
396                 res = temp & (1UL << bit);
397         } else
398                 res = __mips_test_and_clear_bit(nr, addr);
399
400         smp_llsc_mb();
401
402         return res != 0;
403 }
404
405 /*
406  * test_and_change_bit - Change a bit and return its old value
407  * @nr: Bit to change
408  * @addr: Address to count from
409  *
410  * This operation is atomic and cannot be reordered.
411  * It also implies a memory barrier.
412  */
413 static inline int test_and_change_bit(unsigned long nr,
414         volatile unsigned long *addr)
415 {
416         int bit = nr & SZLONG_MASK;
417         unsigned long res;
418
419         smp_mb__before_llsc();
420
421         if (kernel_uses_llsc && R10000_LLSC_WAR) {
422                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
423                 unsigned long temp;
424
425                 __asm__ __volatile__(
426                 "       .set    push                                    \n"
427                 "       .set    arch=r4000                              \n"
428                 "1:     " __LL  "%0, %1         # test_and_change_bit   \n"
429                 "       xor     %2, %0, %3                              \n"
430                 "       " __SC  "%2, %1                                 \n"
431                 "       beqzl   %2, 1b                                  \n"
432                 "       and     %2, %0, %3                              \n"
433                 "       .set    pop                                     \n"
434                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
435                 : "r" (1UL << bit)
436                 : "memory");
437         } else if (kernel_uses_llsc) {
438                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
439                 unsigned long temp;
440
441                 do {
442                         __asm__ __volatile__(
443                         "       .set    push                            \n"
444                         "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
445                         "       " __LL  "%0, %1 # test_and_change_bit   \n"
446                         "       xor     %2, %0, %3                      \n"
447                         "       " __SC  "\t%2, %1                       \n"
448                         "       .set    pop                             \n"
449                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
450                         : "r" (1UL << bit)
451                         : "memory");
452                 } while (unlikely(!res));
453
454                 res = temp & (1UL << bit);
455         } else
456                 res = __mips_test_and_change_bit(nr, addr);
457
458         smp_llsc_mb();
459
460         return res != 0;
461 }
462
463 #include <asm-generic/bitops/non-atomic.h>
464
465 /*
466  * __clear_bit_unlock - Clears a bit in memory
467  * @nr: Bit to clear
468  * @addr: Address to start counting from
469  *
470  * __clear_bit() is non-atomic and implies release semantics before the memory
471  * operation. It can be used for an unlock if no other CPUs can concurrently
472  * modify other bits in the word.
473  */
474 static inline void __clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
475 {
476         smp_mb__before_llsc();
477         __clear_bit(nr, addr);
478         nudge_writes();
479 }
480
481 /*
482  * Return the bit position (0..63) of the most significant 1 bit in a word
483  * Returns -1 if no 1 bit exists
484  */
485 static inline unsigned long __fls(unsigned long word)
486 {
487         int num;
488
489         if (BITS_PER_LONG == 32 && !__builtin_constant_p(word) &&
490             __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
491                 __asm__(
492                 "       .set    push                                    \n"
493                 "       .set    "MIPS_ISA_LEVEL"                        \n"
494                 "       clz     %0, %1                                  \n"
495                 "       .set    pop                                     \n"
496                 : "=r" (num)
497                 : "r" (word));
498
499                 return 31 - num;
500         }
501
502         if (BITS_PER_LONG == 64 && !__builtin_constant_p(word) &&
503             __builtin_constant_p(cpu_has_mips64) && cpu_has_mips64) {
504                 __asm__(
505                 "       .set    push                                    \n"
506                 "       .set    "MIPS_ISA_LEVEL"                        \n"
507                 "       dclz    %0, %1                                  \n"
508                 "       .set    pop                                     \n"
509                 : "=r" (num)
510                 : "r" (word));
511
512                 return 63 - num;
513         }
514
515         num = BITS_PER_LONG - 1;
516
517 #if BITS_PER_LONG == 64
518         if (!(word & (~0ul << 32))) {
519                 num -= 32;
520                 word <<= 32;
521         }
522 #endif
523         if (!(word & (~0ul << (BITS_PER_LONG-16)))) {
524                 num -= 16;
525                 word <<= 16;
526         }
527         if (!(word & (~0ul << (BITS_PER_LONG-8)))) {
528                 num -= 8;
529                 word <<= 8;
530         }
531         if (!(word & (~0ul << (BITS_PER_LONG-4)))) {
532                 num -= 4;
533                 word <<= 4;
534         }
535         if (!(word & (~0ul << (BITS_PER_LONG-2)))) {
536                 num -= 2;
537                 word <<= 2;
538         }
539         if (!(word & (~0ul << (BITS_PER_LONG-1))))
540                 num -= 1;
541         return num;
542 }
543
544 /*
545  * __ffs - find first bit in word.
546  * @word: The word to search
547  *
548  * Returns 0..SZLONG-1
549  * Undefined if no bit exists, so code should check against 0 first.
550  */
551 static inline unsigned long __ffs(unsigned long word)
552 {
553         return __fls(word & -word);
554 }
555
556 /*
557  * fls - find last bit set.
558  * @word: The word to search
559  *
560  * This is defined the same way as ffs.
561  * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
562  */
563 static inline int fls(unsigned int x)
564 {
565         int r;
566
567         if (!__builtin_constant_p(x) &&
568             __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
569                 __asm__(
570                 "       .set    push                                    \n"
571                 "       .set    "MIPS_ISA_LEVEL"                        \n"
572                 "       clz     %0, %1                                  \n"
573                 "       .set    pop                                     \n"
574                 : "=r" (x)
575                 : "r" (x));
576
577                 return 32 - x;
578         }
579
580         r = 32;
581         if (!x)
582                 return 0;
583         if (!(x & 0xffff0000u)) {
584                 x <<= 16;
585                 r -= 16;
586         }
587         if (!(x & 0xff000000u)) {
588                 x <<= 8;
589                 r -= 8;
590         }
591         if (!(x & 0xf0000000u)) {
592                 x <<= 4;
593                 r -= 4;
594         }
595         if (!(x & 0xc0000000u)) {
596                 x <<= 2;
597                 r -= 2;
598         }
599         if (!(x & 0x80000000u)) {
600                 x <<= 1;
601                 r -= 1;
602         }
603         return r;
604 }
605
606 #include <asm-generic/bitops/fls64.h>
607
608 /*
609  * ffs - find first bit set.
610  * @word: The word to search
611  *
612  * This is defined the same way as
613  * the libc and compiler builtin ffs routines, therefore
614  * differs in spirit from the above ffz (man ffs).
615  */
616 static inline int ffs(int word)
617 {
618         if (!word)
619                 return 0;
620
621         return fls(word & -word);
622 }
623
624 #include <asm-generic/bitops/ffz.h>
625 #include <asm-generic/bitops/find.h>
626
627 #ifdef __KERNEL__
628
629 #include <asm-generic/bitops/sched.h>
630
631 #include <asm/arch_hweight.h>
632 #include <asm-generic/bitops/const_hweight.h>
633
634 #include <asm-generic/bitops/le.h>
635 #include <asm-generic/bitops/ext2-atomic.h>
636
637 #endif /* __KERNEL__ */
638
639 #endif /* _ASM_BITOPS_H */