]> asedeno.scripts.mit.edu Git - linux.git/blob - arch/mips/include/asm/bitops.h
mips/atomic: Fix loongson_llsc_mb() wreckage
[linux.git] / arch / mips / include / asm / bitops.h
1 /*
2  * This file is subject to the terms and conditions of the GNU General Public
3  * License.  See the file "COPYING" in the main directory of this archive
4  * for more details.
5  *
6  * Copyright (c) 1994 - 1997, 99, 2000, 06, 07  Ralf Baechle (ralf@linux-mips.org)
7  * Copyright (c) 1999, 2000  Silicon Graphics, Inc.
8  */
9 #ifndef _ASM_BITOPS_H
10 #define _ASM_BITOPS_H
11
12 #ifndef _LINUX_BITOPS_H
13 #error only <linux/bitops.h> can be included directly
14 #endif
15
16 #include <linux/compiler.h>
17 #include <linux/types.h>
18 #include <asm/barrier.h>
19 #include <asm/byteorder.h>              /* sigh ... */
20 #include <asm/compiler.h>
21 #include <asm/cpu-features.h>
22 #include <asm/llsc.h>
23 #include <asm/sgidefs.h>
24 #include <asm/war.h>
25
26 /*
27  * These are the "slower" versions of the functions and are in bitops.c.
28  * These functions call raw_local_irq_{save,restore}().
29  */
30 void __mips_set_bit(unsigned long nr, volatile unsigned long *addr);
31 void __mips_clear_bit(unsigned long nr, volatile unsigned long *addr);
32 void __mips_change_bit(unsigned long nr, volatile unsigned long *addr);
33 int __mips_test_and_set_bit(unsigned long nr,
34                             volatile unsigned long *addr);
35 int __mips_test_and_set_bit_lock(unsigned long nr,
36                                  volatile unsigned long *addr);
37 int __mips_test_and_clear_bit(unsigned long nr,
38                               volatile unsigned long *addr);
39 int __mips_test_and_change_bit(unsigned long nr,
40                                volatile unsigned long *addr);
41
42
43 /*
44  * set_bit - Atomically set a bit in memory
45  * @nr: the bit to set
46  * @addr: the address to start counting from
47  *
48  * This function is atomic and may not be reordered.  See __set_bit()
49  * if you do not require the atomic guarantees.
50  * Note that @nr may be almost arbitrarily large; this function is not
51  * restricted to acting on a single-word quantity.
52  */
53 static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
54 {
55         unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
56         int bit = nr & SZLONG_MASK;
57         unsigned long temp;
58
59         if (kernel_uses_llsc && R10000_LLSC_WAR) {
60                 __asm__ __volatile__(
61                 "       .set    push                                    \n"
62                 "       .set    arch=r4000                              \n"
63                 "1:     " __LL "%0, %1                  # set_bit       \n"
64                 "       or      %0, %2                                  \n"
65                 "       " __SC  "%0, %1                                 \n"
66                 "       beqzl   %0, 1b                                  \n"
67                 "       .set    pop                                     \n"
68                 : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*m)
69                 : "ir" (1UL << bit), GCC_OFF_SMALL_ASM() (*m));
70 #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
71         } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
72                 loongson_llsc_mb();
73                 do {
74                         __asm__ __volatile__(
75                         "       " __LL "%0, %1          # set_bit       \n"
76                         "       " __INS "%0, %3, %2, 1                  \n"
77                         "       " __SC "%0, %1                          \n"
78                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
79                         : "ir" (bit), "r" (~0));
80                 } while (unlikely(!temp));
81 #endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */
82         } else if (kernel_uses_llsc) {
83                 loongson_llsc_mb();
84                 do {
85                         __asm__ __volatile__(
86                         "       .set    push                            \n"
87                         "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
88                         "       " __LL "%0, %1          # set_bit       \n"
89                         "       or      %0, %2                          \n"
90                         "       " __SC  "%0, %1                         \n"
91                         "       .set    pop                             \n"
92                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
93                         : "ir" (1UL << bit));
94                 } while (unlikely(!temp));
95         } else
96                 __mips_set_bit(nr, addr);
97 }
98
99 /*
100  * clear_bit - Clears a bit in memory
101  * @nr: Bit to clear
102  * @addr: Address to start counting from
103  *
104  * clear_bit() is atomic and may not be reordered.  However, it does
105  * not contain a memory barrier, so if it is used for locking purposes,
106  * you should call smp_mb__before_atomic() and/or smp_mb__after_atomic()
107  * in order to ensure changes are visible on other processors.
108  */
109 static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
110 {
111         unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
112         int bit = nr & SZLONG_MASK;
113         unsigned long temp;
114
115         if (kernel_uses_llsc && R10000_LLSC_WAR) {
116                 __asm__ __volatile__(
117                 "       .set    push                                    \n"
118                 "       .set    arch=r4000                              \n"
119                 "1:     " __LL "%0, %1                  # clear_bit     \n"
120                 "       and     %0, %2                                  \n"
121                 "       " __SC "%0, %1                                  \n"
122                 "       beqzl   %0, 1b                                  \n"
123                 "       .set    pop                                     \n"
124                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
125                 : "ir" (~(1UL << bit)));
126 #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
127         } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
128                 loongson_llsc_mb();
129                 do {
130                         __asm__ __volatile__(
131                         "       " __LL "%0, %1          # clear_bit     \n"
132                         "       " __INS "%0, $0, %2, 1                  \n"
133                         "       " __SC "%0, %1                          \n"
134                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
135                         : "ir" (bit));
136                 } while (unlikely(!temp));
137 #endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */
138         } else if (kernel_uses_llsc) {
139                 loongson_llsc_mb();
140                 do {
141                         __asm__ __volatile__(
142                         "       .set    push                            \n"
143                         "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
144                         "       " __LL "%0, %1          # clear_bit     \n"
145                         "       and     %0, %2                          \n"
146                         "       " __SC "%0, %1                          \n"
147                         "       .set    pop                             \n"
148                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
149                         : "ir" (~(1UL << bit)));
150                 } while (unlikely(!temp));
151         } else
152                 __mips_clear_bit(nr, addr);
153 }
154
155 /*
156  * clear_bit_unlock - Clears a bit in memory
157  * @nr: Bit to clear
158  * @addr: Address to start counting from
159  *
160  * clear_bit() is atomic and implies release semantics before the memory
161  * operation. It can be used for an unlock.
162  */
163 static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
164 {
165         smp_mb__before_atomic();
166         clear_bit(nr, addr);
167 }
168
169 /*
170  * change_bit - Toggle a bit in memory
171  * @nr: Bit to change
172  * @addr: Address to start counting from
173  *
174  * change_bit() is atomic and may not be reordered.
175  * Note that @nr may be almost arbitrarily large; this function is not
176  * restricted to acting on a single-word quantity.
177  */
178 static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
179 {
180         int bit = nr & SZLONG_MASK;
181
182         if (kernel_uses_llsc && R10000_LLSC_WAR) {
183                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
184                 unsigned long temp;
185
186                 __asm__ __volatile__(
187                 "       .set    push                            \n"
188                 "       .set    arch=r4000                      \n"
189                 "1:     " __LL "%0, %1          # change_bit    \n"
190                 "       xor     %0, %2                          \n"
191                 "       " __SC  "%0, %1                         \n"
192                 "       beqzl   %0, 1b                          \n"
193                 "       .set    pop                             \n"
194                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
195                 : "ir" (1UL << bit));
196         } else if (kernel_uses_llsc) {
197                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
198                 unsigned long temp;
199
200                 loongson_llsc_mb();
201                 do {
202                         __asm__ __volatile__(
203                         "       .set    push                            \n"
204                         "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
205                         "       " __LL "%0, %1          # change_bit    \n"
206                         "       xor     %0, %2                          \n"
207                         "       " __SC  "%0, %1                         \n"
208                         "       .set    pop                             \n"
209                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
210                         : "ir" (1UL << bit));
211                 } while (unlikely(!temp));
212         } else
213                 __mips_change_bit(nr, addr);
214 }
215
216 /*
217  * test_and_set_bit - Set a bit and return its old value
218  * @nr: Bit to set
219  * @addr: Address to count from
220  *
221  * This operation is atomic and cannot be reordered.
222  * It also implies a memory barrier.
223  */
224 static inline int test_and_set_bit(unsigned long nr,
225         volatile unsigned long *addr)
226 {
227         int bit = nr & SZLONG_MASK;
228         unsigned long res;
229
230         smp_mb__before_llsc();
231
232         if (kernel_uses_llsc && R10000_LLSC_WAR) {
233                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
234                 unsigned long temp;
235
236                 __asm__ __volatile__(
237                 "       .set    push                                    \n"
238                 "       .set    arch=r4000                              \n"
239                 "1:     " __LL "%0, %1          # test_and_set_bit      \n"
240                 "       or      %2, %0, %3                              \n"
241                 "       " __SC  "%2, %1                                 \n"
242                 "       beqzl   %2, 1b                                  \n"
243                 "       and     %2, %0, %3                              \n"
244                 "       .set    pop                                     \n"
245                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
246                 : "r" (1UL << bit)
247                 : "memory");
248         } else if (kernel_uses_llsc) {
249                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
250                 unsigned long temp;
251
252                 loongson_llsc_mb();
253                 do {
254                         __asm__ __volatile__(
255                         "       .set    push                            \n"
256                         "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
257                         "       " __LL "%0, %1  # test_and_set_bit      \n"
258                         "       or      %2, %0, %3                      \n"
259                         "       " __SC  "%2, %1                         \n"
260                         "       .set    pop                             \n"
261                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
262                         : "r" (1UL << bit)
263                         : "memory");
264                 } while (unlikely(!res));
265
266                 res = temp & (1UL << bit);
267         } else
268                 res = __mips_test_and_set_bit(nr, addr);
269
270         smp_llsc_mb();
271
272         return res != 0;
273 }
274
275 /*
276  * test_and_set_bit_lock - Set a bit and return its old value
277  * @nr: Bit to set
278  * @addr: Address to count from
279  *
280  * This operation is atomic and implies acquire ordering semantics
281  * after the memory operation.
282  */
283 static inline int test_and_set_bit_lock(unsigned long nr,
284         volatile unsigned long *addr)
285 {
286         int bit = nr & SZLONG_MASK;
287         unsigned long res;
288
289         if (kernel_uses_llsc && R10000_LLSC_WAR) {
290                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
291                 unsigned long temp;
292
293                 __asm__ __volatile__(
294                 "       .set    push                                    \n"
295                 "       .set    arch=r4000                              \n"
296                 "1:     " __LL "%0, %1          # test_and_set_bit      \n"
297                 "       or      %2, %0, %3                              \n"
298                 "       " __SC  "%2, %1                                 \n"
299                 "       beqzl   %2, 1b                                  \n"
300                 "       and     %2, %0, %3                              \n"
301                 "       .set    pop                                     \n"
302                 : "=&r" (temp), "+m" (*m), "=&r" (res)
303                 : "r" (1UL << bit)
304                 : "memory");
305         } else if (kernel_uses_llsc) {
306                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
307                 unsigned long temp;
308
309                 loongson_llsc_mb();
310                 do {
311                         __asm__ __volatile__(
312                         "       .set    push                            \n"
313                         "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
314                         "       " __LL "%0, %1  # test_and_set_bit      \n"
315                         "       or      %2, %0, %3                      \n"
316                         "       " __SC  "%2, %1                         \n"
317                         "       .set    pop                             \n"
318                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
319                         : "r" (1UL << bit)
320                         : "memory");
321                 } while (unlikely(!res));
322
323                 res = temp & (1UL << bit);
324         } else
325                 res = __mips_test_and_set_bit_lock(nr, addr);
326
327         smp_llsc_mb();
328
329         return res != 0;
330 }
331 /*
332  * test_and_clear_bit - Clear a bit and return its old value
333  * @nr: Bit to clear
334  * @addr: Address to count from
335  *
336  * This operation is atomic and cannot be reordered.
337  * It also implies a memory barrier.
338  */
339 static inline int test_and_clear_bit(unsigned long nr,
340         volatile unsigned long *addr)
341 {
342         int bit = nr & SZLONG_MASK;
343         unsigned long res;
344
345         smp_mb__before_llsc();
346
347         if (kernel_uses_llsc && R10000_LLSC_WAR) {
348                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
349                 unsigned long temp;
350
351                 __asm__ __volatile__(
352                 "       .set    push                                    \n"
353                 "       .set    arch=r4000                              \n"
354                 "1:     " __LL  "%0, %1         # test_and_clear_bit    \n"
355                 "       or      %2, %0, %3                              \n"
356                 "       xor     %2, %3                                  \n"
357                 "       " __SC  "%2, %1                                 \n"
358                 "       beqzl   %2, 1b                                  \n"
359                 "       and     %2, %0, %3                              \n"
360                 "       .set    pop                                     \n"
361                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
362                 : "r" (1UL << bit)
363                 : "memory");
364 #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
365         } else if (kernel_uses_llsc && __builtin_constant_p(nr)) {
366                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
367                 unsigned long temp;
368
369                 loongson_llsc_mb();
370                 do {
371                         __asm__ __volatile__(
372                         "       " __LL  "%0, %1 # test_and_clear_bit    \n"
373                         "       " __EXT "%2, %0, %3, 1                  \n"
374                         "       " __INS "%0, $0, %3, 1                  \n"
375                         "       " __SC  "%0, %1                         \n"
376                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
377                         : "ir" (bit)
378                         : "memory");
379                 } while (unlikely(!temp));
380 #endif
381         } else if (kernel_uses_llsc) {
382                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
383                 unsigned long temp;
384
385                 loongson_llsc_mb();
386                 do {
387                         __asm__ __volatile__(
388                         "       .set    push                            \n"
389                         "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
390                         "       " __LL  "%0, %1 # test_and_clear_bit    \n"
391                         "       or      %2, %0, %3                      \n"
392                         "       xor     %2, %3                          \n"
393                         "       " __SC  "%2, %1                         \n"
394                         "       .set    pop                             \n"
395                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
396                         : "r" (1UL << bit)
397                         : "memory");
398                 } while (unlikely(!res));
399
400                 res = temp & (1UL << bit);
401         } else
402                 res = __mips_test_and_clear_bit(nr, addr);
403
404         smp_llsc_mb();
405
406         return res != 0;
407 }
408
409 /*
410  * test_and_change_bit - Change a bit and return its old value
411  * @nr: Bit to change
412  * @addr: Address to count from
413  *
414  * This operation is atomic and cannot be reordered.
415  * It also implies a memory barrier.
416  */
417 static inline int test_and_change_bit(unsigned long nr,
418         volatile unsigned long *addr)
419 {
420         int bit = nr & SZLONG_MASK;
421         unsigned long res;
422
423         smp_mb__before_llsc();
424
425         if (kernel_uses_llsc && R10000_LLSC_WAR) {
426                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
427                 unsigned long temp;
428
429                 __asm__ __volatile__(
430                 "       .set    push                                    \n"
431                 "       .set    arch=r4000                              \n"
432                 "1:     " __LL  "%0, %1         # test_and_change_bit   \n"
433                 "       xor     %2, %0, %3                              \n"
434                 "       " __SC  "%2, %1                                 \n"
435                 "       beqzl   %2, 1b                                  \n"
436                 "       and     %2, %0, %3                              \n"
437                 "       .set    pop                                     \n"
438                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
439                 : "r" (1UL << bit)
440                 : "memory");
441         } else if (kernel_uses_llsc) {
442                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
443                 unsigned long temp;
444
445                 loongson_llsc_mb();
446                 do {
447                         __asm__ __volatile__(
448                         "       .set    push                            \n"
449                         "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
450                         "       " __LL  "%0, %1 # test_and_change_bit   \n"
451                         "       xor     %2, %0, %3                      \n"
452                         "       " __SC  "\t%2, %1                       \n"
453                         "       .set    pop                             \n"
454                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
455                         : "r" (1UL << bit)
456                         : "memory");
457                 } while (unlikely(!res));
458
459                 res = temp & (1UL << bit);
460         } else
461                 res = __mips_test_and_change_bit(nr, addr);
462
463         smp_llsc_mb();
464
465         return res != 0;
466 }
467
468 #include <asm-generic/bitops/non-atomic.h>
469
470 /*
471  * __clear_bit_unlock - Clears a bit in memory
472  * @nr: Bit to clear
473  * @addr: Address to start counting from
474  *
475  * __clear_bit() is non-atomic and implies release semantics before the memory
476  * operation. It can be used for an unlock if no other CPUs can concurrently
477  * modify other bits in the word.
478  */
479 static inline void __clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
480 {
481         smp_mb__before_llsc();
482         __clear_bit(nr, addr);
483         nudge_writes();
484 }
485
486 /*
487  * Return the bit position (0..63) of the most significant 1 bit in a word
488  * Returns -1 if no 1 bit exists
489  */
490 static __always_inline unsigned long __fls(unsigned long word)
491 {
492         int num;
493
494         if (BITS_PER_LONG == 32 && !__builtin_constant_p(word) &&
495             __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
496                 __asm__(
497                 "       .set    push                                    \n"
498                 "       .set    "MIPS_ISA_LEVEL"                        \n"
499                 "       clz     %0, %1                                  \n"
500                 "       .set    pop                                     \n"
501                 : "=r" (num)
502                 : "r" (word));
503
504                 return 31 - num;
505         }
506
507         if (BITS_PER_LONG == 64 && !__builtin_constant_p(word) &&
508             __builtin_constant_p(cpu_has_mips64) && cpu_has_mips64) {
509                 __asm__(
510                 "       .set    push                                    \n"
511                 "       .set    "MIPS_ISA_LEVEL"                        \n"
512                 "       dclz    %0, %1                                  \n"
513                 "       .set    pop                                     \n"
514                 : "=r" (num)
515                 : "r" (word));
516
517                 return 63 - num;
518         }
519
520         num = BITS_PER_LONG - 1;
521
522 #if BITS_PER_LONG == 64
523         if (!(word & (~0ul << 32))) {
524                 num -= 32;
525                 word <<= 32;
526         }
527 #endif
528         if (!(word & (~0ul << (BITS_PER_LONG-16)))) {
529                 num -= 16;
530                 word <<= 16;
531         }
532         if (!(word & (~0ul << (BITS_PER_LONG-8)))) {
533                 num -= 8;
534                 word <<= 8;
535         }
536         if (!(word & (~0ul << (BITS_PER_LONG-4)))) {
537                 num -= 4;
538                 word <<= 4;
539         }
540         if (!(word & (~0ul << (BITS_PER_LONG-2)))) {
541                 num -= 2;
542                 word <<= 2;
543         }
544         if (!(word & (~0ul << (BITS_PER_LONG-1))))
545                 num -= 1;
546         return num;
547 }
548
549 /*
550  * __ffs - find first bit in word.
551  * @word: The word to search
552  *
553  * Returns 0..SZLONG-1
554  * Undefined if no bit exists, so code should check against 0 first.
555  */
556 static __always_inline unsigned long __ffs(unsigned long word)
557 {
558         return __fls(word & -word);
559 }
560
561 /*
562  * fls - find last bit set.
563  * @word: The word to search
564  *
565  * This is defined the same way as ffs.
566  * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
567  */
568 static inline int fls(unsigned int x)
569 {
570         int r;
571
572         if (!__builtin_constant_p(x) &&
573             __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
574                 __asm__(
575                 "       .set    push                                    \n"
576                 "       .set    "MIPS_ISA_LEVEL"                        \n"
577                 "       clz     %0, %1                                  \n"
578                 "       .set    pop                                     \n"
579                 : "=r" (x)
580                 : "r" (x));
581
582                 return 32 - x;
583         }
584
585         r = 32;
586         if (!x)
587                 return 0;
588         if (!(x & 0xffff0000u)) {
589                 x <<= 16;
590                 r -= 16;
591         }
592         if (!(x & 0xff000000u)) {
593                 x <<= 8;
594                 r -= 8;
595         }
596         if (!(x & 0xf0000000u)) {
597                 x <<= 4;
598                 r -= 4;
599         }
600         if (!(x & 0xc0000000u)) {
601                 x <<= 2;
602                 r -= 2;
603         }
604         if (!(x & 0x80000000u)) {
605                 x <<= 1;
606                 r -= 1;
607         }
608         return r;
609 }
610
611 #include <asm-generic/bitops/fls64.h>
612
613 /*
614  * ffs - find first bit set.
615  * @word: The word to search
616  *
617  * This is defined the same way as
618  * the libc and compiler builtin ffs routines, therefore
619  * differs in spirit from the above ffz (man ffs).
620  */
621 static inline int ffs(int word)
622 {
623         if (!word)
624                 return 0;
625
626         return fls(word & -word);
627 }
628
629 #include <asm-generic/bitops/ffz.h>
630 #include <asm-generic/bitops/find.h>
631
632 #ifdef __KERNEL__
633
634 #include <asm-generic/bitops/sched.h>
635
636 #include <asm/arch_hweight.h>
637 #include <asm-generic/bitops/const_hweight.h>
638
639 #include <asm-generic/bitops/le.h>
640 #include <asm-generic/bitops/ext2-atomic.h>
641
642 #endif /* __KERNEL__ */
643
644 #endif /* _ASM_BITOPS_H */