diff options
Diffstat (limited to 'include/asm-arm26/bitops.h')
-rw-r--r-- | include/asm-arm26/bitops.h | 207 |
1 files changed, 0 insertions, 207 deletions
diff --git a/include/asm-arm26/bitops.h b/include/asm-arm26/bitops.h deleted file mode 100644 index 19a69573a65..00000000000 --- a/include/asm-arm26/bitops.h +++ /dev/null @@ -1,207 +0,0 @@ -/* - * Copyright 1995, Russell King. - * - * Based on the arm32 version by RMK (and others). Their copyrights apply to - * Those parts. - * Modified for arm26 by Ian Molton on 25/11/04 - * - * bit 0 is the LSB of an "unsigned long" quantity. - * - * Please note that the code in this file should never be included - * from user space. Many of these are not implemented in assembler - * since they would be too costly. Also, they require privileged - * instructions (which are not available from user mode) to ensure - * that they are atomic. - */ - -#ifndef __ASM_ARM_BITOPS_H -#define __ASM_ARM_BITOPS_H - -#ifdef __KERNEL__ - -#include <linux/compiler.h> -#include <asm/system.h> - -#define smp_mb__before_clear_bit() do { } while (0) -#define smp_mb__after_clear_bit() do { } while (0) - -/* - * These functions are the basis of our bit ops. - * - * First, the atomic bitops. These use native endian. - */ -static inline void ____atomic_set_bit(unsigned int bit, volatile unsigned long *p) -{ - unsigned long flags; - unsigned long mask = 1UL << (bit & 31); - - p += bit >> 5; - - local_irq_save(flags); - *p |= mask; - local_irq_restore(flags); -} - -static inline void ____atomic_clear_bit(unsigned int bit, volatile unsigned long *p) -{ - unsigned long flags; - unsigned long mask = 1UL << (bit & 31); - - p += bit >> 5; - - local_irq_save(flags); - *p &= ~mask; - local_irq_restore(flags); -} - -static inline void ____atomic_change_bit(unsigned int bit, volatile unsigned long *p) -{ - unsigned long flags; - unsigned long mask = 1UL << (bit & 31); - - p += bit >> 5; - - local_irq_save(flags); - *p ^= mask; - local_irq_restore(flags); -} - -static inline int -____atomic_test_and_set_bit(unsigned int bit, volatile unsigned long *p) -{ - unsigned long flags; - unsigned int res; - unsigned long mask = 1UL << (bit & 31); - - p += bit >> 5; - - local_irq_save(flags); - res = *p; - *p = res | mask; - local_irq_restore(flags); - - return res & mask; -} - -static inline int -____atomic_test_and_clear_bit(unsigned int bit, volatile unsigned long *p) -{ - unsigned long flags; - unsigned int res; - unsigned long mask = 1UL << (bit & 31); - - p += bit >> 5; - - local_irq_save(flags); - res = *p; - *p = res & ~mask; - local_irq_restore(flags); - - return res & mask; -} - -static inline int -____atomic_test_and_change_bit(unsigned int bit, volatile unsigned long *p) -{ - unsigned long flags; - unsigned int res; - unsigned long mask = 1UL << (bit & 31); - - p += bit >> 5; - - local_irq_save(flags); - res = *p; - *p = res ^ mask; - local_irq_restore(flags); - - return res & mask; -} - -#include <asm-generic/bitops/non-atomic.h> - -/* - * Little endian assembly bitops. nr = 0 -> byte 0 bit 0. - */ -extern void _set_bit_le(int nr, volatile unsigned long * p); -extern void _clear_bit_le(int nr, volatile unsigned long * p); -extern void _change_bit_le(int nr, volatile unsigned long * p); -extern int _test_and_set_bit_le(int nr, volatile unsigned long * p); -extern int _test_and_clear_bit_le(int nr, volatile unsigned long * p); -extern int _test_and_change_bit_le(int nr, volatile unsigned long * p); -extern int _find_first_zero_bit_le(const unsigned long * p, unsigned size); -extern int _find_next_zero_bit_le(void * p, int size, int offset); -extern int _find_first_bit_le(const unsigned long *p, unsigned size); -extern int _find_next_bit_le(const unsigned long *p, int size, int offset); - -/* - * The __* form of bitops are non-atomic and may be reordered. - */ -#define ATOMIC_BITOP_LE(name,nr,p) \ - (__builtin_constant_p(nr) ? \ - ____atomic_##name(nr, p) : \ - _##name##_le(nr,p)) - -#define NONATOMIC_BITOP(name,nr,p) \ - (____nonatomic_##name(nr, p)) - -/* - * These are the little endian, atomic definitions. - */ -#define set_bit(nr,p) ATOMIC_BITOP_LE(set_bit,nr,p) -#define clear_bit(nr,p) ATOMIC_BITOP_LE(clear_bit,nr,p) -#define change_bit(nr,p) ATOMIC_BITOP_LE(change_bit,nr,p) -#define test_and_set_bit(nr,p) ATOMIC_BITOP_LE(test_and_set_bit,nr,p) -#define test_and_clear_bit(nr,p) ATOMIC_BITOP_LE(test_and_clear_bit,nr,p) -#define test_and_change_bit(nr,p) ATOMIC_BITOP_LE(test_and_change_bit,nr,p) -#define find_first_zero_bit(p,sz) _find_first_zero_bit_le(p,sz) -#define find_next_zero_bit(p,sz,off) _find_next_zero_bit_le(p,sz,off) -#define find_first_bit(p,sz) _find_first_bit_le(p,sz) -#define find_next_bit(p,sz,off) _find_next_bit_le(p,sz,off) - -#define WORD_BITOFF_TO_LE(x) ((x)) - -#include <asm-generic/bitops/ffz.h> -#include <asm-generic/bitops/__ffs.h> -#include <asm-generic/bitops/fls.h> -#include <asm-generic/bitops/fls64.h> -#include <asm-generic/bitops/ffs.h> -#include <asm-generic/bitops/sched.h> -#include <asm-generic/bitops/hweight.h> - -/* - * Ext2 is defined to use little-endian byte ordering. - * These do not need to be atomic. - */ -#define ext2_set_bit(nr,p) \ - __test_and_set_bit(WORD_BITOFF_TO_LE(nr), (unsigned long *)(p)) -#define ext2_set_bit_atomic(lock,nr,p) \ - test_and_set_bit(WORD_BITOFF_TO_LE(nr), (unsigned long *)(p)) -#define ext2_clear_bit(nr,p) \ - __test_and_clear_bit(WORD_BITOFF_TO_LE(nr), (unsigned long *)(p)) -#define ext2_clear_bit_atomic(lock,nr,p) \ - test_and_clear_bit(WORD_BITOFF_TO_LE(nr), (unsigned long *)(p)) -#define ext2_test_bit(nr,p) \ - test_bit(WORD_BITOFF_TO_LE(nr), (unsigned long *)(p)) -#define ext2_find_first_zero_bit(p,sz) \ - _find_first_zero_bit_le(p,sz) -#define ext2_find_next_zero_bit(p,sz,off) \ - _find_next_zero_bit_le(p,sz,off) - -/* - * Minix is defined to use little-endian byte ordering. - * These do not need to be atomic. - */ -#define minix_set_bit(nr,p) \ - __set_bit(WORD_BITOFF_TO_LE(nr), (unsigned long *)(p)) -#define minix_test_bit(nr,p) \ - test_bit(WORD_BITOFF_TO_LE(nr), (unsigned long *)(p)) -#define minix_test_and_set_bit(nr,p) \ - __test_and_set_bit(WORD_BITOFF_TO_LE(nr), (unsigned long *)(p)) -#define minix_test_and_clear_bit(nr,p) \ - __test_and_clear_bit(WORD_BITOFF_TO_LE(nr), (unsigned long *)(p)) -#define minix_find_first_zero_bit(p,sz) \ - _find_first_zero_bit_le((unsigned long *)(p),sz) - -#endif /* __KERNEL__ */ - -#endif /* _ARM_BITOPS_H */ |