summaryrefslogtreecommitdiffstats
path: root/include/asm-mips/bitops.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-mips/bitops.h')
-rw-r--r--include/asm-mips/bitops.h209
1 files changed, 163 insertions, 46 deletions
diff --git a/include/asm-mips/bitops.h b/include/asm-mips/bitops.h
index eb8d79dba11..5496f9064a6 100644
--- a/include/asm-mips/bitops.h
+++ b/include/asm-mips/bitops.h
@@ -12,20 +12,21 @@
#include <linux/config.h>
#include <linux/compiler.h>
#include <linux/types.h>
+#include <asm/bug.h>
#include <asm/byteorder.h> /* sigh ... */
#include <asm/cpu-features.h>
#if (_MIPS_SZLONG == 32)
#define SZLONG_LOG 5
#define SZLONG_MASK 31UL
-#define __LL "ll "
-#define __SC "sc "
+#define __LL "ll "
+#define __SC "sc "
#define cpu_to_lelongp(x) cpu_to_le32p((__u32 *) (x))
#elif (_MIPS_SZLONG == 64)
#define SZLONG_LOG 6
#define SZLONG_MASK 63UL
-#define __LL "lld "
-#define __SC "scd "
+#define __LL "lld "
+#define __SC "scd "
#define cpu_to_lelongp(x) cpu_to_le64p((__u64 *) (x))
#endif
@@ -72,18 +73,22 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
if (cpu_has_llsc && R10000_LLSC_WAR) {
__asm__ __volatile__(
+ " .set mips3 \n"
"1: " __LL "%0, %1 # set_bit \n"
" or %0, %2 \n"
- " "__SC "%0, %1 \n"
+ " " __SC "%0, %1 \n"
" beqzl %0, 1b \n"
+ " .set mips0 \n"
: "=&r" (temp), "=m" (*m)
: "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m));
} else if (cpu_has_llsc) {
__asm__ __volatile__(
+ " .set mips3 \n"
"1: " __LL "%0, %1 # set_bit \n"
" or %0, %2 \n"
- " "__SC "%0, %1 \n"
+ " " __SC "%0, %1 \n"
" beqz %0, 1b \n"
+ " .set mips0 \n"
: "=&r" (temp), "=m" (*m)
: "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m));
} else {
@@ -132,18 +137,22 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
if (cpu_has_llsc && R10000_LLSC_WAR) {
__asm__ __volatile__(
+ " .set mips3 \n"
"1: " __LL "%0, %1 # clear_bit \n"
" and %0, %2 \n"
" " __SC "%0, %1 \n"
" beqzl %0, 1b \n"
+ " .set mips0 \n"
: "=&r" (temp), "=m" (*m)
: "ir" (~(1UL << (nr & SZLONG_MASK))), "m" (*m));
} else if (cpu_has_llsc) {
__asm__ __volatile__(
+ " .set mips3 \n"
"1: " __LL "%0, %1 # clear_bit \n"
" and %0, %2 \n"
" " __SC "%0, %1 \n"
" beqz %0, 1b \n"
+ " .set mips0 \n"
: "=&r" (temp), "=m" (*m)
: "ir" (~(1UL << (nr & SZLONG_MASK))), "m" (*m));
} else {
@@ -191,10 +200,12 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
unsigned long temp;
__asm__ __volatile__(
+ " .set mips3 \n"
"1: " __LL "%0, %1 # change_bit \n"
" xor %0, %2 \n"
- " "__SC "%0, %1 \n"
+ " " __SC "%0, %1 \n"
" beqzl %0, 1b \n"
+ " .set mips0 \n"
: "=&r" (temp), "=m" (*m)
: "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m));
} else if (cpu_has_llsc) {
@@ -202,10 +213,12 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
unsigned long temp;
__asm__ __volatile__(
+ " .set mips3 \n"
"1: " __LL "%0, %1 # change_bit \n"
" xor %0, %2 \n"
- " "__SC "%0, %1 \n"
+ " " __SC "%0, %1 \n"
" beqz %0, 1b \n"
+ " .set mips0 \n"
: "=&r" (temp), "=m" (*m)
: "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m));
} else {
@@ -253,14 +266,16 @@ static inline int test_and_set_bit(unsigned long nr,
unsigned long temp, res;
__asm__ __volatile__(
+ " .set mips3 \n"
"1: " __LL "%0, %1 # test_and_set_bit \n"
" or %2, %0, %3 \n"
" " __SC "%2, %1 \n"
" beqzl %2, 1b \n"
" and %2, %0, %3 \n"
#ifdef CONFIG_SMP
- "sync \n"
+ " sync \n"
#endif
+ " .set mips0 \n"
: "=&r" (temp), "=m" (*m), "=&r" (res)
: "r" (1UL << (nr & SZLONG_MASK)), "m" (*m)
: "memory");
@@ -271,16 +286,18 @@ static inline int test_and_set_bit(unsigned long nr,
unsigned long temp, res;
__asm__ __volatile__(
- " .set noreorder # test_and_set_bit \n"
- "1: " __LL "%0, %1 \n"
+ " .set push \n"
+ " .set noreorder \n"
+ " .set mips3 \n"
+ "1: " __LL "%0, %1 # test_and_set_bit \n"
" or %2, %0, %3 \n"
" " __SC "%2, %1 \n"
" beqz %2, 1b \n"
" and %2, %0, %3 \n"
#ifdef CONFIG_SMP
- "sync \n"
+ " sync \n"
#endif
- ".set\treorder"
+ " .set pop \n"
: "=&r" (temp), "=m" (*m), "=&r" (res)
: "r" (1UL << (nr & SZLONG_MASK)), "m" (*m)
: "memory");
@@ -343,15 +360,17 @@ static inline int test_and_clear_bit(unsigned long nr,
unsigned long temp, res;
__asm__ __volatile__(
+ " .set mips3 \n"
"1: " __LL "%0, %1 # test_and_clear_bit \n"
" or %2, %0, %3 \n"
" xor %2, %3 \n"
- __SC "%2, %1 \n"
+ " " __SC "%2, %1 \n"
" beqzl %2, 1b \n"
" and %2, %0, %3 \n"
#ifdef CONFIG_SMP
" sync \n"
#endif
+ " .set mips0 \n"
: "=&r" (temp), "=m" (*m), "=&r" (res)
: "r" (1UL << (nr & SZLONG_MASK)), "m" (*m)
: "memory");
@@ -362,17 +381,19 @@ static inline int test_and_clear_bit(unsigned long nr,
unsigned long temp, res;
__asm__ __volatile__(
- " .set noreorder # test_and_clear_bit \n"
- "1: " __LL "%0, %1 \n"
+ " .set push \n"
+ " .set noreorder \n"
+ " .set mips3 \n"
+ "1: " __LL "%0, %1 # test_and_clear_bit \n"
" or %2, %0, %3 \n"
" xor %2, %3 \n"
- __SC "%2, %1 \n"
+ " " __SC "%2, %1 \n"
" beqz %2, 1b \n"
" and %2, %0, %3 \n"
#ifdef CONFIG_SMP
" sync \n"
#endif
- " .set reorder \n"
+ " .set pop \n"
: "=&r" (temp), "=m" (*m), "=&r" (res)
: "r" (1UL << (nr & SZLONG_MASK)), "m" (*m)
: "memory");
@@ -435,14 +456,16 @@ static inline int test_and_change_bit(unsigned long nr,
unsigned long temp, res;
__asm__ __volatile__(
- "1: " __LL " %0, %1 # test_and_change_bit \n"
+ " .set mips3 \n"
+ "1: " __LL "%0, %1 # test_and_change_bit \n"
" xor %2, %0, %3 \n"
- " "__SC "%2, %1 \n"
+ " " __SC "%2, %1 \n"
" beqzl %2, 1b \n"
" and %2, %0, %3 \n"
#ifdef CONFIG_SMP
" sync \n"
#endif
+ " .set mips0 \n"
: "=&r" (temp), "=m" (*m), "=&r" (res)
: "r" (1UL << (nr & SZLONG_MASK)), "m" (*m)
: "memory");
@@ -453,16 +476,18 @@ static inline int test_and_change_bit(unsigned long nr,
unsigned long temp, res;
__asm__ __volatile__(
- " .set noreorder # test_and_change_bit \n"
- "1: " __LL " %0, %1 \n"
+ " .set push \n"
+ " .set noreorder \n"
+ " .set mips3 \n"
+ "1: " __LL "%0, %1 # test_and_change_bit \n"
" xor %2, %0, %3 \n"
- " "__SC "\t%2, %1 \n"
+ " " __SC "\t%2, %1 \n"
" beqz %2, 1b \n"
" and %2, %0, %3 \n"
#ifdef CONFIG_SMP
" sync \n"
#endif
- " .set reorder \n"
+ " .set pop \n"
: "=&r" (temp), "=m" (*m), "=&r" (res)
: "r" (1UL << (nr & SZLONG_MASK)), "m" (*m)
: "memory");
@@ -523,22 +548,60 @@ static inline int test_bit(unsigned long nr, const volatile unsigned long *addr)
}
/*
- * ffz - find first zero in word.
+ * Return the bit position (0..63) of the most significant 1 bit in a word
+ * Returns -1 if no 1 bit exists
+ */
+static inline int __ilog2(unsigned long x)
+{
+ int lz;
+
+ if (sizeof(x) == 4) {
+ __asm__ (
+ " .set push \n"
+ " .set mips32 \n"
+ " clz %0, %1 \n"
+ " .set pop \n"
+ : "=r" (lz)
+ : "r" (x));
+
+ return 31 - lz;
+ }
+
+ BUG_ON(sizeof(x) != 8);
+
+ __asm__ (
+ " .set push \n"
+ " .set mips64 \n"
+ " dclz %0, %1 \n"
+ " .set pop \n"
+ : "=r" (lz)
+ : "r" (x));
+
+ return 63 - lz;
+}
+
+/*
+ * __ffs - find first bit in word.
* @word: The word to search
*
- * Undefined if no zero exists, so code should check against ~0UL first.
+ * Returns 0..SZLONG-1
+ * Undefined if no bit exists, so code should check against 0 first.
*/
-static inline unsigned long ffz(unsigned long word)
+static inline unsigned long __ffs(unsigned long word)
{
+#if defined(CONFIG_CPU_MIPS32) || defined(CONFIG_CPU_MIPS64)
+ return __ilog2(word & -word);
+#else
int b = 0, s;
- word = ~word;
#ifdef CONFIG_32BIT
s = 16; if (word << 16 != 0) s = 0; b += s; word >>= s;
s = 8; if (word << 24 != 0) s = 0; b += s; word >>= s;
s = 4; if (word << 28 != 0) s = 0; b += s; word >>= s;
s = 2; if (word << 30 != 0) s = 0; b += s; word >>= s;
s = 1; if (word << 31 != 0) s = 0; b += s;
+
+ return b;
#endif
#ifdef CONFIG_64BIT
s = 32; if (word << 32 != 0) s = 0; b += s; word >>= s;
@@ -547,27 +610,92 @@ static inline unsigned long ffz(unsigned long word)
s = 4; if (word << 60 != 0) s = 0; b += s; word >>= s;
s = 2; if (word << 62 != 0) s = 0; b += s; word >>= s;
s = 1; if (word << 63 != 0) s = 0; b += s;
-#endif
return b;
+#endif
+#endif
}
/*
- * __ffs - find first bit in word.
+ * ffs - find first bit set.
* @word: The word to search
*
- * Undefined if no bit exists, so code should check against 0 first.
+ * Returns 1..SZLONG
+ * Returns 0 if no bit exists
*/
-static inline unsigned long __ffs(unsigned long word)
+
+static inline unsigned long ffs(unsigned long word)
{
- return ffz(~word);
+ if (!word)
+ return 0;
+
+ return __ffs(word) + 1;
}
/*
- * fls: find last bit set.
+ * ffz - find first zero in word.
+ * @word: The word to search
+ *
+ * Undefined if no zero exists, so code should check against ~0UL first.
+ */
+static inline unsigned long ffz(unsigned long word)
+{
+ return __ffs (~word);
+}
+
+/*
+ * flz - find last zero in word.
+ * @word: The word to search
+ *
+ * Returns 0..SZLONG-1
+ * Undefined if no zero exists, so code should check against ~0UL first.
+ */
+static inline unsigned long flz(unsigned long word)
+{
+#if defined(CONFIG_CPU_MIPS32) || defined(CONFIG_CPU_MIPS64)
+ return __ilog2(~word);
+#else
+#ifdef CONFIG_32BIT
+ int r = 31, s;
+ word = ~word;
+ s = 16; if ((word & 0xffff0000)) s = 0; r -= s; word <<= s;
+ s = 8; if ((word & 0xff000000)) s = 0; r -= s; word <<= s;
+ s = 4; if ((word & 0xf0000000)) s = 0; r -= s; word <<= s;
+ s = 2; if ((word & 0xc0000000)) s = 0; r -= s; word <<= s;
+ s = 1; if ((word & 0x80000000)) s = 0; r -= s;
+
+ return r;
+#endif
+#ifdef CONFIG_64BIT
+ int r = 63, s;
+ word = ~word;
+ s = 32; if ((word & 0xffffffff00000000UL)) s = 0; r -= s; word <<= s;
+ s = 16; if ((word & 0xffff000000000000UL)) s = 0; r -= s; word <<= s;
+ s = 8; if ((word & 0xff00000000000000UL)) s = 0; r -= s; word <<= s;
+ s = 4; if ((word & 0xf000000000000000UL)) s = 0; r -= s; word <<= s;
+ s = 2; if ((word & 0xc000000000000000UL)) s = 0; r -= s; word <<= s;
+ s = 1; if ((word & 0x8000000000000000UL)) s = 0; r -= s;
+
+ return r;
+#endif
+#endif
+}
+
+/*
+ * fls - find last bit set.
+ * @word: The word to search
+ *
+ * Returns 1..SZLONG
+ * Returns 0 if no bit exists
*/
+static inline unsigned long fls(unsigned long word)
+{
+ if (word == 0)
+ return 0;
+
+ return flz(~word) + 1;
+}
-#define fls(x) generic_fls(x)
/*
* find_next_zero_bit - find the first zero bit in a memory region
@@ -704,17 +832,6 @@ static inline int sched_find_first_bit(const unsigned long *b)
}
/*
- * ffs - find first bit set
- * @x: the word to search
- *
- * This is defined the same way as
- * the libc and compiler builtin ffs routines, therefore
- * differs in spirit from the above ffz (man ffs).
- */
-
-#define ffs(x) generic_ffs(x)
-
-/*
* hweightN - returns the hamming weight of a N-bit word
* @x: the word to weigh
*