summaryrefslogtreecommitdiffstats
path: root/arch/sh/include/asm/bitops-llsc.h
diff options
context:
space:
mode:
authorPaul Mundt <lethal@linux-sh.org>2008-08-07 17:36:12 +0900
committerPaul Mundt <lethal@linux-sh.org>2008-09-08 10:35:02 +0900
commit742fd1bcfb475c702c9b1dd6afc79c08f8dbf7dd (patch)
tree0d096e5ebea4d3d63d028bfacde7f4274928eab6 /arch/sh/include/asm/bitops-llsc.h
parent7686ad5606f08d9dfb33a2087a36c8366366015b (diff)
sh: Provide movli.l/movco.l-based bitops.
Signed-off-by: Paul Mundt <lethal@linux-sh.org>
Diffstat (limited to 'arch/sh/include/asm/bitops-llsc.h')
-rw-r--r--arch/sh/include/asm/bitops-llsc.h144
1 files changed, 144 insertions, 0 deletions
diff --git a/arch/sh/include/asm/bitops-llsc.h b/arch/sh/include/asm/bitops-llsc.h
new file mode 100644
index 00000000000..43b8e1a8239
--- /dev/null
+++ b/arch/sh/include/asm/bitops-llsc.h
@@ -0,0 +1,144 @@
+#ifndef __ASM_SH_BITOPS_LLSC_H
+#define __ASM_SH_BITOPS_LLSC_H
+
+static inline void set_bit(int nr, volatile void * addr)
+{
+ int mask;
+ volatile unsigned int *a = addr;
+ unsigned long tmp;
+
+ a += nr >> 5;
+ mask = 1 << (nr & 0x1f);
+
+ __asm__ __volatile__ (
+ "1: \n\t"
+ "movli.l @%1, %0 ! set_bit \n\t"
+ "or %3, %0 \n\t"
+ "movco.l %0, @%1 \n\t"
+ "bf 1b \n\t"
+ : "=&z" (tmp), "=r" (a)
+ : "1" (a), "r" (mask)
+ : "t", "memory"
+ );
+}
+
+static inline void clear_bit(int nr, volatile void * addr)
+{
+ int mask;
+ volatile unsigned int *a = addr;
+ unsigned long tmp;
+
+ a += nr >> 5;
+ mask = 1 << (nr & 0x1f);
+
+ __asm__ __volatile__ (
+ "1: \n\t"
+ "movli.l @%1, %0 ! clear_bit \n\t"
+ "and %3, %0 \n\t"
+ "movco.l %0, @%1 \n\t"
+ "bf 1b \n\t"
+ : "=&z" (tmp), "=r" (a)
+ : "1" (a), "r" (~mask)
+ : "t", "memory"
+ );
+}
+
+static inline void change_bit(int nr, volatile void * addr)
+{
+ int mask;
+ volatile unsigned int *a = addr;
+ unsigned long tmp;
+
+ a += nr >> 5;
+ mask = 1 << (nr & 0x1f);
+
+ __asm__ __volatile__ (
+ "1: \n\t"
+ "movli.l @%1, %0 ! change_bit \n\t"
+ "xor %3, %0 \n\t"
+ "movco.l %0, @%1 \n\t"
+ "bf 1b \n\t"
+ : "=&z" (tmp), "=r" (a)
+ : "1" (a), "r" (mask)
+ : "t", "memory"
+ );
+}
+
+static inline int test_and_set_bit(int nr, volatile void * addr)
+{
+ int mask, retval;
+ volatile unsigned int *a = addr;
+ unsigned long tmp;
+
+ a += nr >> 5;
+ mask = 1 << (nr & 0x1f);
+
+ __asm__ __volatile__ (
+ "1: \n\t"
+ "movli.l @%1, %0 ! test_and_set_bit \n\t"
+ "mov %0, %2 \n\t"
+ "or %4, %0 \n\t"
+ "movco.l %0, @%1 \n\t"
+ "bf 1b \n\t"
+ "and %4, %2 \n\t"
+ : "=&z" (tmp), "=r" (a), "=&r" (retval)
+ : "1" (a), "r" (mask)
+ : "t", "memory"
+ );
+
+ return retval != 0;
+}
+
+static inline int test_and_clear_bit(int nr, volatile void * addr)
+{
+ int mask, retval;
+ volatile unsigned int *a = addr;
+ unsigned long tmp;
+
+ a += nr >> 5;
+ mask = 1 << (nr & 0x1f);
+
+ __asm__ __volatile__ (
+ "1: \n\t"
+ "movli.l @%1, %0 ! test_and_clear_bit \n\t"
+ "mov %0, %2 \n\t"
+ "and %5, %0 \n\t"
+ "movco.l %0, @%1 \n\t"
+ "bf 1b \n\t"
+ "and %4, %2 \n\t"
+ "synco \n\t"
+ : "=&z" (tmp), "=r" (a), "=&r" (retval)
+ : "1" (a), "r" (mask), "r" (~mask)
+ : "t", "memory"
+ );
+
+ return retval != 0;
+}
+
+static inline int test_and_change_bit(int nr, volatile void * addr)
+{
+ int mask, retval;
+ volatile unsigned int *a = addr;
+ unsigned long tmp;
+
+ a += nr >> 5;
+ mask = 1 << (nr & 0x1f);
+
+ __asm__ __volatile__ (
+ "1: \n\t"
+ "movli.l @%1, %0 ! test_and_change_bit \n\t"
+ "mov %0, %2 \n\t"
+ "xor %4, %0 \n\t"
+ "movco.l %0, @%1 \n\t"
+ "bf 1b \n\t"
+ "and %4, %2 \n\t"
+ "synco \n\t"
+ : "=&z" (tmp), "=r" (a), "=&r" (retval)
+ : "1" (a), "r" (mask)
+ : "t", "memory"
+ );
+
+ return retval != 0;
+}
+
+#endif /* __ASM_SH_BITOPS_LLSC_H */