diff options
author | Catalin Marinas <catalin.marinas@arm.com> | 2013-03-21 16:28:47 +0000 |
---|---|---|
committer | Catalin Marinas <catalin.marinas@arm.com> | 2013-03-21 17:39:31 +0000 |
commit | 62479586532715b6da4777374a6f53b32453385e (patch) | |
tree | 232004eb56bf36b966ea11ec8295a1b2893ac9e9 /arch | |
parent | 2b8cac814cd5a0a305d62dcd1d589faccb705a4d (diff) |
arm64: klib: Optimised atomic bitops
This patch implements the AArch64-specific atomic bitops functions using
exclusive memory accesses to avoid locking.
Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
Diffstat (limited to 'arch')
-rw-r--r-- | arch/arm64/include/asm/bitops.h | 18 | ||||
-rw-r--r-- | arch/arm64/kernel/arm64ksyms.c | 13 | ||||
-rw-r--r-- | arch/arm64/lib/bitops.S | 70 | ||||
-rw-r--r-- | arch/arm64/lib/bitops.c | 25 |
4 files changed, 94 insertions, 32 deletions
diff --git a/arch/arm64/include/asm/bitops.h b/arch/arm64/include/asm/bitops.h index 5e693073b03..aa5b59d6ba4 100644 --- a/arch/arm64/include/asm/bitops.h +++ b/arch/arm64/include/asm/bitops.h @@ -32,6 +32,16 @@ #error only <linux/bitops.h> can be included directly #endif +/* + * Little endian assembly atomic bitops. + */ +extern void set_bit(int nr, volatile unsigned long *p); +extern void clear_bit(int nr, volatile unsigned long *p); +extern void change_bit(int nr, volatile unsigned long *p); +extern int test_and_set_bit(int nr, volatile unsigned long *p); +extern int test_and_clear_bit(int nr, volatile unsigned long *p); +extern int test_and_change_bit(int nr, volatile unsigned long *p); + #include <asm-generic/bitops/builtin-__ffs.h> #include <asm-generic/bitops/builtin-ffs.h> #include <asm-generic/bitops/builtin-__fls.h> @@ -45,9 +55,13 @@ #include <asm-generic/bitops/hweight.h> #include <asm-generic/bitops/lock.h> -#include <asm-generic/bitops/atomic.h> #include <asm-generic/bitops/non-atomic.h> #include <asm-generic/bitops/le.h> -#include <asm-generic/bitops/ext2-atomic.h> + +/* + * Ext2 is defined to use little-endian byte ordering. + */ +#define ext2_set_bit_atomic(lock, nr, p) test_and_set_bit_le(nr, p) +#define ext2_clear_bit_atomic(lock, nr, p) test_and_clear_bit_le(nr, p) #endif /* __ASM_BITOPS_H */ diff --git a/arch/arm64/kernel/arm64ksyms.c b/arch/arm64/kernel/arm64ksyms.c index a5d0d464310..7df1aad29b6 100644 --- a/arch/arm64/kernel/arm64ksyms.c +++ b/arch/arm64/kernel/arm64ksyms.c @@ -39,11 +39,6 @@ EXPORT_SYMBOL(__copy_from_user); EXPORT_SYMBOL(__copy_to_user); EXPORT_SYMBOL(__clear_user); - /* bitops */ -#ifdef CONFIG_SMP -EXPORT_SYMBOL(__atomic_hash); -#endif - /* physical memory */ EXPORT_SYMBOL(memstart_addr); @@ -54,3 +49,11 @@ EXPORT_SYMBOL(memset); EXPORT_SYMBOL(memcpy); EXPORT_SYMBOL(memmove); EXPORT_SYMBOL(memchr); + + /* atomic bitops */ +EXPORT_SYMBOL(set_bit); +EXPORT_SYMBOL(test_and_set_bit); +EXPORT_SYMBOL(clear_bit); +EXPORT_SYMBOL(test_and_clear_bit); +EXPORT_SYMBOL(change_bit); +EXPORT_SYMBOL(test_and_change_bit); diff --git a/arch/arm64/lib/bitops.S b/arch/arm64/lib/bitops.S new file mode 100644 index 00000000000..fd1e801b53e --- /dev/null +++ b/arch/arm64/lib/bitops.S @@ -0,0 +1,70 @@ +/* + * Based on arch/arm/lib/bitops.h + * + * Copyright (C) 2013 ARM Ltd. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +#include <linux/linkage.h> +#include <asm/assembler.h> + +/* + * x0: bits 5:0 bit offset + * bits 63:6 word offset + * x1: address + */ + .macro bitop, name, instr +ENTRY( \name ) + and x3, x0, #63 // Get bit offset + eor x0, x0, x3 // Clear low bits + mov x2, #1 + add x1, x1, x0, lsr #3 // Get word offset + lsl x3, x2, x3 // Create mask +1: ldxr x2, [x1] + \instr x2, x2, x3 + stxr w0, x2, [x1] + cbnz w0, 1b + ret +ENDPROC(\name ) + .endm + + .macro testop, name, instr +ENTRY( \name ) + and x3, x0, #63 // Get bit offset + eor x0, x0, x3 // Clear low bits + mov x2, #1 + add x1, x1, x0, lsr #3 // Get word offset + lsl x4, x2, x3 // Create mask + smp_dmb ish +1: ldxr x2, [x1] + lsr x0, x2, x3 // Save old value of bit + \instr x2, x2, x4 // toggle bit + stxr w2, x2, [x1] + cbnz w2, 1b + smp_dmb ish + and x0, x0, #1 +3: ret +ENDPROC(\name ) + .endm + +/* + * Atomic bit operations. + */ + bitop change_bit, eor + bitop clear_bit, bic + bitop set_bit, orr + + testop test_and_change_bit, eor + testop test_and_clear_bit, bic + testop test_and_set_bit, orr diff --git a/arch/arm64/lib/bitops.c b/arch/arm64/lib/bitops.c deleted file mode 100644 index aa4965e60ac..00000000000 --- a/arch/arm64/lib/bitops.c +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright (C) 2012 ARM Limited - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License version 2 as - * published by the Free Software Foundation. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - */ - -#include <linux/kernel.h> -#include <linux/spinlock.h> -#include <linux/atomic.h> - -#ifdef CONFIG_SMP -arch_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned = { - [0 ... (ATOMIC_HASH_SIZE-1)] = __ARCH_SPIN_LOCK_UNLOCKED -}; -#endif |