diff options
author | Catalin Marinas <catalin.marinas@arm.com> | 2013-03-28 16:13:26 +0000 |
---|---|---|
committer | Catalin Marinas <catalin.marinas@arm.com> | 2013-03-28 16:13:26 +0000 |
commit | d4784be3b20ed0dd19bbde3b2d58df023ae1dc86 (patch) | |
tree | 951aa2f45d2acd1b244bd3eabdf78c7d1e75d0f2 /arch | |
parent | e851b58cb77b47a5c14267723bd6b76655d21840 (diff) | |
parent | 62479586532715b6da4777374a6f53b32453385e (diff) |
Merge branch 'arm64-klib' into upstream
* arm64-klib:
arm64: klib: Optimised atomic bitops
arm64: klib: Optimised string functions
arm64: klib: Optimised memory functions
Diffstat (limited to 'arch')
-rw-r--r-- | arch/arm64/include/asm/Kbuild | 1 | ||||
-rw-r--r-- | arch/arm64/include/asm/bitops.h | 18 | ||||
-rw-r--r-- | arch/arm64/include/asm/string.h | 37 | ||||
-rw-r--r-- | arch/arm64/kernel/arm64ksyms.c | 21 | ||||
-rw-r--r-- | arch/arm64/lib/Makefile | 4 | ||||
-rw-r--r-- | arch/arm64/lib/bitops.S | 70 | ||||
-rw-r--r-- | arch/arm64/lib/memchr.S (renamed from arch/arm64/lib/bitops.c) | 37 | ||||
-rw-r--r-- | arch/arm64/lib/memcpy.S | 53 | ||||
-rw-r--r-- | arch/arm64/lib/memmove.S | 57 | ||||
-rw-r--r-- | arch/arm64/lib/memset.S | 53 | ||||
-rw-r--r-- | arch/arm64/lib/strchr.S | 42 | ||||
-rw-r--r-- | arch/arm64/lib/strrchr.S | 43 |
12 files changed, 418 insertions, 18 deletions
diff --git a/arch/arm64/include/asm/Kbuild b/arch/arm64/include/asm/Kbuild index e5fe4f99fe1..a13a4a493d5 100644 --- a/arch/arm64/include/asm/Kbuild +++ b/arch/arm64/include/asm/Kbuild @@ -39,7 +39,6 @@ generic-y += shmbuf.h generic-y += sizes.h generic-y += socket.h generic-y += sockios.h -generic-y += string.h generic-y += switch_to.h generic-y += swab.h generic-y += termbits.h diff --git a/arch/arm64/include/asm/bitops.h b/arch/arm64/include/asm/bitops.h index 5e693073b03..aa5b59d6ba4 100644 --- a/arch/arm64/include/asm/bitops.h +++ b/arch/arm64/include/asm/bitops.h @@ -32,6 +32,16 @@ #error only <linux/bitops.h> can be included directly #endif +/* + * Little endian assembly atomic bitops. + */ +extern void set_bit(int nr, volatile unsigned long *p); +extern void clear_bit(int nr, volatile unsigned long *p); +extern void change_bit(int nr, volatile unsigned long *p); +extern int test_and_set_bit(int nr, volatile unsigned long *p); +extern int test_and_clear_bit(int nr, volatile unsigned long *p); +extern int test_and_change_bit(int nr, volatile unsigned long *p); + #include <asm-generic/bitops/builtin-__ffs.h> #include <asm-generic/bitops/builtin-ffs.h> #include <asm-generic/bitops/builtin-__fls.h> @@ -45,9 +55,13 @@ #include <asm-generic/bitops/hweight.h> #include <asm-generic/bitops/lock.h> -#include <asm-generic/bitops/atomic.h> #include <asm-generic/bitops/non-atomic.h> #include <asm-generic/bitops/le.h> -#include <asm-generic/bitops/ext2-atomic.h> + +/* + * Ext2 is defined to use little-endian byte ordering. + */ +#define ext2_set_bit_atomic(lock, nr, p) test_and_set_bit_le(nr, p) +#define ext2_clear_bit_atomic(lock, nr, p) test_and_clear_bit_le(nr, p) #endif /* __ASM_BITOPS_H */ diff --git a/arch/arm64/include/asm/string.h b/arch/arm64/include/asm/string.h new file mode 100644 index 00000000000..3ee8b303d9a --- /dev/null +++ b/arch/arm64/include/asm/string.h @@ -0,0 +1,37 @@ +/* + * Copyright (C) 2013 ARM Ltd. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ +#ifndef __ASM_STRING_H +#define __ASM_STRING_H + +#define __HAVE_ARCH_STRRCHR +extern char *strrchr(const char *, int c); + +#define __HAVE_ARCH_STRCHR +extern char *strchr(const char *, int c); + +#define __HAVE_ARCH_MEMCPY +extern void *memcpy(void *, const void *, __kernel_size_t); + +#define __HAVE_ARCH_MEMMOVE +extern void *memmove(void *, const void *, __kernel_size_t); + +#define __HAVE_ARCH_MEMCHR +extern void *memchr(const void *, int, __kernel_size_t); + +#define __HAVE_ARCH_MEMSET +extern void *memset(void *, int, __kernel_size_t); + +#endif diff --git a/arch/arm64/kernel/arm64ksyms.c b/arch/arm64/kernel/arm64ksyms.c index aa3e948f788..7df1aad29b6 100644 --- a/arch/arm64/kernel/arm64ksyms.c +++ b/arch/arm64/kernel/arm64ksyms.c @@ -39,10 +39,21 @@ EXPORT_SYMBOL(__copy_from_user); EXPORT_SYMBOL(__copy_to_user); EXPORT_SYMBOL(__clear_user); - /* bitops */ -#ifdef CONFIG_SMP -EXPORT_SYMBOL(__atomic_hash); -#endif - /* physical memory */ EXPORT_SYMBOL(memstart_addr); + + /* string / mem functions */ +EXPORT_SYMBOL(strchr); +EXPORT_SYMBOL(strrchr); +EXPORT_SYMBOL(memset); +EXPORT_SYMBOL(memcpy); +EXPORT_SYMBOL(memmove); +EXPORT_SYMBOL(memchr); + + /* atomic bitops */ +EXPORT_SYMBOL(set_bit); +EXPORT_SYMBOL(test_and_set_bit); +EXPORT_SYMBOL(clear_bit); +EXPORT_SYMBOL(test_and_clear_bit); +EXPORT_SYMBOL(change_bit); +EXPORT_SYMBOL(test_and_change_bit); diff --git a/arch/arm64/lib/Makefile b/arch/arm64/lib/Makefile index 2fb7f6092aa..59acc0ef046 100644 --- a/arch/arm64/lib/Makefile +++ b/arch/arm64/lib/Makefile @@ -1,4 +1,6 @@ lib-y := bitops.o delay.o \ strncpy_from_user.o strnlen_user.o clear_user.o \ copy_from_user.o copy_to_user.o copy_in_user.o \ - copy_page.o clear_page.o + copy_page.o clear_page.o \ + memchr.o memcpy.o memmove.o memset.o \ + strchr.o strrchr.o diff --git a/arch/arm64/lib/bitops.S b/arch/arm64/lib/bitops.S new file mode 100644 index 00000000000..fd1e801b53e --- /dev/null +++ b/arch/arm64/lib/bitops.S @@ -0,0 +1,70 @@ +/* + * Based on arch/arm/lib/bitops.h + * + * Copyright (C) 2013 ARM Ltd. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +#include <linux/linkage.h> +#include <asm/assembler.h> + +/* + * x0: bits 5:0 bit offset + * bits 63:6 word offset + * x1: address + */ + .macro bitop, name, instr +ENTRY( \name ) + and x3, x0, #63 // Get bit offset + eor x0, x0, x3 // Clear low bits + mov x2, #1 + add x1, x1, x0, lsr #3 // Get word offset + lsl x3, x2, x3 // Create mask +1: ldxr x2, [x1] + \instr x2, x2, x3 + stxr w0, x2, [x1] + cbnz w0, 1b + ret +ENDPROC(\name ) + .endm + + .macro testop, name, instr +ENTRY( \name ) + and x3, x0, #63 // Get bit offset + eor x0, x0, x3 // Clear low bits + mov x2, #1 + add x1, x1, x0, lsr #3 // Get word offset + lsl x4, x2, x3 // Create mask + smp_dmb ish +1: ldxr x2, [x1] + lsr x0, x2, x3 // Save old value of bit + \instr x2, x2, x4 // toggle bit + stxr w2, x2, [x1] + cbnz w2, 1b + smp_dmb ish + and x0, x0, #1 +3: ret +ENDPROC(\name ) + .endm + +/* + * Atomic bit operations. + */ + bitop change_bit, eor + bitop clear_bit, bic + bitop set_bit, orr + + testop test_and_change_bit, eor + testop test_and_clear_bit, bic + testop test_and_set_bit, orr diff --git a/arch/arm64/lib/bitops.c b/arch/arm64/lib/memchr.S index aa4965e60ac..8636b754916 100644 --- a/arch/arm64/lib/bitops.c +++ b/arch/arm64/lib/memchr.S @@ -1,5 +1,8 @@ /* - * Copyright (C) 2012 ARM Limited + * Based on arch/arm/lib/memchr.S + * + * Copyright (C) 1995-2000 Russell King + * Copyright (C) 2013 ARM Ltd. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2 as @@ -14,12 +17,28 @@ * along with this program. If not, see <http://www.gnu.org/licenses/>. */ -#include <linux/kernel.h> -#include <linux/spinlock.h> -#include <linux/atomic.h> +#include <linux/linkage.h> +#include <asm/assembler.h> -#ifdef CONFIG_SMP -arch_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned = { - [0 ... (ATOMIC_HASH_SIZE-1)] = __ARCH_SPIN_LOCK_UNLOCKED -}; -#endif +/* + * Find a character in an area of memory. + * + * Parameters: + * x0 - buf + * x1 - c + * x2 - n + * Returns: + * x0 - address of first occurrence of 'c' or 0 + */ +ENTRY(memchr) + and w1, w1, #0xff +1: subs x2, x2, #1 + b.mi 2f + ldrb w3, [x0], #1 + cmp w3, w1 + b.ne 1b + sub x0, x0, #1 + ret +2: mov x0, #0 + ret +ENDPROC(memchr) diff --git a/arch/arm64/lib/memcpy.S b/arch/arm64/lib/memcpy.S new file mode 100644 index 00000000000..27b5003609b --- /dev/null +++ b/arch/arm64/lib/memcpy.S @@ -0,0 +1,53 @@ +/* + * Copyright (C) 2013 ARM Ltd. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +#include <linux/linkage.h> +#include <asm/assembler.h> + +/* + * Copy a buffer from src to dest (alignment handled by the hardware) + * + * Parameters: + * x0 - dest + * x1 - src + * x2 - n + * Returns: + * x0 - dest + */ +ENTRY(memcpy) + mov x4, x0 + subs x2, x2, #8 + b.mi 2f +1: ldr x3, [x1], #8 + subs x2, x2, #8 + str x3, [x4], #8 + b.pl 1b +2: adds x2, x2, #4 + b.mi 3f + ldr w3, [x1], #4 + sub x2, x2, #4 + str w3, [x4], #4 +3: adds x2, x2, #2 + b.mi 4f + ldrh w3, [x1], #2 + sub x2, x2, #2 + strh w3, [x4], #2 +4: adds x2, x2, #1 + b.mi 5f + ldrb w3, [x1] + strb w3, [x4] +5: ret +ENDPROC(memcpy) diff --git a/arch/arm64/lib/memmove.S b/arch/arm64/lib/memmove.S new file mode 100644 index 00000000000..b79fdfa42d3 --- /dev/null +++ b/arch/arm64/lib/memmove.S @@ -0,0 +1,57 @@ +/* + * Copyright (C) 2013 ARM Ltd. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +#include <linux/linkage.h> +#include <asm/assembler.h> + +/* + * Move a buffer from src to test (alignment handled by the hardware). + * If dest <= src, call memcpy, otherwise copy in reverse order. + * + * Parameters: + * x0 - dest + * x1 - src + * x2 - n + * Returns: + * x0 - dest + */ +ENTRY(memmove) + cmp x0, x1 + b.ls memcpy + add x4, x0, x2 + add x1, x1, x2 + subs x2, x2, #8 + b.mi 2f +1: ldr x3, [x1, #-8]! + subs x2, x2, #8 + str x3, [x4, #-8]! + b.pl 1b +2: adds x2, x2, #4 + b.mi 3f + ldr w3, [x1, #-4]! + sub x2, x2, #4 + str w3, [x4, #-4]! +3: adds x2, x2, #2 + b.mi 4f + ldrh w3, [x1, #-2]! + sub x2, x2, #2 + strh w3, [x4, #-2]! +4: adds x2, x2, #1 + b.mi 5f + ldrb w3, [x1, #-1] + strb w3, [x4, #-1] +5: ret +ENDPROC(memmove) diff --git a/arch/arm64/lib/memset.S b/arch/arm64/lib/memset.S new file mode 100644 index 00000000000..87e4a68fbbb --- /dev/null +++ b/arch/arm64/lib/memset.S @@ -0,0 +1,53 @@ +/* + * Copyright (C) 2013 ARM Ltd. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +#include <linux/linkage.h> +#include <asm/assembler.h> + +/* + * Fill in the buffer with character c (alignment handled by the hardware) + * + * Parameters: + * x0 - buf + * x1 - c + * x2 - n + * Returns: + * x0 - buf + */ +ENTRY(memset) + mov x4, x0 + and w1, w1, #0xff + orr w1, w1, w1, lsl #8 + orr w1, w1, w1, lsl #16 + orr x1, x1, x1, lsl #32 + subs x2, x2, #8 + b.mi 2f +1: str x1, [x4], #8 + subs x2, x2, #8 + b.pl 1b +2: adds x2, x2, #4 + b.mi 3f + sub x2, x2, #4 + str w1, [x4], #4 +3: adds x2, x2, #2 + b.mi 4f + sub x2, x2, #2 + strh w1, [x4], #2 +4: adds x2, x2, #1 + b.mi 5f + strb w1, [x4] +5: ret +ENDPROC(memset) diff --git a/arch/arm64/lib/strchr.S b/arch/arm64/lib/strchr.S new file mode 100644 index 00000000000..dae0cf5591f --- /dev/null +++ b/arch/arm64/lib/strchr.S @@ -0,0 +1,42 @@ +/* + * Based on arch/arm/lib/strchr.S + * + * Copyright (C) 1995-2000 Russell King + * Copyright (C) 2013 ARM Ltd. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +#include <linux/linkage.h> +#include <asm/assembler.h> + +/* + * Find the first occurrence of a character in a string. + * + * Parameters: + * x0 - str + * x1 - c + * Returns: + * x0 - address of first occurrence of 'c' or 0 + */ +ENTRY(strchr) + and w1, w1, #0xff +1: ldrb w2, [x0], #1 + cmp w2, w1 + ccmp w2, wzr, #4, ne + b.ne 1b + sub x0, x0, #1 + cmp w2, w1 + csel x0, x0, xzr, eq + ret +ENDPROC(strchr) diff --git a/arch/arm64/lib/strrchr.S b/arch/arm64/lib/strrchr.S new file mode 100644 index 00000000000..61eabd9a289 --- /dev/null +++ b/arch/arm64/lib/strrchr.S @@ -0,0 +1,43 @@ +/* + * Based on arch/arm/lib/strrchr.S + * + * Copyright (C) 1995-2000 Russell King + * Copyright (C) 2013 ARM Ltd. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +#include <linux/linkage.h> +#include <asm/assembler.h> + +/* + * Find the last occurrence of a character in a string. + * + * Parameters: + * x0 - str + * x1 - c + * Returns: + * x0 - address of last occurrence of 'c' or 0 + */ +ENTRY(strrchr) + mov x3, #0 + and w1, w1, #0xff +1: ldrb w2, [x0], #1 + cbz w2, 2f + cmp w2, w1 + b.ne 1b + sub x3, x0, #1 + b 1b +2: mov x0, x3 + ret +ENDPROC(strrchr) |