Merge branch 'arm64-klib' into upstream

* arm64-klib:
  arm64: klib: Optimised atomic bitops
  arm64: klib: Optimised string functions
  arm64: klib: Optimised memory functions
This commit is contained in:
Catalin Marinas 2013-03-28 16:13:26 +00:00
commit d4784be3b2
12 changed files with 418 additions and 18 deletions

View file

@ -39,7 +39,6 @@ generic-y += shmbuf.h
generic-y += sizes.h
generic-y += socket.h
generic-y += sockios.h
generic-y += string.h
generic-y += switch_to.h
generic-y += swab.h
generic-y += termbits.h

View file

@ -32,6 +32,16 @@
#error only <linux/bitops.h> can be included directly
#endif
/*
* Little endian assembly atomic bitops.
*/
extern void set_bit(int nr, volatile unsigned long *p);
extern void clear_bit(int nr, volatile unsigned long *p);
extern void change_bit(int nr, volatile unsigned long *p);
extern int test_and_set_bit(int nr, volatile unsigned long *p);
extern int test_and_clear_bit(int nr, volatile unsigned long *p);
extern int test_and_change_bit(int nr, volatile unsigned long *p);
#include <asm-generic/bitops/builtin-__ffs.h>
#include <asm-generic/bitops/builtin-ffs.h>
#include <asm-generic/bitops/builtin-__fls.h>
@ -45,9 +55,13 @@
#include <asm-generic/bitops/hweight.h>
#include <asm-generic/bitops/lock.h>
#include <asm-generic/bitops/atomic.h>
#include <asm-generic/bitops/non-atomic.h>
#include <asm-generic/bitops/le.h>
#include <asm-generic/bitops/ext2-atomic.h>
/*
* Ext2 is defined to use little-endian byte ordering.
*/
#define ext2_set_bit_atomic(lock, nr, p) test_and_set_bit_le(nr, p)
#define ext2_clear_bit_atomic(lock, nr, p) test_and_clear_bit_le(nr, p)
#endif /* __ASM_BITOPS_H */

View file

@ -0,0 +1,37 @@
/*
* Copyright (C) 2013 ARM Ltd.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef __ASM_STRING_H
#define __ASM_STRING_H
#define __HAVE_ARCH_STRRCHR
extern char *strrchr(const char *, int c);
#define __HAVE_ARCH_STRCHR
extern char *strchr(const char *, int c);
#define __HAVE_ARCH_MEMCPY
extern void *memcpy(void *, const void *, __kernel_size_t);
#define __HAVE_ARCH_MEMMOVE
extern void *memmove(void *, const void *, __kernel_size_t);
#define __HAVE_ARCH_MEMCHR
extern void *memchr(const void *, int, __kernel_size_t);
#define __HAVE_ARCH_MEMSET
extern void *memset(void *, int, __kernel_size_t);
#endif

View file

@ -39,10 +39,21 @@ EXPORT_SYMBOL(__copy_from_user);
EXPORT_SYMBOL(__copy_to_user);
EXPORT_SYMBOL(__clear_user);
/* bitops */
#ifdef CONFIG_SMP
EXPORT_SYMBOL(__atomic_hash);
#endif
/* physical memory */
EXPORT_SYMBOL(memstart_addr);
/* string / mem functions */
EXPORT_SYMBOL(strchr);
EXPORT_SYMBOL(strrchr);
EXPORT_SYMBOL(memset);
EXPORT_SYMBOL(memcpy);
EXPORT_SYMBOL(memmove);
EXPORT_SYMBOL(memchr);
/* atomic bitops */
EXPORT_SYMBOL(set_bit);
EXPORT_SYMBOL(test_and_set_bit);
EXPORT_SYMBOL(clear_bit);
EXPORT_SYMBOL(test_and_clear_bit);
EXPORT_SYMBOL(change_bit);
EXPORT_SYMBOL(test_and_change_bit);

View file

@ -1,4 +1,6 @@
lib-y := bitops.o delay.o \
strncpy_from_user.o strnlen_user.o clear_user.o \
copy_from_user.o copy_to_user.o copy_in_user.o \
copy_page.o clear_page.o
copy_page.o clear_page.o \
memchr.o memcpy.o memmove.o memset.o \
strchr.o strrchr.o

70
arch/arm64/lib/bitops.S Normal file
View file

@ -0,0 +1,70 @@
/*
* Based on arch/arm/lib/bitops.h
*
* Copyright (C) 2013 ARM Ltd.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <linux/linkage.h>
#include <asm/assembler.h>
/*
* x0: bits 5:0 bit offset
* bits 63:6 word offset
* x1: address
*/
.macro bitop, name, instr
ENTRY( \name )
and x3, x0, #63 // Get bit offset
eor x0, x0, x3 // Clear low bits
mov x2, #1
add x1, x1, x0, lsr #3 // Get word offset
lsl x3, x2, x3 // Create mask
1: ldxr x2, [x1]
\instr x2, x2, x3
stxr w0, x2, [x1]
cbnz w0, 1b
ret
ENDPROC(\name )
.endm
.macro testop, name, instr
ENTRY( \name )
and x3, x0, #63 // Get bit offset
eor x0, x0, x3 // Clear low bits
mov x2, #1
add x1, x1, x0, lsr #3 // Get word offset
lsl x4, x2, x3 // Create mask
smp_dmb ish
1: ldxr x2, [x1]
lsr x0, x2, x3 // Save old value of bit
\instr x2, x2, x4 // toggle bit
stxr w2, x2, [x1]
cbnz w2, 1b
smp_dmb ish
and x0, x0, #1
3: ret
ENDPROC(\name )
.endm
/*
* Atomic bit operations.
*/
bitop change_bit, eor
bitop clear_bit, bic
bitop set_bit, orr
testop test_and_change_bit, eor
testop test_and_clear_bit, bic
testop test_and_set_bit, orr

View file

@ -1,5 +1,8 @@
/*
* Copyright (C) 2012 ARM Limited
* Based on arch/arm/lib/memchr.S
*
* Copyright (C) 1995-2000 Russell King
* Copyright (C) 2013 ARM Ltd.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
@ -14,12 +17,28 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <linux/kernel.h>
#include <linux/spinlock.h>
#include <linux/atomic.h>
#include <linux/linkage.h>
#include <asm/assembler.h>
#ifdef CONFIG_SMP
arch_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned = {
[0 ... (ATOMIC_HASH_SIZE-1)] = __ARCH_SPIN_LOCK_UNLOCKED
};
#endif
/*
* Find a character in an area of memory.
*
* Parameters:
* x0 - buf
* x1 - c
* x2 - n
* Returns:
* x0 - address of first occurrence of 'c' or 0
*/
ENTRY(memchr)
and w1, w1, #0xff
1: subs x2, x2, #1
b.mi 2f
ldrb w3, [x0], #1
cmp w3, w1
b.ne 1b
sub x0, x0, #1
ret
2: mov x0, #0
ret
ENDPROC(memchr)

53
arch/arm64/lib/memcpy.S Normal file
View file

@ -0,0 +1,53 @@
/*
* Copyright (C) 2013 ARM Ltd.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <linux/linkage.h>
#include <asm/assembler.h>
/*
* Copy a buffer from src to dest (alignment handled by the hardware)
*
* Parameters:
* x0 - dest
* x1 - src
* x2 - n
* Returns:
* x0 - dest
*/
ENTRY(memcpy)
mov x4, x0
subs x2, x2, #8
b.mi 2f
1: ldr x3, [x1], #8
subs x2, x2, #8
str x3, [x4], #8
b.pl 1b
2: adds x2, x2, #4
b.mi 3f
ldr w3, [x1], #4
sub x2, x2, #4
str w3, [x4], #4
3: adds x2, x2, #2
b.mi 4f
ldrh w3, [x1], #2
sub x2, x2, #2
strh w3, [x4], #2
4: adds x2, x2, #1
b.mi 5f
ldrb w3, [x1]
strb w3, [x4]
5: ret
ENDPROC(memcpy)

57
arch/arm64/lib/memmove.S Normal file
View file

@ -0,0 +1,57 @@
/*
* Copyright (C) 2013 ARM Ltd.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <linux/linkage.h>
#include <asm/assembler.h>
/*
* Move a buffer from src to test (alignment handled by the hardware).
* If dest <= src, call memcpy, otherwise copy in reverse order.
*
* Parameters:
* x0 - dest
* x1 - src
* x2 - n
* Returns:
* x0 - dest
*/
ENTRY(memmove)
cmp x0, x1
b.ls memcpy
add x4, x0, x2
add x1, x1, x2
subs x2, x2, #8
b.mi 2f
1: ldr x3, [x1, #-8]!
subs x2, x2, #8
str x3, [x4, #-8]!
b.pl 1b
2: adds x2, x2, #4
b.mi 3f
ldr w3, [x1, #-4]!
sub x2, x2, #4
str w3, [x4, #-4]!
3: adds x2, x2, #2
b.mi 4f
ldrh w3, [x1, #-2]!
sub x2, x2, #2
strh w3, [x4, #-2]!
4: adds x2, x2, #1
b.mi 5f
ldrb w3, [x1, #-1]
strb w3, [x4, #-1]
5: ret
ENDPROC(memmove)

53
arch/arm64/lib/memset.S Normal file
View file

@ -0,0 +1,53 @@
/*
* Copyright (C) 2013 ARM Ltd.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <linux/linkage.h>
#include <asm/assembler.h>
/*
* Fill in the buffer with character c (alignment handled by the hardware)
*
* Parameters:
* x0 - buf
* x1 - c
* x2 - n
* Returns:
* x0 - buf
*/
ENTRY(memset)
mov x4, x0
and w1, w1, #0xff
orr w1, w1, w1, lsl #8
orr w1, w1, w1, lsl #16
orr x1, x1, x1, lsl #32
subs x2, x2, #8
b.mi 2f
1: str x1, [x4], #8
subs x2, x2, #8
b.pl 1b
2: adds x2, x2, #4
b.mi 3f
sub x2, x2, #4
str w1, [x4], #4
3: adds x2, x2, #2
b.mi 4f
sub x2, x2, #2
strh w1, [x4], #2
4: adds x2, x2, #1
b.mi 5f
strb w1, [x4]
5: ret
ENDPROC(memset)

42
arch/arm64/lib/strchr.S Normal file
View file

@ -0,0 +1,42 @@
/*
* Based on arch/arm/lib/strchr.S
*
* Copyright (C) 1995-2000 Russell King
* Copyright (C) 2013 ARM Ltd.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <linux/linkage.h>
#include <asm/assembler.h>
/*
* Find the first occurrence of a character in a string.
*
* Parameters:
* x0 - str
* x1 - c
* Returns:
* x0 - address of first occurrence of 'c' or 0
*/
ENTRY(strchr)
and w1, w1, #0xff
1: ldrb w2, [x0], #1
cmp w2, w1
ccmp w2, wzr, #4, ne
b.ne 1b
sub x0, x0, #1
cmp w2, w1
csel x0, x0, xzr, eq
ret
ENDPROC(strchr)

43
arch/arm64/lib/strrchr.S Normal file
View file

@ -0,0 +1,43 @@
/*
* Based on arch/arm/lib/strrchr.S
*
* Copyright (C) 1995-2000 Russell King
* Copyright (C) 2013 ARM Ltd.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <linux/linkage.h>
#include <asm/assembler.h>
/*
* Find the last occurrence of a character in a string.
*
* Parameters:
* x0 - str
* x1 - c
* Returns:
* x0 - address of last occurrence of 'c' or 0
*/
ENTRY(strrchr)
mov x3, #0
and w1, w1, #0xff
1: ldrb w2, [x0], #1
cbz w2, 2f
cmp w2, w1
b.ne 1b
sub x3, x0, #1
b 1b
2: mov x0, x3
ret
ENDPROC(strrchr)