1
0
Fork 0

arm64: lib: Use modern annotations for assembly functions

In an effort to clarify and simplify the annotation of assembly functions
in the kernel new macros have been introduced. These replace ENTRY and
ENDPROC and also add a new annotation for static functions which previously
had no ENTRY equivalent. Update the annotations in the library code to the
new macros.

Signed-off-by: Mark Brown <broonie@kernel.org>
[will: Use SYM_FUNC_START_WEAK_PI]
Signed-off-by: Will Deacon <will@kernel.org>
alistair/sensors
Mark Brown 2020-01-06 19:58:17 +00:00 committed by Will Deacon
parent 35e61c77ef
commit 3ac0f4526d
19 changed files with 50 additions and 50 deletions

View File

@ -14,7 +14,7 @@
* Parameters: * Parameters:
* x0 - dest * x0 - dest
*/ */
ENTRY(clear_page) SYM_FUNC_START(clear_page)
mrs x1, dczid_el0 mrs x1, dczid_el0
and w1, w1, #0xf and w1, w1, #0xf
mov x2, #4 mov x2, #4
@ -25,5 +25,5 @@ ENTRY(clear_page)
tst x0, #(PAGE_SIZE - 1) tst x0, #(PAGE_SIZE - 1)
b.ne 1b b.ne 1b
ret ret
ENDPROC(clear_page) SYM_FUNC_END(clear_page)
EXPORT_SYMBOL(clear_page) EXPORT_SYMBOL(clear_page)

View File

@ -19,7 +19,7 @@
* *
* Alignment fixed up by hardware. * Alignment fixed up by hardware.
*/ */
ENTRY(__arch_clear_user) SYM_FUNC_START(__arch_clear_user)
mov x2, x1 // save the size for fixup return mov x2, x1 // save the size for fixup return
subs x1, x1, #8 subs x1, x1, #8
b.mi 2f b.mi 2f
@ -40,7 +40,7 @@ uao_user_alternative 9f, strh, sttrh, wzr, x0, 2
uao_user_alternative 9f, strb, sttrb, wzr, x0, 0 uao_user_alternative 9f, strb, sttrb, wzr, x0, 0
5: mov x0, #0 5: mov x0, #0
ret ret
ENDPROC(__arch_clear_user) SYM_FUNC_END(__arch_clear_user)
EXPORT_SYMBOL(__arch_clear_user) EXPORT_SYMBOL(__arch_clear_user)
.section .fixup,"ax" .section .fixup,"ax"

View File

@ -53,12 +53,12 @@
.endm .endm
end .req x5 end .req x5
ENTRY(__arch_copy_from_user) SYM_FUNC_START(__arch_copy_from_user)
add end, x0, x2 add end, x0, x2
#include "copy_template.S" #include "copy_template.S"
mov x0, #0 // Nothing to copy mov x0, #0 // Nothing to copy
ret ret
ENDPROC(__arch_copy_from_user) SYM_FUNC_END(__arch_copy_from_user)
EXPORT_SYMBOL(__arch_copy_from_user) EXPORT_SYMBOL(__arch_copy_from_user)
.section .fixup,"ax" .section .fixup,"ax"

View File

@ -55,12 +55,12 @@
end .req x5 end .req x5
ENTRY(__arch_copy_in_user) SYM_FUNC_START(__arch_copy_in_user)
add end, x0, x2 add end, x0, x2
#include "copy_template.S" #include "copy_template.S"
mov x0, #0 mov x0, #0
ret ret
ENDPROC(__arch_copy_in_user) SYM_FUNC_END(__arch_copy_in_user)
EXPORT_SYMBOL(__arch_copy_in_user) EXPORT_SYMBOL(__arch_copy_in_user)
.section .fixup,"ax" .section .fixup,"ax"

View File

@ -17,7 +17,7 @@
* x0 - dest * x0 - dest
* x1 - src * x1 - src
*/ */
ENTRY(copy_page) SYM_FUNC_START(copy_page)
alternative_if ARM64_HAS_NO_HW_PREFETCH alternative_if ARM64_HAS_NO_HW_PREFETCH
// Prefetch three cache lines ahead. // Prefetch three cache lines ahead.
prfm pldl1strm, [x1, #128] prfm pldl1strm, [x1, #128]
@ -75,5 +75,5 @@ alternative_else_nop_endif
stnp x16, x17, [x0, #112] stnp x16, x17, [x0, #112]
ret ret
ENDPROC(copy_page) SYM_FUNC_END(copy_page)
EXPORT_SYMBOL(copy_page) EXPORT_SYMBOL(copy_page)

View File

@ -52,12 +52,12 @@
.endm .endm
end .req x5 end .req x5
ENTRY(__arch_copy_to_user) SYM_FUNC_START(__arch_copy_to_user)
add end, x0, x2 add end, x0, x2
#include "copy_template.S" #include "copy_template.S"
mov x0, #0 mov x0, #0
ret ret
ENDPROC(__arch_copy_to_user) SYM_FUNC_END(__arch_copy_to_user)
EXPORT_SYMBOL(__arch_copy_to_user) EXPORT_SYMBOL(__arch_copy_to_user)
.section .fixup,"ax" .section .fixup,"ax"

View File

@ -85,17 +85,17 @@ CPU_BE( rev16 w3, w3 )
.endm .endm
.align 5 .align 5
ENTRY(crc32_le) SYM_FUNC_START(crc32_le)
alternative_if_not ARM64_HAS_CRC32 alternative_if_not ARM64_HAS_CRC32
b crc32_le_base b crc32_le_base
alternative_else_nop_endif alternative_else_nop_endif
__crc32 __crc32
ENDPROC(crc32_le) SYM_FUNC_END(crc32_le)
.align 5 .align 5
ENTRY(__crc32c_le) SYM_FUNC_START(__crc32c_le)
alternative_if_not ARM64_HAS_CRC32 alternative_if_not ARM64_HAS_CRC32
b __crc32c_le_base b __crc32c_le_base
alternative_else_nop_endif alternative_else_nop_endif
__crc32 c __crc32 c
ENDPROC(__crc32c_le) SYM_FUNC_END(__crc32c_le)

View File

@ -19,7 +19,7 @@
* Returns: * Returns:
* x0 - address of first occurrence of 'c' or 0 * x0 - address of first occurrence of 'c' or 0
*/ */
WEAK(memchr) SYM_FUNC_START_WEAK_PI(memchr)
and w1, w1, #0xff and w1, w1, #0xff
1: subs x2, x2, #1 1: subs x2, x2, #1
b.mi 2f b.mi 2f
@ -30,5 +30,5 @@ WEAK(memchr)
ret ret
2: mov x0, #0 2: mov x0, #0
ret ret
ENDPIPROC(memchr) SYM_FUNC_END_PI(memchr)
EXPORT_SYMBOL_NOKASAN(memchr) EXPORT_SYMBOL_NOKASAN(memchr)

View File

@ -46,7 +46,7 @@ pos .req x11
limit_wd .req x12 limit_wd .req x12
mask .req x13 mask .req x13
WEAK(memcmp) SYM_FUNC_START_WEAK_PI(memcmp)
cbz limit, .Lret0 cbz limit, .Lret0
eor tmp1, src1, src2 eor tmp1, src1, src2
tst tmp1, #7 tst tmp1, #7
@ -243,5 +243,5 @@ CPU_LE( rev data2, data2 )
.Lret0: .Lret0:
mov result, #0 mov result, #0
ret ret
ENDPIPROC(memcmp) SYM_FUNC_END_PI(memcmp)
EXPORT_SYMBOL_NOKASAN(memcmp) EXPORT_SYMBOL_NOKASAN(memcmp)

View File

@ -57,11 +57,11 @@
.endm .endm
.weak memcpy .weak memcpy
ENTRY(__memcpy) SYM_FUNC_START_ALIAS(__memcpy)
ENTRY(memcpy) SYM_FUNC_START_PI(memcpy)
#include "copy_template.S" #include "copy_template.S"
ret ret
ENDPIPROC(memcpy) SYM_FUNC_END_PI(memcpy)
EXPORT_SYMBOL(memcpy) EXPORT_SYMBOL(memcpy)
ENDPROC(__memcpy) SYM_FUNC_END_ALIAS(__memcpy)
EXPORT_SYMBOL(__memcpy) EXPORT_SYMBOL(__memcpy)

View File

@ -46,8 +46,8 @@ D_l .req x13
D_h .req x14 D_h .req x14
.weak memmove .weak memmove
ENTRY(__memmove) SYM_FUNC_START_ALIAS(__memmove)
ENTRY(memmove) SYM_FUNC_START_PI(memmove)
cmp dstin, src cmp dstin, src
b.lo __memcpy b.lo __memcpy
add tmp1, src, count add tmp1, src, count
@ -184,7 +184,7 @@ ENTRY(memmove)
tst count, #0x3f tst count, #0x3f
b.ne .Ltail63 b.ne .Ltail63
ret ret
ENDPIPROC(memmove) SYM_FUNC_END_PI(memmove)
EXPORT_SYMBOL(memmove) EXPORT_SYMBOL(memmove)
ENDPROC(__memmove) SYM_FUNC_END_ALIAS(__memmove)
EXPORT_SYMBOL(__memmove) EXPORT_SYMBOL(__memmove)

View File

@ -43,8 +43,8 @@ tmp3w .req w9
tmp3 .req x9 tmp3 .req x9
.weak memset .weak memset
ENTRY(__memset) SYM_FUNC_START_ALIAS(__memset)
ENTRY(memset) SYM_FUNC_START_PI(memset)
mov dst, dstin /* Preserve return value. */ mov dst, dstin /* Preserve return value. */
and A_lw, val, #255 and A_lw, val, #255
orr A_lw, A_lw, A_lw, lsl #8 orr A_lw, A_lw, A_lw, lsl #8
@ -203,7 +203,7 @@ ENTRY(memset)
ands count, count, zva_bits_x ands count, count, zva_bits_x
b.ne .Ltail_maybe_long b.ne .Ltail_maybe_long
ret ret
ENDPIPROC(memset) SYM_FUNC_END_PI(memset)
EXPORT_SYMBOL(memset) EXPORT_SYMBOL(memset)
ENDPROC(__memset) SYM_FUNC_END_ALIAS(__memset)
EXPORT_SYMBOL(__memset) EXPORT_SYMBOL(__memset)

View File

@ -18,7 +18,7 @@
* Returns: * Returns:
* x0 - address of first occurrence of 'c' or 0 * x0 - address of first occurrence of 'c' or 0
*/ */
WEAK(strchr) SYM_FUNC_START_WEAK(strchr)
and w1, w1, #0xff and w1, w1, #0xff
1: ldrb w2, [x0], #1 1: ldrb w2, [x0], #1
cmp w2, w1 cmp w2, w1
@ -28,5 +28,5 @@ WEAK(strchr)
cmp w2, w1 cmp w2, w1
csel x0, x0, xzr, eq csel x0, x0, xzr, eq
ret ret
ENDPROC(strchr) SYM_FUNC_END(strchr)
EXPORT_SYMBOL_NOKASAN(strchr) EXPORT_SYMBOL_NOKASAN(strchr)

View File

@ -48,7 +48,7 @@ tmp3 .req x9
zeroones .req x10 zeroones .req x10
pos .req x11 pos .req x11
WEAK(strcmp) SYM_FUNC_START_WEAK_PI(strcmp)
eor tmp1, src1, src2 eor tmp1, src1, src2
mov zeroones, #REP8_01 mov zeroones, #REP8_01
tst tmp1, #7 tst tmp1, #7
@ -219,5 +219,5 @@ CPU_BE( orr syndrome, diff, has_nul )
lsr data1, data1, #56 lsr data1, data1, #56
sub result, data1, data2, lsr #56 sub result, data1, data2, lsr #56
ret ret
ENDPIPROC(strcmp) SYM_FUNC_END_PI(strcmp)
EXPORT_SYMBOL_NOKASAN(strcmp) EXPORT_SYMBOL_NOKASAN(strcmp)

View File

@ -44,7 +44,7 @@ pos .req x12
#define REP8_7f 0x7f7f7f7f7f7f7f7f #define REP8_7f 0x7f7f7f7f7f7f7f7f
#define REP8_80 0x8080808080808080 #define REP8_80 0x8080808080808080
WEAK(strlen) SYM_FUNC_START_WEAK_PI(strlen)
mov zeroones, #REP8_01 mov zeroones, #REP8_01
bic src, srcin, #15 bic src, srcin, #15
ands tmp1, srcin, #15 ands tmp1, srcin, #15
@ -111,5 +111,5 @@ CPU_LE( lsr tmp2, tmp2, tmp1 ) /* Shift (tmp1 & 63). */
csinv data1, data1, xzr, le csinv data1, data1, xzr, le
csel data2, data2, data2a, le csel data2, data2, data2a, le
b .Lrealigned b .Lrealigned
ENDPIPROC(strlen) SYM_FUNC_END_PI(strlen)
EXPORT_SYMBOL_NOKASAN(strlen) EXPORT_SYMBOL_NOKASAN(strlen)

View File

@ -52,7 +52,7 @@ limit_wd .req x13
mask .req x14 mask .req x14
endloop .req x15 endloop .req x15
WEAK(strncmp) SYM_FUNC_START_WEAK_PI(strncmp)
cbz limit, .Lret0 cbz limit, .Lret0
eor tmp1, src1, src2 eor tmp1, src1, src2
mov zeroones, #REP8_01 mov zeroones, #REP8_01
@ -295,5 +295,5 @@ CPU_BE( orr syndrome, diff, has_nul )
.Lret0: .Lret0:
mov result, #0 mov result, #0
ret ret
ENDPIPROC(strncmp) SYM_FUNC_END_PI(strncmp)
EXPORT_SYMBOL_NOKASAN(strncmp) EXPORT_SYMBOL_NOKASAN(strncmp)

View File

@ -47,7 +47,7 @@ limit_wd .req x14
#define REP8_7f 0x7f7f7f7f7f7f7f7f #define REP8_7f 0x7f7f7f7f7f7f7f7f
#define REP8_80 0x8080808080808080 #define REP8_80 0x8080808080808080
WEAK(strnlen) SYM_FUNC_START_WEAK_PI(strnlen)
cbz limit, .Lhit_limit cbz limit, .Lhit_limit
mov zeroones, #REP8_01 mov zeroones, #REP8_01
bic src, srcin, #15 bic src, srcin, #15
@ -156,5 +156,5 @@ CPU_LE( lsr tmp2, tmp2, tmp4 ) /* Shift (tmp1 & 63). */
.Lhit_limit: .Lhit_limit:
mov len, limit mov len, limit
ret ret
ENDPIPROC(strnlen) SYM_FUNC_END_PI(strnlen)
EXPORT_SYMBOL_NOKASAN(strnlen) EXPORT_SYMBOL_NOKASAN(strnlen)

View File

@ -18,7 +18,7 @@
* Returns: * Returns:
* x0 - address of last occurrence of 'c' or 0 * x0 - address of last occurrence of 'c' or 0
*/ */
WEAK(strrchr) SYM_FUNC_START_WEAK_PI(strrchr)
mov x3, #0 mov x3, #0
and w1, w1, #0xff and w1, w1, #0xff
1: ldrb w2, [x0], #1 1: ldrb w2, [x0], #1
@ -29,5 +29,5 @@ WEAK(strrchr)
b 1b b 1b
2: mov x0, x3 2: mov x0, x3
ret ret
ENDPIPROC(strrchr) SYM_FUNC_END_PI(strrchr)
EXPORT_SYMBOL_NOKASAN(strrchr) EXPORT_SYMBOL_NOKASAN(strrchr)

View File

@ -7,7 +7,7 @@
#include <asm/assembler.h> #include <asm/assembler.h>
ENTRY(__ashlti3) SYM_FUNC_START(__ashlti3)
cbz x2, 1f cbz x2, 1f
mov x3, #64 mov x3, #64
sub x3, x3, x2 sub x3, x3, x2
@ -26,10 +26,10 @@ ENTRY(__ashlti3)
lsl x1, x0, x1 lsl x1, x0, x1
mov x0, x2 mov x0, x2
ret ret
ENDPROC(__ashlti3) SYM_FUNC_END(__ashlti3)
EXPORT_SYMBOL(__ashlti3) EXPORT_SYMBOL(__ashlti3)
ENTRY(__ashrti3) SYM_FUNC_START(__ashrti3)
cbz x2, 1f cbz x2, 1f
mov x3, #64 mov x3, #64
sub x3, x3, x2 sub x3, x3, x2
@ -48,10 +48,10 @@ ENTRY(__ashrti3)
asr x0, x1, x0 asr x0, x1, x0
mov x1, x2 mov x1, x2
ret ret
ENDPROC(__ashrti3) SYM_FUNC_END(__ashrti3)
EXPORT_SYMBOL(__ashrti3) EXPORT_SYMBOL(__ashrti3)
ENTRY(__lshrti3) SYM_FUNC_START(__lshrti3)
cbz x2, 1f cbz x2, 1f
mov x3, #64 mov x3, #64
sub x3, x3, x2 sub x3, x3, x2
@ -70,5 +70,5 @@ ENTRY(__lshrti3)
lsr x0, x1, x0 lsr x0, x1, x0
mov x1, x2 mov x1, x2
ret ret
ENDPROC(__lshrti3) SYM_FUNC_END(__lshrti3)
EXPORT_SYMBOL(__lshrti3) EXPORT_SYMBOL(__lshrti3)