1
0
Fork 0

arm64: mask PAC bits of __builtin_return_address

Functions like vmap() record how much memory has been allocated by their
callers, and callers are identified using __builtin_return_address(). Once
the kernel is using pointer-auth the return address will be signed. This
means it will not match any kernel symbol, and will vary between threads
even for the same caller.

The output of /proc/vmallocinfo in this case may look like,
0x(____ptrval____)-0x(____ptrval____)   20480 0x86e28000100e7c60 pages=4 vmalloc N0=4
0x(____ptrval____)-0x(____ptrval____)   20480 0x86e28000100e7c60 pages=4 vmalloc N0=4
0x(____ptrval____)-0x(____ptrval____)   20480 0xc5c78000100e7c60 pages=4 vmalloc N0=4

The above three 64bit values should be the same symbol name and not
different LR values.

Use the pre-processor to add logic to clear the PAC to
__builtin_return_address() callers. This patch adds a new file
asm/compiler.h and is transitively included via include/compiler_types.h on
the compiler command line so it is guaranteed to be loaded and the users of
this macro will not find a wrong version.

Helper macros ptrauth_kernel_pac_mask/ptrauth_clear_pac are created for
this purpose and added in this file. Existing macro ptrauth_user_pac_mask
moved from asm/pointer_auth.h.

Signed-off-by: Amit Daniel Kachhap <amit.kachhap@arm.com>
Reviewed-by: James Morse <james.morse@arm.com>
Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
alistair/sensors
Amit Daniel Kachhap 2020-03-13 14:34:58 +05:30 committed by Catalin Marinas
parent 2832158233
commit 689eae42af
3 changed files with 26 additions and 8 deletions

View File

@ -118,6 +118,7 @@ config ARM64
select HAVE_ALIGNED_STRUCT_PAGE if SLUB
select HAVE_ARCH_AUDITSYSCALL
select HAVE_ARCH_BITREVERSE
select HAVE_ARCH_COMPILER_H
select HAVE_ARCH_HUGE_VMAP
select HAVE_ARCH_JUMP_LABEL
select HAVE_ARCH_JUMP_LABEL_RELATIVE

View File

@ -0,0 +1,24 @@
/* SPDX-License-Identifier: GPL-2.0 */
#ifndef __ASM_COMPILER_H
#define __ASM_COMPILER_H
#if defined(CONFIG_ARM64_PTR_AUTH)
/*
* The EL0/EL1 pointer bits used by a pointer authentication code.
* This is dependent on TBI0/TBI1 being enabled, or bits 63:56 would also apply.
*/
#define ptrauth_user_pac_mask() GENMASK_ULL(54, vabits_actual)
#define ptrauth_kernel_pac_mask() GENMASK_ULL(63, vabits_actual)
/* Valid for EL0 TTBR0 and EL1 TTBR1 instruction pointers */
#define ptrauth_clear_pac(ptr) \
((ptr & BIT_ULL(55)) ? (ptr | ptrauth_kernel_pac_mask()) : \
(ptr & ~ptrauth_user_pac_mask()))
#define __builtin_return_address(val) \
(void *)(ptrauth_clear_pac((unsigned long)__builtin_return_address(val)))
#endif /* CONFIG_ARM64_PTR_AUTH */
#endif /* __ASM_COMPILER_H */

View File

@ -68,16 +68,9 @@ static __always_inline void ptrauth_keys_switch_kernel(struct ptrauth_keys_kerne
extern int ptrauth_prctl_reset_keys(struct task_struct *tsk, unsigned long arg);
/*
* The EL0 pointer bits used by a pointer authentication code.
* This is dependent on TBI0 being enabled, or bits 63:56 would also apply.
*/
#define ptrauth_user_pac_mask() GENMASK(54, vabits_actual)
/* Only valid for EL0 TTBR0 instruction pointers */
static inline unsigned long ptrauth_strip_insn_pac(unsigned long ptr)
{
return ptr & ~ptrauth_user_pac_mask();
return ptrauth_clear_pac(ptr);
}
#define ptrauth_thread_init_user(tsk) \