arm64: add support for building vmlinux as a relocatable PIE binary
This implements CONFIG_RELOCATABLE, which links the final vmlinux image with a dynamic relocation section, allowing the early boot code to perform a relocation to a different virtual address at runtime. This is a prerequisite for KASLR (CONFIG_RANDOMIZE_BASE). Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
This commit is contained in:
parent
6c94f27ac8
commit
1e48ef7fcc
5 changed files with 65 additions and 0 deletions
|
@ -787,6 +787,17 @@ config ARM64_MODULE_PLTS
|
|||
select ARM64_MODULE_CMODEL_LARGE
|
||||
select HAVE_MOD_ARCH_SPECIFIC
|
||||
|
||||
config RELOCATABLE
|
||||
bool
|
||||
help
|
||||
This builds the kernel as a Position Independent Executable (PIE),
|
||||
which retains all relocation metadata required to relocate the
|
||||
kernel binary at runtime to a different virtual address than the
|
||||
address it was linked at.
|
||||
Since AArch64 uses the RELA relocation format, this requires a
|
||||
relocation pass at runtime even if the kernel is loaded at the
|
||||
same address it was linked at.
|
||||
|
||||
endmenu
|
||||
|
||||
menu "Boot options"
|
||||
|
|
|
@ -15,6 +15,10 @@ CPPFLAGS_vmlinux.lds = -DTEXT_OFFSET=$(TEXT_OFFSET)
|
|||
OBJCOPYFLAGS :=-O binary -R .note -R .note.gnu.build-id -R .comment -S
|
||||
GZFLAGS :=-9
|
||||
|
||||
ifneq ($(CONFIG_RELOCATABLE),)
|
||||
LDFLAGS_vmlinux += -pie
|
||||
endif
|
||||
|
||||
KBUILD_DEFCONFIG := defconfig
|
||||
|
||||
# Check for binutils support for specific extensions
|
||||
|
|
|
@ -77,6 +77,8 @@
|
|||
#define R_AARCH64_MOVW_PREL_G2_NC 292
|
||||
#define R_AARCH64_MOVW_PREL_G3 293
|
||||
|
||||
#define R_AARCH64_RELATIVE 1027
|
||||
|
||||
/*
|
||||
* These are used to set parameters in the core dumps.
|
||||
*/
|
||||
|
|
|
@ -29,6 +29,7 @@
|
|||
#include <asm/asm-offsets.h>
|
||||
#include <asm/cache.h>
|
||||
#include <asm/cputype.h>
|
||||
#include <asm/elf.h>
|
||||
#include <asm/kernel-pgtable.h>
|
||||
#include <asm/memory.h>
|
||||
#include <asm/pgtable-hwdef.h>
|
||||
|
@ -432,6 +433,37 @@ __mmap_switched:
|
|||
bl __pi_memset
|
||||
dsb ishst // Make zero page visible to PTW
|
||||
|
||||
#ifdef CONFIG_RELOCATABLE
|
||||
|
||||
/*
|
||||
* Iterate over each entry in the relocation table, and apply the
|
||||
* relocations in place.
|
||||
*/
|
||||
adr_l x8, __dynsym_start // start of symbol table
|
||||
adr_l x9, __reloc_start // start of reloc table
|
||||
adr_l x10, __reloc_end // end of reloc table
|
||||
|
||||
0: cmp x9, x10
|
||||
b.hs 2f
|
||||
ldp x11, x12, [x9], #24
|
||||
ldr x13, [x9, #-8]
|
||||
cmp w12, #R_AARCH64_RELATIVE
|
||||
b.ne 1f
|
||||
str x13, [x11]
|
||||
b 0b
|
||||
|
||||
1: cmp w12, #R_AARCH64_ABS64
|
||||
b.ne 0b
|
||||
add x12, x12, x12, lsl #1 // symtab offset: 24x top word
|
||||
add x12, x8, x12, lsr #(32 - 3) // ... shifted into bottom word
|
||||
ldr x15, [x12, #8] // Elf64_Sym::st_value
|
||||
add x15, x13, x15
|
||||
str x15, [x11]
|
||||
b 0b
|
||||
|
||||
2:
|
||||
#endif
|
||||
|
||||
adr_l sp, initial_sp, x4
|
||||
mov x4, sp
|
||||
and x4, x4, #~(THREAD_SIZE - 1)
|
||||
|
|
|
@ -87,6 +87,7 @@ SECTIONS
|
|||
EXIT_CALL
|
||||
*(.discard)
|
||||
*(.discard.*)
|
||||
*(.interp .dynamic)
|
||||
}
|
||||
|
||||
. = KIMAGE_VADDR + TEXT_OFFSET;
|
||||
|
@ -149,6 +150,21 @@ SECTIONS
|
|||
.altinstr_replacement : {
|
||||
*(.altinstr_replacement)
|
||||
}
|
||||
.rela : ALIGN(8) {
|
||||
__reloc_start = .;
|
||||
*(.rela .rela*)
|
||||
__reloc_end = .;
|
||||
}
|
||||
.dynsym : ALIGN(8) {
|
||||
__dynsym_start = .;
|
||||
*(.dynsym)
|
||||
}
|
||||
.dynstr : {
|
||||
*(.dynstr)
|
||||
}
|
||||
.hash : {
|
||||
*(.hash)
|
||||
}
|
||||
|
||||
. = ALIGN(PAGE_SIZE);
|
||||
__init_end = .;
|
||||
|
|
Loading…
Reference in a new issue