summaryrefslogtreecommitdiffstats
path: root/arch/arm64/kernel/head.S
diff options
context:
space:
mode:
authorArd Biesheuvel <ardb@kernel.org>2022-06-24 17:06:43 +0200
committerWill Deacon <will@kernel.org>2022-06-24 17:18:10 +0100
commitd7bea550279db28cd154fd54843ebc858ffdf0b7 (patch)
treebbfb345ddc377ca5a003cf0b68c966eefc5211c2 /arch/arm64/kernel/head.S
parentc3cee924bd855184d15bc4aa6088dcf8e2c1394c (diff)
downloadlinux-d7bea550279db28cd154fd54843ebc858ffdf0b7.tar.bz2
arm64: head: use relative references to the RELA and RELR tables
Formerly, we had to access the RELA and RELR tables via the kernel mapping that was being relocated, and so deriving the start and end addresses using ADRP/ADD references was not possible, as the relocation code runs from the ID map. Now that we map the entire kernel image via the ID map, we can simplify this, and just load the entries via the ID map as well. Signed-off-by: Ard Biesheuvel <ardb@kernel.org> Link: https://lore.kernel.org/r/20220624150651.1358849-14-ardb@kernel.org Signed-off-by: Will Deacon <will@kernel.org>
Diffstat (limited to 'arch/arm64/kernel/head.S')
-rw-r--r--arch/arm64/kernel/head.S13
1 files changed, 4 insertions, 9 deletions
diff --git a/arch/arm64/kernel/head.S b/arch/arm64/kernel/head.S
index 93734c91a29a..f1497f7b4da0 100644
--- a/arch/arm64/kernel/head.S
+++ b/arch/arm64/kernel/head.S
@@ -757,13 +757,10 @@ SYM_FUNC_START_LOCAL(__relocate_kernel)
* Iterate over each entry in the relocation table, and apply the
* relocations in place.
*/
- ldr w9, =__rela_offset // offset to reloc table
- ldr w10, =__rela_size // size of reloc table
-
+ adr_l x9, __rela_start
+ adr_l x10, __rela_end
mov_q x11, KIMAGE_VADDR // default virtual offset
add x11, x11, x23 // actual virtual offset
- add x9, x9, x11 // __va(.rela)
- add x10, x9, x10 // __va(.rela) + sizeof(.rela)
0: cmp x9, x10
b.hs 1f
@@ -813,10 +810,8 @@ SYM_FUNC_START_LOCAL(__relocate_kernel)
* __relocate_kernel is called twice with non-zero displacements (i.e.
* if there is both a physical misalignment and a KASLR displacement).
*/
- ldr w9, =__relr_offset // offset to reloc table
- ldr w10, =__relr_size // size of reloc table
- add x9, x9, x11 // __va(.relr)
- add x10, x9, x10 // __va(.relr) + sizeof(.relr)
+ adr_l x9, __relr_start
+ adr_l x10, __relr_end
sub x15, x23, x24 // delta from previous offset
cbz x15, 7f // nothing to do if unchanged