Skip to content

Commit d481227

Browse files
author
Ard Biesheuvel
committed
arm64: kernel: perform relocation processing from ID map
Refactor the relocation processing so that the code executes from the ID map while accessing the relocation tables via the virtual mapping. This way, we can use literals containing virtual addresses as before, instead of having to use convoluted absolute expressions. For symmetry with the secondary code path, the relocation code and the subsequent jump to the virtual entry point are implemented in a function called __primary_switch(), and __mmap_switched() is renamed to __primary_switched(). Also, the call sequence in stext() is aligned with the one in secondary_startup(), by replacing the awkward 'adr_l lr' and 'b cpu_setup' sequence with a simple branch and link. Acked-by: Catalin Marinas <catalin.marinas@arm.com> Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Signed-off-by: Will Deacon <will.deacon@arm.com> (cherry picked from commit 0cd3defe0af4153ffc5fe39bcfa4abfc301984e9) Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org>
1 parent f7e45d9 commit d481227

2 files changed

Lines changed: 55 additions & 48 deletions

File tree

arch/arm64/kernel/head.S

Lines changed: 51 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -221,13 +221,11 @@ ENTRY(stext)
221221
* On return, the CPU will be ready for the MMU to be turned on and
222222
* the TCR will have been set.
223223
*/
224-
ldr x27, 0f // address to jump to after
225-
neg x27, x27 // MMU has been enabled
226-
adr_l lr, __enable_mmu // return (PIC) address
227-
b __cpu_setup // initialise processor
224+
bl __cpu_setup // initialise processor
225+
adr_l x27, __primary_switch // address to jump to after
226+
// MMU has been enabled
227+
b __enable_mmu
228228
ENDPROC(stext)
229-
.align 3
230-
0: .quad (_text - TEXT_OFFSET) - __mmap_switched - KIMAGE_VADDR
231229

232230
/*
233231
* Preserve the arguments passed by the bootloader in x0 .. x3
@@ -419,7 +417,7 @@ ENDPROC(__create_page_tables)
419417
* The following fragment of code is executed with the MMU enabled.
420418
*/
421419
.set initial_sp, init_thread_union + THREAD_START_SP
422-
__mmap_switched:
420+
__primary_switched:
423421
mov x28, lr // preserve LR
424422
adr_l x8, vectors // load VBAR_EL1 with virtual
425423
msr vbar_el1, x8 // vector table address
@@ -433,42 +431,6 @@ __mmap_switched:
433431
bl __pi_memset
434432
dsb ishst // Make zero page visible to PTW
435433

436-
#ifdef CONFIG_RELOCATABLE
437-
438-
/*
439-
* Iterate over each entry in the relocation table, and apply the
440-
* relocations in place.
441-
*/
442-
adr_l x8, __dynsym_start // start of symbol table
443-
adr_l x9, __reloc_start // start of reloc table
444-
adr_l x10, __reloc_end // end of reloc table
445-
446-
0: cmp x9, x10
447-
b.hs 2f
448-
ldp x11, x12, [x9], #24
449-
ldr x13, [x9, #-8]
450-
cmp w12, #R_AARCH64_RELATIVE
451-
b.ne 1f
452-
add x13, x13, x23 // relocate
453-
str x13, [x11, x23]
454-
b 0b
455-
456-
1: cmp w12, #R_AARCH64_ABS64
457-
b.ne 0b
458-
add x12, x12, x12, lsl #1 // symtab offset: 24x top word
459-
add x12, x8, x12, lsr #(32 - 3) // ... shifted into bottom word
460-
ldrsh w14, [x12, #6] // Elf64_Sym::st_shndx
461-
ldr x15, [x12, #8] // Elf64_Sym::st_value
462-
cmp w14, #-0xf // SHN_ABS (0xfff1) ?
463-
add x14, x15, x23 // relocate
464-
csel x15, x14, x15, ne
465-
add x15, x13, x15
466-
str x15, [x11, x23]
467-
b 0b
468-
469-
2:
470-
#endif
471-
472434
adr_l sp, initial_sp, x4
473435
mov x4, sp
474436
and x4, x4, #~(THREAD_SIZE - 1)
@@ -494,7 +456,7 @@ __mmap_switched:
494456
0:
495457
#endif
496458
b start_kernel
497-
ENDPROC(__mmap_switched)
459+
ENDPROC(__primary_switched)
498460

499461
/*
500462
* end early head section, begin head code that is also used for
@@ -736,7 +698,6 @@ __enable_mmu:
736698
ic iallu // flush instructions fetched
737699
dsb nsh // via old mapping
738700
isb
739-
add x27, x27, x23 // relocated __mmap_switched
740701
#endif
741702
br x27
742703
ENDPROC(__enable_mmu)
@@ -746,6 +707,51 @@ __no_granule_support:
746707
b __no_granule_support
747708
ENDPROC(__no_granule_support)
748709

710+
__primary_switch:
711+
#ifdef CONFIG_RELOCATABLE
712+
/*
713+
* Iterate over each entry in the relocation table, and apply the
714+
* relocations in place.
715+
*/
716+
ldr w8, =__dynsym_offset // offset to symbol table
717+
ldr w9, =__rela_offset // offset to reloc table
718+
ldr w10, =__rela_size // size of reloc table
719+
720+
ldr x11, =KIMAGE_VADDR // default virtual offset
721+
add x11, x11, x23 // actual virtual offset
722+
add x8, x8, x11 // __va(.dynsym)
723+
add x9, x9, x11 // __va(.rela)
724+
add x10, x9, x10 // __va(.rela) + sizeof(.rela)
725+
726+
0: cmp x9, x10
727+
b.hs 2f
728+
ldp x11, x12, [x9], #24
729+
ldr x13, [x9, #-8]
730+
cmp w12, #R_AARCH64_RELATIVE
731+
b.ne 1f
732+
add x13, x13, x23 // relocate
733+
str x13, [x11, x23]
734+
b 0b
735+
736+
1: cmp w12, #R_AARCH64_ABS64
737+
b.ne 0b
738+
add x12, x12, x12, lsl #1 // symtab offset: 24x top word
739+
add x12, x8, x12, lsr #(32 - 3) // ... shifted into bottom word
740+
ldrsh w14, [x12, #6] // Elf64_Sym::st_shndx
741+
ldr x15, [x12, #8] // Elf64_Sym::st_value
742+
cmp w14, #-0xf // SHN_ABS (0xfff1) ?
743+
add x14, x15, x23 // relocate
744+
csel x15, x14, x15, ne
745+
add x15, x13, x15
746+
str x15, [x11, x23]
747+
b 0b
748+
749+
2:
750+
#endif
751+
ldr x8, =__primary_switched
752+
br x8
753+
ENDPROC(__primary_switch)
754+
749755
__secondary_switch:
750756
ldr x8, =__secondary_switched
751757
br x8

arch/arm64/kernel/vmlinux.lds.S

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -156,12 +156,9 @@ SECTIONS
156156
*(.altinstr_replacement)
157157
}
158158
.rela : ALIGN(8) {
159-
__reloc_start = .;
160159
*(.rela .rela*)
161-
__reloc_end = .;
162160
}
163161
.dynsym : ALIGN(8) {
164-
__dynsym_start = .;
165162
*(.dynsym)
166163
}
167164
.dynstr : {
@@ -171,6 +168,10 @@ SECTIONS
171168
*(.hash)
172169
}
173170

171+
__rela_offset = ADDR(.rela) - KIMAGE_VADDR;
172+
__rela_size = SIZEOF(.rela);
173+
__dynsym_offset = ADDR(.dynsym) - KIMAGE_VADDR;
174+
174175
. = ALIGN(SEGMENT_ALIGN);
175176
__init_end = .;
176177

0 commit comments

Comments
 (0)