Change the assembly code to use absolute reference for transition between address spaces and relative references when referencing global variables in the same address space. Ensure the kernel built with PIE references the correct addresses based on context. Signed-off-by: Thomas Garnier <thgarnie@xxxxxxxxxxxx> Reviewed-by: Kees Cook <keescook@xxxxxxxxxxxx> --- arch/x86/kernel/head_64.S | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/arch/x86/kernel/head_64.S b/arch/x86/kernel/head_64.S index 4bbc770af632..40a467f8e116 100644 --- a/arch/x86/kernel/head_64.S +++ b/arch/x86/kernel/head_64.S @@ -87,7 +87,8 @@ SYM_CODE_START_NOALIGN(startup_64) popq %rsi /* Form the CR3 value being sure to include the CR3 modifier */ - addq $(early_top_pgt - __START_KERNEL_map), %rax + movabs $(early_top_pgt - __START_KERNEL_map), %rcx + addq %rcx, %rax jmp 1f SYM_CODE_END(startup_64) @@ -119,7 +120,8 @@ SYM_CODE_START(secondary_startup_64) popq %rsi /* Form the CR3 value being sure to include the CR3 modifier */ - addq $(init_top_pgt - __START_KERNEL_map), %rax + movabs $(init_top_pgt - __START_KERNEL_map), %rcx + addq %rcx, %rax 1: /* Enable PAE mode, PGE and LA57 */ @@ -137,7 +139,7 @@ SYM_CODE_START(secondary_startup_64) movq %rax, %cr3 /* Ensure I am executing from virtual addresses */ - movq $1f, %rax + movabs $1f, %rax ANNOTATE_RETPOLINE_SAFE jmp *%rax 1: @@ -234,11 +236,12 @@ SYM_CODE_START(secondary_startup_64) * REX.W + FF /5 JMP m16:64 Jump far, absolute indirect, * address given in m16:64. */ - pushq $.Lafter_lret # put return address on stack for unwinder + movabs $.Lafter_lret, %rax + pushq %rax # put return address on stack for unwinder xorl %ebp, %ebp # clear frame pointer - movq initial_code(%rip), %rax + leaq initial_code(%rip), %rax pushq $__KERNEL_CS # set correct cs - pushq %rax # target address in negative space + pushq (%rax) # target address in negative space lretq .Lafter_lret: SYM_CODE_END(secondary_startup_64) -- 2.25.1.481.gfbce0eb801-goog