Prepare for being able to deal with primary entry with the MMU and caches enabled, by recording whether or not we entered with the MMU on in register x22. While at it, add disable_mmu_workaround macro invocations to init_kernel_el, as its manipulation of SCTLR_ELx may come down to disabling of the MMU after subsequent patches. Signed-off-by: Ard Biesheuvel <ardb@xxxxxxxxxx> --- arch/arm64/kernel/head.S | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/arch/arm64/kernel/head.S b/arch/arm64/kernel/head.S index cba9a5e8abb8..1ff474701e99 100644 --- a/arch/arm64/kernel/head.S +++ b/arch/arm64/kernel/head.S @@ -85,10 +85,12 @@ * x19 create_idmap() .. __ start_kernel() ID map VA of the DT blob * x20 primary_entry() .. __primary_switch() CPU boot mode * x21 primary_entry() .. start_kernel() FDT pointer passed at boot in x0 + * x22 primary_entry() .. start_kernel() whether we entered with the MMU on * x23 __primary_switch() .. relocate_kernel() physical misalignment/KASLR offset * x28 create_idmap(), remap_kernel_text() callee preserved temp register */ SYM_CODE_START(primary_entry) + bl record_mmu_state bl preserve_boot_args bl init_kernel_el // w0=cpu_boot_mode mov x20, x0 @@ -104,6 +106,17 @@ SYM_CODE_START(primary_entry) b __primary_switch SYM_CODE_END(primary_entry) +SYM_CODE_START_LOCAL(record_mmu_state) + mrs x22, CurrentEL + cmp x22, #CurrentEL_EL2 + mrs x22, sctlr_el1 + b.ne 0f + mrs x22, sctlr_el2 +0: tst x22, #SCTLR_ELx_M + cset w22, ne + ret +SYM_CODE_END(record_mmu_state) + /* * Preserve the arguments passed by the bootloader in x0 .. x3 */ @@ -528,6 +541,7 @@ SYM_FUNC_START(init_kernel_el) SYM_INNER_LABEL(init_el1, SYM_L_LOCAL) mov_q x0, INIT_SCTLR_EL1_MMU_OFF + pre_disable_mmu_workaround msr sctlr_el1, x0 isb mov_q x0, INIT_PSTATE_EL1 @@ -559,6 +573,7 @@ SYM_INNER_LABEL(init_el2, SYM_L_LOCAL) /* Switching to VHE requires a sane SCTLR_EL1 as a start */ mov_q x0, INIT_SCTLR_EL1_MMU_OFF + pre_disable_mmu_workaround msr_s SYS_SCTLR_EL12, x0 /* @@ -574,6 +589,7 @@ SYM_INNER_LABEL(init_el2, SYM_L_LOCAL) 1: mov_q x0, INIT_SCTLR_EL1_MMU_OFF + pre_disable_mmu_workaround msr sctlr_el1, x0 msr elr_el2, lr -- 2.30.2