Prepare for being able to deal with primary entry with the MMU and caches enabled, by recording whether or not we entered with the MMU on in register x19. While at it, add disable_mmu_workaround macro invocations to init_kernel_el, as its manipulation of SCTLR_ELx may amount to disabling of the MMU after subsequent patches. Signed-off-by: Ard Biesheuvel <ardb@xxxxxxxxxx> --- arch/arm64/kernel/head.S | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/arch/arm64/kernel/head.S b/arch/arm64/kernel/head.S index cfc7ba25bf87..8e26f2deb78b 100644 --- a/arch/arm64/kernel/head.S +++ b/arch/arm64/kernel/head.S @@ -77,6 +77,7 @@ * primary lowlevel boot path: * * Register Scope Purpose + * x19 primary_entry() .. start_kernel() whether we entered with the MMU on * x20 primary_entry() .. __primary_switch() CPU boot mode * x21 primary_entry() .. start_kernel() FDT pointer passed at boot in x0 * x22 create_idmap() .. start_kernel() ID map VA of the DT blob @@ -86,6 +87,7 @@ * x28 create_idmap() callee preserved temp register */ SYM_CODE_START(primary_entry) + bl record_mmu_state bl preserve_boot_args bl init_kernel_el // w0=cpu_boot_mode mov x20, x0 @@ -109,6 +111,18 @@ SYM_CODE_START(primary_entry) b __primary_switch SYM_CODE_END(primary_entry) +SYM_CODE_START_LOCAL(record_mmu_state) + mrs x19, CurrentEL + cmp x19, #CurrentEL_EL2 + mrs x19, sctlr_el1 + b.ne 0f + mrs x19, sctlr_el2 +0: and x19, x19, x19, lsr #2 // BIT(n) &= BIT(n + 2) + tst x19, #SCTLR_ELx_M // M(0) and C(2) both set? + cset w19, ne + ret +SYM_CODE_END(record_mmu_state) + /* * Preserve the arguments passed by the bootloader in x0 .. x3 */ @@ -495,6 +509,7 @@ SYM_FUNC_START(init_kernel_el) SYM_INNER_LABEL(init_el1, SYM_L_LOCAL) mov_q x0, INIT_SCTLR_EL1_MMU_OFF + pre_disable_mmu_workaround msr sctlr_el1, x0 isb mov_q x0, INIT_PSTATE_EL1 @@ -527,11 +542,13 @@ SYM_INNER_LABEL(init_el2, SYM_L_LOCAL) cbz x0, 1f /* Set a sane SCTLR_EL1, the VHE way */ + pre_disable_mmu_workaround msr_s SYS_SCTLR_EL12, x1 mov x2, #BOOT_CPU_FLAG_E2H b 2f 1: + pre_disable_mmu_workaround msr sctlr_el1, x1 mov x2, xzr 2: -- 2.35.1