From b9293d457ff3de415fa07d7e35978bceb29c3827 Mon Sep 17 00:00:00 2001 From: Jamie Iles Date: Tue, 13 Jun 2023 15:19:59 +0100 Subject: [PATCH] arm64/mm: remove now-superfluous ISBs from TTBR writes At the time of authoring 7655abb95386 ("arm64: mm: Move ASID from TTBR0 to TTBR1"), the Arm ARM did not specify any ordering guarantees for direct writes to TTBR0_ELx and TTBR1_ELx and so an ISB was required after each write to ensure TLBs would only be populated from the expected (or reserved tables). In a recent update to the Arm ARM, the requirements have been relaxed to reflect the implementation of current CPUs and required implementation of future CPUs to read (RDYDPX in D8.2.3 Translation table base address register): Direct writes to TTBR0_ELx and TTBR1_ELx occur in program order relative to one another, without the need for explicit synchronization. For any one translation, all indirect reads of TTBR0_ELx and TTBR1_ELx that are made as part of the translation observe only one point in that order of direct writes. Remove the superfluous ISBs to optimize uaccess helpers and context switch. Cc: Will Deacon Cc: Mark Rutland Signed-off-by: Jamie Iles Reviewed-by: Mark Rutland Link: https://lore.kernel.org/r/20230613141959.92697-1-quic_jiles@quicinc.com [catalin.marinas@arm.com: rename __cpu_set_reserved_ttbr0 to ..._nosync] [catalin.marinas@arm.com: move the cpu_set_reserved_ttbr0_nosync() call to cpu_do_switch_mm()] Signed-off-by: Catalin Marinas --- arch/arm64/include/asm/asm-uaccess.h | 2 -- arch/arm64/include/asm/mmu_context.h | 8 ++++++-- arch/arm64/include/asm/uaccess.h | 2 -- arch/arm64/mm/context.c | 2 +- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/arch/arm64/include/asm/asm-uaccess.h b/arch/arm64/include/asm/asm-uaccess.h index 75b211c98dea..5b6efe8abeeb 100644 --- a/arch/arm64/include/asm/asm-uaccess.h +++ b/arch/arm64/include/asm/asm-uaccess.h @@ -18,7 +18,6 @@ bic \tmp1, \tmp1, #TTBR_ASID_MASK sub \tmp1, \tmp1, #RESERVED_SWAPPER_OFFSET // reserved_pg_dir msr ttbr0_el1, \tmp1 // set reserved TTBR0_EL1 - isb add \tmp1, \tmp1, #RESERVED_SWAPPER_OFFSET msr ttbr1_el1, \tmp1 // set reserved ASID isb @@ -31,7 +30,6 @@ extr \tmp2, \tmp2, \tmp1, #48 ror \tmp2, \tmp2, #16 msr ttbr1_el1, \tmp2 // set the active ASID - isb msr ttbr0_el1, \tmp1 // set the non-PAN TTBR0_EL1 isb .endm diff --git a/arch/arm64/include/asm/mmu_context.h b/arch/arm64/include/asm/mmu_context.h index 56911691bef0..86f6870c64a6 100644 --- a/arch/arm64/include/asm/mmu_context.h +++ b/arch/arm64/include/asm/mmu_context.h @@ -39,11 +39,16 @@ static inline void contextidr_thread_switch(struct task_struct *next) /* * Set TTBR0 to reserved_pg_dir. No translations will be possible via TTBR0. */ -static inline void cpu_set_reserved_ttbr0(void) +static inline void cpu_set_reserved_ttbr0_nosync(void) { unsigned long ttbr = phys_to_ttbr(__pa_symbol(reserved_pg_dir)); write_sysreg(ttbr, ttbr0_el1); +} + +static inline void cpu_set_reserved_ttbr0(void) +{ + cpu_set_reserved_ttbr0_nosync(); isb(); } @@ -52,7 +57,6 @@ void cpu_do_switch_mm(phys_addr_t pgd_phys, struct mm_struct *mm); static inline void cpu_switch_mm(pgd_t *pgd, struct mm_struct *mm) { BUG_ON(pgd == swapper_pg_dir); - cpu_set_reserved_ttbr0(); cpu_do_switch_mm(virt_to_phys(pgd),mm); } diff --git a/arch/arm64/include/asm/uaccess.h b/arch/arm64/include/asm/uaccess.h index 05f4fc265428..14be5000c5a0 100644 --- a/arch/arm64/include/asm/uaccess.h +++ b/arch/arm64/include/asm/uaccess.h @@ -65,7 +65,6 @@ static inline void __uaccess_ttbr0_disable(void) ttbr &= ~TTBR_ASID_MASK; /* reserved_pg_dir placed before swapper_pg_dir */ write_sysreg(ttbr - RESERVED_SWAPPER_OFFSET, ttbr0_el1); - isb(); /* Set reserved ASID */ write_sysreg(ttbr, ttbr1_el1); isb(); @@ -89,7 +88,6 @@ static inline void __uaccess_ttbr0_enable(void) ttbr1 &= ~TTBR_ASID_MASK; /* safety measure */ ttbr1 |= ttbr0 & TTBR_ASID_MASK; write_sysreg(ttbr1, ttbr1_el1); - isb(); /* Restore user page table */ write_sysreg(ttbr0, ttbr0_el1); diff --git a/arch/arm64/mm/context.c b/arch/arm64/mm/context.c index e1e0dca01839..188197590fc9 100644 --- a/arch/arm64/mm/context.c +++ b/arch/arm64/mm/context.c @@ -364,8 +364,8 @@ void cpu_do_switch_mm(phys_addr_t pgd_phys, struct mm_struct *mm) ttbr1 &= ~TTBR_ASID_MASK; ttbr1 |= FIELD_PREP(TTBR_ASID_MASK, asid); + cpu_set_reserved_ttbr0_nosync(); write_sysreg(ttbr1, ttbr1_el1); - isb(); write_sysreg(ttbr0, ttbr0_el1); isb(); post_ttbr_update_workaround();