On Wed, May 26, 2021 at 11:12:40AM +0800, Guo Ren wrote: > > static inline void local_flush_tlb_range_asid(unsigned long start, > > unsigned long size, unsigned long asid) > > > > > +{ > > > + unsigned long tmp = start & PAGE_MASK; > > > + unsigned long end = ALIGN(start + size, PAGE_SIZE); > > > + > > > + if (size == -1) { > > > + __asm__ __volatile__ ("sfence.vma x0, %0" : : "r" (asid) : "memory"); > > > + return; > > > > Please split the global (size == -1) case into separate helpers. > Do you mean: No. Basically a static inline void local_flush_tlb_ll_asid(unsigned long asid) { __asm__ __volatile__ ("sfence.vma x0, %0" : : "r" (asid) : "memory"); } and static inline void local_flush_tlb_range_asid(unsigned long start, unsigned long size, unsigned long asid) { unsigned long end = ALIGN(start + size, PAGE_SIZE), tmp; for (tmp = start & PAGE_MASK; tmp < end; tmp += PAGE_SIZE) { __asm__ __volatile__ ("sfence.vma %0, %1" : : "r" (tmp), "r" (asid) : "memory"); } }