Utilize the alternative coding to return early from D/I cache flush functions with the bv,n instruction when machine has no D- or I-caches. Up to now the coding was replaced with a long branch instruction to the end of the functions. Signed-off-by: Helge Deller <deller@xxxxxx> diff --git a/arch/parisc/include/asm/alternative.h b/arch/parisc/include/asm/alternative.h index a3630442111d..754988ef4e8d 100644 --- a/arch/parisc/include/asm/alternative.h +++ b/arch/parisc/include/asm/alternative.h @@ -12,6 +12,7 @@ #define INSN_PxTLB 0x02 /* modify pdtlb, pitlb */ #define INSN_LDI_CPUs 0x34000000 /* ldi val,%reg */ #define INSN_NOP 0x08000240 /* nop */ +#define INSN_RETURN 0xe840c002 /* bv,n r0(rp) */ #ifndef __ASSEMBLY__ diff --git a/arch/parisc/kernel/pacache.S b/arch/parisc/kernel/pacache.S index 187f032c9dd8..bb5da609b7d3 100644 --- a/arch/parisc/kernel/pacache.S +++ b/arch/parisc/kernel/pacache.S @@ -192,6 +192,7 @@ ENDPROC_CFI(flush_tlb_all_local) ENTRY_CFI(flush_instruction_cache_local) 88: load32 cache_info, %r1 + ALTERNATIVE(88b, 88b+4, ALT_COND_NO_ICACHE, INSN_RETURN) /* Flush Instruction Cache */ @@ -244,7 +245,6 @@ fioneloop2: fisync: sync mtsm %r22 /* restore I-bit */ -89: ALTERNATIVE(88b, 89b, ALT_COND_NO_ICACHE, INSN_NOP) bv %r0(%r2) nop ENDPROC_CFI(flush_instruction_cache_local) @@ -253,6 +253,7 @@ ENDPROC_CFI(flush_instruction_cache_local) .import cache_info, data ENTRY_CFI(flush_data_cache_local) 88: load32 cache_info, %r1 + ALTERNATIVE(88b, 88b+4, ALT_COND_NO_DCACHE, INSN_RETURN) /* Flush Data Cache */ @@ -306,7 +307,6 @@ fdsync: syncdma sync mtsm %r22 /* restore I-bit */ -89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP) bv %r0(%r2) nop ENDPROC_CFI(flush_data_cache_local) @@ -996,6 +996,7 @@ ENDPROC_CFI(flush_icache_page_asm) ENTRY_CFI(flush_kernel_dcache_page_asm) 88: ldil L%dcache_stride, %r1 + ALTERNATIVE(88b, 88b+4, ALT_COND_NO_DCACHE, INSN_RETURN) ldw R%dcache_stride(%r1), %r23 #ifdef CONFIG_64BIT @@ -1024,7 +1025,6 @@ ENTRY_CFI(flush_kernel_dcache_page_asm) cmpb,COND(>>) %r25, %r26, 1b /* predict taken */ fdc,m %r23(%r26) -89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP) sync bv %r0(%r2) nop @@ -1032,6 +1032,7 @@ ENDPROC_CFI(flush_kernel_dcache_page_asm) ENTRY_CFI(purge_kernel_dcache_page_asm) 88: ldil L%dcache_stride, %r1 + ALTERNATIVE(88b, 88b+4, ALT_COND_NO_DCACHE, INSN_RETURN) ldw R%dcache_stride(%r1), %r23 #ifdef CONFIG_64BIT @@ -1060,7 +1061,6 @@ ENTRY_CFI(purge_kernel_dcache_page_asm) cmpb,COND(>>) %r25, %r26, 1b /* predict taken */ pdc,m %r23(%r26) -89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP) sync bv %r0(%r2) nop @@ -1068,6 +1068,7 @@ ENDPROC_CFI(purge_kernel_dcache_page_asm) ENTRY_CFI(flush_user_dcache_range_asm) 88: ldil L%dcache_stride, %r1 + ALTERNATIVE(88b, 88b+4, ALT_COND_NO_DCACHE, INSN_RETURN) ldw R%dcache_stride(%r1), %r23 ldo -1(%r23), %r21 ANDCM %r26, %r21, %r26 @@ -1101,7 +1102,6 @@ ENTRY_CFI(flush_user_dcache_range_asm) 2: cmpb,COND(>>),n %r25, %r26, 2b fdc,m %r23(%sr3, %r26) -89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP) sync bv %r0(%r2) nop @@ -1109,6 +1109,7 @@ ENDPROC_CFI(flush_user_dcache_range_asm) ENTRY_CFI(flush_kernel_dcache_range_asm) 88: ldil L%dcache_stride, %r1 + ALTERNATIVE(88b, 88b+4, ALT_COND_NO_DCACHE, INSN_RETURN) ldw R%dcache_stride(%r1), %r23 ldo -1(%r23), %r21 ANDCM %r26, %r21, %r26 @@ -1143,7 +1144,6 @@ ENTRY_CFI(flush_kernel_dcache_range_asm) fdc,m %r23(%r26) sync -89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP) syncdma bv %r0(%r2) nop @@ -1151,6 +1151,7 @@ ENDPROC_CFI(flush_kernel_dcache_range_asm) ENTRY_CFI(purge_kernel_dcache_range_asm) 88: ldil L%dcache_stride, %r1 + ALTERNATIVE(88b, 88b+4, ALT_COND_NO_DCACHE, INSN_RETURN) ldw R%dcache_stride(%r1), %r23 ldo -1(%r23), %r21 ANDCM %r26, %r21, %r26 @@ -1185,7 +1186,6 @@ ENTRY_CFI(purge_kernel_dcache_range_asm) pdc,m %r23(%r26) sync -89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP) syncdma bv %r0(%r2) nop @@ -1193,6 +1193,7 @@ ENDPROC_CFI(purge_kernel_dcache_range_asm) ENTRY_CFI(flush_user_icache_range_asm) 88: ldil L%icache_stride, %r1 + ALTERNATIVE(88b, 88b+4, ALT_COND_NO_ICACHE, INSN_RETURN) ldw R%icache_stride(%r1), %r23 ldo -1(%r23), %r21 ANDCM %r26, %r21, %r26 @@ -1226,7 +1227,6 @@ ENTRY_CFI(flush_user_icache_range_asm) 2: cmpb,COND(>>),n %r25, %r26, 2b fic,m %r23(%sr3, %r26) -89: ALTERNATIVE(88b, 89b, ALT_COND_NO_ICACHE, INSN_NOP) sync bv %r0(%r2) nop @@ -1234,6 +1234,7 @@ ENDPROC_CFI(flush_user_icache_range_asm) ENTRY_CFI(flush_kernel_icache_page) 88: ldil L%icache_stride, %r1 + ALTERNATIVE(88b, 88b+4, ALT_COND_NO_ICACHE, INSN_RETURN) ldw R%icache_stride(%r1), %r23 #ifdef CONFIG_64BIT @@ -1263,7 +1264,6 @@ ENTRY_CFI(flush_kernel_icache_page) cmpb,COND(>>) %r25, %r26, 1b /* predict taken */ fic,m %r23(%sr4, %r26) -89: ALTERNATIVE(88b, 89b, ALT_COND_NO_ICACHE, INSN_NOP) sync bv %r0(%r2) nop @@ -1271,6 +1271,7 @@ ENDPROC_CFI(flush_kernel_icache_page) ENTRY_CFI(flush_kernel_icache_range_asm) 88: ldil L%icache_stride, %r1 + ALTERNATIVE(88b, 88b+4, ALT_COND_NO_ICACHE, INSN_RETURN) ldw R%icache_stride(%r1), %r23 ldo -1(%r23), %r21 ANDCM %r26, %r21, %r26 @@ -1304,7 +1305,6 @@ ENTRY_CFI(flush_kernel_icache_range_asm) 2: cmpb,COND(>>),n %r25, %r26, 2b /* predict taken */ fic,m %r23(%sr4, %r26) -89: ALTERNATIVE(88b, 89b, ALT_COND_NO_ICACHE, INSN_NOP) sync bv %r0(%r2) nop