[PATCH 02/12] powerpc/64s/hash: avoid the POWER5 < DD2.1 slb invalidate workaround on POWER8/9
Nicholas Piggin
npiggin at gmail.com
Sat Sep 15 01:30:46 AEST 2018
I only have POWER8/9 to test, so just remove it for those.
Signed-off-by: Nicholas Piggin <npiggin at gmail.com>
---
arch/powerpc/kernel/entry_64.S | 2 ++
arch/powerpc/mm/slb.c | 8 +++++---
2 files changed, 7 insertions(+), 3 deletions(-)
diff --git a/arch/powerpc/kernel/entry_64.S b/arch/powerpc/kernel/entry_64.S
index 2206912ea4f0..77a888bfcb53 100644
--- a/arch/powerpc/kernel/entry_64.S
+++ b/arch/powerpc/kernel/entry_64.S
@@ -672,7 +672,9 @@ END_MMU_FTR_SECTION_IFSET(MMU_FTR_1T_SEGMENT)
isync
slbie r6
+BEGIN_FTR_SECTION
slbie r6 /* Workaround POWER5 < DD2.1 issue */
+END_FTR_SECTION_IFCLR(CPU_FTR_ARCH_207S)
slbmte r7,r0
isync
2:
diff --git a/arch/powerpc/mm/slb.c b/arch/powerpc/mm/slb.c
index 2f162c6e52d4..1c7128c63a4b 100644
--- a/arch/powerpc/mm/slb.c
+++ b/arch/powerpc/mm/slb.c
@@ -256,9 +256,11 @@ void switch_slb(struct task_struct *tsk, struct mm_struct *mm)
__slb_flush_and_rebolt();
}
- /* Workaround POWER5 < DD2.1 issue */
- if (offset == 1 || offset > SLB_CACHE_ENTRIES)
- asm volatile("slbie %0" : : "r" (slbie_data));
+ if (!cpu_has_feature(CPU_FTR_ARCH_207S)) {
+ /* Workaround POWER5 < DD2.1 issue */
+ if (offset == 1 || offset > SLB_CACHE_ENTRIES)
+ asm volatile("slbie %0" : : "r" (slbie_data));
+ }
get_paca()->slb_cache_ptr = 0;
copy_mm_to_paca(mm);
--
2.18.0
More information about the Linuxppc-dev
mailing list