[PATCH v2 7/9] powerpc/64s/hash: Fix false positive preempt warnings
Nicholas Piggin
npiggin at gmail.com
Wed Oct 3 00:27:57 AEST 2018
Fixes: 5e46e29e6a97 ("powerpc/64s/hash: convert SLB miss handlers to C")
Signed-off-by: Nicholas Piggin <npiggin at gmail.com>
---
arch/powerpc/mm/slb.c | 24 ++++++++++++------------
1 file changed, 12 insertions(+), 12 deletions(-)
diff --git a/arch/powerpc/mm/slb.c b/arch/powerpc/mm/slb.c
index b5a33da875b5..c273d0baf4fc 100644
--- a/arch/powerpc/mm/slb.c
+++ b/arch/powerpc/mm/slb.c
@@ -558,21 +558,21 @@ static void slb_cache_update(unsigned long esid_data)
/*
* Now update slb cache entries
*/
- slb_cache_index = get_paca()->slb_cache_ptr;
+ slb_cache_index = local_paca->slb_cache_ptr;
if (slb_cache_index < SLB_CACHE_ENTRIES) {
/*
* We have space in slb cache for optimized switch_slb().
* Top 36 bits from esid_data as per ISA
*/
- get_paca()->slb_cache[slb_cache_index++] = esid_data >> 28;
- get_paca()->slb_cache_ptr++;
+ local_paca->slb_cache[slb_cache_index++] = esid_data >> 28;
+ local_paca->slb_cache_ptr++;
} else {
/*
* Our cache is full and the current cache content strictly
* doesn't indicate the active SLB conents. Bump the ptr
* so that switch_slb() will ignore the cache.
*/
- get_paca()->slb_cache_ptr = SLB_CACHE_ENTRIES + 1;
+ local_paca->slb_cache_ptr = SLB_CACHE_ENTRIES + 1;
}
}
@@ -596,24 +596,24 @@ static enum slb_index alloc_slb_index(bool kernel)
* POWER7/8/9 have 32 SLB entries, this could be expanded if a
* future CPU has more.
*/
- if (get_paca()->slb_used_bitmap != U32_MAX) {
- index = ffz(get_paca()->slb_used_bitmap);
- get_paca()->slb_used_bitmap |= 1U << index;
+ if (local_paca->slb_used_bitmap != U32_MAX) {
+ index = ffz(local_paca->slb_used_bitmap);
+ local_paca->slb_used_bitmap |= 1U << index;
if (kernel)
- get_paca()->slb_kern_bitmap |= 1U << index;
+ local_paca->slb_kern_bitmap |= 1U << index;
} else {
/* round-robin replacement of slb starting at SLB_NUM_BOLTED. */
- index = get_paca()->stab_rr;
+ index = local_paca->stab_rr;
if (index < (mmu_slb_size - 1))
index++;
else
index = SLB_NUM_BOLTED;
- get_paca()->stab_rr = index;
+ local_paca->stab_rr = index;
if (index < 32) {
if (kernel)
- get_paca()->slb_kern_bitmap |= 1U << index;
+ local_paca->slb_kern_bitmap |= 1U << index;
else
- get_paca()->slb_kern_bitmap &= ~(1U << index);
+ local_paca->slb_kern_bitmap &= ~(1U << index);
}
}
BUG_ON(index < SLB_NUM_BOLTED);
--
2.18.0
More information about the Linuxppc-dev
mailing list