[PATCH] powerpc/32: Remove memory clobber asm constraint on dcbX() functions
Christophe Leroy
christophe.leroy at c-s.fr
Tue Jan 9 17:57:59 AEDT 2018
Instead of just telling GCC that dcbz(), dcbi(), dcbf() and dcbst()
clobber memory, tell it what it clobbers:
* dcbz(), dcbi() and dcbf() clobbers one cacheline as output
* dcbf() and dcbst() clobbers one cacheline as input
Signed-off-by: Christophe Leroy <christophe.leroy at c-s.fr>
---
arch/powerpc/include/asm/cache.h | 17 +++++++++++++----
1 file changed, 13 insertions(+), 4 deletions(-)
diff --git a/arch/powerpc/include/asm/cache.h b/arch/powerpc/include/asm/cache.h
index c1d257aa4c2d..fc8fe18acf8c 100644
--- a/arch/powerpc/include/asm/cache.h
+++ b/arch/powerpc/include/asm/cache.h
@@ -82,22 +82,31 @@ extern void _set_L3CR(unsigned long);
static inline void dcbz(void *addr)
{
- __asm__ __volatile__ ("dcbz 0, %0" : : "r"(addr) : "memory");
+ __asm__ __volatile__ ("dcbz 0, %1" :
+ "=m"(*(char (*)[L1_CACHE_BYTES])addr) :
+ "r"(addr) :);
}
static inline void dcbi(void *addr)
{
- __asm__ __volatile__ ("dcbi 0, %0" : : "r"(addr) : "memory");
+ __asm__ __volatile__ ("dcbi 0, %1" :
+ "=m"(*(char (*)[L1_CACHE_BYTES])addr) :
+ "r"(addr) :);
}
static inline void dcbf(void *addr)
{
- __asm__ __volatile__ ("dcbf 0, %0" : : "r"(addr) : "memory");
+ __asm__ __volatile__ ("dcbf 0, %1" :
+ "=m"(*(char (*)[L1_CACHE_BYTES])addr) :
+ "r"(addr), "m"(*(char (*)[L1_CACHE_BYTES])addr) :
+ );
}
static inline void dcbst(void *addr)
{
- __asm__ __volatile__ ("dcbst 0, %0" : : "r"(addr) : "memory");
+ __asm__ __volatile__ ("dcbst 0, %0" : :
+ "r"(addr), "m"(*(char (*)[L1_CACHE_BYTES])addr) :
+ );
}
#endif /* !__ASSEMBLY__ */
#endif /* __KERNEL__ */
--
2.13.3
More information about the Linuxppc-dev
mailing list