[patch] powerpc: native atomic_add_unless

Nick Piggin npiggin at suse.de
Sat Jan 21 22:25:36 EST 2006


Hi list,

atomic_add_unless (atomic_inc_not_zero) is used in several hot paths in the
vfs and I'm planning some uses in the memory manager, so it should be as
small and fast as possible.

Joel had a good suggestion to save a register but all bugs are mine.

Comments?

Nick

--
Implement atomic_add_unless natively instead of using cmpxchg.
Improvements by Joel Schopp.

Signed-off-by: Nick Piggin <npiggin at suse.de>

Index: linux-2.6/include/asm-powerpc/atomic.h
===================================================================
--- linux-2.6.orig/include/asm-powerpc/atomic.h
+++ linux-2.6/include/asm-powerpc/atomic.h
@@ -8,6 +8,7 @@
 typedef struct { volatile int counter; } atomic_t;
 
 #ifdef __KERNEL__
+#include <linux/compiler.h>
 #include <asm/synch.h>
 #include <asm/asm-compat.h>
 
@@ -176,20 +177,29 @@ static __inline__ int atomic_dec_return(
  * Atomically adds @a to @v, so long as it was not @u.
  * Returns non-zero if @v was not @u, and zero otherwise.
  */
-#define atomic_add_unless(v, a, u)			\
-({							\
-	int c, old;					\
-	c = atomic_read(v);				\
-	for (;;) {					\
-		if (unlikely(c == (u)))			\
-			break;				\
-		old = atomic_cmpxchg((v), c, c + (a));	\
-		if (likely(old == c))			\
-			break;				\
-		c = old;				\
-	}						\
-	c != (u);					\
-})
+static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
+{
+	int t;
+
+	__asm__ __volatile__ (
+	LWSYNC_ON_SMP
+"1:	lwarx	%0,0,%1		# atomic_add_unless\n\
+	cmpw	0,%0,%3 \n\
+	beq-	2f \n\
+	add	%0,%2,%0 \n"
+	PPC405_ERR77(0,%2)
+"	stwcx.	%0,0,%1 \n\
+	bne-	1b \n"
+	ISYNC_ON_SMP
+"	subf	%0,%2,%0 \n\
+2:"
+	: "=&r" (t)
+	: "r" (&v->counter), "r" (a), "r" (u)
+	: "cc", "memory");
+
+	return likely(t != u);
+}
+
 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
 
 #define atomic_sub_and_test(a, v)	(atomic_sub_return((a), (v)) == 0)



More information about the Linuxppc64-dev mailing list