[PATCH] atomic: inc_not_zero
authorNick Piggin <nickpiggin@yahoo.com.au>
Mon, 14 Nov 2005 00:07:25 +0000 (16:07 -0800)
committerLinus Torvalds <torvalds@g5.osdl.org>
Mon, 14 Nov 2005 02:14:16 +0000 (18:14 -0800)
Introduce an atomic_inc_not_zero operation.  Make this a special case of
atomic_add_unless because lockless pagecache actually wants
atomic_inc_not_negativeone due to its offset refcount.

Signed-off-by: Nick Piggin <npiggin@suse.de>
Cc: "Paul E. McKenney" <paulmck@us.ibm.com>
Signed-off-by: Andrew Morton <akpm@osdl.org>
Signed-off-by: Linus Torvalds <torvalds@osdl.org>
23 files changed:
Documentation/atomic_ops.txt
arch/sparc/lib/atomic32.c
include/asm-alpha/atomic.h
include/asm-arm/atomic.h
include/asm-arm26/atomic.h
include/asm-cris/atomic.h
include/asm-frv/atomic.h
include/asm-h8300/atomic.h
include/asm-i386/atomic.h
include/asm-ia64/atomic.h
include/asm-m68k/atomic.h
include/asm-m68knommu/atomic.h
include/asm-mips/atomic.h
include/asm-parisc/atomic.h
include/asm-powerpc/atomic.h
include/asm-s390/atomic.h
include/asm-sh/atomic.h
include/asm-sh64/atomic.h
include/asm-sparc/atomic.h
include/asm-sparc64/atomic.h
include/asm-v850/atomic.h
include/asm-x86_64/atomic.h
include/asm-xtensa/atomic.h

index f174416..23a1c24 100644 (file)
@@ -115,7 +115,7 @@ boolean is return which indicates whether the resulting counter value
 is negative.  It requires explicit memory barrier semantics around the
 operation.
 
-Finally:
+Then:
 
        int atomic_cmpxchg(atomic_t *v, int old, int new);
 
@@ -129,6 +129,18 @@ atomic_cmpxchg requires explicit memory barriers around the operation.
 The semantics for atomic_cmpxchg are the same as those defined for 'cas'
 below.
 
+Finally:
+
+       int atomic_add_unless(atomic_t *v, int a, int u);
+
+If the atomic value v is not equal to u, this function adds a to v, and
+returns non zero. If v is equal to u then it returns zero. This is done as
+an atomic operation.
+
+atomic_add_unless requires explicit memory barriers around the operation.
+
+atomic_inc_not_zero, equivalent to atomic_add_unless(v, 1, 0)
+
 
 If a caller requires memory barrier semantics around an atomic_t
 operation which does not return a value, a set of interfaces are
index be46f65..cb3cf0f 100644 (file)
@@ -53,6 +53,21 @@ int atomic_cmpxchg(atomic_t *v, int old, int new)
        return ret;
 }
 
+int atomic_add_unless(atomic_t *v, int a, int u)
+{
+       int ret;
+       unsigned long flags;
+
+       spin_lock_irqsave(ATOMIC_HASH(v), flags);
+       ret = v->counter;
+       if (ret != u)
+               v->counter += a;
+       spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
+       return ret != u;
+}
+
+static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
+/* Atomic operations are already serializing */
 void atomic_set(atomic_t *v, int i)
 {
        unsigned long flags;
index a666080..36505bb 100644 (file)
@@ -179,6 +179,16 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
 
 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
 
+#define atomic_add_unless(v, a, u)                             \
+({                                                             \
+       int c, old;                                             \
+       c = atomic_read(v);                                     \
+       while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
+               c = old;                                        \
+       c != (u);                                               \
+})
+#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
+
 #define atomic_dec_return(v) atomic_sub_return(1,(v))
 #define atomic64_dec_return(v) atomic64_sub_return(1,(v))
 
index 8ab1689..75b8027 100644 (file)
@@ -173,6 +173,17 @@ static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
 
 #endif /* __LINUX_ARM_ARCH__ */
 
+static inline int atomic_add_unless(atomic_t *v, int a, int u)
+{
+       int c, old;
+
+       c = atomic_read(v);
+       while (c != u && (old = atomic_cmpxchg((v), c, c + a)) != c)
+               c = old;
+       return c != u;
+}
+#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
+
 #define atomic_add(i, v)       (void) atomic_add_return(i, v)
 #define atomic_inc(v)          (void) atomic_add_return(1, v)
 #define atomic_sub(i, v)       (void) atomic_sub_return(i, v)
index 54b24ea..a47cadc 100644 (file)
@@ -76,6 +76,21 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
        return ret;
 }
 
+static inline int atomic_add_unless(atomic_t *v, int a, int u)
+{
+       int ret;
+       unsigned long flags;
+
+       local_irq_save(flags);
+       ret = v->counter;
+       if (ret != u)
+               v->counter += a;
+       local_irq_restore(flags);
+
+       return ret != u;
+}
+#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
+
 static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
 {
         unsigned long flags;
index 45891f7..683b05a 100644 (file)
@@ -136,6 +136,20 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
        return ret;
 }
 
+static inline int atomic_add_unless(atomic_t *v, int a, int u)
+{
+       int ret;
+       unsigned long flags;
+
+       cris_atomic_save(v, flags);
+       ret = v->counter;
+       if (ret != u)
+               v->counter += a;
+       cris_atomic_restore(v, flags);
+       return ret != u;
+}
+#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
+
 /* Atomic operations are already serializing */
 #define smp_mb__before_atomic_dec()    barrier()
 #define smp_mb__after_atomic_dec()     barrier()
index 55f06a0..f6539ff 100644 (file)
@@ -416,4 +416,14 @@ extern uint32_t __cmpxchg_32(uint32_t *v, uint32_t test, uint32_t new);
 
 #define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new))
 
+#define atomic_add_unless(v, a, u)                             \
+({                                                             \
+       int c, old;                                             \
+       c = atomic_read(v);                                     \
+       while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
+               c = old;                                        \
+       c != (u);                                               \
+})
+#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
+
 #endif /* _ASM_ATOMIC_H */
index d504392..f23d868 100644 (file)
@@ -95,6 +95,20 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
        return ret;
 }
 
+static inline int atomic_add_unless(atomic_t *v, int a, int u)
+{
+       int ret;
+       unsigned long flags;
+
+       local_irq_save(flags);
+       ret = v->counter;
+       if (ret != u)
+               v->counter += a;
+       local_irq_restore(flags);
+       return ret != u;
+}
+#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
+
 static __inline__ void atomic_clear_mask(unsigned long mask, unsigned long *v)
 {
        __asm__ __volatile__("stc ccr,r1l\n\t"
index 5ff698e..c68557a 100644 (file)
@@ -217,6 +217,25 @@ static __inline__ int atomic_sub_return(int i, atomic_t *v)
 
 #define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new))
 
+/**
+ * atomic_add_unless - add unless the number is a given value
+ * @v: pointer of type atomic_t
+ * @a: the amount to add to v...
+ * @u: ...unless v is equal to u.
+ *
+ * Atomically adds @a to @v, so long as it was not @u.
+ * Returns non-zero if @v was not @u, and zero otherwise.
+ */
+#define atomic_add_unless(v, a, u)                             \
+({                                                             \
+       int c, old;                                             \
+       c = atomic_read(v);                                     \
+       while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
+               c = old;                                        \
+       c != (u);                                               \
+})
+#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
+
 #define atomic_inc_return(v)  (atomic_add_return(1,v))
 #define atomic_dec_return(v)  (atomic_sub_return(1,v))
 
index 593d3da..2fbebf8 100644 (file)
@@ -90,6 +90,16 @@ ia64_atomic64_sub (__s64 i, atomic64_t *v)
 
 #define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new))
 
+#define atomic_add_unless(v, a, u)                             \
+({                                                             \
+       int c, old;                                             \
+       c = atomic_read(v);                                     \
+       while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
+               c = old;                                        \
+       c != (u);                                               \
+})
+#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
+
 #define atomic_add_return(i,v)                                         \
 ({                                                                     \
        int __ia64_aar_i = (i);                                         \
index b821975..e3c962e 100644 (file)
@@ -141,6 +141,16 @@ static inline void atomic_set_mask(unsigned long mask, unsigned long *v)
 
 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
 
+#define atomic_add_unless(v, a, u)                             \
+({                                                             \
+       int c, old;                                             \
+       c = atomic_read(v);                                     \
+       while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
+               c = old;                                        \
+       c != (u);                                               \
+})
+#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
+
 /* Atomic operations are already serializing */
 #define smp_mb__before_atomic_dec()    barrier()
 #define smp_mb__after_atomic_dec()     barrier()
index 2fd33a5..3c1cc15 100644 (file)
@@ -130,6 +130,16 @@ static inline int atomic_sub_return(int i, atomic_t * v)
 
 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
 
+#define atomic_add_unless(v, a, u)                             \
+({                                                             \
+       int c, old;                                             \
+       c = atomic_read(v);                                     \
+       while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
+               c = old;                                        \
+       c != (u);                                               \
+})
+#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
+
 #define atomic_dec_return(v) atomic_sub_return(1,(v))
 #define atomic_inc_return(v) atomic_add_return(1,(v))
 
index 4fba0d0..2c87b41 100644 (file)
@@ -289,6 +289,25 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
 
 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
 
+/**
+ * atomic_add_unless - add unless the number is a given value
+ * @v: pointer of type atomic_t
+ * @a: the amount to add to v...
+ * @u: ...unless v is equal to u.
+ *
+ * Atomically adds @a to @v, so long as it was not @u.
+ * Returns non-zero if @v was not @u, and zero otherwise.
+ */
+#define atomic_add_unless(v, a, u)                             \
+({                                                             \
+       int c, old;                                             \
+       c = atomic_read(v);                                     \
+       while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
+               c = old;                                        \
+       c != (u);                                               \
+})
+#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
+
 #define atomic_dec_return(v) atomic_sub_return(1,(v))
 #define atomic_inc_return(v) atomic_add_return(1,(v))
 
index 52c9a45..983e9a2 100644 (file)
@@ -166,6 +166,25 @@ static __inline__ int atomic_read(const atomic_t *v)
 /* exported interface */
 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
 
+/**
+ * atomic_add_unless - add unless the number is a given value
+ * @v: pointer of type atomic_t
+ * @a: the amount to add to v...
+ * @u: ...unless v is equal to u.
+ *
+ * Atomically adds @a to @v, so long as it was not @u.
+ * Returns non-zero if @v was not @u, and zero otherwise.
+ */
+#define atomic_add_unless(v, a, u)                             \
+({                                                             \
+       int c, old;                                             \
+       c = atomic_read(v);                                     \
+       while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
+               c = old;                                        \
+       c != (u);                                               \
+})
+#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
+
 #define atomic_add(i,v)        ((void)(__atomic_add_return( ((int)i),(v))))
 #define atomic_sub(i,v)        ((void)(__atomic_add_return(-((int)i),(v))))
 #define atomic_inc(v)  ((void)(__atomic_add_return(   1,(v))))
index 37205fa..ec4b144 100644 (file)
@@ -166,6 +166,31 @@ static __inline__ int atomic_dec_return(atomic_t *v)
 
 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
 
+/**
+ * atomic_add_unless - add unless the number is a given value
+ * @v: pointer of type atomic_t
+ * @a: the amount to add to v...
+ * @u: ...unless v is equal to u.
+ *
+ * Atomically adds @a to @v, so long as it was not @u.
+ * Returns non-zero if @v was not @u, and zero otherwise.
+ */
+#define atomic_add_unless(v, a, u)                                                      \
+({                                                                                                                      \
+          int c, old;                                                                                   \
+          c = atomic_read(v);                                                                   \
+          for (;;) {                                                                                     \
+                          if (unlikely(c == (u)))                                               \
+                                          break;                                                                 \
+                          old = atomic_cmpxchg((v), c, c + (a));                 \
+                          if (likely(old == c))                                                   \
+                                          break;                                                                 \
+                          c = old;                                                                             \
+          }                                                                                                       \
+          c != (u);                                                                                       \
+})
+#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
+
 #define atomic_sub_and_test(a, v)      (atomic_sub_return((a), (v)) == 0)
 #define atomic_dec_and_test(v)         (atomic_dec_return((v)) == 0)
 
index 631014d..b3bd4f6 100644 (file)
@@ -200,6 +200,16 @@ atomic_compare_and_swap(int expected_oldval,int new_val,atomic_t *v)
 
 #define atomic_cmpxchg(v, o, n) (atomic_compare_and_swap((o), (n), &((v)->counter)))
 
+#define atomic_add_unless(v, a, u)                             \
+({                                                             \
+       int c, old;                                             \
+       c = atomic_read(v);                                     \
+       while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
+               c = old;                                        \
+       c != (u);                                               \
+})
+#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
+
 #define smp_mb__before_atomic_dec()    smp_mb()
 #define smp_mb__after_atomic_dec()     smp_mb()
 #define smp_mb__before_atomic_inc()    smp_mb()
index a148c76..aabfd33 100644 (file)
@@ -101,6 +101,21 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
        return ret;
 }
 
+static inline int atomic_add_unless(atomic_t *v, int a, int u)
+{
+       int ret;
+       unsigned long flags;
+
+       local_irq_save(flags);
+       ret = v->counter;
+       if (ret != u)
+               v->counter += a;
+       local_irq_restore(flags);
+
+       return ret != u;
+}
+#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
+
 static __inline__ void atomic_clear_mask(unsigned int mask, atomic_t *v)
 {
        unsigned long flags;
index 6eeb57b..927a2bc 100644 (file)
@@ -113,6 +113,21 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
        return ret;
 }
 
+static inline int atomic_add_unless(atomic_t *v, int a, int u)
+{
+       int ret;
+       unsigned long flags;
+
+       local_irq_save(flags);
+       ret = v->counter;
+       if (ret != u)
+               v->counter += a;
+       local_irq_restore(flags);
+
+       return ret != u;
+}
+#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
+
 static __inline__ void atomic_clear_mask(unsigned int mask, atomic_t *v)
 {
        unsigned long flags;
index 52bdd1a..62bec7a 100644 (file)
@@ -20,6 +20,7 @@ typedef struct { volatile int counter; } atomic_t;
 
 extern int __atomic_add_return(int, atomic_t *);
 extern int atomic_cmpxchg(atomic_t *, int, int);
+extern int atomic_add_unless(atomic_t *, int, int);
 extern void atomic_set(atomic_t *, int);
 
 #define atomic_read(v)          ((v)->counter)
@@ -49,6 +50,8 @@ extern void atomic_set(atomic_t *, int);
 #define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
 #define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0)
 
+#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
+
 /* This is the old 24-bit implementation.  It's still used internally
  * by some sparc-specific code, notably the semaphore implementation.
  */
index 3a0b438..8198c3d 100644 (file)
@@ -72,6 +72,16 @@ extern int atomic64_sub_ret(int, atomic64_t *);
 
 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
 
+#define atomic_add_unless(v, a, u)                             \
+({                                                             \
+       int c, old;                                             \
+       c = atomic_read(v);                                     \
+       while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
+               c = old;                                        \
+       c != (u);                                               \
+})
+#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
+
 /* Atomic operations are already serializing */
 #ifdef CONFIG_SMP
 #define smp_mb__before_atomic_dec()    membar_storeload_loadload();
index e497166..bede317 100644 (file)
@@ -104,6 +104,22 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
        return ret;
 }
 
+static inline int atomic_add_unless(atomic_t *v, int a, int u)
+{
+       int ret;
+       unsigned long flags;
+
+       local_irq_save(flags);
+       ret = v->counter;
+       if (ret != u)
+               v->counter += a;
+       local_irq_restore(flags);
+
+       return ret != u;
+}
+
+#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
+
 /* Atomic operations are already serializing on ARM */
 #define smp_mb__before_atomic_dec()    barrier()
 #define smp_mb__after_atomic_dec()     barrier()
index 75c8a1e..0866ef6 100644 (file)
@@ -362,6 +362,25 @@ static __inline__ int atomic_sub_return(int i, atomic_t *v)
 
 #define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new))
 
+/**
+ * atomic_add_unless - add unless the number is a given value
+ * @v: pointer of type atomic_t
+ * @a: the amount to add to v...
+ * @u: ...unless v is equal to u.
+ *
+ * Atomically adds @a to @v, so long as it was not @u.
+ * Returns non-zero if @v was not @u, and zero otherwise.
+ */
+#define atomic_add_unless(v, a, u)                             \
+({                                                             \
+       int c, old;                                             \
+       c = atomic_read(v);                                     \
+       while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
+               c = old;                                        \
+       c != (u);                                               \
+})
+#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
+
 #define atomic_inc_return(v)  (atomic_add_return(1,v))
 #define atomic_dec_return(v)  (atomic_sub_return(1,v))
 
index cd40c5e..3670cc7 100644 (file)
@@ -225,6 +225,25 @@ static inline int atomic_sub_return(int i, atomic_t * v)
 
 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
 
+/**
+ * atomic_add_unless - add unless the number is a given value
+ * @v: pointer of type atomic_t
+ * @a: the amount to add to v...
+ * @u: ...unless v is equal to u.
+ *
+ * Atomically adds @a to @v, so long as it was not @u.
+ * Returns non-zero if @v was not @u, and zero otherwise.
+ */
+#define atomic_add_unless(v, a, u)                             \
+({                                                             \
+       int c, old;                                             \
+       c = atomic_read(v);                                     \
+       while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
+               c = old;                                        \
+       c != (u);                                               \
+})
+#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
+
 static inline void atomic_clear_mask(unsigned int mask, atomic_t *v)
 {
     unsigned int all_f = -1;