X-Git-Url: http://ftp.safe.ca/?a=blobdiff_plain;f=include%2Fasm-xtensa%2Fatomic.h;h=b3b23540f14d2c5087e4b8d09d519378e73e726a;hb=05345b0f006ac226d0d25d48fcb2d792ac44a071;hp=12b5732dc6e5986ef4966ca87afe7f019745ef3e;hpb=288a60cf4d7cc35f84f46cd8ffd0b34f9d8e7346;p=safe%2Fjmp%2Flinux-2.6 diff --git a/include/asm-xtensa/atomic.h b/include/asm-xtensa/atomic.h index 12b5732..b3b2354 100644 --- a/include/asm-xtensa/atomic.h +++ b/include/asm-xtensa/atomic.h @@ -13,7 +13,6 @@ #ifndef _XTENSA_ATOMIC_H #define _XTENSA_ATOMIC_H -#include #include typedef struct { volatile int counter; } atomic_t; @@ -223,6 +222,34 @@ static inline int atomic_sub_return(int i, atomic_t * v) */ #define atomic_add_negative(i,v) (atomic_add_return((i),(v)) < 0) +#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) +#define atomic_xchg(v, new) (xchg(&((v)->counter), new)) + +/** + * atomic_add_unless - add unless the number is a given value + * @v: pointer of type atomic_t + * @a: the amount to add to v... + * @u: ...unless v is equal to u. + * + * Atomically adds @a to @v, so long as it was not @u. + * Returns non-zero if @v was not @u, and zero otherwise. + */ +static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) +{ + int c, old; + c = atomic_read(v); + for (;;) { + if (unlikely(c == (u))) + break; + old = atomic_cmpxchg((v), c, c + (a)); + if (likely(old == c)) + break; + c = old; + } + return c != (u); +} + +#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) static inline void atomic_clear_mask(unsigned int mask, atomic_t *v) { @@ -266,6 +293,7 @@ static inline void atomic_set_mask(unsigned int mask, atomic_t *v) #define smp_mb__before_atomic_inc() barrier() #define smp_mb__after_atomic_inc() barrier() +#include #endif /* __KERNEL__ */ #endif /* _XTENSA_ATOMIC_H */