[tip:locking/core] locking/atomic: Introduce inc/dec variants for the atomic_fetch_$op() API

From: tip-bot for Davidlohr Bueso
Date: Thu Jul 07 2016 - 04:34:15 EST


Commit-ID: f06628638cf6e75f179742b6c1b35076965b9fdd
Gitweb: http://git.kernel.org/tip/f06628638cf6e75f179742b6c1b35076965b9fdd
Author: Davidlohr Bueso <dave@xxxxxxxxxxxx>
AuthorDate: Tue, 28 Jun 2016 14:56:51 -0700
Committer: Ingo Molnar <mingo@xxxxxxxxxx>
CommitDate: Thu, 7 Jul 2016 09:16:20 +0200

locking/atomic: Introduce inc/dec variants for the atomic_fetch_$op() API

With the inclusion of atomic FETCH-OP variants, many places in the
kernel can make use of atomic_fetch_$op() to avoid the callers that
need to compute the value/state _before_ the operation.

Peter Zijlstra laid out the machinery but we are still missing the
simpler dec,inc() calls (which future patches will make use of).

This patch only deals with the generic code, as at least right now
no arch actually implement them -- which is similar to what the
OP-RETURN primitives currently do.

Signed-off-by: Davidlohr Bueso <dbueso@xxxxxxx>
Signed-off-by: Peter Zijlstra (Intel) <peterz@xxxxxxxxxxxxx>
Cc: Andrew Morton <akpm@xxxxxxxxxxxxxxxxxxxx>
Cc: James.Bottomley@xxxxxxxxxxxxxxxxxxxxx
Cc: Linus Torvalds <torvalds@xxxxxxxxxxxxxxxxxxxx>
Cc: Paul E. McKenney <paulmck@xxxxxxxxxxxxxxxxxx>
Cc: Peter Zijlstra <peterz@xxxxxxxxxxxxx>
Cc: Thomas Gleixner <tglx@xxxxxxxxxxxxx>
Cc: awalls@xxxxxxxxxxxxxxxx
Cc: bp@xxxxxxxxx
Cc: cw00.choi@xxxxxxxxxxx
Cc: davem@xxxxxxxxxxxxx
Cc: dledford@xxxxxxxxxx
Cc: dougthompson@xxxxxxxxxxxx
Cc: gregkh@xxxxxxxxxxxxxxxxxxx
Cc: hans.verkuil@xxxxxxxxx
Cc: heiko.carstens@xxxxxxxxxx
Cc: jikos@xxxxxxxxxx
Cc: kys@xxxxxxxxxxxxx
Cc: mchehab@xxxxxxxxxxxxxxx
Cc: pfg@xxxxxxx
Cc: schwidefsky@xxxxxxxxxx
Cc: sean.hefty@xxxxxxxxx
Cc: sumit.semwal@xxxxxxxxxx
Link: http://lkml.kernel.org/r/20160628215651.GA20048@xxxxxxxxxxxxxxx
Signed-off-by: Ingo Molnar <mingo@xxxxxxxxxx>
---
include/asm-generic/atomic-long.h | 22 +++++++
include/linux/atomic.h | 128 ++++++++++++++++++++++++++++++++++++++
2 files changed, 150 insertions(+)

diff --git a/include/asm-generic/atomic-long.h b/include/asm-generic/atomic-long.h
index 2d0d3cf..288cc9e 100644
--- a/include/asm-generic/atomic-long.h
+++ b/include/asm-generic/atomic-long.h
@@ -146,6 +146,28 @@ ATOMIC_LONG_FETCH_OP(xor, _relaxed)
ATOMIC_LONG_FETCH_OP(xor, _acquire)
ATOMIC_LONG_FETCH_OP(xor, _release)

+#undef ATOMIC_LONG_FETCH_OP
+
+#define ATOMIC_LONG_FETCH_INC_DEC_OP(op, mo) \
+static inline long \
+atomic_long_fetch_##op##mo(atomic_long_t *l) \
+{ \
+ ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l; \
+ \
+ return (long)ATOMIC_LONG_PFX(_fetch_##op##mo)(v); \
+}
+
+ATOMIC_LONG_FETCH_INC_DEC_OP(inc,)
+ATOMIC_LONG_FETCH_INC_DEC_OP(inc, _relaxed)
+ATOMIC_LONG_FETCH_INC_DEC_OP(inc, _acquire)
+ATOMIC_LONG_FETCH_INC_DEC_OP(inc, _release)
+ATOMIC_LONG_FETCH_INC_DEC_OP(dec,)
+ATOMIC_LONG_FETCH_INC_DEC_OP(dec, _relaxed)
+ATOMIC_LONG_FETCH_INC_DEC_OP(dec, _acquire)
+ATOMIC_LONG_FETCH_INC_DEC_OP(dec, _release)
+
+#undef ATOMIC_LONG_FETCH_INC_DEC_OP
+
#define ATOMIC_LONG_OP(op) \
static __always_inline void \
atomic_long_##op(long i, atomic_long_t *l) \
diff --git a/include/linux/atomic.h b/include/linux/atomic.h
index 12d910d..e71835b 100644
--- a/include/linux/atomic.h
+++ b/include/linux/atomic.h
@@ -188,6 +188,38 @@
#endif
#endif /* atomic_fetch_add_relaxed */

+/* atomic_fetch_inc_relaxed */
+#ifndef atomic_fetch_inc_relaxed
+
+#ifndef atomic_fetch_inc
+#define atomic_fetch_inc(v) atomic_fetch_add(1, (v))
+#define atomic_fetch_inc_relaxed(v) atomic_fetch_add_relaxed(1, (v))
+#define atomic_fetch_inc_acquire(v) atomic_fetch_add_acquire(1, (v))
+#define atomic_fetch_inc_release(v) atomic_fetch_add_release(1, (v))
+#else /* atomic_fetch_inc */
+#define atomic_fetch_inc_relaxed atomic_fetch_inc
+#define atomic_fetch_inc_acquire atomic_fetch_inc
+#define atomic_fetch_inc_release atomic_fetch_inc
+#endif /* atomic_fetch_inc */
+
+#else /* atomic_fetch_inc_relaxed */
+
+#ifndef atomic_fetch_inc_acquire
+#define atomic_fetch_inc_acquire(...) \
+ __atomic_op_acquire(atomic_fetch_inc, __VA_ARGS__)
+#endif
+
+#ifndef atomic_fetch_inc_release
+#define atomic_fetch_inc_release(...) \
+ __atomic_op_release(atomic_fetch_inc, __VA_ARGS__)
+#endif
+
+#ifndef atomic_fetch_inc
+#define atomic_fetch_inc(...) \
+ __atomic_op_fence(atomic_fetch_inc, __VA_ARGS__)
+#endif
+#endif /* atomic_fetch_inc_relaxed */
+
/* atomic_fetch_sub_relaxed */
#ifndef atomic_fetch_sub_relaxed
#define atomic_fetch_sub_relaxed atomic_fetch_sub
@@ -212,6 +244,38 @@
#endif
#endif /* atomic_fetch_sub_relaxed */

+/* atomic_fetch_dec_relaxed */
+#ifndef atomic_fetch_dec_relaxed
+
+#ifndef atomic_fetch_dec
+#define atomic_fetch_dec(v) atomic_fetch_sub(1, (v))
+#define atomic_fetch_dec_relaxed(v) atomic_fetch_sub_relaxed(1, (v))
+#define atomic_fetch_dec_acquire(v) atomic_fetch_sub_acquire(1, (v))
+#define atomic_fetch_dec_release(v) atomic_fetch_sub_release(1, (v))
+#else /* atomic_fetch_dec */
+#define atomic_fetch_dec_relaxed atomic_fetch_dec
+#define atomic_fetch_dec_acquire atomic_fetch_dec
+#define atomic_fetch_dec_release atomic_fetch_dec
+#endif /* atomic_fetch_dec */
+
+#else /* atomic_fetch_dec_relaxed */
+
+#ifndef atomic_fetch_dec_acquire
+#define atomic_fetch_dec_acquire(...) \
+ __atomic_op_acquire(atomic_fetch_dec, __VA_ARGS__)
+#endif
+
+#ifndef atomic_fetch_dec_release
+#define atomic_fetch_dec_release(...) \
+ __atomic_op_release(atomic_fetch_dec, __VA_ARGS__)
+#endif
+
+#ifndef atomic_fetch_dec
+#define atomic_fetch_dec(...) \
+ __atomic_op_fence(atomic_fetch_dec, __VA_ARGS__)
+#endif
+#endif /* atomic_fetch_dec_relaxed */
+
/* atomic_fetch_or_relaxed */
#ifndef atomic_fetch_or_relaxed
#define atomic_fetch_or_relaxed atomic_fetch_or
@@ -697,6 +761,38 @@ static inline int atomic_dec_if_positive(atomic_t *v)
#endif
#endif /* atomic64_fetch_add_relaxed */

+/* atomic64_fetch_inc_relaxed */
+#ifndef atomic64_fetch_inc_relaxed
+
+#ifndef atomic64_fetch_inc
+#define atomic64_fetch_inc(v) atomic64_fetch_add(1, (v))
+#define atomic64_fetch_inc_relaxed(v) atomic64_fetch_add_relaxed(1, (v))
+#define atomic64_fetch_inc_acquire(v) atomic64_fetch_add_acquire(1, (v))
+#define atomic64_fetch_inc_release(v) atomic64_fetch_add_release(1, (v))
+#else /* atomic64_fetch_inc */
+#define atomic64_fetch_inc_relaxed atomic64_fetch_inc
+#define atomic64_fetch_inc_acquire atomic64_fetch_inc
+#define atomic64_fetch_inc_release atomic64_fetch_inc
+#endif /* atomic64_fetch_inc */
+
+#else /* atomic64_fetch_inc_relaxed */
+
+#ifndef atomic64_fetch_inc_acquire
+#define atomic64_fetch_inc_acquire(...) \
+ __atomic_op_acquire(atomic64_fetch_inc, __VA_ARGS__)
+#endif
+
+#ifndef atomic64_fetch_inc_release
+#define atomic64_fetch_inc_release(...) \
+ __atomic_op_release(atomic64_fetch_inc, __VA_ARGS__)
+#endif
+
+#ifndef atomic64_fetch_inc
+#define atomic64_fetch_inc(...) \
+ __atomic_op_fence(atomic64_fetch_inc, __VA_ARGS__)
+#endif
+#endif /* atomic64_fetch_inc_relaxed */
+
/* atomic64_fetch_sub_relaxed */
#ifndef atomic64_fetch_sub_relaxed
#define atomic64_fetch_sub_relaxed atomic64_fetch_sub
@@ -721,6 +817,38 @@ static inline int atomic_dec_if_positive(atomic_t *v)
#endif
#endif /* atomic64_fetch_sub_relaxed */

+/* atomic64_fetch_dec_relaxed */
+#ifndef atomic64_fetch_dec_relaxed
+
+#ifndef atomic64_fetch_dec
+#define atomic64_fetch_dec(v) atomic64_fetch_sub(1, (v))
+#define atomic64_fetch_dec_relaxed(v) atomic64_fetch_sub_relaxed(1, (v))
+#define atomic64_fetch_dec_acquire(v) atomic64_fetch_sub_acquire(1, (v))
+#define atomic64_fetch_dec_release(v) atomic64_fetch_sub_release(1, (v))
+#else /* atomic64_fetch_dec */
+#define atomic64_fetch_dec_relaxed atomic64_fetch_dec
+#define atomic64_fetch_dec_acquire atomic64_fetch_dec
+#define atomic64_fetch_dec_release atomic64_fetch_dec
+#endif /* atomic64_fetch_dec */
+
+#else /* atomic64_fetch_dec_relaxed */
+
+#ifndef atomic64_fetch_dec_acquire
+#define atomic64_fetch_dec_acquire(...) \
+ __atomic_op_acquire(atomic64_fetch_dec, __VA_ARGS__)
+#endif
+
+#ifndef atomic64_fetch_dec_release
+#define atomic64_fetch_dec_release(...) \
+ __atomic_op_release(atomic64_fetch_dec, __VA_ARGS__)
+#endif
+
+#ifndef atomic64_fetch_dec
+#define atomic64_fetch_dec(...) \
+ __atomic_op_fence(atomic64_fetch_dec, __VA_ARGS__)
+#endif
+#endif /* atomic64_fetch_dec_relaxed */
+
/* atomic64_fetch_or_relaxed */
#ifndef atomic64_fetch_or_relaxed
#define atomic64_fetch_or_relaxed atomic64_fetch_or