[PATCH 07/14] tools: introduce SMALL_CONST() macro

From: Yury Norov
Date: Wed Feb 17 2021 - 23:09:25 EST


Many algorithms become simpler if they are passed with relatively small
input values. One example is bitmap operations when the whole bitmap fits
into one word. To implement such simplifications, linux/bitmap.h declares
small_const_nbits() macro.

Other subsystems may also benefit from optimizations of this sort, like
find_bit API in the following patches. So it looks helpful to generalize
the macro and extend it's visibility.

Signed-off-by: Yury Norov <yury.norov@xxxxxxxxx>
---
tools/include/asm-generic/bitsperlong.h | 2 ++
tools/include/linux/bitmap.h | 19 ++++++++-----------
2 files changed, 10 insertions(+), 11 deletions(-)

diff --git a/tools/include/asm-generic/bitsperlong.h b/tools/include/asm-generic/bitsperlong.h
index 8f2283052333..432d272baf27 100644
--- a/tools/include/asm-generic/bitsperlong.h
+++ b/tools/include/asm-generic/bitsperlong.h
@@ -18,4 +18,6 @@
#define BITS_PER_LONG_LONG 64
#endif

+#define SMALL_CONST(n) (__builtin_constant_p(n) && (unsigned long)(n) < BITS_PER_LONG)
+
#endif /* __ASM_GENERIC_BITS_PER_LONG */
diff --git a/tools/include/linux/bitmap.h b/tools/include/linux/bitmap.h
index b6e8430c8bc9..fdc0b64bbdbf 100644
--- a/tools/include/linux/bitmap.h
+++ b/tools/include/linux/bitmap.h
@@ -19,12 +19,9 @@ int __bitmap_equal(const unsigned long *bitmap1,
const unsigned long *bitmap2, unsigned int bits);
void bitmap_clear(unsigned long *map, unsigned int start, int len);

-#define small_const_nbits(nbits) \
- (__builtin_constant_p(nbits) && (nbits) <= BITS_PER_LONG)
-
static inline void bitmap_zero(unsigned long *dst, unsigned int nbits)
{
- if (small_const_nbits(nbits))
+ if (SMALL_CONST(nbits - 1))
*dst = 0UL;
else {
int len = BITS_TO_LONGS(nbits) * sizeof(unsigned long);
@@ -35,7 +32,7 @@ static inline void bitmap_zero(unsigned long *dst, unsigned int nbits)
static inline void bitmap_fill(unsigned long *dst, unsigned int nbits)
{
unsigned int nlongs = BITS_TO_LONGS(nbits);
- if (!small_const_nbits(nbits)) {
+ if (!SMALL_CONST(nbits - 1)) {
unsigned int len = (nlongs - 1) * sizeof(unsigned long);
memset(dst, 0xff, len);
}
@@ -44,7 +41,7 @@ static inline void bitmap_fill(unsigned long *dst, unsigned int nbits)

static inline int bitmap_empty(const unsigned long *src, unsigned nbits)
{
- if (small_const_nbits(nbits))
+ if (SMALL_CONST(nbits - 1))
return !(*src & BITS_FIRST(nbits - 1));

return find_first_bit(src, nbits) == nbits;
@@ -52,7 +49,7 @@ static inline int bitmap_empty(const unsigned long *src, unsigned nbits)

static inline int bitmap_full(const unsigned long *src, unsigned int nbits)
{
- if (small_const_nbits(nbits))
+ if (SMALL_CONST(nbits - 1))
return !(~(*src) & BITS_FIRST(nbits - 1));

return find_first_zero_bit(src, nbits) == nbits;
@@ -60,7 +57,7 @@ static inline int bitmap_full(const unsigned long *src, unsigned int nbits)

static inline int bitmap_weight(const unsigned long *src, unsigned int nbits)
{
- if (small_const_nbits(nbits))
+ if (SMALL_CONST(nbits - 1))
return hweight_long(*src & BITS_FIRST(nbits - 1));
return __bitmap_weight(src, nbits);
}
@@ -68,7 +65,7 @@ static inline int bitmap_weight(const unsigned long *src, unsigned int nbits)
static inline void bitmap_or(unsigned long *dst, const unsigned long *src1,
const unsigned long *src2, unsigned int nbits)
{
- if (small_const_nbits(nbits))
+ if (SMALL_CONST(nbits - 1))
*dst = *src1 | *src2;
else
__bitmap_or(dst, src1, src2, nbits);
@@ -146,7 +143,7 @@ size_t bitmap_scnprintf(unsigned long *bitmap, unsigned int nbits,
static inline int bitmap_and(unsigned long *dst, const unsigned long *src1,
const unsigned long *src2, unsigned int nbits)
{
- if (small_const_nbits(nbits))
+ if (SMALL_CONST(nbits - 1))
return (*dst = *src1 & *src2 & BITS_FIRST(nbits - 1)) != 0;
return __bitmap_and(dst, src1, src2, nbits);
}
@@ -162,7 +159,7 @@ static inline int bitmap_and(unsigned long *dst, const unsigned long *src1,
static inline int bitmap_equal(const unsigned long *src1,
const unsigned long *src2, unsigned int nbits)
{
- if (small_const_nbits(nbits))
+ if (SMALL_CONST(nbits - 1))
return !((*src1 ^ *src2) & BITS_FIRST(nbits - 1));
if (__builtin_constant_p(nbits & BITMAP_MEM_MASK) &&
IS_ALIGNED(nbits, BITMAP_MEM_ALIGNMENT))
--
2.25.1