Many algorithms become simpler if they are passed with relatively small input values. One example is bitmap operations when the whole bitmap fits into one word. To implement such simplifications, linux/bitmap.h declares small_const_nbits() macro. Other subsystems may also benefit from optimizations of this sort, like find_bit API in the following patches. So it looks helpful to generalize the macro and extend it's visibility. Signed-off-by: Yury Norov <yury.norov@xxxxxxxxx> --- include/asm-generic/bitsperlong.h | 2 ++ include/linux/bitmap.h | 33 +++++++++++-------------- tools/include/asm-generic/bitsperlong.h | 2 ++ tools/include/linux/bitmap.h | 19 ++++++-------- 4 files changed, 27 insertions(+), 29 deletions(-) diff --git a/include/asm-generic/bitsperlong.h b/include/asm-generic/bitsperlong.h index 3905c1c93dc2..0eeb77544f1d 100644 --- a/include/asm-generic/bitsperlong.h +++ b/include/asm-generic/bitsperlong.h @@ -23,4 +23,6 @@ #define BITS_PER_LONG_LONG 64 #endif +#define SMALL_CONST(n) (__builtin_constant_p(n) && (unsigned long)(n) < BITS_PER_LONG) + #endif /* __ASM_GENERIC_BITS_PER_LONG */ diff --git a/include/linux/bitmap.h b/include/linux/bitmap.h index adf7bd9f0467..e89f1dace846 100644 --- a/include/linux/bitmap.h +++ b/include/linux/bitmap.h @@ -224,9 +224,6 @@ extern int bitmap_print_to_pagebuf(bool list, char *buf, * so make such users (should any ever turn up) call the out-of-line * versions. */ -#define small_const_nbits(nbits) \ - (__builtin_constant_p(nbits) && (nbits) <= BITS_PER_LONG && (nbits) > 0) - static inline void bitmap_zero(unsigned long *dst, unsigned int nbits) { unsigned int len = BITS_TO_LONGS(nbits) * sizeof(unsigned long); @@ -278,7 +275,7 @@ extern void bitmap_to_arr32(u32 *buf, const unsigned long *bitmap, static inline int bitmap_and(unsigned long *dst, const unsigned long *src1, const unsigned long *src2, unsigned int nbits) { - if (small_const_nbits(nbits)) + if (SMALL_CONST(nbits - 1)) return (*dst = *src1 & *src2 & BITS_FIRST(nbits - 1)) != 0; return __bitmap_and(dst, src1, src2, nbits); } @@ -286,7 +283,7 @@ static inline int bitmap_and(unsigned long *dst, const unsigned long *src1, static inline void bitmap_or(unsigned long *dst, const unsigned long *src1, const unsigned long *src2, unsigned int nbits) { - if (small_const_nbits(nbits)) + if (SMALL_CONST(nbits - 1)) *dst = *src1 | *src2; else __bitmap_or(dst, src1, src2, nbits); @@ -295,7 +292,7 @@ static inline void bitmap_or(unsigned long *dst, const unsigned long *src1, static inline void bitmap_xor(unsigned long *dst, const unsigned long *src1, const unsigned long *src2, unsigned int nbits) { - if (small_const_nbits(nbits)) + if (SMALL_CONST(nbits - 1)) *dst = *src1 ^ *src2; else __bitmap_xor(dst, src1, src2, nbits); @@ -304,7 +301,7 @@ static inline void bitmap_xor(unsigned long *dst, const unsigned long *src1, static inline int bitmap_andnot(unsigned long *dst, const unsigned long *src1, const unsigned long *src2, unsigned int nbits) { - if (small_const_nbits(nbits)) + if (SMALL_CONST(nbits - 1)) return (*dst = *src1 & ~(*src2) & BITS_FIRST(nbits - 1)) != 0; return __bitmap_andnot(dst, src1, src2, nbits); } @@ -312,7 +309,7 @@ static inline int bitmap_andnot(unsigned long *dst, const unsigned long *src1, static inline void bitmap_complement(unsigned long *dst, const unsigned long *src, unsigned int nbits) { - if (small_const_nbits(nbits)) + if (SMALL_CONST(nbits - 1)) *dst = ~(*src); else __bitmap_complement(dst, src, nbits); @@ -328,7 +325,7 @@ static inline void bitmap_complement(unsigned long *dst, const unsigned long *sr static inline int bitmap_equal(const unsigned long *src1, const unsigned long *src2, unsigned int nbits) { - if (small_const_nbits(nbits)) + if (SMALL_CONST(nbits - 1)) return !((*src1 ^ *src2) & BITS_FIRST(nbits - 1)); if (__builtin_constant_p(nbits & BITMAP_MEM_MASK) && IS_ALIGNED(nbits, BITMAP_MEM_ALIGNMENT)) @@ -350,7 +347,7 @@ static inline bool bitmap_or_equal(const unsigned long *src1, const unsigned long *src3, unsigned int nbits) { - if (!small_const_nbits(nbits)) + if (!SMALL_CONST(nbits - 1)) return __bitmap_or_equal(src1, src2, src3, nbits); return !(((*src1 | *src2) ^ *src3) & BITS_FIRST(nbits - 1)); @@ -359,7 +356,7 @@ static inline bool bitmap_or_equal(const unsigned long *src1, static inline int bitmap_intersects(const unsigned long *src1, const unsigned long *src2, unsigned int nbits) { - if (small_const_nbits(nbits)) + if (SMALL_CONST(nbits - 1)) return ((*src1 & *src2) & BITS_FIRST(nbits - 1)) != 0; else return __bitmap_intersects(src1, src2, nbits); @@ -368,7 +365,7 @@ static inline int bitmap_intersects(const unsigned long *src1, static inline int bitmap_subset(const unsigned long *src1, const unsigned long *src2, unsigned int nbits) { - if (small_const_nbits(nbits)) + if (SMALL_CONST(nbits - 1)) return !((*src1 & ~(*src2)) & BITS_FIRST(nbits - 1)); else return __bitmap_subset(src1, src2, nbits); @@ -376,7 +373,7 @@ static inline int bitmap_subset(const unsigned long *src1, static inline bool bitmap_empty(const unsigned long *src, unsigned nbits) { - if (small_const_nbits(nbits)) + if (SMALL_CONST(nbits - 1)) return !(*src & BITS_FIRST(nbits - 1)); return find_first_bit(src, nbits) == nbits; @@ -384,7 +381,7 @@ static inline bool bitmap_empty(const unsigned long *src, unsigned nbits) static inline bool bitmap_full(const unsigned long *src, unsigned int nbits) { - if (small_const_nbits(nbits)) + if (SMALL_CONST(nbits - 1)) return !(~(*src) & BITS_FIRST(nbits - 1)); return find_first_zero_bit(src, nbits) == nbits; @@ -392,7 +389,7 @@ static inline bool bitmap_full(const unsigned long *src, unsigned int nbits) static __always_inline int bitmap_weight(const unsigned long *src, unsigned int nbits) { - if (small_const_nbits(nbits)) + if (SMALL_CONST(nbits - 1)) return hweight_long(*src & BITS_FIRST(nbits - 1)); return __bitmap_weight(src, nbits); } @@ -428,7 +425,7 @@ static __always_inline void bitmap_clear(unsigned long *map, unsigned int start, static inline void bitmap_shift_right(unsigned long *dst, const unsigned long *src, unsigned int shift, unsigned int nbits) { - if (small_const_nbits(nbits)) + if (SMALL_CONST(nbits - 1)) *dst = (*src & BITS_FIRST(nbits - 1)) >> shift; else __bitmap_shift_right(dst, src, shift, nbits); @@ -437,7 +434,7 @@ static inline void bitmap_shift_right(unsigned long *dst, const unsigned long *s static inline void bitmap_shift_left(unsigned long *dst, const unsigned long *src, unsigned int shift, unsigned int nbits) { - if (small_const_nbits(nbits)) + if (SMALL_CONST(nbits - 1)) *dst = (*src << shift) & BITS_FIRST(nbits - 1); else __bitmap_shift_left(dst, src, shift, nbits); @@ -449,7 +446,7 @@ static inline void bitmap_replace(unsigned long *dst, const unsigned long *mask, unsigned int nbits) { - if (small_const_nbits(nbits)) + if (SMALL_CONST(nbits - 1)) *dst = (*old & ~(*mask)) | (*new & *mask); else __bitmap_replace(dst, old, new, mask, nbits); diff --git a/tools/include/asm-generic/bitsperlong.h b/tools/include/asm-generic/bitsperlong.h index 8f2283052333..432d272baf27 100644 --- a/tools/include/asm-generic/bitsperlong.h +++ b/tools/include/asm-generic/bitsperlong.h @@ -18,4 +18,6 @@ #define BITS_PER_LONG_LONG 64 #endif +#define SMALL_CONST(n) (__builtin_constant_p(n) && (unsigned long)(n) < BITS_PER_LONG) + #endif /* __ASM_GENERIC_BITS_PER_LONG */ diff --git a/tools/include/linux/bitmap.h b/tools/include/linux/bitmap.h index b6e8430c8bc9..fdc0b64bbdbf 100644 --- a/tools/include/linux/bitmap.h +++ b/tools/include/linux/bitmap.h @@ -19,12 +19,9 @@ int __bitmap_equal(const unsigned long *bitmap1, const unsigned long *bitmap2, unsigned int bits); void bitmap_clear(unsigned long *map, unsigned int start, int len); -#define small_const_nbits(nbits) \ - (__builtin_constant_p(nbits) && (nbits) <= BITS_PER_LONG) - static inline void bitmap_zero(unsigned long *dst, unsigned int nbits) { - if (small_const_nbits(nbits)) + if (SMALL_CONST(nbits - 1)) *dst = 0UL; else { int len = BITS_TO_LONGS(nbits) * sizeof(unsigned long); @@ -35,7 +32,7 @@ static inline void bitmap_zero(unsigned long *dst, unsigned int nbits) static inline void bitmap_fill(unsigned long *dst, unsigned int nbits) { unsigned int nlongs = BITS_TO_LONGS(nbits); - if (!small_const_nbits(nbits)) { + if (!SMALL_CONST(nbits - 1)) { unsigned int len = (nlongs - 1) * sizeof(unsigned long); memset(dst, 0xff, len); } @@ -44,7 +41,7 @@ static inline void bitmap_fill(unsigned long *dst, unsigned int nbits) static inline int bitmap_empty(const unsigned long *src, unsigned nbits) { - if (small_const_nbits(nbits)) + if (SMALL_CONST(nbits - 1)) return !(*src & BITS_FIRST(nbits - 1)); return find_first_bit(src, nbits) == nbits; @@ -52,7 +49,7 @@ static inline int bitmap_empty(const unsigned long *src, unsigned nbits) static inline int bitmap_full(const unsigned long *src, unsigned int nbits) { - if (small_const_nbits(nbits)) + if (SMALL_CONST(nbits - 1)) return !(~(*src) & BITS_FIRST(nbits - 1)); return find_first_zero_bit(src, nbits) == nbits; @@ -60,7 +57,7 @@ static inline int bitmap_full(const unsigned long *src, unsigned int nbits) static inline int bitmap_weight(const unsigned long *src, unsigned int nbits) { - if (small_const_nbits(nbits)) + if (SMALL_CONST(nbits - 1)) return hweight_long(*src & BITS_FIRST(nbits - 1)); return __bitmap_weight(src, nbits); } @@ -68,7 +65,7 @@ static inline int bitmap_weight(const unsigned long *src, unsigned int nbits) static inline void bitmap_or(unsigned long *dst, const unsigned long *src1, const unsigned long *src2, unsigned int nbits) { - if (small_const_nbits(nbits)) + if (SMALL_CONST(nbits - 1)) *dst = *src1 | *src2; else __bitmap_or(dst, src1, src2, nbits); @@ -146,7 +143,7 @@ size_t bitmap_scnprintf(unsigned long *bitmap, unsigned int nbits, static inline int bitmap_and(unsigned long *dst, const unsigned long *src1, const unsigned long *src2, unsigned int nbits) { - if (small_const_nbits(nbits)) + if (SMALL_CONST(nbits - 1)) return (*dst = *src1 & *src2 & BITS_FIRST(nbits - 1)) != 0; return __bitmap_and(dst, src1, src2, nbits); } @@ -162,7 +159,7 @@ static inline int bitmap_and(unsigned long *dst, const unsigned long *src1, static inline int bitmap_equal(const unsigned long *src1, const unsigned long *src2, unsigned int nbits) { - if (small_const_nbits(nbits)) + if (SMALL_CONST(nbits - 1)) return !((*src1 ^ *src2) & BITS_FIRST(nbits - 1)); if (__builtin_constant_p(nbits & BITMAP_MEM_MASK) && IS_ALIGNED(nbits, BITMAP_MEM_ALIGNMENT)) -- 2.25.1