[PATCH 5/6] lib: add fast path for find_next_*_bit()

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

 



Similarly to bitmap functions, find_next_*_bit() users will benefit
if we'll handle a case of bitmaps that fit into a single word. In the
very best case, the compiler may replace a function call with a
single ffs or ffz instruction.

Signed-off-by: Yury Norov <yury.norov@xxxxxxxxx>
---
 include/asm-generic/bitops/find.h | 39 +++++++++++++++++++++++++++++++
 include/asm-generic/bitops/le.h   | 28 ++++++++++++++++++++++
 2 files changed, 67 insertions(+)

diff --git a/include/asm-generic/bitops/find.h b/include/asm-generic/bitops/find.h
index 7ad70dab8e93..d45096069011 100644
--- a/include/asm-generic/bitops/find.h
+++ b/include/asm-generic/bitops/find.h
@@ -20,6 +20,18 @@ static inline
 unsigned long find_next_bit(const unsigned long *addr, unsigned long size,
 			    unsigned long offset)
 {
+	if (small_const_nbits(size)) {
+		unsigned long val;
+
+		if (unlikely(offset >= size))
+			return size;
+
+		val = *addr & BITMAP_FIRST_WORD_MASK(offset)
+				& BITMAP_LAST_WORD_MASK(size);
+
+		return val ? __ffs(val) : size;
+	}
+
 	return _find_next_bit(addr, NULL, size, offset, 0UL, 0);
 }
 #endif
@@ -40,6 +52,18 @@ unsigned long find_next_and_bit(const unsigned long *addr1,
 		const unsigned long *addr2, unsigned long size,
 		unsigned long offset)
 {
+	if (small_const_nbits(size)) {
+		unsigned long val;
+
+		if (unlikely(offset >= size))
+			return size;
+
+		val = *addr1 & *addr2 & BITMAP_FIRST_WORD_MASK(offset)
+				      & BITMAP_LAST_WORD_MASK(size);
+
+		return val ? __ffs(val) : size;
+	}
+
 	return _find_next_bit(addr1, addr2, size, offset, 0UL, 0);
 }
 #endif
@@ -58,6 +82,21 @@ static inline
 unsigned long find_next_zero_bit(const unsigned long *addr, unsigned long size,
 				 unsigned long offset)
 {
+	if (small_const_nbits(size)) {
+		unsigned long val, idx;
+
+		if (unlikely(offset >= size))
+			return size;
+
+		val = *addr | ~BITMAP_FIRST_WORD_MASK(offset);
+		if (val == ~0UL)
+			return size;
+
+		idx = ffz(val);
+
+		return idx < size ? idx : size;
+	}
+
 	return _find_next_bit(addr, NULL, size, offset, ~0UL, 0);
 }
 #endif
diff --git a/include/asm-generic/bitops/le.h b/include/asm-generic/bitops/le.h
index 4cf44ea16ec0..f4a76d3d145f 100644
--- a/include/asm-generic/bitops/le.h
+++ b/include/asm-generic/bitops/le.h
@@ -5,6 +5,7 @@
 #include <asm/types.h>
 #include <asm/byteorder.h>
 #include <asm-generic/bitops/find.h>
+#include <linux/swab.h>
 
 #if defined(__LITTLE_ENDIAN)
 
@@ -37,6 +38,21 @@ static inline
 unsigned long find_next_zero_bit_le(const void *addr, unsigned
 		long size, unsigned long offset)
 {
+	if (small_const_nbits(size)) {
+		unsigned long val = *(const unsigned long *)addr, idx;
+
+		if (unlikely(offset >= size))
+			return size;
+
+		val = swab(val) | ~BITMAP_FIRST_WORD_MASK(offset);
+		if (val == ~0UL)
+			return size;
+
+		idx = ffz(val);
+
+		return idx < size ? idx : size;
+	}
+
 	return _find_next_bit(addr, NULL, size, offset, ~0UL, 1);
 }
 #endif
@@ -46,6 +62,18 @@ static inline
 unsigned long find_next_bit_le(const void *addr, unsigned
 		long size, unsigned long offset)
 {
+	if (small_const_nbits(size)) {
+		unsigned long val = *(const unsigned long *)addr;
+
+		if (unlikely(offset >= size))
+			return size;
+
+		val = swab(val) & BITMAP_FIRST_WORD_MASK(offset)
+				& BITMAP_LAST_WORD_MASK(size);
+
+		return val ? __ffs(val) : size;
+	}
+
 	return _find_next_bit(addr, NULL, size, offset, 0UL, 1);
 }
 #endif
-- 
2.25.1




[Index of Archives]     [Video for Linux]     [Yosemite News]     [Linux S/390]     [Linux Kernel]     [Linux SCSI]

  Powered by Linux