[RESEND. PATCH v2] MIPS: crypto: Clean up useless assignment operations

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

 



When entering the "len & sizeof(u32)" branch, len must be less than 8.
So after one operation, len must be less than 4.
At this time, "len -= sizeof(u32)" is not necessary for 64-bit CPUs.

After that, replace `while' loops with equivalent `for' to make the
code structure a little bit better by the way.

Suggested-by: Maciej W. Rozycki <macro@xxxxxxxxxxx>
Link: https://lore.kernel.org/all/alpine.DEB.2.21.2406281713040.43454@xxxxxxxxxxxxxxxxx/
Signed-off-by: Guan Wentao <guanwentao@xxxxxxxxxxxxx>
Signed-off-by: WangYuli <wangyuli@xxxxxxxxxxxxx>
---
 arch/mips/crypto/crc32-mips.c | 27 +++++++--------------------
 1 file changed, 7 insertions(+), 20 deletions(-)

diff --git a/arch/mips/crypto/crc32-mips.c b/arch/mips/crypto/crc32-mips.c
index ec6d58008f8e..3a80b7576ec3 100644
--- a/arch/mips/crypto/crc32-mips.c
+++ b/arch/mips/crypto/crc32-mips.c
@@ -77,36 +77,29 @@ static u32 crc32_mips_le_hw(u32 crc_, const u8 *p, unsigned int len)
 {
 	u32 crc = crc_;
 
-#ifdef CONFIG_64BIT
-	while (len >= sizeof(u64)) {
+#if IS_ENABLED(CONFIG_64BIT)
+	for (; len >= sizeof(u64); p += sizeof(u64), len -= sizeof(u64)) {
 		u64 value = get_unaligned_le64(p);
-
 		CRC32(crc, value, d);
-		p += sizeof(u64);
-		len -= sizeof(u64);
 	}
 
 	if (len & sizeof(u32)) {
 #else /* !CONFIG_64BIT */
-	while (len >= sizeof(u32)) {
+	for (; len >= sizeof(u32); len -= sizeof(u32)) {
 #endif
 		u32 value = get_unaligned_le32(p);
-
 		CRC32(crc, value, w);
 		p += sizeof(u32);
-		len -= sizeof(u32);
 	}
 
 	if (len & sizeof(u16)) {
 		u16 value = get_unaligned_le16(p);
-
 		CRC32(crc, value, h);
 		p += sizeof(u16);
 	}
 
 	if (len & sizeof(u8)) {
 		u8 value = *p++;
-
 		CRC32(crc, value, b);
 	}
 
@@ -117,38 +110,32 @@ static u32 crc32c_mips_le_hw(u32 crc_, const u8 *p, unsigned int len)
 {
 	u32 crc = crc_;
 
-#ifdef CONFIG_64BIT
-	while (len >= sizeof(u64)) {
+#if IS_ENABLED(CONFIG_64BIT)
+	for (; len >= sizeof(u64); p += sizeof(u64), len -= sizeof(u64)) {
 		u64 value = get_unaligned_le64(p);
-
 		CRC32C(crc, value, d);
-		p += sizeof(u64);
-		len -= sizeof(u64);
 	}
 
 	if (len & sizeof(u32)) {
 #else /* !CONFIG_64BIT */
-	while (len >= sizeof(u32)) {
+	for (; len >= sizeof(u32); len -= sizeof(u32)) {
 #endif
 		u32 value = get_unaligned_le32(p);
-
 		CRC32C(crc, value, w);
 		p += sizeof(u32);
-		len -= sizeof(u32);
 	}
 
 	if (len & sizeof(u16)) {
 		u16 value = get_unaligned_le16(p);
-
 		CRC32C(crc, value, h);
 		p += sizeof(u16);
 	}
 
 	if (len & sizeof(u8)) {
 		u8 value = *p++;
-
 		CRC32C(crc, value, b);
 	}
+
 	return crc;
 }
 
-- 
2.43.4





[Index of Archives]     [LKML Archive]     [Linux ARM Kernel]     [Linux ARM]     [Git]     [Yosemite News]     [Linux SCSI]     [Linux Hams]

  Powered by Linux