[PATCH] crypto: arm64/sm4-ccm - Rewrite skcipher walker loop

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

 



The fact that an error in the skcipher walker API are indicated
not only by a non-zero return value, but also by the fact that
walk->nbytes is zero, causes the layout of the skcipher walker
loop to be sufficiently different from the usual layout, which
is not a problem in itself, but it is likely to cause reading
confusion and difficulty in code maintenance.

This patch rewrites skcipher walker loop, and separates the
last chunk cryption from the loop to avoid wrong calls to the
skcipher walker API. In addition to following the usual convention
of checking walk->nbytes, it also makes the loop execute logic
clearer and easier to understand.

Signed-off-by: Tianjia Zhang <tianjia.zhang@xxxxxxxxxxxxxxxxx>
---
 arch/arm64/crypto/sm4-ce-ccm-glue.c | 44 ++++++++++++++++-------------
 1 file changed, 24 insertions(+), 20 deletions(-)

diff --git a/arch/arm64/crypto/sm4-ce-ccm-glue.c b/arch/arm64/crypto/sm4-ce-ccm-glue.c
index f2cec7b52efc..5e7e17bbec81 100644
--- a/arch/arm64/crypto/sm4-ce-ccm-glue.c
+++ b/arch/arm64/crypto/sm4-ce-ccm-glue.c
@@ -166,7 +166,7 @@ static int ccm_crypt(struct aead_request *req, struct skcipher_walk *walk,
 					unsigned int nbytes, u8 *mac))
 {
 	u8 __aligned(8) ctr0[SM4_BLOCK_SIZE];
-	int err;
+	int err = 0;
 
 	/* preserve the initial ctr0 for the TAG */
 	memcpy(ctr0, walk->iv, SM4_BLOCK_SIZE);
@@ -177,33 +177,37 @@ static int ccm_crypt(struct aead_request *req, struct skcipher_walk *walk,
 	if (req->assoclen)
 		ccm_calculate_auth_mac(req, mac);
 
-	do {
+	while (walk->nbytes && walk->nbytes != walk->total) {
 		unsigned int tail = walk->nbytes % SM4_BLOCK_SIZE;
-		const u8 *src = walk->src.virt.addr;
-		u8 *dst = walk->dst.virt.addr;
 
-		if (walk->nbytes == walk->total)
-			tail = 0;
+		sm4_ce_ccm_crypt(rkey_enc, walk->dst.virt.addr,
+				 walk->src.virt.addr, walk->iv,
+				 walk->nbytes - tail, mac);
+
+		kernel_neon_end();
+
+		err = skcipher_walk_done(walk, tail);
+
+		kernel_neon_begin();
+	}
 
-		if (walk->nbytes - tail)
-			sm4_ce_ccm_crypt(rkey_enc, dst, src, walk->iv,
-					 walk->nbytes - tail, mac);
+	if (walk->nbytes) {
+		sm4_ce_ccm_crypt(rkey_enc, walk->dst.virt.addr,
+				 walk->src.virt.addr, walk->iv,
+				 walk->nbytes, mac);
 
-		if (walk->nbytes == walk->total)
-			sm4_ce_ccm_final(rkey_enc, ctr0, mac);
+		sm4_ce_ccm_final(rkey_enc, ctr0, mac);
 
 		kernel_neon_end();
 
-		if (walk->nbytes) {
-			err = skcipher_walk_done(walk, tail);
-			if (err)
-				return err;
-			if (walk->nbytes)
-				kernel_neon_begin();
-		}
-	} while (walk->nbytes > 0);
+		err = skcipher_walk_done(walk, 0);
+	} else {
+		sm4_ce_ccm_final(rkey_enc, ctr0, mac);
 
-	return 0;
+		kernel_neon_end();
+	}
+
+	return err;
 }
 
 static int ccm_encrypt(struct aead_request *req)
-- 
2.24.3 (Apple Git-128)




[Index of Archives]     [Kernel]     [Gnu Classpath]     [Gnu Crypto]     [DM Crypt]     [Netfilter]     [Bugtraq]
  Powered by Linux