[PATCH v3 2/2] crypto: inside-secure - Replace generic aes with libaes

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

 



Commit 363a90c2d517 ("crypto: safexcel/aes - switch to
library version of key expansion routine") removed
CRYPTO_AES in the config. However, some portions of codes
still rely on generic AES cipher (e.g. refer to
safexcel_aead_gcm_cra_init(), safexcel_xcbcmac_cra_init()).
This causes transform allocation failure for those algos,
if CRYPTO_AES is not manually enabled.

To resolve that, we replace all existing AES cipher
dependent codes with their AES library counterpart.

Fixes: 363a90c2d517 ("crypto: safexcel/aes - switch to library version of key expansion routine")
Signed-off-by: Peter Harliman Liem <pliem@xxxxxxxxxxxxx>
---
v3:
 Rework patch to replace generic aes with libaes instead
v2:
 Add fixes tag

 .../crypto/inside-secure/safexcel_cipher.c    | 16 +----
 drivers/crypto/inside-secure/safexcel_hash.c  | 59 ++++++-------------
 2 files changed, 21 insertions(+), 54 deletions(-)

diff --git a/drivers/crypto/inside-secure/safexcel_cipher.c b/drivers/crypto/inside-secure/safexcel_cipher.c
index 5a222c228c3b..32a37e3850c5 100644
--- a/drivers/crypto/inside-secure/safexcel_cipher.c
+++ b/drivers/crypto/inside-secure/safexcel_cipher.c
@@ -63,7 +63,6 @@ struct safexcel_cipher_ctx {
 	u32 hash_alg;
 	u32 state_sz;
 
-	struct crypto_cipher *hkaes;
 	struct crypto_aead *fback;
 };
 
@@ -2607,15 +2606,8 @@ static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
 	ctx->key_len = len;
 
 	/* Compute hash key by encrypting zeroes with cipher key */
-	crypto_cipher_clear_flags(ctx->hkaes, CRYPTO_TFM_REQ_MASK);
-	crypto_cipher_set_flags(ctx->hkaes, crypto_aead_get_flags(ctfm) &
-				CRYPTO_TFM_REQ_MASK);
-	ret = crypto_cipher_setkey(ctx->hkaes, key, len);
-	if (ret)
-		return ret;
-
 	memset(hashkey, 0, AES_BLOCK_SIZE);
-	crypto_cipher_encrypt_one(ctx->hkaes, (u8 *)hashkey, (u8 *)hashkey);
+	aes_encrypt(&aes, (u8 *)hashkey, (u8 *)hashkey);
 
 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
 		for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
@@ -2644,15 +2636,11 @@ static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
 	ctx->xcm = EIP197_XCM_MODE_GCM;
 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
 
-	ctx->hkaes = crypto_alloc_cipher("aes", 0, 0);
-	return PTR_ERR_OR_ZERO(ctx->hkaes);
+	return 0;
 }
 
 static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
 {
-	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
-
-	crypto_free_cipher(ctx->hkaes);
 	safexcel_aead_cra_exit(tfm);
 }
 
diff --git a/drivers/crypto/inside-secure/safexcel_hash.c b/drivers/crypto/inside-secure/safexcel_hash.c
index 2124416742f8..103fc551d2af 100644
--- a/drivers/crypto/inside-secure/safexcel_hash.c
+++ b/drivers/crypto/inside-secure/safexcel_hash.c
@@ -30,7 +30,7 @@ struct safexcel_ahash_ctx {
 	bool fb_init_done;
 	bool fb_do_setkey;
 
-	struct crypto_cipher *kaes;
+	struct crypto_aes_ctx *aes;
 	struct crypto_ahash *fback;
 	struct crypto_shash *shpre;
 	struct shash_desc *shdesc;
@@ -824,7 +824,7 @@ static int safexcel_ahash_final(struct ahash_request *areq)
 			result[i] = swab32(ctx->base.ipad.word[i + 4]);
 		}
 		areq->result[0] ^= 0x80;			// 10- padding
-		crypto_cipher_encrypt_one(ctx->kaes, areq->result, areq->result);
+		aes_encrypt(ctx->aes, areq->result, areq->result);
 		return 0;
 	} else if (unlikely(req->hmac &&
 			    (req->len == req->block_sz) &&
@@ -2083,37 +2083,26 @@ static int safexcel_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
 				 unsigned int len)
 {
 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
-	struct crypto_aes_ctx aes;
 	u32 key_tmp[3 * AES_BLOCK_SIZE / sizeof(u32)];
 	int ret, i;
 
-	ret = aes_expandkey(&aes, key, len);
+	ret = aes_expandkey(ctx->aes, key, len);
 	if (ret)
 		return ret;
 
 	/* precompute the XCBC key material */
-	crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
-	crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
-				CRYPTO_TFM_REQ_MASK);
-	ret = crypto_cipher_setkey(ctx->kaes, key, len);
-	if (ret)
-		return ret;
-
-	crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
-		"\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1");
-	crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp,
-		"\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2");
-	crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + AES_BLOCK_SIZE,
-		"\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3");
+	aes_encrypt(ctx->aes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
+		    "\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1");
+	aes_encrypt(ctx->aes, (u8 *)key_tmp,
+		    "\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2");
+	aes_encrypt(ctx->aes, (u8 *)key_tmp + AES_BLOCK_SIZE,
+		    "\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3");
 	for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++)
 		ctx->base.ipad.word[i] = swab32(key_tmp[i]);
 
-	crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
-	crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
-				CRYPTO_TFM_REQ_MASK);
-	ret = crypto_cipher_setkey(ctx->kaes,
-				   (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
-				   AES_MIN_KEY_SIZE);
+	ret = aes_expandkey(ctx->aes,
+			    (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
+			    AES_MIN_KEY_SIZE);
 	if (ret)
 		return ret;
 
@@ -2121,7 +2110,6 @@ static int safexcel_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
 	ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
 	ctx->cbcmac = false;
 
-	memzero_explicit(&aes, sizeof(aes));
 	return 0;
 }
 
@@ -2130,15 +2118,15 @@ static int safexcel_xcbcmac_cra_init(struct crypto_tfm *tfm)
 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
 
 	safexcel_ahash_cra_init(tfm);
-	ctx->kaes = crypto_alloc_cipher("aes", 0, 0);
-	return PTR_ERR_OR_ZERO(ctx->kaes);
+	ctx->aes = kmalloc(sizeof(*ctx->aes), GFP_KERNEL);
+	return PTR_ERR_OR_ZERO(ctx->aes);
 }
 
 static void safexcel_xcbcmac_cra_exit(struct crypto_tfm *tfm)
 {
 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
 
-	crypto_free_cipher(ctx->kaes);
+	kfree(ctx->aes);
 	safexcel_ahash_cra_exit(tfm);
 }
 
@@ -2178,31 +2166,23 @@ static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
 				unsigned int len)
 {
 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
-	struct crypto_aes_ctx aes;
 	__be64 consts[4];
 	u64 _const[2];
 	u8 msb_mask, gfmask;
 	int ret, i;
 
-	ret = aes_expandkey(&aes, key, len);
+	/* precompute the CMAC key material */
+	ret = aes_expandkey(ctx->aes, key, len);
 	if (ret)
 		return ret;
 
 	for (i = 0; i < len / sizeof(u32); i++)
-		ctx->base.ipad.word[i + 8] = swab32(aes.key_enc[i]);
-
-	/* precompute the CMAC key material */
-	crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
-	crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
-				CRYPTO_TFM_REQ_MASK);
-	ret = crypto_cipher_setkey(ctx->kaes, key, len);
-	if (ret)
-		return ret;
+		ctx->base.ipad.word[i + 8] = swab32(ctx->aes->key_enc[i]);
 
 	/* code below borrowed from crypto/cmac.c */
 	/* encrypt the zero block */
 	memset(consts, 0, AES_BLOCK_SIZE);
-	crypto_cipher_encrypt_one(ctx->kaes, (u8 *)consts, (u8 *)consts);
+	aes_encrypt(ctx->aes, (u8 *)consts, (u8 *)consts);
 
 	gfmask = 0x87;
 	_const[0] = be64_to_cpu(consts[1]);
@@ -2234,7 +2214,6 @@ static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
 	}
 	ctx->cbcmac = false;
 
-	memzero_explicit(&aes, sizeof(aes));
 	return 0;
 }
 
-- 
2.17.1





[Index of Archives]     [Kernel]     [Gnu Classpath]     [Gnu Crypto]     [DM Crypt]     [Netfilter]     [Bugtraq]
  Powered by Linux