[PATCH v2 2/4] crypto: arm64/gcm - add RFC4106 support

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

 



Add support for RFC4106 ESP encapsulation to the accelerated GCM
implementation. This results in a ~10% speedup for IPsec frames of
typical size (~1420 bytes) on Cortex-A53.

Signed-off-by: Ard Biesheuvel <ardb@xxxxxxxxxx>
---
 arch/arm64/crypto/ghash-ce-glue.c | 145 +++++++++++++++-----
 1 file changed, 107 insertions(+), 38 deletions(-)

diff --git a/arch/arm64/crypto/ghash-ce-glue.c b/arch/arm64/crypto/ghash-ce-glue.c
index 15794fe21a0b2eca..4b45fad493b16239 100644
--- a/arch/arm64/crypto/ghash-ce-glue.c
+++ b/arch/arm64/crypto/ghash-ce-glue.c
@@ -9,6 +9,7 @@
 #include <asm/simd.h>
 #include <asm/unaligned.h>
 #include <crypto/aes.h>
+#include <crypto/gcm.h>
 #include <crypto/algapi.h>
 #include <crypto/b128ops.h>
 #include <crypto/gf128mul.h>
@@ -28,7 +29,8 @@ MODULE_ALIAS_CRYPTO("ghash");
 
 #define GHASH_BLOCK_SIZE	16
 #define GHASH_DIGEST_SIZE	16
-#define GCM_IV_SIZE		12
+
+#define RFC4106_NONCE_SIZE	4
 
 struct ghash_key {
 	be128			k;
@@ -43,6 +45,7 @@ struct ghash_desc_ctx {
 
 struct gcm_aes_ctx {
 	struct crypto_aes_ctx	aes_key;
+	u8			nonce[RFC4106_NONCE_SIZE];
 	struct ghash_key	ghash_key;
 };
 
@@ -226,8 +229,8 @@ static int num_rounds(struct crypto_aes_ctx *ctx)
 	return 6 + ctx->key_length / 4;
 }
 
-static int gcm_setkey(struct crypto_aead *tfm, const u8 *inkey,
-		      unsigned int keylen)
+static int gcm_aes_setkey(struct crypto_aead *tfm, const u8 *inkey,
+			  unsigned int keylen)
 {
 	struct gcm_aes_ctx *ctx = crypto_aead_ctx(tfm);
 	u8 key[GHASH_BLOCK_SIZE];
@@ -258,17 +261,9 @@ static int gcm_setkey(struct crypto_aead *tfm, const u8 *inkey,
 	return 0;
 }
 
-static int gcm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
+static int gcm_aes_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
 {
-	switch (authsize) {
-	case 4:
-	case 8:
-	case 12 ... 16:
-		break;
-	default:
-		return -EINVAL;
-	}
-	return 0;
+	return crypto_gcm_check_authsize(authsize);
 }
 
 static void gcm_update_mac(u64 dg[], const u8 *src, int count, u8 buf[],
@@ -302,13 +297,12 @@ static void gcm_update_mac(u64 dg[], const u8 *src, int count, u8 buf[],
 	}
 }
 
-static void gcm_calculate_auth_mac(struct aead_request *req, u64 dg[])
+static void gcm_calculate_auth_mac(struct aead_request *req, u64 dg[], u32 len)
 {
 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
 	struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
 	u8 buf[GHASH_BLOCK_SIZE];
 	struct scatter_walk walk;
-	u32 len = req->assoclen;
 	int buf_count = 0;
 
 	scatterwalk_start(&walk, req->src);
@@ -338,27 +332,25 @@ static void gcm_calculate_auth_mac(struct aead_request *req, u64 dg[])
 	}
 }
 
-static int gcm_encrypt(struct aead_request *req)
+static int gcm_encrypt(struct aead_request *req, char *iv, int assoclen)
 {
 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
 	struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
 	int nrounds = num_rounds(&ctx->aes_key);
 	struct skcipher_walk walk;
 	u8 buf[AES_BLOCK_SIZE];
-	u8 iv[AES_BLOCK_SIZE];
 	u64 dg[2] = {};
 	be128 lengths;
 	u8 *tag;
 	int err;
 
-	lengths.a = cpu_to_be64(req->assoclen * 8);
+	lengths.a = cpu_to_be64(assoclen * 8);
 	lengths.b = cpu_to_be64(req->cryptlen * 8);
 
-	if (req->assoclen)
-		gcm_calculate_auth_mac(req, dg);
+	if (assoclen)
+		gcm_calculate_auth_mac(req, dg, assoclen);
 
-	memcpy(iv, req->iv, GCM_IV_SIZE);
-	put_unaligned_be32(2, iv + GCM_IV_SIZE);
+	put_unaligned_be32(2, iv + GCM_AES_IV_SIZE);
 
 	err = skcipher_walk_aead_encrypt(&walk, req, false);
 
@@ -403,7 +395,7 @@ static int gcm_encrypt(struct aead_request *req)
 	return 0;
 }
 
-static int gcm_decrypt(struct aead_request *req)
+static int gcm_decrypt(struct aead_request *req, char *iv, int assoclen)
 {
 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
 	struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
@@ -412,21 +404,19 @@ static int gcm_decrypt(struct aead_request *req)
 	struct skcipher_walk walk;
 	u8 otag[AES_BLOCK_SIZE];
 	u8 buf[AES_BLOCK_SIZE];
-	u8 iv[AES_BLOCK_SIZE];
 	u64 dg[2] = {};
 	be128 lengths;
 	u8 *tag;
 	int ret;
 	int err;
 
-	lengths.a = cpu_to_be64(req->assoclen * 8);
+	lengths.a = cpu_to_be64(assoclen * 8);
 	lengths.b = cpu_to_be64((req->cryptlen - authsize) * 8);
 
-	if (req->assoclen)
-		gcm_calculate_auth_mac(req, dg);
+	if (assoclen)
+		gcm_calculate_auth_mac(req, dg, assoclen);
 
-	memcpy(iv, req->iv, GCM_IV_SIZE);
-	put_unaligned_be32(2, iv + GCM_IV_SIZE);
+	put_unaligned_be32(2, iv + GCM_AES_IV_SIZE);
 
 	scatterwalk_map_and_copy(otag, req->src,
 				 req->assoclen + req->cryptlen - authsize,
@@ -471,14 +461,76 @@ static int gcm_decrypt(struct aead_request *req)
 	return ret ? -EBADMSG : 0;
 }
 
-static struct aead_alg gcm_aes_alg = {
-	.ivsize			= GCM_IV_SIZE,
+static int gcm_aes_encrypt(struct aead_request *req)
+{
+	u8 iv[AES_BLOCK_SIZE];
+
+	memcpy(iv, req->iv, GCM_AES_IV_SIZE);
+	return gcm_encrypt(req, iv, req->assoclen);
+}
+
+static int gcm_aes_decrypt(struct aead_request *req)
+{
+	u8 iv[AES_BLOCK_SIZE];
+
+	memcpy(iv, req->iv, GCM_AES_IV_SIZE);
+	return gcm_decrypt(req, iv, req->assoclen);
+}
+
+static int rfc4106_setkey(struct crypto_aead *tfm, const u8 *inkey,
+			  unsigned int keylen)
+{
+	struct gcm_aes_ctx *ctx = crypto_aead_ctx(tfm);
+	int err;
+
+	keylen -= RFC4106_NONCE_SIZE;
+	err = gcm_aes_setkey(tfm, inkey, keylen);
+	if (err)
+		return err;
+
+	memcpy(ctx->nonce, inkey + keylen, RFC4106_NONCE_SIZE);
+	return 0;
+}
+
+static int rfc4106_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
+{
+	return crypto_rfc4106_check_authsize(authsize);
+}
+
+static int rfc4106_encrypt(struct aead_request *req)
+{
+	struct crypto_aead *aead = crypto_aead_reqtfm(req);
+	struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
+	u8 iv[AES_BLOCK_SIZE];
+
+	memcpy(iv, ctx->nonce, RFC4106_NONCE_SIZE);
+	memcpy(iv + RFC4106_NONCE_SIZE, req->iv, GCM_RFC4106_IV_SIZE);
+
+	return crypto_ipsec_check_assoclen(req->assoclen) ?:
+	       gcm_encrypt(req, iv, req->assoclen - GCM_RFC4106_IV_SIZE);
+}
+
+static int rfc4106_decrypt(struct aead_request *req)
+{
+	struct crypto_aead *aead = crypto_aead_reqtfm(req);
+	struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
+	u8 iv[AES_BLOCK_SIZE];
+
+	memcpy(iv, ctx->nonce, RFC4106_NONCE_SIZE);
+	memcpy(iv + RFC4106_NONCE_SIZE, req->iv, GCM_RFC4106_IV_SIZE);
+
+	return crypto_ipsec_check_assoclen(req->assoclen) ?:
+	       gcm_decrypt(req, iv, req->assoclen - GCM_RFC4106_IV_SIZE);
+}
+
+static struct aead_alg gcm_aes_algs[] = {{
+	.ivsize			= GCM_AES_IV_SIZE,
 	.chunksize		= AES_BLOCK_SIZE,
 	.maxauthsize		= AES_BLOCK_SIZE,
-	.setkey			= gcm_setkey,
-	.setauthsize		= gcm_setauthsize,
-	.encrypt		= gcm_encrypt,
-	.decrypt		= gcm_decrypt,
+	.setkey			= gcm_aes_setkey,
+	.setauthsize		= gcm_aes_setauthsize,
+	.encrypt		= gcm_aes_encrypt,
+	.decrypt		= gcm_aes_decrypt,
 
 	.base.cra_name		= "gcm(aes)",
 	.base.cra_driver_name	= "gcm-aes-ce",
@@ -487,7 +539,23 @@ static struct aead_alg gcm_aes_alg = {
 	.base.cra_ctxsize	= sizeof(struct gcm_aes_ctx) +
 				  4 * sizeof(u64[2]),
 	.base.cra_module	= THIS_MODULE,
-};
+}, {
+	.ivsize			= GCM_RFC4106_IV_SIZE,
+	.chunksize		= AES_BLOCK_SIZE,
+	.maxauthsize		= AES_BLOCK_SIZE,
+	.setkey			= rfc4106_setkey,
+	.setauthsize		= rfc4106_setauthsize,
+	.encrypt		= rfc4106_encrypt,
+	.decrypt		= rfc4106_decrypt,
+
+	.base.cra_name		= "rfc4106(gcm(aes))",
+	.base.cra_driver_name	= "rfc4106-gcm-aes-ce",
+	.base.cra_priority	= 300,
+	.base.cra_blocksize	= 1,
+	.base.cra_ctxsize	= sizeof(struct gcm_aes_ctx) +
+				  4 * sizeof(u64[2]),
+	.base.cra_module	= THIS_MODULE,
+}};
 
 static int __init ghash_ce_mod_init(void)
 {
@@ -495,7 +563,8 @@ static int __init ghash_ce_mod_init(void)
 		return -ENODEV;
 
 	if (cpu_have_named_feature(PMULL))
-		return crypto_register_aead(&gcm_aes_alg);
+		return crypto_register_aeads(gcm_aes_algs,
+					     ARRAY_SIZE(gcm_aes_algs));
 
 	return crypto_register_shash(&ghash_alg);
 }
@@ -503,7 +572,7 @@ static int __init ghash_ce_mod_init(void)
 static void __exit ghash_ce_mod_exit(void)
 {
 	if (cpu_have_named_feature(PMULL))
-		crypto_unregister_aead(&gcm_aes_alg);
+		crypto_unregister_aeads(gcm_aes_algs, ARRAY_SIZE(gcm_aes_algs));
 	else
 		crypto_unregister_shash(&ghash_alg);
 }
-- 
2.35.1




[Index of Archives]     [Kernel]     [Gnu Classpath]     [Gnu Crypto]     [DM Crypt]     [Netfilter]     [Bugtraq]
  Powered by Linux