The driver does not handle case where cryptlen is zero and fail crypto selftests. So let's add a fallback for this case. Fixes: f63601fd616ab ("crypto: marvell/cesa - add a new driver for Marvell's CESA") Signed-off-by: Corentin Labbe <clabbe@xxxxxxxxxxxx> --- drivers/crypto/marvell/Kconfig | 4 ++ drivers/crypto/marvell/cesa/cesa.h | 12 +++-- drivers/crypto/marvell/cesa/cipher.c | 69 ++++++++++++++++++++++++---- 3 files changed, 72 insertions(+), 13 deletions(-) diff --git a/drivers/crypto/marvell/Kconfig b/drivers/crypto/marvell/Kconfig index a48591af12d0..6d8a625e2208 100644 --- a/drivers/crypto/marvell/Kconfig +++ b/drivers/crypto/marvell/Kconfig @@ -13,6 +13,10 @@ config CRYPTO_DEV_MARVELL_CESA select CRYPTO_SKCIPHER select CRYPTO_HASH select SRAM + select CRYPTO_ECB + select CRYPTO_CBC + select CRYPTO_DES + select CRYPTO_AES select CRYPTO_DEV_MARVELL help This driver allows you to utilize the Cryptographic Engines and diff --git a/drivers/crypto/marvell/cesa/cesa.h b/drivers/crypto/marvell/cesa/cesa.h index d215a6bed6bc..93736eed19f1 100644 --- a/drivers/crypto/marvell/cesa/cesa.h +++ b/drivers/crypto/marvell/cesa/cesa.h @@ -487,12 +487,14 @@ struct mv_cesa_req_ops { /** * struct mv_cesa_ctx - CESA operation context - * @ops: crypto operations + * @ops: crypto operations + * @fallback_tfm: pointer to the fallback TFM * * Base context structure inherited by operation specific ones. */ struct mv_cesa_ctx { const struct mv_cesa_req_ops *ops; + struct crypto_skcipher *fallback_tfm; }; /** @@ -563,15 +565,17 @@ struct mv_cesa_skcipher_std_req { /** * struct mv_cesa_skcipher_req - cipher request - * @req: type specific request information - * @src_nents: number of entries in the src sg list - * @dst_nents: number of entries in the dest sg list + * @req: type specific request information + * @src_nents: number of entries in the src sg list + * @dst_nents: number of entries in the dest sg list + * @fallback_req: request struct for invoking the fallback skcipher TFM */ struct mv_cesa_skcipher_req { struct mv_cesa_req base; struct mv_cesa_skcipher_std_req std; int src_nents; int dst_nents; + struct skcipher_request fallback_req; // keep at the end }; /** diff --git a/drivers/crypto/marvell/cesa/cipher.c b/drivers/crypto/marvell/cesa/cipher.c index c6f2fa753b7c..6da44651635f 100644 --- a/drivers/crypto/marvell/cesa/cipher.c +++ b/drivers/crypto/marvell/cesa/cipher.c @@ -37,6 +37,26 @@ struct mv_cesa_skcipher_dma_iter { struct mv_cesa_sg_dma_iter dst; }; +static int cesa_skcipher_fallback(struct skcipher_request *areq, + struct mv_cesa_op_ctx *tmpl) +{ + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); + struct mv_cesa_ctx *op = crypto_skcipher_ctx(tfm); + struct mv_cesa_skcipher_req *rctx = skcipher_request_ctx(areq); + int err; + + skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm); + skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags, + areq->base.complete, areq->base.data); + skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst, + areq->cryptlen, areq->iv); + if (mv_cesa_get_op_cfg(tmpl) & CESA_SA_DESC_CFG_DIR_DEC) + err = crypto_skcipher_decrypt(&rctx->fallback_req); + else + err = crypto_skcipher_encrypt(&rctx->fallback_req); + return err; +} + static inline void mv_cesa_skcipher_req_iter_init(struct mv_cesa_skcipher_dma_iter *iter, struct skcipher_request *req) @@ -240,15 +260,25 @@ static const struct mv_cesa_req_ops mv_cesa_skcipher_req_ops = { static void mv_cesa_skcipher_cra_exit(struct crypto_tfm *tfm) { - void *ctx = crypto_tfm_ctx(tfm); + struct mv_cesa_ctx *ctx = crypto_tfm_ctx(tfm); memzero_explicit(ctx, tfm->__crt_alg->cra_ctxsize); + crypto_free_skcipher(ctx->fallback_tfm); } static int mv_cesa_skcipher_cra_init(struct crypto_tfm *tfm) { struct mv_cesa_ctx *ctx = crypto_tfm_ctx(tfm); + struct crypto_skcipher *sktfm = __crypto_skcipher_cast(tfm); + const char *name = crypto_tfm_alg_name(tfm); + + ctx->fallback_tfm = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK); + if (IS_ERR(ctx->fallback_tfm)) { + return PTR_ERR(ctx->fallback_tfm); + } + sktfm->reqsize = sizeof(struct mv_cesa_ctx) + + crypto_skcipher_reqsize(ctx->fallback_tfm); ctx->ops = &mv_cesa_skcipher_req_ops; crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm), @@ -276,7 +306,10 @@ static int mv_cesa_aes_setkey(struct crypto_skcipher *cipher, const u8 *key, for (i = 0; i < remaining; i++) ctx->aes.key_dec[4 + i] = ctx->aes.key_enc[offset + i]; - return 0; + crypto_skcipher_clear_flags(ctx->base.fallback_tfm, CRYPTO_TFM_REQ_MASK); + crypto_skcipher_set_flags(ctx->base.fallback_tfm, cipher->base.crt_flags & CRYPTO_TFM_REQ_MASK); + + return crypto_skcipher_setkey(ctx->base.fallback_tfm, key, len); } static int mv_cesa_des_setkey(struct crypto_skcipher *cipher, const u8 *key, @@ -291,7 +324,10 @@ static int mv_cesa_des_setkey(struct crypto_skcipher *cipher, const u8 *key, memcpy(ctx->key, key, DES_KEY_SIZE); - return 0; + crypto_skcipher_clear_flags(ctx->base.fallback_tfm, CRYPTO_TFM_REQ_MASK); + crypto_skcipher_set_flags(ctx->base.fallback_tfm, cipher->base.crt_flags & CRYPTO_TFM_REQ_MASK); + + return crypto_skcipher_setkey(ctx->base.fallback_tfm, key, len); } static int mv_cesa_des3_ede_setkey(struct crypto_skcipher *cipher, @@ -306,7 +342,10 @@ static int mv_cesa_des3_ede_setkey(struct crypto_skcipher *cipher, memcpy(ctx->key, key, DES3_EDE_KEY_SIZE); - return 0; + crypto_skcipher_clear_flags(ctx->base.fallback_tfm, CRYPTO_TFM_REQ_MASK); + crypto_skcipher_set_flags(ctx->base.fallback_tfm, cipher->base.crt_flags & CRYPTO_TFM_REQ_MASK); + + return crypto_skcipher_setkey(ctx->base.fallback_tfm, key, len); } static int mv_cesa_skcipher_dma_req_init(struct skcipher_request *req, @@ -458,6 +497,13 @@ static int mv_cesa_skcipher_queue_req(struct skcipher_request *req, int ret; struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req); struct mv_cesa_engine *engine; + bool need_fallback = false; + + if (!req->cryptlen) + need_fallback = true; + + if (need_fallback) + return cesa_skcipher_fallback(req, tmpl); ret = mv_cesa_skcipher_req_init(req, tmpl); if (ret) @@ -520,7 +566,8 @@ struct skcipher_alg mv_cesa_ecb_des_alg = { .cra_driver_name = "mv-ecb-des", .cra_priority = 300, .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC | - CRYPTO_ALG_ALLOCATES_MEMORY, + CRYPTO_ALG_ALLOCATES_MEMORY | + CRYPTO_ALG_NEED_FALLBACK, .cra_blocksize = DES_BLOCK_SIZE, .cra_ctxsize = sizeof(struct mv_cesa_des_ctx), .cra_alignmask = 0, @@ -571,7 +618,8 @@ struct skcipher_alg mv_cesa_cbc_des_alg = { .cra_driver_name = "mv-cbc-des", .cra_priority = 300, .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC | - CRYPTO_ALG_ALLOCATES_MEMORY, + CRYPTO_ALG_ALLOCATES_MEMORY | + CRYPTO_ALG_NEED_FALLBACK, .cra_blocksize = DES_BLOCK_SIZE, .cra_ctxsize = sizeof(struct mv_cesa_des_ctx), .cra_alignmask = 0, @@ -629,7 +677,8 @@ struct skcipher_alg mv_cesa_ecb_des3_ede_alg = { .cra_driver_name = "mv-ecb-des3-ede", .cra_priority = 300, .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC | - CRYPTO_ALG_ALLOCATES_MEMORY, + CRYPTO_ALG_ALLOCATES_MEMORY | + CRYPTO_ALG_NEED_FALLBACK, .cra_blocksize = DES3_EDE_BLOCK_SIZE, .cra_ctxsize = sizeof(struct mv_cesa_des3_ctx), .cra_alignmask = 0, @@ -683,7 +732,8 @@ struct skcipher_alg mv_cesa_cbc_des3_ede_alg = { .cra_driver_name = "mv-cbc-des3-ede", .cra_priority = 300, .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC | - CRYPTO_ALG_ALLOCATES_MEMORY, + CRYPTO_ALG_ALLOCATES_MEMORY | + CRYPTO_ALG_NEED_FALLBACK, .cra_blocksize = DES3_EDE_BLOCK_SIZE, .cra_ctxsize = sizeof(struct mv_cesa_des3_ctx), .cra_alignmask = 0, @@ -756,7 +806,8 @@ struct skcipher_alg mv_cesa_ecb_aes_alg = { .cra_driver_name = "mv-ecb-aes", .cra_priority = 300, .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC | - CRYPTO_ALG_ALLOCATES_MEMORY, + CRYPTO_ALG_ALLOCATES_MEMORY | + CRYPTO_ALG_NEED_FALLBACK, .cra_blocksize = AES_BLOCK_SIZE, .cra_ctxsize = sizeof(struct mv_cesa_aes_ctx), .cra_alignmask = 0, -- 2.35.1