The crypto engine could actually only enqueue hash and ablkcipher request. This patch permit it to enqueue skcipher requets by adding all necessary functions. Signed-off-by: Corentin Labbe <clabbe.montjoie@xxxxxxxxx> --- crypto/crypto_engine.c | 114 ++++++++++++++++++++++++++++++++++++++++++++++++ include/crypto/engine.h | 14 ++++++ 2 files changed, 128 insertions(+) diff --git a/crypto/crypto_engine.c b/crypto/crypto_engine.c index 74b840749074..8567224d7609 100644 --- a/crypto/crypto_engine.c +++ b/crypto/crypto_engine.c @@ -36,6 +36,7 @@ static void crypto_pump_requests(struct crypto_engine *engine, struct crypto_async_request *async_req, *backlog; struct ahash_request *hreq; struct ablkcipher_request *breq; + struct skcipher_request *skreq; unsigned long flags; bool was_busy = false; int ret; @@ -139,6 +140,23 @@ static void crypto_pump_requests(struct crypto_engine *engine, goto req_err; } return; + } else if (cratype == &crypto_skcipher_type2) { + skreq = skcipher_request_cast(engine->cur_req); + if (engine->prepare_skcipher_request) { + ret = engine->prepare_skcipher_request(engine, skreq); + if (ret) { + dev_err(engine->dev, "failed to prepare request: %d\n", + ret); + goto req_err; + } + engine->cur_req_prepared = true; + } + ret = engine->skcipher_one_request(engine, skreq); + if (ret) { + dev_err(engine->dev, "failed to cipher one request from queue\n"); + goto req_err; + } + return; } else { dev_err(engine->dev, "failed to prepare request of unknown type\n"); return; @@ -151,6 +169,9 @@ static void crypto_pump_requests(struct crypto_engine *engine, } else if (cratype == &crypto_ablkcipher_type) { breq = ablkcipher_request_cast(engine->cur_req); crypto_finalize_cipher_request(engine, breq, ret); + } else if (cratype == &crypto_skcipher_type2) { + skreq = skcipher_request_cast(engine->cur_req); + crypto_finalize_skcipher_request(engine, skreq, ret); } return; @@ -210,6 +231,49 @@ int crypto_transfer_cipher_request_to_engine(struct crypto_engine *engine, EXPORT_SYMBOL_GPL(crypto_transfer_cipher_request_to_engine); /** + * crypto_transfer_skcipher_request - transfer the new request into the + * enginequeue + * @engine: the hardware engine + * @req: the request need to be listed into the engine queue + */ +int crypto_transfer_skcipher_request(struct crypto_engine *engine, + struct skcipher_request *req, + bool need_pump) +{ + unsigned long flags; + int ret; + + spin_lock_irqsave(&engine->queue_lock, flags); + + if (!engine->running) { + spin_unlock_irqrestore(&engine->queue_lock, flags); + return -ESHUTDOWN; + } + + ret = crypto_enqueue_request(&engine->queue, &req->base); + + if (!engine->busy && need_pump) + kthread_queue_work(engine->kworker, &engine->pump_requests); + + spin_unlock_irqrestore(&engine->queue_lock, flags); + return ret; +} +EXPORT_SYMBOL_GPL(crypto_transfer_skcipher_request); + +/** + * crypto_transfer_skcipher_request_to_engine - transfer one request to list + * into the engine queue + * @engine: the hardware engine + * @req: the request need to be listed into the engine queue + */ +int crypto_transfer_skcipher_request_to_engine(struct crypto_engine *engine, + struct skcipher_request *req) +{ + return crypto_transfer_skcipher_request(engine, req, true); +} +EXPORT_SYMBOL_GPL(crypto_transfer_skcipher_request_to_engine); + +/** * crypto_transfer_hash_request - transfer the new request into the * enginequeue * @engine: the hardware engine @@ -289,6 +353,43 @@ void crypto_finalize_cipher_request(struct crypto_engine *engine, EXPORT_SYMBOL_GPL(crypto_finalize_cipher_request); /** + * crypto_finalize_skcipher_request - finalize one request if the request is done + * @engine: the hardware engine + * @req: the request need to be finalized + * @err: error number + */ +void crypto_finalize_skcipher_request(struct crypto_engine *engine, + struct skcipher_request *req, int err) +{ + unsigned long flags; + bool finalize_cur_req = false; + int ret; + + spin_lock_irqsave(&engine->queue_lock, flags); + if (engine->cur_req == &req->base) + finalize_cur_req = true; + spin_unlock_irqrestore(&engine->queue_lock, flags); + + if (finalize_cur_req) { + if (engine->cur_req_prepared && + engine->unprepare_skcipher_request) { + ret = engine->unprepare_skcipher_request(engine, req); + if (ret) + dev_err(engine->dev, "failed to unprepare request\n"); + } + spin_lock_irqsave(&engine->queue_lock, flags); + engine->cur_req = NULL; + engine->cur_req_prepared = false; + spin_unlock_irqrestore(&engine->queue_lock, flags); + } + + req->base.complete(&req->base, err); + + kthread_queue_work(engine->kworker, &engine->pump_requests); +} +EXPORT_SYMBOL_GPL(crypto_finalize_skcipher_request); + +/** * crypto_finalize_hash_request - finalize one request if the request is done * @engine: the hardware engine * @req: the request need to be finalized @@ -342,6 +443,19 @@ int crypto_engine_start(struct crypto_engine *engine) return -EBUSY; } + if (!engine->skcipher_one_request && !engine->cipher_one_request && + !engine->hash_one_request) { + spin_unlock_irqrestore(&engine->queue_lock, flags); + dev_err(engine->dev, "need at least one request type\n"); + return -EINVAL; + } + + if (engine->skcipher_one_request && engine->cipher_one_request) { + spin_unlock_irqrestore(&engine->queue_lock, flags); + dev_err(engine->dev, "Cannot use both skcipher and ablkcipher\n"); + return -EINVAL; + } + engine->running = true; spin_unlock_irqrestore(&engine->queue_lock, flags); diff --git a/include/crypto/engine.h b/include/crypto/engine.h index dd04c1699b51..a8f6e6ed377b 100644 --- a/include/crypto/engine.h +++ b/include/crypto/engine.h @@ -18,6 +18,7 @@ #include <linux/kthread.h> #include <crypto/algapi.h> #include <crypto/hash.h> +#include <crypto/skcipher.h> #define ENGINE_NAME_LEN 30 /* @@ -69,12 +70,18 @@ struct crypto_engine { struct ablkcipher_request *req); int (*unprepare_cipher_request)(struct crypto_engine *engine, struct ablkcipher_request *req); + int (*prepare_skcipher_request)(struct crypto_engine *engine, + struct skcipher_request *req); + int (*unprepare_skcipher_request)(struct crypto_engine *engine, + struct skcipher_request *req); int (*prepare_hash_request)(struct crypto_engine *engine, struct ahash_request *req); int (*unprepare_hash_request)(struct crypto_engine *engine, struct ahash_request *req); int (*cipher_one_request)(struct crypto_engine *engine, struct ablkcipher_request *req); + int (*skcipher_one_request)(struct crypto_engine *engine, + struct skcipher_request *req); int (*hash_one_request)(struct crypto_engine *engine, struct ahash_request *req); @@ -90,12 +97,19 @@ int crypto_transfer_cipher_request(struct crypto_engine *engine, bool need_pump); int crypto_transfer_cipher_request_to_engine(struct crypto_engine *engine, struct ablkcipher_request *req); +int crypto_transfer_skcipher_request(struct crypto_engine *engine, + struct skcipher_request *req, + bool need_pump); +int crypto_transfer_skcipher_request_to_engine(struct crypto_engine *engine, + struct skcipher_request *req); int crypto_transfer_hash_request(struct crypto_engine *engine, struct ahash_request *req, bool need_pump); int crypto_transfer_hash_request_to_engine(struct crypto_engine *engine, struct ahash_request *req); void crypto_finalize_cipher_request(struct crypto_engine *engine, struct ablkcipher_request *req, int err); +void crypto_finalize_skcipher_request(struct crypto_engine *engine, + struct skcipher_request *req, int err); void crypto_finalize_hash_request(struct crypto_engine *engine, struct ahash_request *req, int err); int crypto_engine_start(struct crypto_engine *engine); -- 2.13.0