This patch adds helpers for using sync skciphers as spawns from crypto instances. In doing so it also changes the error returned when an algorithm exceeds the maximum sync skcipher request size from EINVAL to E2BIG as the former is used for way too many things. Also the CRYPTO_ALG_ASYNC bit is now masked off just in case someone tries to be clever. Signed-off-by: Herbert Xu <herbert@xxxxxxxxxxxxxxxxxxx> --- crypto/skcipher.c | 38 ++++++++++++++++++++++++++----------- include/crypto/internal/skcipher.h | 28 +++++++++++++++++++++++++++ 2 files changed, 55 insertions(+), 11 deletions(-) diff --git a/crypto/skcipher.c b/crypto/skcipher.c index 467af525848a1..bc9fc0c07a659 100644 --- a/crypto/skcipher.c +++ b/crypto/skcipher.c @@ -14,7 +14,7 @@ #include <crypto/scatterwalk.h> #include <linux/bug.h> #include <linux/cryptouser.h> -#include <linux/compiler.h> +#include <linux/kernel.h> #include <linux/list.h> #include <linux/module.h> #include <linux/rtnetlink.h> @@ -762,16 +762,9 @@ struct crypto_skcipher *crypto_alloc_skcipher(const char *alg_name, } EXPORT_SYMBOL_GPL(crypto_alloc_skcipher); -struct crypto_sync_skcipher *crypto_alloc_sync_skcipher( - const char *alg_name, u32 type, u32 mask) +static struct crypto_sync_skcipher *crypto_verify_sync_skcipher( + struct crypto_skcipher *tfm) { - struct crypto_skcipher *tfm; - - /* Only sync algorithms allowed. */ - mask |= CRYPTO_ALG_ASYNC; - - tfm = crypto_alloc_tfm(alg_name, &crypto_skcipher_type, type, mask); - /* * Make sure we do not allocate something that might get used with * an on-stack request: check the request size. @@ -779,13 +772,36 @@ struct crypto_sync_skcipher *crypto_alloc_sync_skcipher( if (!IS_ERR(tfm) && WARN_ON(crypto_skcipher_reqsize(tfm) > MAX_SYNC_SKCIPHER_REQSIZE)) { crypto_free_skcipher(tfm); - return ERR_PTR(-EINVAL); + tfm = ERR_PTR(-E2BIG); } return (struct crypto_sync_skcipher *)tfm; } + +struct crypto_sync_skcipher *crypto_alloc_sync_skcipher( + const char *alg_name, u32 type, u32 mask) +{ + struct crypto_skcipher *tfm; + + /* Only sync algorithms allowed. */ + type &= ~CRYPTO_ALG_ASYNC; + mask |= CRYPTO_ALG_ASYNC; + + tfm = crypto_alloc_tfm(alg_name, &crypto_skcipher_type, type, mask); + return crypto_verify_sync_skcipher(tfm); +} EXPORT_SYMBOL_GPL(crypto_alloc_sync_skcipher); +struct crypto_sync_skcipher *crypto_spawn_sync_skcipher( + struct crypto_sync_skcipher_spawn *spawn) +{ + struct crypto_skcipher *tfm; + + tfm = crypto_spawn_skcipher(&spawn->base); + return crypto_verify_sync_skcipher(tfm); +} +EXPORT_SYMBOL_GPL(crypto_spawn_sync_skcipher); + int crypto_has_skcipher(const char *alg_name, u32 type, u32 mask) { return crypto_type_has_alg(alg_name, &crypto_skcipher_type, type, mask); diff --git a/include/crypto/internal/skcipher.h b/include/crypto/internal/skcipher.h index 10226c12c5df0..19f99643c0fe2 100644 --- a/include/crypto/internal/skcipher.h +++ b/include/crypto/internal/skcipher.h @@ -31,6 +31,10 @@ struct crypto_skcipher_spawn { struct crypto_spawn base; }; +struct crypto_sync_skcipher_spawn { + struct crypto_skcipher_spawn base; +}; + struct skcipher_walk { union { struct { @@ -92,11 +96,26 @@ int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, struct crypto_instance *inst, const char *name, u32 type, u32 mask); +static inline int crypto_grab_sync_skcipher( + struct crypto_sync_skcipher_spawn *spawn, + struct crypto_instance *inst, const char *name, u32 type, u32 mask) +{ + return crypto_grab_skcipher(&spawn->base, inst, name, + type & ~CRYPTO_ALG_ASYNC, + mask | CRYPTO_ALG_ASYNC); +} + static inline void crypto_drop_skcipher(struct crypto_skcipher_spawn *spawn) { crypto_drop_spawn(&spawn->base); } +static inline void crypto_drop_sync_skcipher( + struct crypto_sync_skcipher_spawn *spawn) +{ + crypto_drop_skcipher(&spawn->base); +} + static inline struct skcipher_alg *crypto_skcipher_spawn_alg( struct crypto_skcipher_spawn *spawn) { @@ -109,12 +128,21 @@ static inline struct skcipher_alg *crypto_spawn_skcipher_alg( return crypto_skcipher_spawn_alg(spawn); } +static inline struct skcipher_alg *crypto_sync_spawn_skcipher_alg( + struct crypto_sync_skcipher_spawn *spawn) +{ + return crypto_spawn_skcipher_alg(&spawn->base); +} + static inline struct crypto_skcipher *crypto_spawn_skcipher( struct crypto_skcipher_spawn *spawn) { return crypto_spawn_tfm2(&spawn->base); } +struct crypto_sync_skcipher *crypto_spawn_sync_skcipher( + struct crypto_sync_skcipher_spawn *spawn); + static inline void crypto_skcipher_set_reqsize( struct crypto_skcipher *skcipher, unsigned int reqsize) {