On Sat, 5 Oct 2019 at 14:22, Florian Bezdeka <florian@xxxxxxxxxx> wrote: > > Commit 79c65d179a40e145 ("crypto: cbc - Convert to skcipher") updated > the generic CBC template wrapper from a blkcipher to a skcipher algo, > to get away from the deprecated blkcipher interface. However, as a side > effect, drivers that instantiate CBC transforms using the blkcipher as > a fallback no longer work, since skciphers can wrap blkciphers but not > the other way around. This broke the geode-aes driver. > > So let's fix it by moving to the sync skcipher interface when allocating > the fallback. At the same time, align with the generic API for ECB and > CBC by rejecting inputs that are not a multiple of the AES block size. > > Fixes: 79c65d179a40e145 ("crypto: cbc - Convert to skcipher") > Cc: Ard Biesheuvel <ard.biesheuvel@xxxxxxxxxx> > Signed-off-by: Florian Bezdeka <florian@xxxxxxxxxx> > --- > > Ard, I just followed your instructions and created this patch > for usage on an 4.19 kernel. The patch was successfully tested > on two different Geode systems. > > Can you please review again and forward to the stable tree if the patch > looks OK? > The patch looks fine to me, but we cannot send it to -stable before the mainline version is merged. > drivers/crypto/geode-aes.c | 57 +++++++++++++++++++++++--------------- > drivers/crypto/geode-aes.h | 2 +- > 2 files changed, 35 insertions(+), 24 deletions(-) > > diff --git a/drivers/crypto/geode-aes.c b/drivers/crypto/geode-aes.c > index eb2a0a73cbed..cc33354d13c1 100644 > --- a/drivers/crypto/geode-aes.c > +++ b/drivers/crypto/geode-aes.c > @@ -14,6 +14,7 @@ > #include <linux/spinlock.h> > #include <crypto/algapi.h> > #include <crypto/aes.h> > +#include <crypto/skcipher.h> > > #include <linux/io.h> > #include <linux/delay.h> > @@ -170,13 +171,15 @@ static int geode_setkey_blk(struct crypto_tfm *tfm, const u8 *key, > /* > * The requested key size is not supported by HW, do a fallback > */ > - op->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK; > - op->fallback.blk->base.crt_flags |= (tfm->crt_flags & CRYPTO_TFM_REQ_MASK); > + crypto_skcipher_clear_flags(op->fallback.blk, CRYPTO_TFM_REQ_MASK); > + crypto_skcipher_set_flags(op->fallback.blk, > + tfm->crt_flags & CRYPTO_TFM_REQ_MASK); > > - ret = crypto_blkcipher_setkey(op->fallback.blk, key, len); > + ret = crypto_skcipher_setkey(op->fallback.blk, key, len); > if (ret) { > tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK; > - tfm->crt_flags |= (op->fallback.blk->base.crt_flags & CRYPTO_TFM_RES_MASK); > + tfm->crt_flags |= crypto_skcipher_get_flags(op->fallback.blk) & > + CRYPTO_TFM_RES_MASK; > } > return ret; > } > @@ -185,33 +188,28 @@ static int fallback_blk_dec(struct blkcipher_desc *desc, > struct scatterlist *dst, struct scatterlist *src, > unsigned int nbytes) > { > - unsigned int ret; > - struct crypto_blkcipher *tfm; > struct geode_aes_op *op = crypto_blkcipher_ctx(desc->tfm); > + SKCIPHER_REQUEST_ON_STACK(req, op->fallback.blk); > > - tfm = desc->tfm; > - desc->tfm = op->fallback.blk; > - > - ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes); > + skcipher_request_set_tfm(req, op->fallback.blk); > + skcipher_request_set_callback(req, 0, NULL, NULL); > + skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); > > - desc->tfm = tfm; > - return ret; > + return crypto_skcipher_decrypt(req); > } > + > static int fallback_blk_enc(struct blkcipher_desc *desc, > struct scatterlist *dst, struct scatterlist *src, > unsigned int nbytes) > { > - unsigned int ret; > - struct crypto_blkcipher *tfm; > struct geode_aes_op *op = crypto_blkcipher_ctx(desc->tfm); > + SKCIPHER_REQUEST_ON_STACK(req, op->fallback.blk); > > - tfm = desc->tfm; > - desc->tfm = op->fallback.blk; > - > - ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes); > + skcipher_request_set_tfm(req, op->fallback.blk); > + skcipher_request_set_callback(req, 0, NULL, NULL); > + skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); > > - desc->tfm = tfm; > - return ret; > + return crypto_skcipher_encrypt(req); > } > > static void > @@ -311,6 +309,9 @@ geode_cbc_decrypt(struct blkcipher_desc *desc, > struct blkcipher_walk walk; > int err, ret; > > + if (nbytes % AES_BLOCK_SIZE) > + return -EINVAL; > + > if (unlikely(op->keylen != AES_KEYSIZE_128)) > return fallback_blk_dec(desc, dst, src, nbytes); > > @@ -343,6 +344,9 @@ geode_cbc_encrypt(struct blkcipher_desc *desc, > struct blkcipher_walk walk; > int err, ret; > > + if (nbytes % AES_BLOCK_SIZE) > + return -EINVAL; > + > if (unlikely(op->keylen != AES_KEYSIZE_128)) > return fallback_blk_enc(desc, dst, src, nbytes); > > @@ -370,8 +374,9 @@ static int fallback_init_blk(struct crypto_tfm *tfm) > const char *name = crypto_tfm_alg_name(tfm); > struct geode_aes_op *op = crypto_tfm_ctx(tfm); > > - op->fallback.blk = crypto_alloc_blkcipher(name, 0, > - CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK); > + op->fallback.blk = crypto_alloc_skcipher(name, 0, > + CRYPTO_ALG_ASYNC | > + CRYPTO_ALG_NEED_FALLBACK); > > if (IS_ERR(op->fallback.blk)) { > printk(KERN_ERR "Error allocating fallback algo %s\n", name); > @@ -385,7 +390,7 @@ static void fallback_exit_blk(struct crypto_tfm *tfm) > { > struct geode_aes_op *op = crypto_tfm_ctx(tfm); > > - crypto_free_blkcipher(op->fallback.blk); > + crypto_free_skcipher(op->fallback.blk); > op->fallback.blk = NULL; > } > > @@ -424,6 +429,9 @@ geode_ecb_decrypt(struct blkcipher_desc *desc, > struct blkcipher_walk walk; > int err, ret; > > + if (nbytes % AES_BLOCK_SIZE) > + return -EINVAL; > + > if (unlikely(op->keylen != AES_KEYSIZE_128)) > return fallback_blk_dec(desc, dst, src, nbytes); > > @@ -454,6 +462,9 @@ geode_ecb_encrypt(struct blkcipher_desc *desc, > struct blkcipher_walk walk; > int err, ret; > > + if (nbytes % AES_BLOCK_SIZE) > + return -EINVAL; > + > if (unlikely(op->keylen != AES_KEYSIZE_128)) > return fallback_blk_enc(desc, dst, src, nbytes); > > diff --git a/drivers/crypto/geode-aes.h b/drivers/crypto/geode-aes.h > index f442ca972e3c..c5763a041bb8 100644 > --- a/drivers/crypto/geode-aes.h > +++ b/drivers/crypto/geode-aes.h > @@ -64,7 +64,7 @@ struct geode_aes_op { > u8 *iv; > > union { > - struct crypto_blkcipher *blk; > + struct crypto_skcipher *blk; > struct crypto_cipher *cip; > } fallback; > u32 keylen; > -- > 2.21.0 >