Re: [v3 PATCH 3/31] crypto: cts - Add support for chaining

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

 



On Tue, 28 Jul 2020 at 10:18, Herbert Xu <herbert@xxxxxxxxxxxxxxxxxxx> wrote:
>
> As it stands cts cannot do chaining.  That is, it always performs
> the cipher-text stealing at the end of a request.  This patch adds
> support for chaining when the CRYPTO_TM_REQ_MORE flag is set.
>
> It also sets final_chunksize so that data can be withheld by the
> caller to enable correct processing at the true end of a request.
>

But isn't the final chunksize a function of cryptlen? What happens if
i try to use cts(cbc(aes)) to encrypt 16 bytes with the MORE flag, and
<16 additional bytes as the final chunk?


> Signed-off-by: Herbert Xu <herbert@xxxxxxxxxxxxxxxxxxx>
> ---
>
>  crypto/cts.c |   19 ++++++++++---------
>  1 file changed, 10 insertions(+), 9 deletions(-)
>
> diff --git a/crypto/cts.c b/crypto/cts.c
> index 3766d47ebcc01..67990146c9b06 100644
> --- a/crypto/cts.c
> +++ b/crypto/cts.c
> @@ -100,7 +100,7 @@ static int cts_cbc_encrypt(struct skcipher_request *req)
>         struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req);
>         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
>         struct skcipher_request *subreq = &rctx->subreq;
> -       int bsize = crypto_skcipher_blocksize(tfm);
> +       int bsize = crypto_skcipher_chunksize(tfm);
>         u8 d[MAX_CIPHER_BLOCKSIZE * 2] __aligned(__alignof__(u32));
>         struct scatterlist *sg;
>         unsigned int offset;
> @@ -146,7 +146,7 @@ static int crypto_cts_encrypt(struct skcipher_request *req)
>         struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req);
>         struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(tfm);
>         struct skcipher_request *subreq = &rctx->subreq;
> -       int bsize = crypto_skcipher_blocksize(tfm);
> +       int bsize = crypto_skcipher_chunksize(tfm);
>         unsigned int nbytes = req->cryptlen;
>         unsigned int offset;
>
> @@ -155,7 +155,7 @@ static int crypto_cts_encrypt(struct skcipher_request *req)
>         if (nbytes < bsize)
>                 return -EINVAL;
>
> -       if (nbytes == bsize) {
> +       if (nbytes == bsize || req->base.flags & CRYPTO_TFM_REQ_MORE) {
>                 skcipher_request_set_callback(subreq, req->base.flags,
>                                               req->base.complete,
>                                               req->base.data);
> @@ -181,7 +181,7 @@ static int cts_cbc_decrypt(struct skcipher_request *req)
>         struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req);
>         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
>         struct skcipher_request *subreq = &rctx->subreq;
> -       int bsize = crypto_skcipher_blocksize(tfm);
> +       int bsize = crypto_skcipher_chunksize(tfm);
>         u8 d[MAX_CIPHER_BLOCKSIZE * 2] __aligned(__alignof__(u32));
>         struct scatterlist *sg;
>         unsigned int offset;
> @@ -240,7 +240,7 @@ static int crypto_cts_decrypt(struct skcipher_request *req)
>         struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req);
>         struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(tfm);
>         struct skcipher_request *subreq = &rctx->subreq;
> -       int bsize = crypto_skcipher_blocksize(tfm);
> +       int bsize = crypto_skcipher_chunksize(tfm);
>         unsigned int nbytes = req->cryptlen;
>         unsigned int offset;
>         u8 *space;
> @@ -250,7 +250,7 @@ static int crypto_cts_decrypt(struct skcipher_request *req)
>         if (nbytes < bsize)
>                 return -EINVAL;
>
> -       if (nbytes == bsize) {
> +       if (nbytes == bsize || req->base.flags & CRYPTO_TFM_REQ_MORE) {
>                 skcipher_request_set_callback(subreq, req->base.flags,
>                                               req->base.complete,
>                                               req->base.data);
> @@ -297,7 +297,7 @@ static int crypto_cts_init_tfm(struct crypto_skcipher *tfm)
>         ctx->child = cipher;
>
>         align = crypto_skcipher_alignmask(tfm);
> -       bsize = crypto_skcipher_blocksize(cipher);
> +       bsize = crypto_skcipher_chunksize(cipher);
>         reqsize = ALIGN(sizeof(struct crypto_cts_reqctx) +
>                         crypto_skcipher_reqsize(cipher),
>                         crypto_tfm_ctx_alignment()) +
> @@ -359,11 +359,12 @@ static int crypto_cts_create(struct crypto_template *tmpl, struct rtattr **tb)
>                 goto err_free_inst;
>
>         inst->alg.base.cra_priority = alg->base.cra_priority;
> -       inst->alg.base.cra_blocksize = alg->base.cra_blocksize;
> +       inst->alg.base.cra_blocksize = 1;
>         inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
>
>         inst->alg.ivsize = alg->base.cra_blocksize;
> -       inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg);
> +       inst->alg.chunksize = alg->base.cra_blocksize;
> +       inst->alg.final_chunksize = inst->alg.chunksize * 2;
>         inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg);
>         inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg);
>



[Index of Archives]     [Kernel]     [Gnu Classpath]     [Gnu Crypto]     [DM Crypt]     [Netfilter]     [Bugtraq]

  Powered by Linux