Hi Herbert,
Thank you so much for this work!
On 11/11/22 18:59, Herbert Xu wrote:
> On Wed, Nov 09, 2022 at 10:16:58PM +0900, Taehee Yoo wrote:
>>
>> I have encountered kernel panic(stack-out-of-bounds) while using the
reqctx
>> instead of the tfm.
>>
>> cryptd is used when simd drivers are used.
>> cryptd_skcipher_encrypt() internally doesn't allocate a request ctx of a
>> child, instead, it uses stack memory with
SYNC_SKCIPHER_REQUEST_ON_STACK.
>> It retains only 384 bytes for child request ctx even if a child set
a large
>> reqsize value with crypto_skcipher_set_reqsize().
>> aria-avx2 needs 512 bytes and aria-avx512 needs 1024 bytes.
>> So, stack-out-of-bounds occurs.
>
> OK this is not supposed to happen.
>
> ---8<---
> cryptd is buggy as it tries to use sync_skcipher without going
> through the proper sync_skcipher interface. In fact it doesn't
> even need sync_skcipher since it's already a proper skcipher and
> can easily access the request context instead of using something
> off the stack.
>
I have tested this patch with ctr(aria-avx), ctr(aria-avx2), and
ctr(aria-avx512), and it works well.
stack-out-of-bounds issues have disappeared after this patch.
So, I will test more and I will send a v4 patch.
> Fixes: 36b3875a97b8 ("crypto: cryptd - Remove VLA usage of skcipher")
> Signed-off-by: Herbert Xu <herbert@xxxxxxxxxxxxxxxxxxx>
>
> diff --git a/crypto/cryptd.c b/crypto/cryptd.c
> index 668095eca0fa..ca3a40fc7da9 100644
> --- a/crypto/cryptd.c
> +++ b/crypto/cryptd.c
> @@ -68,11 +68,12 @@ struct aead_instance_ctx {
>
> struct cryptd_skcipher_ctx {
> refcount_t refcnt;
> - struct crypto_sync_skcipher *child;
> + struct crypto_skcipher *child;
> };
>
> struct cryptd_skcipher_request_ctx {
> crypto_completion_t complete;
> + struct skcipher_request req;
> };
>
> struct cryptd_hash_ctx {
> @@ -227,13 +228,13 @@ static int cryptd_skcipher_setkey(struct
crypto_skcipher *parent,
> const u8 *key, unsigned int keylen)
> {
> struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(parent);
> - struct crypto_sync_skcipher *child = ctx->child;
> + struct crypto_skcipher *child = ctx->child;
>
> - crypto_sync_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
> - crypto_sync_skcipher_set_flags(child,
> - crypto_skcipher_get_flags(parent) &
> - CRYPTO_TFM_REQ_MASK);
> - return crypto_sync_skcipher_setkey(child, key, keylen);
> + crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
> + crypto_skcipher_set_flags(child,
> + crypto_skcipher_get_flags(parent) &
> + CRYPTO_TFM_REQ_MASK);
> + return crypto_skcipher_setkey(child, key, keylen);
> }
>
> static void cryptd_skcipher_complete(struct skcipher_request *req,
int err)
> @@ -258,13 +259,13 @@ static void cryptd_skcipher_encrypt(struct
crypto_async_request *base,
> struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
> struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
> struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
> - struct crypto_sync_skcipher *child = ctx->child;
> - SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, child);
> + struct skcipher_request *subreq = &rctx->req;
> + struct crypto_skcipher *child = ctx->child;
>
> if (unlikely(err == -EINPROGRESS))
> goto out;
>
> - skcipher_request_set_sync_tfm(subreq, child);
> + skcipher_request_set_tfm(subreq, child);
> skcipher_request_set_callback(subreq, CRYPTO_TFM_REQ_MAY_SLEEP,
> NULL, NULL);
> skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
> @@ -286,13 +287,13 @@ static void cryptd_skcipher_decrypt(struct
crypto_async_request *base,
> struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
> struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
> struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
> - struct crypto_sync_skcipher *child = ctx->child;
> - SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, child);
> + struct skcipher_request *subreq = &rctx->req;
> + struct crypto_skcipher *child = ctx->child;
>
> if (unlikely(err == -EINPROGRESS))
> goto out;
>
> - skcipher_request_set_sync_tfm(subreq, child);
> + skcipher_request_set_tfm(subreq, child);
> skcipher_request_set_callback(subreq, CRYPTO_TFM_REQ_MAY_SLEEP,
> NULL, NULL);
> skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
> @@ -343,9 +344,10 @@ static int cryptd_skcipher_init_tfm(struct
crypto_skcipher *tfm)
> if (IS_ERR(cipher))
> return PTR_ERR(cipher);
>
> - ctx->child = (struct crypto_sync_skcipher *)cipher;
> + ctx->child = cipher;
> crypto_skcipher_set_reqsize(
> - tfm, sizeof(struct cryptd_skcipher_request_ctx));
> + tfm, sizeof(struct cryptd_skcipher_request_ctx) +
> + crypto_skcipher_reqsize(cipher));
> return 0;
> }
>
> @@ -353,7 +355,7 @@ static void cryptd_skcipher_exit_tfm(struct
crypto_skcipher *tfm)
> {
> struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
>
> - crypto_free_sync_skcipher(ctx->child);
> + crypto_free_skcipher(ctx->child);
> }
>
> static void cryptd_skcipher_free(struct skcipher_instance *inst)
> @@ -931,7 +933,7 @@ struct crypto_skcipher
*cryptd_skcipher_child(struct cryptd_skcipher *tfm)
> {
> struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
>
> - return &ctx->child->base;
> + return ctx->child;
> }
> EXPORT_SYMBOL_GPL(cryptd_skcipher_child);
>
Thanks a lot,
Taehee Yoo