Flip the dec_and_test if, so when we add more stuff later there is less churn. Signed-off-by: Pavel Begunkov <asml.silence@xxxxxxxxx> --- io_uring/notif.c | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/io_uring/notif.c b/io_uring/notif.c index 7caaebf94312..5a8b2fdd67fd 100644 --- a/io_uring/notif.c +++ b/io_uring/notif.c @@ -37,10 +37,11 @@ static void io_tx_ubuf_complete(struct sk_buff *skb, struct ubuf_info *uarg, WRITE_ONCE(nd->zc_copied, true); } - if (refcount_dec_and_test(&uarg->refcnt)) { - notif->io_task_work.func = io_notif_tw_complete; - __io_req_task_work_add(notif, IOU_F_TWQ_LAZY_WAKE); - } + if (!refcount_dec_and_test(&uarg->refcnt)) + return; + + notif->io_task_work.func = io_notif_tw_complete; + __io_req_task_work_add(notif, IOU_F_TWQ_LAZY_WAKE); } static const struct ubuf_info_ops io_ubuf_ops = { -- 2.44.0