retain_dentry() used to decrement refcount if and only if it returned true. Lift those decrements into the callers. Signed-off-by: Al Viro <viro@xxxxxxxxxxxxxxxxxx> --- fs/dcache.c | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/fs/dcache.c b/fs/dcache.c index 1899376d0189..1f61a5d03d5b 100644 --- a/fs/dcache.c +++ b/fs/dcache.c @@ -680,7 +680,6 @@ static inline bool retain_dentry(struct dentry *dentry) return false; /* retain; LRU fodder */ - dentry->d_lockref.count--; if (unlikely(!(dentry->d_flags & DCACHE_LRU_LIST))) d_lru_add(dentry); else if (unlikely(!(dentry->d_flags & DCACHE_REFERENCED))) @@ -744,6 +743,8 @@ static struct dentry *dentry_kill(struct dentry *dentry) } else if (likely(!retain_dentry(dentry))) { __dentry_kill(dentry); return parent; + } else { + dentry->d_lockref.count--; } /* we are keeping it, after all */ if (inode) @@ -893,6 +894,7 @@ void dput(struct dentry *dentry) rcu_read_unlock(); if (likely(retain_dentry(dentry))) { + dentry->d_lockref.count--; spin_unlock(&dentry->d_lock); return; } @@ -925,6 +927,8 @@ void dput_to_list(struct dentry *dentry, struct list_head *list) if (!retain_dentry(dentry)) { --dentry->d_lockref.count; to_shrink_list(dentry, list); + } else { + --dentry->d_lockref.count; } spin_unlock(&dentry->d_lock); } -- 2.39.2