On 1 Dec 2022 15:39:22 -0700 Yu Zhao <yuzhao@xxxxxxxxxx> > @@ -477,6 +477,16 @@ static void mem_cgroup_update_tree(struct mem_cgroup *memcg, int nid) > struct mem_cgroup_per_node *mz; > struct mem_cgroup_tree_per_node *mctz; > > + if (lru_gen_enabled()) { > + struct lruvec *lruvec = &memcg->nodeinfo[nid]->lruvec; > + > + /* see the comment on MEMCG_NR_GENS */ > + if (soft_limit_excess(memcg) && lru_gen_memcg_seg(lruvec) != MEMCG_LRU_HEAD) > + lru_gen_rotate_memcg(lruvec, MEMCG_LRU_HEAD); > + > + return; The heuristic of rotation is so weak a signal in the background noise produced by prandom_u32_max(MEMCG_NR_BINS), wonder if mcgroup lru works no fine without it. > + } > + > mctz = soft_limit_tree.rb_tree_per_node[nid]; > if (!mctz) > return; > @@ -3526,6 +3536,9 @@ unsigned long mem_cgroup_soft_limit_reclaim(pg_data_t *pgdat, int order, > struct mem_cgroup_tree_per_node *mctz; > unsigned long excess; > > + if (lru_gen_enabled()) > + return 0; > + > if (order > 0) > return 0; >