The patch titled Subject: maple_tree: re-introduce entry to mas_preallocate() arguments has been added to the -mm mm-unstable branch. Its filename is maple_tree-re-introduce-entry-to-mas_preallocate-arguments.patch This patch will shortly appear at https://git.kernel.org/pub/scm/linux/kernel/git/akpm/25-new.git/tree/patches/maple_tree-re-introduce-entry-to-mas_preallocate-arguments.patch This patch will later appear in the mm-unstable branch at git://git.kernel.org/pub/scm/linux/kernel/git/akpm/mm Before you just go and hit "reply", please: a) Consider who else should be cc'ed b) Prefer to cc a suitable mailing list as well c) Ideally: find the original patch on the mailing list and do a reply-to-all to that, adding suitable additional cc's *** Remember to use Documentation/process/submit-checklist.rst when testing your code *** The -mm tree is included into linux-next via the mm-everything branch at git://git.kernel.org/pub/scm/linux/kernel/git/akpm/mm and is updated there every 2-3 working days ------------------------------------------------------ From: "Liam R. Howlett" <Liam.Howlett@xxxxxxxxxx> Subject: maple_tree: re-introduce entry to mas_preallocate() arguments Date: Mon, 12 Jun 2023 16:39:46 -0400 The current preallocation strategy is to preallocate the absolute worst-case allocation for a tree modification. The entry (or NULL) is needed to know how many nodes are needed to write to the tree. Start by adding the argument to the mas_preallocate() definition. Link: https://lkml.kernel.org/r/20230612203953.2093911-10-Liam.Howlett@xxxxxxxxxx Signed-off-by: Liam R. Howlett <Liam.Howlett@xxxxxxxxxx> Cc: Peng Zhang <zhangpeng.00@xxxxxxxxxxxxx> Signed-off-by: Andrew Morton <akpm@xxxxxxxxxxxxxxxxxxxx> --- include/linux/maple_tree.h | 2 - lib/maple_tree.c | 3 +- mm/internal.h | 2 - mm/mmap.c | 4 +-- tools/testing/radix-tree/maple.c | 32 ++++++++++++++--------------- 5 files changed, 22 insertions(+), 21 deletions(-) --- a/include/linux/maple_tree.h~maple_tree-re-introduce-entry-to-mas_preallocate-arguments +++ a/include/linux/maple_tree.h @@ -458,7 +458,7 @@ void *mas_find(struct ma_state *mas, uns void *mas_find_range(struct ma_state *mas, unsigned long max); void *mas_find_rev(struct ma_state *mas, unsigned long min); void *mas_find_range_rev(struct ma_state *mas, unsigned long max); -int mas_preallocate(struct ma_state *mas, gfp_t gfp); +int mas_preallocate(struct ma_state *mas, void *entry, gfp_t gfp); bool mas_is_err(struct ma_state *mas); bool mas_nomem(struct ma_state *mas, gfp_t gfp); --- a/lib/maple_tree.c~maple_tree-re-introduce-entry-to-mas_preallocate-arguments +++ a/lib/maple_tree.c @@ -5535,11 +5535,12 @@ EXPORT_SYMBOL_GPL(mas_store_prealloc); /** * mas_preallocate() - Preallocate enough nodes for a store operation * @mas: The maple state + * @entry: The entry that will be stored * @gfp: The GFP_FLAGS to use for allocations. * * Return: 0 on success, -ENOMEM if memory could not be allocated. */ -int mas_preallocate(struct ma_state *mas, gfp_t gfp) +int mas_preallocate(struct ma_state *mas, void *entry, gfp_t gfp) { int ret; --- a/mm/internal.h~maple_tree-re-introduce-entry-to-mas_preallocate-arguments +++ a/mm/internal.h @@ -1043,7 +1043,7 @@ static inline void vma_iter_config(struc */ static inline int vma_iter_prealloc(struct vma_iterator *vmi) { - return mas_preallocate(&vmi->mas, GFP_KERNEL); + return mas_preallocate(&vmi->mas, NULL, GFP_KERNEL); } static inline void vma_iter_clear(struct vma_iterator *vmi, --- a/mm/mmap.c~maple_tree-re-introduce-entry-to-mas_preallocate-arguments +++ a/mm/mmap.c @@ -1979,7 +1979,7 @@ int expand_upwards(struct vm_area_struct /* Check that both stack segments have the same anon_vma? */ } - if (mas_preallocate(&mas, GFP_KERNEL)) + if (mas_preallocate(&mas, vma, GFP_KERNEL)) return -ENOMEM; /* We must make sure the anon_vma is allocated. */ @@ -2061,7 +2061,7 @@ int expand_downwards(struct vm_area_stru return -ENOMEM; } - if (mas_preallocate(&mas, GFP_KERNEL)) + if (mas_preallocate(&mas, vma, GFP_KERNEL)) return -ENOMEM; /* We must make sure the anon_vma is allocated. */ --- a/tools/testing/radix-tree/maple.c~maple_tree-re-introduce-entry-to-mas_preallocate-arguments +++ a/tools/testing/radix-tree/maple.c @@ -35383,7 +35383,7 @@ static noinline void __init check_preall for (i = 0; i <= max; i++) mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); - MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0); + MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); allocated = mas_allocated(&mas); height = mas_mt_height(&mas); MT_BUG_ON(mt, allocated == 0); @@ -35392,18 +35392,18 @@ static noinline void __init check_preall allocated = mas_allocated(&mas); MT_BUG_ON(mt, allocated != 0); - MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0); + MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); allocated = mas_allocated(&mas); height = mas_mt_height(&mas); MT_BUG_ON(mt, allocated == 0); MT_BUG_ON(mt, allocated != 1 + height * 3); - MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0); + MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); mas_destroy(&mas); allocated = mas_allocated(&mas); MT_BUG_ON(mt, allocated != 0); - MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0); + MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); allocated = mas_allocated(&mas); height = mas_mt_height(&mas); MT_BUG_ON(mt, allocated == 0); @@ -35412,26 +35412,26 @@ static noinline void __init check_preall MT_BUG_ON(mt, mas_allocated(&mas) != allocated - 1); mn->parent = ma_parent_ptr(mn); ma_free_rcu(mn); - MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0); + MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); mas_destroy(&mas); allocated = mas_allocated(&mas); MT_BUG_ON(mt, allocated != 0); - MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0); + MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); allocated = mas_allocated(&mas); height = mas_mt_height(&mas); MT_BUG_ON(mt, allocated == 0); MT_BUG_ON(mt, allocated != 1 + height * 3); mn = mas_pop_node(&mas); MT_BUG_ON(mt, mas_allocated(&mas) != allocated - 1); - MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0); + MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); mas_destroy(&mas); allocated = mas_allocated(&mas); MT_BUG_ON(mt, allocated != 0); mn->parent = ma_parent_ptr(mn); ma_free_rcu(mn); - MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0); + MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); allocated = mas_allocated(&mas); height = mas_mt_height(&mas); MT_BUG_ON(mt, allocated == 0); @@ -35440,12 +35440,12 @@ static noinline void __init check_preall MT_BUG_ON(mt, mas_allocated(&mas) != allocated - 1); mas_push_node(&mas, mn); MT_BUG_ON(mt, mas_allocated(&mas) != allocated); - MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0); + MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); mas_destroy(&mas); allocated = mas_allocated(&mas); MT_BUG_ON(mt, allocated != 0); - MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0); + MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); allocated = mas_allocated(&mas); height = mas_mt_height(&mas); MT_BUG_ON(mt, allocated == 0); @@ -35453,21 +35453,21 @@ static noinline void __init check_preall mas_store_prealloc(&mas, ptr); MT_BUG_ON(mt, mas_allocated(&mas) != 0); - MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0); + MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); allocated = mas_allocated(&mas); height = mas_mt_height(&mas); MT_BUG_ON(mt, allocated == 0); MT_BUG_ON(mt, allocated != 1 + height * 3); mas_store_prealloc(&mas, ptr); MT_BUG_ON(mt, mas_allocated(&mas) != 0); - MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0); + MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); allocated = mas_allocated(&mas); height = mas_mt_height(&mas); MT_BUG_ON(mt, allocated == 0); MT_BUG_ON(mt, allocated != 1 + height * 3); mas_store_prealloc(&mas, ptr); - MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0); + MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); allocated = mas_allocated(&mas); height = mas_mt_height(&mas); MT_BUG_ON(mt, allocated == 0); @@ -35475,14 +35475,14 @@ static noinline void __init check_preall mas_store_prealloc(&mas, ptr); MT_BUG_ON(mt, mas_allocated(&mas) != 0); mt_set_non_kernel(1); - MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL & GFP_NOWAIT) == 0); + MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL & GFP_NOWAIT) == 0); allocated = mas_allocated(&mas); height = mas_mt_height(&mas); MT_BUG_ON(mt, allocated != 0); mas_destroy(&mas); - MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL) != 0); + MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); allocated = mas_allocated(&mas); height = mas_mt_height(&mas); MT_BUG_ON(mt, allocated == 0); @@ -35490,7 +35490,7 @@ static noinline void __init check_preall mas_store_prealloc(&mas, ptr); MT_BUG_ON(mt, mas_allocated(&mas) != 0); mt_set_non_kernel(1); - MT_BUG_ON(mt, mas_preallocate(&mas, GFP_KERNEL & GFP_NOWAIT) == 0); + MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL & GFP_NOWAIT) == 0); allocated = mas_allocated(&mas); height = mas_mt_height(&mas); MT_BUG_ON(mt, allocated != 0); _ Patches currently in -mm which might be from Liam.Howlett@xxxxxxxxxx are mm-mprotect-fix-do_mprotect_pkey-limit-check.patch maple_tree-add-benchmarking-for-mas_for_each.patch maple_tree-add-benchmarking-for-mas_prev.patch mm-move-unmap_vmas-declaration-to-internal-header.patch mm-change-do_vmi_align_munmap-side-tree-index.patch mm-remove-prev-check-from-do_vmi_align_munmap.patch maple_tree-introduce-__mas_set_range.patch mm-remove-re-walk-from-mmap_region.patch maple_tree-adjust-node-allocation-on-mas_rebalance.patch maple_tree-re-introduce-entry-to-mas_preallocate-arguments.patch mm-use-vma_iter_clear_gfp-in-nommu.patch mm-set-up-vma-iterator-for-vma_iter_prealloc-calls.patch maple_tree-move-mas_wr_end_piv-below-mas_wr_extend_null.patch maple_tree-update-mas_preallocate-testing.patch maple_tree-refine-mas_preallocate-node-calculations.patch maple_tree-reduce-resets-during-store-setup.patch mm-mmap-change-vma-iteration-order-in-do_vmi_align_munmap.patch userfaultfd-fix-regression-in-userfaultfd_unmap_prep.patch