Add some benchmarking functions in testing for mas_prev(). This is useful to ensure there are no regressions added during modifications. Signed-off-by: Liam R. Howlett <Liam.Howlett@xxxxxxxxxx> --- lib/test_maple_tree.c | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/lib/test_maple_tree.c b/lib/test_maple_tree.c index 9c4cf5fb2b7f..0674aebd4423 100644 --- a/lib/test_maple_tree.c +++ b/lib/test_maple_tree.c @@ -45,6 +45,7 @@ atomic_t maple_tree_tests_passed; /* #define BENCH_MT_FOR_EACH */ /* #define BENCH_FORK */ /* #define BENCH_MAS_FOR_EACH */ +/* #define BENCH_MAS_PREV */ #ifdef __KERNEL__ #define mt_set_non_kernel(x) do {} while (0) @@ -1801,7 +1802,36 @@ static noinline void __init bench_mas_for_each(struct maple_tree *mt) } #endif +#if defined(BENCH_MAS_PREV) +static noinline void __init bench_mas_prev(struct maple_tree *mt) +{ + int i, count = 1000000; + unsigned long max = 2500; + void *entry; + MA_STATE(mas, mt, 0, 0); + + for (i = 0; i < max; i += 5) { + int gap = 4; + + if (i % 30 == 0) + gap = 3; + mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL); + } + + rcu_read_lock(); + for (i = 0; i < count; i++) { + unsigned long j = 2495; + + mas_set(&mas, ULONG_MAX); + while ((entry = mas_prev(&mas, 0)) != NULL) { + MT_BUG_ON(mt, entry != xa_mk_value(j)); + j -= 5; + } + } + rcu_read_unlock(); +} +#endif /* check_forking - simulate the kernel forking sequence with the tree. */ static noinline void __init check_forking(struct maple_tree *mt) { @@ -3537,6 +3567,13 @@ static int __init maple_tree_seed(void) mtree_destroy(&tree); goto skip; #endif +#if defined(BENCH_MAS_PREV) +#define BENCH + mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE); + bench_mas_prev(&tree); + mtree_destroy(&tree); + goto skip; +#endif mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE); check_iteration(&tree); -- 2.39.2