2022-09-06 22:48:45 +03:00
// SPDX-License-Identifier: GPL-2.0+
/*
* test_maple_tree . c : Test the maple tree API
2022-10-28 21:04:30 +03:00
* Copyright ( c ) 2018 - 2022 Oracle Corporation
2022-09-06 22:48:45 +03:00
* Author : Liam R . Howlett < Liam . Howlett @ Oracle . com >
2022-10-28 21:04:30 +03:00
*
* Any tests that only require the interface of the tree .
2022-09-06 22:48:45 +03:00
*/
# include <linux/maple_tree.h>
# include <linux/module.h>
# define MTREE_ALLOC_MAX 0x2000000000000Ul
# define CONFIG_MAPLE_SEARCH
2022-10-28 21:04:30 +03:00
# define MAPLE_32BIT (MAPLE_NODE_SLOTS > 31)
2023-05-18 17:55:25 +03:00
# ifndef CONFIG_DEBUG_MAPLE_TREE
# define mt_dump(mt, fmt) do {} while (0)
# define mt_validate(mt) do {} while (0)
# define mt_cache_shrink() do {} while (0)
# define mas_dump(mas) do {} while (0)
# define mas_wr_dump(mas) do {} while (0)
atomic_t maple_tree_tests_run ;
atomic_t maple_tree_tests_passed ;
# undef MT_BUG_ON
# define MT_BUG_ON(__tree, __x) do { \
atomic_inc ( & maple_tree_tests_run ) ; \
if ( __x ) { \
pr_info ( " BUG at %s:%d (%u) \n " , \
__func__ , __LINE__ , __x ) ; \
pr_info ( " Pass: %u Run:%u \n " , \
atomic_read ( & maple_tree_tests_passed ) , \
atomic_read ( & maple_tree_tests_run ) ) ; \
} else { \
atomic_inc ( & maple_tree_tests_passed ) ; \
} \
} while ( 0 )
# endif
2022-09-06 22:48:45 +03:00
/* #define BENCH_SLOT_STORE */
/* #define BENCH_NODE_STORE */
/* #define BENCH_AWALK */
/* #define BENCH_WALK */
/* #define BENCH_MT_FOR_EACH */
/* #define BENCH_FORK */
2022-10-28 21:04:30 +03:00
# ifdef __KERNEL__
# define mt_set_non_kernel(x) do {} while (0)
# define mt_zero_nr_tallocated(x) do {} while (0)
# else
# define cond_resched() do {} while (0)
# endif
2023-05-18 17:55:28 +03:00
static int __init mtree_insert_index ( struct maple_tree * mt ,
unsigned long index , gfp_t gfp )
2022-09-06 22:48:45 +03:00
{
return mtree_insert ( mt , index , xa_mk_value ( index & LONG_MAX ) , gfp ) ;
}
2023-05-18 17:55:28 +03:00
static void __init mtree_erase_index ( struct maple_tree * mt , unsigned long index )
2022-09-06 22:48:45 +03:00
{
MT_BUG_ON ( mt , mtree_erase ( mt , index ) ! = xa_mk_value ( index & LONG_MAX ) ) ;
MT_BUG_ON ( mt , mtree_load ( mt , index ) ! = NULL ) ;
}
2023-05-18 17:55:28 +03:00
static int __init mtree_test_insert ( struct maple_tree * mt , unsigned long index ,
2022-09-06 22:48:45 +03:00
void * ptr )
{
return mtree_insert ( mt , index , ptr , GFP_KERNEL ) ;
}
2023-05-18 17:55:28 +03:00
static int __init mtree_test_store_range ( struct maple_tree * mt ,
unsigned long start , unsigned long end , void * ptr )
2022-09-06 22:48:45 +03:00
{
return mtree_store_range ( mt , start , end , ptr , GFP_KERNEL ) ;
}
2023-05-18 17:55:28 +03:00
static int __init mtree_test_store ( struct maple_tree * mt , unsigned long start ,
2022-09-06 22:48:45 +03:00
void * ptr )
{
return mtree_test_store_range ( mt , start , start , ptr ) ;
}
2023-05-18 17:55:28 +03:00
static int __init mtree_test_insert_range ( struct maple_tree * mt ,
unsigned long start , unsigned long end , void * ptr )
2022-09-06 22:48:45 +03:00
{
return mtree_insert_range ( mt , start , end , ptr , GFP_KERNEL ) ;
}
2023-05-18 17:55:28 +03:00
static void __init * mtree_test_load ( struct maple_tree * mt , unsigned long index )
2022-09-06 22:48:45 +03:00
{
return mtree_load ( mt , index ) ;
}
2023-05-18 17:55:28 +03:00
static void __init * mtree_test_erase ( struct maple_tree * mt , unsigned long index )
2022-09-06 22:48:45 +03:00
{
return mtree_erase ( mt , index ) ;
}
2022-10-28 21:04:30 +03:00
# if defined(CONFIG_64BIT)
2023-05-18 17:55:28 +03:00
static noinline void __init check_mtree_alloc_range ( struct maple_tree * mt ,
2022-09-06 22:48:45 +03:00
unsigned long start , unsigned long end , unsigned long size ,
unsigned long expected , int eret , void * ptr )
{
unsigned long result = expected + 1 ;
int ret ;
ret = mtree_alloc_range ( mt , & result , ptr , size , start , end ,
GFP_KERNEL ) ;
MT_BUG_ON ( mt , ret ! = eret ) ;
if ( ret )
return ;
MT_BUG_ON ( mt , result ! = expected ) ;
}
2023-05-18 17:55:28 +03:00
static noinline void __init check_mtree_alloc_rrange ( struct maple_tree * mt ,
2022-09-06 22:48:45 +03:00
unsigned long start , unsigned long end , unsigned long size ,
unsigned long expected , int eret , void * ptr )
{
unsigned long result = expected + 1 ;
int ret ;
2023-05-18 17:55:34 +03:00
ret = mtree_alloc_rrange ( mt , & result , ptr , size , start , end ,
2022-09-06 22:48:45 +03:00
GFP_KERNEL ) ;
MT_BUG_ON ( mt , ret ! = eret ) ;
if ( ret )
return ;
MT_BUG_ON ( mt , result ! = expected ) ;
}
2022-10-28 21:04:30 +03:00
# endif
2022-09-06 22:48:45 +03:00
2023-05-18 17:55:28 +03:00
static noinline void __init check_load ( struct maple_tree * mt ,
unsigned long index , void * ptr )
2022-09-06 22:48:45 +03:00
{
void * ret = mtree_test_load ( mt , index ) ;
if ( ret ! = ptr )
pr_err ( " Load %lu returned %p expect %p \n " , index , ret , ptr ) ;
MT_BUG_ON ( mt , ret ! = ptr ) ;
}
2023-05-18 17:55:28 +03:00
static noinline void __init check_store_range ( struct maple_tree * mt ,
2022-09-06 22:48:45 +03:00
unsigned long start , unsigned long end , void * ptr , int expected )
{
int ret = - EINVAL ;
unsigned long i ;
ret = mtree_test_store_range ( mt , start , end , ptr ) ;
MT_BUG_ON ( mt , ret ! = expected ) ;
if ( ret )
return ;
for ( i = start ; i < = end ; i + + )
check_load ( mt , i , ptr ) ;
}
2023-05-18 17:55:28 +03:00
static noinline void __init check_insert_range ( struct maple_tree * mt ,
2022-09-06 22:48:45 +03:00
unsigned long start , unsigned long end , void * ptr , int expected )
{
int ret = - EINVAL ;
unsigned long i ;
ret = mtree_test_insert_range ( mt , start , end , ptr ) ;
MT_BUG_ON ( mt , ret ! = expected ) ;
if ( ret )
return ;
for ( i = start ; i < = end ; i + + )
check_load ( mt , i , ptr ) ;
}
2023-05-18 17:55:28 +03:00
static noinline void __init check_insert ( struct maple_tree * mt ,
unsigned long index , void * ptr )
2022-09-06 22:48:45 +03:00
{
int ret = - EINVAL ;
ret = mtree_test_insert ( mt , index , ptr ) ;
MT_BUG_ON ( mt , ret ! = 0 ) ;
}
2023-05-18 17:55:28 +03:00
static noinline void __init check_dup_insert ( struct maple_tree * mt ,
2022-09-06 22:48:45 +03:00
unsigned long index , void * ptr )
{
int ret = - EINVAL ;
ret = mtree_test_insert ( mt , index , ptr ) ;
MT_BUG_ON ( mt , ret ! = - EEXIST ) ;
}
2023-05-18 17:55:28 +03:00
static noinline void __init check_index_load ( struct maple_tree * mt ,
unsigned long index )
2022-09-06 22:48:45 +03:00
{
return check_load ( mt , index , xa_mk_value ( index & LONG_MAX ) ) ;
}
2023-05-18 17:55:28 +03:00
static inline __init int not_empty ( struct maple_node * node )
2022-09-06 22:48:45 +03:00
{
int i ;
if ( node - > parent )
return 1 ;
for ( i = 0 ; i < ARRAY_SIZE ( node - > slot ) ; i + + )
if ( node - > slot [ i ] )
return 1 ;
return 0 ;
}
2023-05-18 17:55:28 +03:00
static noinline void __init check_rev_seq ( struct maple_tree * mt ,
unsigned long max , bool verbose )
2022-09-06 22:48:45 +03:00
{
unsigned long i = max , j ;
MT_BUG_ON ( mt , ! mtree_empty ( mt ) ) ;
mt_zero_nr_tallocated ( ) ;
while ( i ) {
MT_BUG_ON ( mt , mtree_insert_index ( mt , i , GFP_KERNEL ) ) ;
for ( j = i ; j < = max ; j + + )
check_index_load ( mt , j ) ;
check_load ( mt , i - 1 , NULL ) ;
mt_set_in_rcu ( mt ) ;
MT_BUG_ON ( mt , ! mt_height ( mt ) ) ;
mt_clear_in_rcu ( mt ) ;
MT_BUG_ON ( mt , ! mt_height ( mt ) ) ;
i - - ;
}
check_load ( mt , max + 1 , NULL ) ;
2022-10-28 21:04:30 +03:00
# ifndef __KERNEL__
2022-09-06 22:48:45 +03:00
if ( verbose ) {
rcu_barrier ( ) ;
2023-05-18 17:55:14 +03:00
mt_dump ( mt , mt_dump_dec ) ;
2022-09-06 22:48:45 +03:00
pr_info ( " %s test of 0-%lu %luK in %d active (%d total) \n " ,
__func__ , max , mt_get_alloc_size ( ) / 1024 , mt_nr_allocated ( ) ,
mt_nr_tallocated ( ) ) ;
}
2022-10-28 21:04:30 +03:00
# endif
2022-09-06 22:48:45 +03:00
}
2023-05-18 17:55:28 +03:00
static noinline void __init check_seq ( struct maple_tree * mt , unsigned long max ,
2022-09-06 22:48:45 +03:00
bool verbose )
{
unsigned long i , j ;
MT_BUG_ON ( mt , ! mtree_empty ( mt ) ) ;
mt_zero_nr_tallocated ( ) ;
for ( i = 0 ; i < = max ; i + + ) {
MT_BUG_ON ( mt , mtree_insert_index ( mt , i , GFP_KERNEL ) ) ;
for ( j = 0 ; j < = i ; j + + )
check_index_load ( mt , j ) ;
if ( i )
MT_BUG_ON ( mt , ! mt_height ( mt ) ) ;
check_load ( mt , i + 1 , NULL ) ;
}
2022-10-28 21:04:30 +03:00
# ifndef __KERNEL__
2022-09-06 22:48:45 +03:00
if ( verbose ) {
rcu_barrier ( ) ;
2023-05-18 17:55:14 +03:00
mt_dump ( mt , mt_dump_dec ) ;
2022-09-06 22:48:45 +03:00
pr_info ( " seq test of 0-%lu %luK in %d active (%d total) \n " ,
max , mt_get_alloc_size ( ) / 1024 , mt_nr_allocated ( ) ,
mt_nr_tallocated ( ) ) ;
}
2022-10-28 21:04:30 +03:00
# endif
2022-09-06 22:48:45 +03:00
}
2023-05-18 17:55:28 +03:00
static noinline void __init check_lb_not_empty ( struct maple_tree * mt )
2022-09-06 22:48:45 +03:00
{
unsigned long i , j ;
unsigned long huge = 4000UL * 1000 * 1000 ;
i = huge ;
while ( i > 4096 ) {
check_insert ( mt , i , ( void * ) i ) ;
for ( j = huge ; j > = i ; j / = 2 ) {
check_load ( mt , j - 1 , NULL ) ;
check_load ( mt , j , ( void * ) j ) ;
check_load ( mt , j + 1 , NULL ) ;
}
i / = 2 ;
}
mtree_destroy ( mt ) ;
}
2023-05-18 17:55:28 +03:00
static noinline void __init check_lower_bound_split ( struct maple_tree * mt )
2022-09-06 22:48:45 +03:00
{
MT_BUG_ON ( mt , ! mtree_empty ( mt ) ) ;
check_lb_not_empty ( mt ) ;
}
2023-05-18 17:55:28 +03:00
static noinline void __init check_upper_bound_split ( struct maple_tree * mt )
2022-09-06 22:48:45 +03:00
{
unsigned long i , j ;
2022-10-28 21:04:30 +03:00
unsigned long huge ;
2022-09-06 22:48:45 +03:00
MT_BUG_ON ( mt , ! mtree_empty ( mt ) ) ;
2022-10-28 21:04:30 +03:00
if ( MAPLE_32BIT )
huge = 2147483647UL ;
else
huge = 4000UL * 1000 * 1000 ;
2022-09-06 22:48:45 +03:00
i = 4096 ;
while ( i < huge ) {
check_insert ( mt , i , ( void * ) i ) ;
for ( j = i ; j > = huge ; j * = 2 ) {
check_load ( mt , j - 1 , NULL ) ;
check_load ( mt , j , ( void * ) j ) ;
check_load ( mt , j + 1 , NULL ) ;
}
i * = 2 ;
}
mtree_destroy ( mt ) ;
}
2023-05-18 17:55:28 +03:00
static noinline void __init check_mid_split ( struct maple_tree * mt )
2022-09-06 22:48:45 +03:00
{
unsigned long huge = 8000UL * 1000 * 1000 ;
check_insert ( mt , huge , ( void * ) huge ) ;
check_insert ( mt , 0 , xa_mk_value ( 0 ) ) ;
check_lb_not_empty ( mt ) ;
}
2023-05-18 17:55:28 +03:00
static noinline void __init check_rev_find ( struct maple_tree * mt )
2022-09-06 22:48:45 +03:00
{
int i , nr_entries = 200 ;
void * val ;
MA_STATE ( mas , mt , 0 , 0 ) ;
for ( i = 0 ; i < = nr_entries ; i + + )
mtree_store_range ( mt , i * 10 , i * 10 + 5 ,
xa_mk_value ( i ) , GFP_KERNEL ) ;
2022-10-28 21:04:30 +03:00
rcu_read_lock ( ) ;
2022-09-06 22:48:45 +03:00
mas_set ( & mas , 1000 ) ;
val = mas_find_rev ( & mas , 1000 ) ;
MT_BUG_ON ( mt , val ! = xa_mk_value ( 100 ) ) ;
val = mas_find_rev ( & mas , 1000 ) ;
MT_BUG_ON ( mt , val ! = NULL ) ;
mas_set ( & mas , 999 ) ;
val = mas_find_rev ( & mas , 997 ) ;
MT_BUG_ON ( mt , val ! = NULL ) ;
mas_set ( & mas , 1000 ) ;
val = mas_find_rev ( & mas , 900 ) ;
MT_BUG_ON ( mt , val ! = xa_mk_value ( 100 ) ) ;
val = mas_find_rev ( & mas , 900 ) ;
MT_BUG_ON ( mt , val ! = xa_mk_value ( 99 ) ) ;
mas_set ( & mas , 20 ) ;
val = mas_find_rev ( & mas , 0 ) ;
MT_BUG_ON ( mt , val ! = xa_mk_value ( 2 ) ) ;
val = mas_find_rev ( & mas , 0 ) ;
MT_BUG_ON ( mt , val ! = xa_mk_value ( 1 ) ) ;
val = mas_find_rev ( & mas , 0 ) ;
MT_BUG_ON ( mt , val ! = xa_mk_value ( 0 ) ) ;
val = mas_find_rev ( & mas , 0 ) ;
MT_BUG_ON ( mt , val ! = NULL ) ;
2022-10-28 21:04:30 +03:00
rcu_read_unlock ( ) ;
2022-09-06 22:48:45 +03:00
}
2023-05-18 17:55:28 +03:00
static noinline void __init check_find ( struct maple_tree * mt )
2022-09-06 22:48:45 +03:00
{
unsigned long val = 0 ;
2022-10-28 21:04:30 +03:00
unsigned long count ;
2022-09-06 22:48:45 +03:00
unsigned long max ;
2022-10-28 21:04:30 +03:00
unsigned long top ;
2022-09-06 22:48:45 +03:00
unsigned long last = 0 , index = 0 ;
void * entry , * entry2 ;
MA_STATE ( mas , mt , 0 , 0 ) ;
/* Insert 0. */
MT_BUG_ON ( mt , mtree_insert_index ( mt , val + + , GFP_KERNEL ) ) ;
2022-10-28 21:04:30 +03:00
# if defined(CONFIG_64BIT)
top = 4398046511104UL ;
# else
top = ULONG_MAX ;
# endif
if ( MAPLE_32BIT ) {
count = 15 ;
} else {
count = 20 ;
}
2022-09-06 22:48:45 +03:00
for ( int i = 0 ; i < = count ; i + + ) {
if ( val ! = 64 )
MT_BUG_ON ( mt , mtree_insert_index ( mt , val , GFP_KERNEL ) ) ;
else
MT_BUG_ON ( mt , mtree_insert ( mt , val ,
XA_ZERO_ENTRY , GFP_KERNEL ) ) ;
val < < = 2 ;
}
val = 0 ;
mas_set ( & mas , val ) ;
mas_lock ( & mas ) ;
while ( ( entry = mas_find ( & mas , 268435456 ) ) ! = NULL ) {
if ( val ! = 64 )
MT_BUG_ON ( mt , xa_mk_value ( val ) ! = entry ) ;
else
MT_BUG_ON ( mt , entry ! = XA_ZERO_ENTRY ) ;
val < < = 2 ;
/* For zero check. */
if ( ! val )
val = 1 ;
}
mas_unlock ( & mas ) ;
val = 0 ;
mas_set ( & mas , val ) ;
mas_lock ( & mas ) ;
mas_for_each ( & mas , entry , ULONG_MAX ) {
if ( val ! = 64 )
MT_BUG_ON ( mt , xa_mk_value ( val ) ! = entry ) ;
else
MT_BUG_ON ( mt , entry ! = XA_ZERO_ENTRY ) ;
val < < = 2 ;
/* For zero check. */
if ( ! val )
val = 1 ;
}
mas_unlock ( & mas ) ;
/* Test mas_pause */
val = 0 ;
mas_set ( & mas , val ) ;
mas_lock ( & mas ) ;
mas_for_each ( & mas , entry , ULONG_MAX ) {
if ( val ! = 64 )
MT_BUG_ON ( mt , xa_mk_value ( val ) ! = entry ) ;
else
MT_BUG_ON ( mt , entry ! = XA_ZERO_ENTRY ) ;
val < < = 2 ;
/* For zero check. */
if ( ! val )
val = 1 ;
mas_pause ( & mas ) ;
mas_unlock ( & mas ) ;
mas_lock ( & mas ) ;
}
mas_unlock ( & mas ) ;
val = 0 ;
max = 300 ; /* A value big enough to include XA_ZERO_ENTRY at 64. */
mt_for_each ( mt , entry , index , max ) {
MT_BUG_ON ( mt , xa_mk_value ( val ) ! = entry ) ;
val < < = 2 ;
if ( val = = 64 ) /* Skip zero entry. */
val < < = 2 ;
/* For zero check. */
if ( ! val )
val = 1 ;
}
val = 0 ;
max = 0 ;
index = 0 ;
MT_BUG_ON ( mt , mtree_insert_index ( mt , ULONG_MAX , GFP_KERNEL ) ) ;
mt_for_each ( mt , entry , index , ULONG_MAX ) {
2022-10-28 21:04:30 +03:00
if ( val = = top )
MT_BUG_ON ( mt , entry ! = xa_mk_value ( LONG_MAX ) ) ;
2022-09-06 22:48:45 +03:00
else
MT_BUG_ON ( mt , xa_mk_value ( val ) ! = entry ) ;
2022-10-28 21:04:30 +03:00
/* Workaround for 32bit */
if ( ( val < < 2 ) < val )
val = ULONG_MAX ;
else
val < < = 2 ;
2022-09-06 22:48:45 +03:00
if ( val = = 64 ) /* Skip zero entry. */
val < < = 2 ;
/* For zero check. */
if ( ! val )
val = 1 ;
max + + ;
MT_BUG_ON ( mt , max > 25 ) ;
}
mtree_erase_index ( mt , ULONG_MAX ) ;
mas_reset ( & mas ) ;
index = 17 ;
entry = mt_find ( mt , & index , 512 ) ;
MT_BUG_ON ( mt , xa_mk_value ( 256 ) ! = entry ) ;
mas_reset ( & mas ) ;
index = 17 ;
entry = mt_find ( mt , & index , 20 ) ;
MT_BUG_ON ( mt , entry ! = NULL ) ;
/* Range check.. */
/* Insert ULONG_MAX */
MT_BUG_ON ( mt , mtree_insert_index ( mt , ULONG_MAX , GFP_KERNEL ) ) ;
val = 0 ;
mas_set ( & mas , 0 ) ;
mas_lock ( & mas ) ;
mas_for_each ( & mas , entry , ULONG_MAX ) {
if ( val = = 64 )
MT_BUG_ON ( mt , entry ! = XA_ZERO_ENTRY ) ;
2022-10-28 21:04:30 +03:00
else if ( val = = top )
MT_BUG_ON ( mt , entry ! = xa_mk_value ( LONG_MAX ) ) ;
2022-09-06 22:48:45 +03:00
else
MT_BUG_ON ( mt , xa_mk_value ( val ) ! = entry ) ;
2022-10-28 21:04:30 +03:00
/* Workaround for 32bit */
if ( ( val < < 2 ) < val )
val = ULONG_MAX ;
else
val < < = 2 ;
2022-09-06 22:48:45 +03:00
/* For zero check. */
if ( ! val )
val = 1 ;
mas_pause ( & mas ) ;
mas_unlock ( & mas ) ;
mas_lock ( & mas ) ;
}
mas_unlock ( & mas ) ;
mas_set ( & mas , 1048576 ) ;
mas_lock ( & mas ) ;
entry = mas_find ( & mas , 1048576 ) ;
mas_unlock ( & mas ) ;
MT_BUG_ON ( mas . tree , entry = = NULL ) ;
/*
* Find last value .
* 1. get the expected value , leveraging the existence of an end entry
* 2. delete end entry
* 3. find the last value but searching for ULONG_MAX and then using
* prev
*/
/* First, get the expected result. */
mas_lock ( & mas ) ;
mas_reset ( & mas ) ;
mas . index = ULONG_MAX ; /* start at max.. */
entry = mas_find ( & mas , ULONG_MAX ) ;
entry = mas_prev ( & mas , 0 ) ;
index = mas . index ;
last = mas . last ;
/* Erase the last entry. */
mas_reset ( & mas ) ;
mas . index = ULONG_MAX ;
mas . last = ULONG_MAX ;
mas_erase ( & mas ) ;
/* Get the previous value from MAS_START */
mas_reset ( & mas ) ;
entry2 = mas_prev ( & mas , 0 ) ;
/* Check results. */
MT_BUG_ON ( mt , entry ! = entry2 ) ;
MT_BUG_ON ( mt , index ! = mas . index ) ;
MT_BUG_ON ( mt , last ! = mas . last ) ;
mas . node = MAS_NONE ;
mas . index = ULONG_MAX ;
mas . last = ULONG_MAX ;
entry2 = mas_prev ( & mas , 0 ) ;
MT_BUG_ON ( mt , entry ! = entry2 ) ;
mas_set ( & mas , 0 ) ;
MT_BUG_ON ( mt , mas_prev ( & mas , 0 ) ! = NULL ) ;
mas_unlock ( & mas ) ;
mtree_destroy ( mt ) ;
}
2023-05-18 17:55:28 +03:00
static noinline void __init check_find_2 ( struct maple_tree * mt )
2022-09-06 22:48:45 +03:00
{
unsigned long i , j ;
void * entry ;
MA_STATE ( mas , mt , 0 , 0 ) ;
rcu_read_lock ( ) ;
mas_for_each ( & mas , entry , ULONG_MAX )
MT_BUG_ON ( mt , true ) ;
rcu_read_unlock ( ) ;
for ( i = 0 ; i < 256 ; i + + ) {
mtree_insert_index ( mt , i , GFP_KERNEL ) ;
j = 0 ;
mas_set ( & mas , 0 ) ;
rcu_read_lock ( ) ;
mas_for_each ( & mas , entry , ULONG_MAX ) {
MT_BUG_ON ( mt , entry ! = xa_mk_value ( j ) ) ;
j + + ;
}
rcu_read_unlock ( ) ;
MT_BUG_ON ( mt , j ! = i + 1 ) ;
}
for ( i = 0 ; i < 256 ; i + + ) {
mtree_erase_index ( mt , i ) ;
j = i + 1 ;
mas_set ( & mas , 0 ) ;
rcu_read_lock ( ) ;
mas_for_each ( & mas , entry , ULONG_MAX ) {
if ( xa_is_zero ( entry ) )
continue ;
MT_BUG_ON ( mt , entry ! = xa_mk_value ( j ) ) ;
j + + ;
}
rcu_read_unlock ( ) ;
MT_BUG_ON ( mt , j ! = 256 ) ;
}
/*MT_BUG_ON(mt, !mtree_empty(mt)); */
}
2022-10-28 21:04:30 +03:00
# if defined(CONFIG_64BIT)
2023-05-18 17:55:28 +03:00
static noinline void __init check_alloc_rev_range ( struct maple_tree * mt )
2022-09-06 22:48:45 +03:00
{
/*
2022-10-28 21:04:30 +03:00
* Generated by :
* cat / proc / self / maps | awk ' { print $ 1 } ' |
* awk - F " - " ' { printf " 0x%s, 0x%s, " , $ 1 , $ 2 } '
2022-09-06 22:48:45 +03:00
*/
2023-05-18 17:55:28 +03:00
static const unsigned long range [ ] = {
2022-10-28 21:04:30 +03:00
/* Inclusive , Exclusive. */
0x565234af2000 , 0x565234af4000 ,
0x565234af4000 , 0x565234af9000 ,
0x565234af9000 , 0x565234afb000 ,
0x565234afc000 , 0x565234afd000 ,
0x565234afd000 , 0x565234afe000 ,
0x565235def000 , 0x565235e10000 ,
0x7f36d4bfd000 , 0x7f36d4ee2000 ,
0x7f36d4ee2000 , 0x7f36d4f04000 ,
0x7f36d4f04000 , 0x7f36d504c000 ,
0x7f36d504c000 , 0x7f36d5098000 ,
0x7f36d5098000 , 0x7f36d5099000 ,
0x7f36d5099000 , 0x7f36d509d000 ,
0x7f36d509d000 , 0x7f36d509f000 ,
0x7f36d509f000 , 0x7f36d50a5000 ,
0x7f36d50b9000 , 0x7f36d50db000 ,
0x7f36d50db000 , 0x7f36d50dc000 ,
0x7f36d50dc000 , 0x7f36d50fa000 ,
0x7f36d50fa000 , 0x7f36d5102000 ,
0x7f36d5102000 , 0x7f36d5103000 ,
0x7f36d5103000 , 0x7f36d5104000 ,
0x7f36d5104000 , 0x7f36d5105000 ,
0x7fff5876b000 , 0x7fff5878d000 ,
0x7fff5878e000 , 0x7fff58791000 ,
0x7fff58791000 , 0x7fff58793000 ,
} ;
2022-09-06 22:48:45 +03:00
2023-05-18 17:55:28 +03:00
static const unsigned long holes [ ] = {
2022-10-28 21:04:30 +03:00
/*
* Note : start of hole is INCLUSIVE
* end of hole is EXCLUSIVE
* ( opposite of the above table . )
* Start of hole , end of hole , size of hole ( + 1 )
*/
0x565234afb000 , 0x565234afc000 , 0x1000 ,
0x565234afe000 , 0x565235def000 , 0x12F1000 ,
0x565235e10000 , 0x7f36d4bfd000 , 0x28E49EDED000 ,
} ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
/*
* req_range consists of 4 values .
* 1. min index
* 2. max index
* 3. size
* 4. number that should be returned .
* 5. return value
*/
2023-05-18 17:55:28 +03:00
static const unsigned long req_range [ ] = {
2022-10-28 21:04:30 +03:00
0x565234af9000 , /* Min */
0x7fff58791000 , /* Max */
0x1000 , /* Size */
0x7fff5878d < < 12 , /* First rev hole of size 0x1000 */
0 , /* Return value success. */
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
0x0 , /* Min */
2023-05-18 17:55:34 +03:00
0x565234AF0 < < 12 , /* Max */
2022-10-28 21:04:30 +03:00
0x3000 , /* Size */
0x565234AEE < < 12 , /* max - 3. */
0 , /* Return value success. */
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
0x0 , /* Min */
- 1 , /* Max */
0x1000 , /* Size */
562949953421311 < < 12 , /* First rev hole of size 0x1000 */
0 , /* Return value success. */
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
0x0 , /* Min */
2023-05-18 17:55:34 +03:00
0x7F36D5109 < < 12 , /* Max */
2022-10-28 21:04:30 +03:00
0x4000 , /* Size */
0x7F36D5106 < < 12 , /* First rev hole of size 0x4000 */
0 , /* Return value success. */
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
/* Ascend test. */
0x0 ,
2023-05-18 17:55:34 +03:00
34148798628 < < 12 ,
2022-10-28 21:04:30 +03:00
19 < < 12 ,
34148797418 < < 12 ,
0x0 ,
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
/* Too big test. */
0x0 ,
18446744073709551615UL ,
562915594369134UL < < 12 ,
0x0 ,
- EBUSY ,
2022-09-06 22:48:45 +03:00
2023-05-18 17:55:34 +03:00
/* Single space test. */
34148798725 < < 12 ,
34148798725 < < 12 ,
1 < < 12 ,
34148798725 < < 12 ,
0 ,
2022-10-28 21:04:30 +03:00
} ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
int i , range_count = ARRAY_SIZE ( range ) ;
int req_range_count = ARRAY_SIZE ( req_range ) ;
unsigned long min = 0 ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
MA_STATE ( mas , mt , 0 , 0 ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mtree_store_range ( mt , MTREE_ALLOC_MAX , ULONG_MAX , XA_ZERO_ENTRY ,
GFP_KERNEL ) ;
# define DEBUG_REV_RANGE 0
for ( i = 0 ; i < range_count ; i + = 2 ) {
/* Inclusive, Inclusive (with the -1) */
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
# if DEBUG_REV_RANGE
pr_debug ( " \t %s: Insert %lu-%lu \n " , __func__ , range [ i ] > > 12 ,
( range [ i + 1 ] > > 12 ) - 1 ) ;
# endif
check_insert_range ( mt , range [ i ] > > 12 , ( range [ i + 1 ] > > 12 ) - 1 ,
xa_mk_value ( range [ i ] > > 12 ) , 0 ) ;
mt_validate ( mt ) ;
2022-09-06 22:48:45 +03:00
}
2022-10-28 21:04:30 +03:00
mas_lock ( & mas ) ;
for ( i = 0 ; i < ARRAY_SIZE ( holes ) ; i + = 3 ) {
# if DEBUG_REV_RANGE
pr_debug ( " Search from %lu-%lu for gap %lu should be at %lu \n " ,
min , holes [ i + 1 ] > > 12 , holes [ i + 2 ] > > 12 ,
holes [ i ] > > 12 ) ;
# endif
MT_BUG_ON ( mt , mas_empty_area_rev ( & mas , min ,
holes [ i + 1 ] > > 12 ,
holes [ i + 2 ] > > 12 ) ) ;
# if DEBUG_REV_RANGE
pr_debug ( " Found %lu %lu \n " , mas . index , mas . last ) ;
pr_debug ( " gap %lu %lu \n " , ( holes [ i ] > > 12 ) ,
( holes [ i + 1 ] > > 12 ) ) ;
# endif
MT_BUG_ON ( mt , mas . last + 1 ! = ( holes [ i + 1 ] > > 12 ) ) ;
MT_BUG_ON ( mt , mas . index ! = ( holes [ i + 1 ] > > 12 ) - ( holes [ i + 2 ] > > 12 ) ) ;
min = holes [ i + 1 ] > > 12 ;
mas_reset ( & mas ) ;
2022-09-06 22:48:45 +03:00
}
2022-10-28 21:04:30 +03:00
mas_unlock ( & mas ) ;
for ( i = 0 ; i < req_range_count ; i + = 5 ) {
# if DEBUG_REV_RANGE
2023-05-18 17:55:34 +03:00
pr_debug ( " \t Reverse request %d between %lu-%lu size %lu, should get %lu \n " ,
i , req_range [ i ] > > 12 ,
( req_range [ i + 1 ] > > 12 ) ,
2022-10-28 21:04:30 +03:00
req_range [ i + 2 ] > > 12 ,
req_range [ i + 3 ] > > 12 ) ;
# endif
check_mtree_alloc_rrange ( mt ,
req_range [ i ] > > 12 , /* start */
req_range [ i + 1 ] > > 12 , /* end */
req_range [ i + 2 ] > > 12 , /* size */
req_range [ i + 3 ] > > 12 , /* expected address */
req_range [ i + 4 ] , /* expected return */
xa_mk_value ( req_range [ i ] > > 12 ) ) ; /* pointer */
mt_validate ( mt ) ;
2022-09-06 22:48:45 +03:00
}
2022-10-28 21:04:30 +03:00
mt_set_non_kernel ( 1 ) ;
mtree_erase ( mt , 34148798727 ) ; /* create a deleted range. */
2023-05-18 17:55:34 +03:00
mtree_erase ( mt , 34148798725 ) ;
2022-10-28 21:04:30 +03:00
check_mtree_alloc_rrange ( mt , 0 , 34359052173 , 210253414 ,
34148798725 , 0 , mt ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mtree_destroy ( mt ) ;
}
2022-09-06 22:48:45 +03:00
2023-05-18 17:55:28 +03:00
static noinline void __init check_alloc_range ( struct maple_tree * mt )
2022-09-06 22:48:45 +03:00
{
2022-10-28 21:04:30 +03:00
/*
* Generated by :
* cat / proc / self / maps | awk ' { print $ 1 } ' |
* awk - F " - " ' { printf " 0x%s, 0x%s, " , $ 1 , $ 2 } '
*/
2022-09-06 22:48:45 +03:00
2023-05-18 17:55:28 +03:00
static const unsigned long range [ ] = {
2022-10-28 21:04:30 +03:00
/* Inclusive , Exclusive. */
0x565234af2000 , 0x565234af4000 ,
0x565234af4000 , 0x565234af9000 ,
0x565234af9000 , 0x565234afb000 ,
0x565234afc000 , 0x565234afd000 ,
0x565234afd000 , 0x565234afe000 ,
0x565235def000 , 0x565235e10000 ,
0x7f36d4bfd000 , 0x7f36d4ee2000 ,
0x7f36d4ee2000 , 0x7f36d4f04000 ,
0x7f36d4f04000 , 0x7f36d504c000 ,
0x7f36d504c000 , 0x7f36d5098000 ,
0x7f36d5098000 , 0x7f36d5099000 ,
0x7f36d5099000 , 0x7f36d509d000 ,
0x7f36d509d000 , 0x7f36d509f000 ,
0x7f36d509f000 , 0x7f36d50a5000 ,
0x7f36d50b9000 , 0x7f36d50db000 ,
0x7f36d50db000 , 0x7f36d50dc000 ,
0x7f36d50dc000 , 0x7f36d50fa000 ,
0x7f36d50fa000 , 0x7f36d5102000 ,
0x7f36d5102000 , 0x7f36d5103000 ,
0x7f36d5103000 , 0x7f36d5104000 ,
0x7f36d5104000 , 0x7f36d5105000 ,
0x7fff5876b000 , 0x7fff5878d000 ,
0x7fff5878e000 , 0x7fff58791000 ,
0x7fff58791000 , 0x7fff58793000 ,
} ;
2023-05-18 17:55:28 +03:00
static const unsigned long holes [ ] = {
2022-10-28 21:04:30 +03:00
/* Start of hole, end of hole, size of hole (+1) */
0x565234afb000 , 0x565234afc000 , 0x1000 ,
0x565234afe000 , 0x565235def000 , 0x12F1000 ,
0x565235e10000 , 0x7f36d4bfd000 , 0x28E49EDED000 ,
} ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
/*
* req_range consists of 4 values .
* 1. min index
* 2. max index
* 3. size
* 4. number that should be returned .
* 5. return value
*/
2023-05-18 17:55:28 +03:00
static const unsigned long req_range [ ] = {
2022-10-28 21:04:30 +03:00
0x565234af9000 , /* Min */
0x7fff58791000 , /* Max */
0x1000 , /* Size */
0x565234afb000 , /* First hole in our data of size 1000. */
0 , /* Return value success. */
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
0x0 , /* Min */
0x7fff58791000 , /* Max */
0x1F00 , /* Size */
0x0 , /* First hole in our data of size 2000. */
0 , /* Return value success. */
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
/* Test ascend. */
34148797436 < < 12 , /* Min */
0x7fff587AF000 , /* Max */
0x3000 , /* Size */
34148798629 < < 12 , /* Expected location */
0 , /* Return value success. */
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
/* Test failing. */
34148798623 < < 12 , /* Min */
34148798683 < < 12 , /* Max */
0x15000 , /* Size */
0 , /* Expected location */
- EBUSY , /* Return value failed. */
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
/* Test filling entire gap. */
34148798623 < < 12 , /* Min */
0x7fff587AF000 , /* Max */
0x10000 , /* Size */
34148798632 < < 12 , /* Expected location */
0 , /* Return value success. */
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
/* Test walking off the end of root. */
0 , /* Min */
- 1 , /* Max */
- 1 , /* Size */
0 , /* Expected location */
- EBUSY , /* Return value failure. */
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
/* Test looking for too large a hole across entire range. */
0 , /* Min */
- 1 , /* Max */
4503599618982063UL < < 12 , /* Size */
34359052178 < < 12 , /* Expected location */
- EBUSY , /* Return failure. */
2023-05-18 17:55:34 +03:00
/* Test a single entry */
34148798648 < < 12 , /* Min */
34148798648 < < 12 , /* Max */
4096 , /* Size of 1 */
34148798648 < < 12 , /* Location is the same as min/max */
0 , /* Success */
2022-10-28 21:04:30 +03:00
} ;
int i , range_count = ARRAY_SIZE ( range ) ;
int req_range_count = ARRAY_SIZE ( req_range ) ;
unsigned long min = 0x565234af2000 ;
2022-09-06 22:48:45 +03:00
MA_STATE ( mas , mt , 0 , 0 ) ;
2022-10-28 21:04:30 +03:00
mtree_store_range ( mt , MTREE_ALLOC_MAX , ULONG_MAX , XA_ZERO_ENTRY ,
GFP_KERNEL ) ;
for ( i = 0 ; i < range_count ; i + = 2 ) {
# define DEBUG_ALLOC_RANGE 0
# if DEBUG_ALLOC_RANGE
pr_debug ( " \t Insert %lu-%lu \n " , range [ i ] > > 12 ,
( range [ i + 1 ] > > 12 ) - 1 ) ;
2023-05-18 17:55:14 +03:00
mt_dump ( mt , mt_dump_hex ) ;
2022-09-06 22:48:45 +03:00
# endif
2022-10-28 21:04:30 +03:00
check_insert_range ( mt , range [ i ] > > 12 , ( range [ i + 1 ] > > 12 ) - 1 ,
xa_mk_value ( range [ i ] > > 12 ) , 0 ) ;
mt_validate ( mt ) ;
}
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mas_lock ( & mas ) ;
for ( i = 0 ; i < ARRAY_SIZE ( holes ) ; i + = 3 ) {
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
# if DEBUG_ALLOC_RANGE
pr_debug ( " \t Get empty %lu-%lu size %lu (%lx-%lx) \n " , min > > 12 ,
holes [ i + 1 ] > > 12 , holes [ i + 2 ] > > 12 ,
min , holes [ i + 1 ] ) ;
# endif
MT_BUG_ON ( mt , mas_empty_area ( & mas , min > > 12 ,
holes [ i + 1 ] > > 12 ,
holes [ i + 2 ] > > 12 ) ) ;
MT_BUG_ON ( mt , mas . index ! = holes [ i ] > > 12 ) ;
min = holes [ i + 1 ] ;
2022-09-06 22:48:45 +03:00
mas_reset ( & mas ) ;
2022-10-28 21:04:30 +03:00
}
mas_unlock ( & mas ) ;
for ( i = 0 ; i < req_range_count ; i + = 5 ) {
# if DEBUG_ALLOC_RANGE
pr_debug ( " \t Test %d: %lu-%lu size %lu expected %lu (%lu-%lu) \n " ,
i / 5 , req_range [ i ] > > 12 , req_range [ i + 1 ] > > 12 ,
req_range [ i + 2 ] > > 12 , req_range [ i + 3 ] > > 12 ,
req_range [ i ] , req_range [ i + 1 ] ) ;
2022-09-06 22:48:45 +03:00
# endif
2022-10-28 21:04:30 +03:00
check_mtree_alloc_range ( mt ,
req_range [ i ] > > 12 , /* start */
req_range [ i + 1 ] > > 12 , /* end */
req_range [ i + 2 ] > > 12 , /* size */
req_range [ i + 3 ] > > 12 , /* expected address */
req_range [ i + 4 ] , /* expected return */
xa_mk_value ( req_range [ i ] > > 12 ) ) ; /* pointer */
2022-09-06 22:48:45 +03:00
mt_validate ( mt ) ;
2022-10-28 21:04:30 +03:00
# if DEBUG_ALLOC_RANGE
2023-05-18 17:55:14 +03:00
mt_dump ( mt , mt_dump_hex ) ;
2022-09-06 22:48:45 +03:00
# endif
}
2022-10-28 21:04:30 +03:00
mtree_destroy ( mt ) ;
}
# endif
2022-09-06 22:48:45 +03:00
2023-05-18 17:55:28 +03:00
static noinline void __init check_ranges ( struct maple_tree * mt )
2022-09-06 22:48:45 +03:00
{
2022-10-28 21:04:30 +03:00
int i , val , val2 ;
2023-05-18 17:55:28 +03:00
static const unsigned long r [ ] = {
2022-10-28 21:04:30 +03:00
10 , 15 ,
20 , 25 ,
17 , 22 , /* Overlaps previous range. */
9 , 1000 , /* Huge. */
100 , 200 ,
45 , 168 ,
118 , 128 ,
} ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
MT_BUG_ON ( mt , ! mtree_empty ( mt ) ) ;
check_insert_range ( mt , r [ 0 ] , r [ 1 ] , xa_mk_value ( r [ 0 ] ) , 0 ) ;
check_insert_range ( mt , r [ 2 ] , r [ 3 ] , xa_mk_value ( r [ 2 ] ) , 0 ) ;
check_insert_range ( mt , r [ 4 ] , r [ 5 ] , xa_mk_value ( r [ 4 ] ) , - EEXIST ) ;
MT_BUG_ON ( mt , ! mt_height ( mt ) ) ;
/* Store */
check_store_range ( mt , r [ 4 ] , r [ 5 ] , xa_mk_value ( r [ 4 ] ) , 0 ) ;
check_store_range ( mt , r [ 6 ] , r [ 7 ] , xa_mk_value ( r [ 6 ] ) , 0 ) ;
check_store_range ( mt , r [ 8 ] , r [ 9 ] , xa_mk_value ( r [ 8 ] ) , 0 ) ;
MT_BUG_ON ( mt , ! mt_height ( mt ) ) ;
mtree_destroy ( mt ) ;
MT_BUG_ON ( mt , mt_height ( mt ) ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
check_seq ( mt , 50 , false ) ;
mt_set_non_kernel ( 4 ) ;
check_store_range ( mt , 5 , 47 , xa_mk_value ( 47 ) , 0 ) ;
MT_BUG_ON ( mt , ! mt_height ( mt ) ) ;
mtree_destroy ( mt ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
/* Create tree of 1-100 */
check_seq ( mt , 100 , false ) ;
/* Store 45-168 */
mt_set_non_kernel ( 10 ) ;
check_store_range ( mt , r [ 10 ] , r [ 11 ] , xa_mk_value ( r [ 10 ] ) , 0 ) ;
MT_BUG_ON ( mt , ! mt_height ( mt ) ) ;
2022-09-06 22:48:45 +03:00
mtree_destroy ( mt ) ;
2022-10-28 21:04:30 +03:00
/* Create tree of 1-200 */
check_seq ( mt , 200 , false ) ;
/* Store 45-168 */
check_store_range ( mt , r [ 10 ] , r [ 11 ] , xa_mk_value ( r [ 10 ] ) , 0 ) ;
MT_BUG_ON ( mt , ! mt_height ( mt ) ) ;
2022-09-06 22:48:45 +03:00
mtree_destroy ( mt ) ;
2022-10-28 21:04:30 +03:00
check_seq ( mt , 30 , false ) ;
check_store_range ( mt , 6 , 18 , xa_mk_value ( 6 ) , 0 ) ;
MT_BUG_ON ( mt , ! mt_height ( mt ) ) ;
2022-09-06 22:48:45 +03:00
mtree_destroy ( mt ) ;
2022-10-28 21:04:30 +03:00
/* Overwrite across multiple levels. */
/* Create tree of 1-400 */
check_seq ( mt , 400 , false ) ;
mt_set_non_kernel ( 50 ) ;
/* Store 118-128 */
check_store_range ( mt , r [ 12 ] , r [ 13 ] , xa_mk_value ( r [ 12 ] ) , 0 ) ;
mt_set_non_kernel ( 50 ) ;
mtree_test_erase ( mt , 140 ) ;
mtree_test_erase ( mt , 141 ) ;
mtree_test_erase ( mt , 142 ) ;
mtree_test_erase ( mt , 143 ) ;
mtree_test_erase ( mt , 130 ) ;
mtree_test_erase ( mt , 131 ) ;
mtree_test_erase ( mt , 132 ) ;
mtree_test_erase ( mt , 133 ) ;
mtree_test_erase ( mt , 134 ) ;
mtree_test_erase ( mt , 135 ) ;
check_load ( mt , r [ 12 ] , xa_mk_value ( r [ 12 ] ) ) ;
check_load ( mt , r [ 13 ] , xa_mk_value ( r [ 12 ] ) ) ;
check_load ( mt , r [ 13 ] - 1 , xa_mk_value ( r [ 12 ] ) ) ;
check_load ( mt , r [ 13 ] + 1 , xa_mk_value ( r [ 13 ] + 1 ) ) ;
check_load ( mt , 135 , NULL ) ;
check_load ( mt , 140 , NULL ) ;
2022-09-06 22:48:45 +03:00
mt_set_non_kernel ( 0 ) ;
2022-10-28 21:04:30 +03:00
MT_BUG_ON ( mt , ! mt_height ( mt ) ) ;
2022-09-06 22:48:45 +03:00
mtree_destroy ( mt ) ;
2022-10-28 21:04:30 +03:00
/* Overwrite multiple levels at the end of the tree (slot 7) */
mt_set_non_kernel ( 50 ) ;
check_seq ( mt , 400 , false ) ;
check_store_range ( mt , 353 , 361 , xa_mk_value ( 353 ) , 0 ) ;
check_store_range ( mt , 347 , 352 , xa_mk_value ( 347 ) , 0 ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
check_load ( mt , 346 , xa_mk_value ( 346 ) ) ;
for ( i = 347 ; i < = 352 ; i + + )
check_load ( mt , i , xa_mk_value ( 347 ) ) ;
for ( i = 353 ; i < = 361 ; i + + )
check_load ( mt , i , xa_mk_value ( 353 ) ) ;
check_load ( mt , 362 , xa_mk_value ( 362 ) ) ;
mt_set_non_kernel ( 0 ) ;
MT_BUG_ON ( mt , ! mt_height ( mt ) ) ;
2022-09-06 22:48:45 +03:00
mtree_destroy ( mt ) ;
2022-10-28 21:04:30 +03:00
mt_set_non_kernel ( 50 ) ;
check_seq ( mt , 400 , false ) ;
check_store_range ( mt , 352 , 364 , NULL , 0 ) ;
check_store_range ( mt , 351 , 363 , xa_mk_value ( 352 ) , 0 ) ;
check_load ( mt , 350 , xa_mk_value ( 350 ) ) ;
check_load ( mt , 351 , xa_mk_value ( 352 ) ) ;
for ( i = 352 ; i < = 363 ; i + + )
check_load ( mt , i , xa_mk_value ( 352 ) ) ;
check_load ( mt , 364 , NULL ) ;
check_load ( mt , 365 , xa_mk_value ( 365 ) ) ;
mt_set_non_kernel ( 0 ) ;
MT_BUG_ON ( mt , ! mt_height ( mt ) ) ;
2022-09-06 22:48:45 +03:00
mtree_destroy ( mt ) ;
2022-10-28 21:04:30 +03:00
mt_set_non_kernel ( 5 ) ;
check_seq ( mt , 400 , false ) ;
check_store_range ( mt , 352 , 364 , NULL , 0 ) ;
check_store_range ( mt , 351 , 364 , xa_mk_value ( 352 ) , 0 ) ;
check_load ( mt , 350 , xa_mk_value ( 350 ) ) ;
check_load ( mt , 351 , xa_mk_value ( 352 ) ) ;
for ( i = 352 ; i < = 364 ; i + + )
check_load ( mt , i , xa_mk_value ( 352 ) ) ;
check_load ( mt , 365 , xa_mk_value ( 365 ) ) ;
mt_set_non_kernel ( 0 ) ;
MT_BUG_ON ( mt , ! mt_height ( mt ) ) ;
2022-09-06 22:48:45 +03:00
mtree_destroy ( mt ) ;
2022-10-28 21:04:30 +03:00
mt_set_non_kernel ( 50 ) ;
check_seq ( mt , 400 , false ) ;
check_store_range ( mt , 362 , 367 , xa_mk_value ( 362 ) , 0 ) ;
check_store_range ( mt , 353 , 361 , xa_mk_value ( 353 ) , 0 ) ;
mt_set_non_kernel ( 0 ) ;
mt_validate ( mt ) ;
MT_BUG_ON ( mt , ! mt_height ( mt ) ) ;
2022-09-06 22:48:45 +03:00
mtree_destroy ( mt ) ;
2022-10-28 21:04:30 +03:00
/*
* Interesting cases :
* 1. Overwrite the end of a node and end in the first entry of the next
* node .
* 2. Split a single range
* 3. Overwrite the start of a range
* 4. Overwrite the end of a range
* 5. Overwrite the entire range
* 6. Overwrite a range that causes multiple parent nodes to be
* combined
* 7. Overwrite a range that causes multiple parent nodes and part of
* root to be combined
* 8. Overwrite the whole tree
* 9. Try to overwrite the zero entry of an alloc tree .
* 10. Write a range larger than a nodes current pivot
*/
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mt_set_non_kernel ( 50 ) ;
for ( i = 0 ; i < = 500 ; i + + ) {
val = i * 5 ;
val2 = ( i + 1 ) * 5 ;
check_store_range ( mt , val , val2 , xa_mk_value ( val ) , 0 ) ;
}
check_store_range ( mt , 2400 , 2400 , xa_mk_value ( 2400 ) , 0 ) ;
check_store_range ( mt , 2411 , 2411 , xa_mk_value ( 2411 ) , 0 ) ;
check_store_range ( mt , 2412 , 2412 , xa_mk_value ( 2412 ) , 0 ) ;
check_store_range ( mt , 2396 , 2400 , xa_mk_value ( 4052020 ) , 0 ) ;
check_store_range ( mt , 2402 , 2402 , xa_mk_value ( 2402 ) , 0 ) ;
2022-09-06 22:48:45 +03:00
mtree_destroy ( mt ) ;
2022-10-28 21:04:30 +03:00
mt_set_non_kernel ( 0 ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mt_set_non_kernel ( 50 ) ;
for ( i = 0 ; i < = 500 ; i + + ) {
val = i * 5 ;
val2 = ( i + 1 ) * 5 ;
check_store_range ( mt , val , val2 , xa_mk_value ( val ) , 0 ) ;
}
check_store_range ( mt , 2422 , 2422 , xa_mk_value ( 2422 ) , 0 ) ;
check_store_range ( mt , 2424 , 2424 , xa_mk_value ( 2424 ) , 0 ) ;
check_store_range ( mt , 2425 , 2425 , xa_mk_value ( 2 ) , 0 ) ;
check_store_range ( mt , 2460 , 2470 , NULL , 0 ) ;
check_store_range ( mt , 2435 , 2460 , xa_mk_value ( 2435 ) , 0 ) ;
check_store_range ( mt , 2461 , 2470 , xa_mk_value ( 2461 ) , 0 ) ;
2022-09-06 22:48:45 +03:00
mt_set_non_kernel ( 0 ) ;
2022-10-28 21:04:30 +03:00
MT_BUG_ON ( mt , ! mt_height ( mt ) ) ;
2022-09-06 22:48:45 +03:00
mtree_destroy ( mt ) ;
2022-10-28 21:04:30 +03:00
/* Test rebalance gaps */
2022-09-06 22:48:45 +03:00
mt_init_flags ( mt , MT_FLAGS_ALLOC_RANGE ) ;
2022-10-28 21:04:30 +03:00
mt_set_non_kernel ( 50 ) ;
for ( i = 0 ; i < = 50 ; i + + ) {
val = i * 10 ;
val2 = ( i + 1 ) * 10 ;
check_store_range ( mt , val , val2 , xa_mk_value ( val ) , 0 ) ;
}
check_store_range ( mt , 161 , 161 , xa_mk_value ( 161 ) , 0 ) ;
check_store_range ( mt , 162 , 162 , xa_mk_value ( 162 ) , 0 ) ;
check_store_range ( mt , 163 , 163 , xa_mk_value ( 163 ) , 0 ) ;
check_store_range ( mt , 240 , 249 , NULL , 0 ) ;
mtree_erase ( mt , 200 ) ;
mtree_erase ( mt , 210 ) ;
mtree_erase ( mt , 220 ) ;
mtree_erase ( mt , 230 ) ;
2022-09-06 22:48:45 +03:00
mt_set_non_kernel ( 0 ) ;
2022-10-28 21:04:30 +03:00
MT_BUG_ON ( mt , ! mt_height ( mt ) ) ;
2022-09-06 22:48:45 +03:00
mtree_destroy ( mt ) ;
mt_init_flags ( mt , MT_FLAGS_ALLOC_RANGE ) ;
2022-10-28 21:04:30 +03:00
for ( i = 0 ; i < = 500 ; i + + ) {
val = i * 10 ;
val2 = ( i + 1 ) * 10 ;
check_store_range ( mt , val , val2 , xa_mk_value ( val ) , 0 ) ;
}
check_store_range ( mt , 4600 , 4959 , xa_mk_value ( 1 ) , 0 ) ;
mt_validate ( mt ) ;
MT_BUG_ON ( mt , ! mt_height ( mt ) ) ;
2022-09-06 22:48:45 +03:00
mtree_destroy ( mt ) ;
mt_init_flags ( mt , MT_FLAGS_ALLOC_RANGE ) ;
2022-10-28 21:04:30 +03:00
for ( i = 0 ; i < = 500 ; i + + ) {
val = i * 10 ;
val2 = ( i + 1 ) * 10 ;
check_store_range ( mt , val , val2 , xa_mk_value ( val ) , 0 ) ;
2022-09-06 22:48:45 +03:00
}
2022-10-28 21:04:30 +03:00
check_store_range ( mt , 4811 , 4811 , xa_mk_value ( 4811 ) , 0 ) ;
check_store_range ( mt , 4812 , 4812 , xa_mk_value ( 4812 ) , 0 ) ;
check_store_range ( mt , 4861 , 4861 , xa_mk_value ( 4861 ) , 0 ) ;
check_store_range ( mt , 4862 , 4862 , xa_mk_value ( 4862 ) , 0 ) ;
check_store_range ( mt , 4842 , 4849 , NULL , 0 ) ;
mt_validate ( mt ) ;
MT_BUG_ON ( mt , ! mt_height ( mt ) ) ;
2022-09-06 22:48:45 +03:00
mtree_destroy ( mt ) ;
mt_init_flags ( mt , MT_FLAGS_ALLOC_RANGE ) ;
2022-10-28 21:04:30 +03:00
for ( i = 0 ; i < = 1300 ; i + + ) {
val = i * 10 ;
val2 = ( i + 1 ) * 10 ;
check_store_range ( mt , val , val2 , xa_mk_value ( val ) , 0 ) ;
MT_BUG_ON ( mt , mt_height ( mt ) > = 4 ) ;
}
/* Cause a 3 child split all the way up the tree. */
for ( i = 5 ; i < 215 ; i + = 10 )
check_store_range ( mt , 11450 + i , 11450 + i + 1 , NULL , 0 ) ;
for ( i = 5 ; i < 65 ; i + = 10 )
check_store_range ( mt , 11770 + i , 11770 + i + 1 , NULL , 0 ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
MT_BUG_ON ( mt , mt_height ( mt ) > = 4 ) ;
for ( i = 5 ; i < 45 ; i + = 10 )
check_store_range ( mt , 11700 + i , 11700 + i + 1 , NULL , 0 ) ;
if ( ! MAPLE_32BIT )
MT_BUG_ON ( mt , mt_height ( mt ) < 4 ) ;
2022-09-06 22:48:45 +03:00
mtree_destroy ( mt ) ;
2022-10-28 21:04:30 +03:00
2022-09-06 22:48:45 +03:00
mt_init_flags ( mt , MT_FLAGS_ALLOC_RANGE ) ;
2022-10-28 21:04:30 +03:00
for ( i = 0 ; i < = 1200 ; i + + ) {
val = i * 10 ;
val2 = ( i + 1 ) * 10 ;
check_store_range ( mt , val , val2 , xa_mk_value ( val ) , 0 ) ;
MT_BUG_ON ( mt , mt_height ( mt ) > = 4 ) ;
2022-09-06 22:48:45 +03:00
}
2022-10-28 21:04:30 +03:00
/* Fill parents and leaves before split. */
for ( i = 5 ; i < 455 ; i + = 10 )
check_store_range ( mt , 7800 + i , 7800 + i + 1 , NULL , 0 ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
for ( i = 1 ; i < 16 ; i + + )
check_store_range ( mt , 8185 + i , 8185 + i + 1 ,
xa_mk_value ( 8185 + i ) , 0 ) ;
MT_BUG_ON ( mt , mt_height ( mt ) > = 4 ) ;
/* triple split across multiple levels. */
check_store_range ( mt , 8184 , 8184 , xa_mk_value ( 8184 ) , 0 ) ;
if ( ! MAPLE_32BIT )
MT_BUG_ON ( mt , mt_height ( mt ) ! = 4 ) ;
2022-09-06 22:48:45 +03:00
}
2023-05-18 17:55:28 +03:00
static noinline void __init check_next_entry ( struct maple_tree * mt )
2022-09-06 22:48:45 +03:00
{
2022-10-28 21:04:30 +03:00
void * entry = NULL ;
unsigned long limit = 30 , i = 0 ;
MA_STATE ( mas , mt , i , i ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
MT_BUG_ON ( mt , ! mtree_empty ( mt ) ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
check_seq ( mt , limit , false ) ;
rcu_read_lock ( ) ;
/* Check the first one and get ma_state in the correct state. */
MT_BUG_ON ( mt , mas_walk ( & mas ) ! = xa_mk_value ( i + + ) ) ;
for ( ; i < = limit + 1 ; i + + ) {
entry = mas_next ( & mas , limit ) ;
if ( i > limit )
MT_BUG_ON ( mt , entry ! = NULL ) ;
else
MT_BUG_ON ( mt , xa_mk_value ( i ) ! = entry ) ;
2022-09-06 22:48:45 +03:00
}
2022-10-28 21:04:30 +03:00
rcu_read_unlock ( ) ;
mtree_destroy ( mt ) ;
2022-09-06 22:48:45 +03:00
}
2023-05-18 17:55:28 +03:00
static noinline void __init check_prev_entry ( struct maple_tree * mt )
2022-09-06 22:48:45 +03:00
{
2022-10-28 21:04:30 +03:00
unsigned long index = 16 ;
void * value ;
int i ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
MA_STATE ( mas , mt , index , index ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
MT_BUG_ON ( mt , ! mtree_empty ( mt ) ) ;
check_seq ( mt , 30 , false ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
rcu_read_lock ( ) ;
value = mas_find ( & mas , ULONG_MAX ) ;
MT_BUG_ON ( mt , value ! = xa_mk_value ( index ) ) ;
value = mas_prev ( & mas , 0 ) ;
MT_BUG_ON ( mt , value ! = xa_mk_value ( index - 1 ) ) ;
rcu_read_unlock ( ) ;
mtree_destroy ( mt ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
/* Check limits on prev */
mt_init_flags ( mt , MT_FLAGS_ALLOC_RANGE ) ;
mas_lock ( & mas ) ;
for ( i = 0 ; i < = index ; i + + ) {
mas_set_range ( & mas , i * 10 , i * 10 + 5 ) ;
mas_store_gfp ( & mas , xa_mk_value ( i ) , GFP_KERNEL ) ;
}
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mas_set ( & mas , 20 ) ;
value = mas_walk ( & mas ) ;
MT_BUG_ON ( mt , value ! = xa_mk_value ( 2 ) ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
value = mas_prev ( & mas , 19 ) ;
MT_BUG_ON ( mt , value ! = NULL ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mas_set ( & mas , 80 ) ;
value = mas_walk ( & mas ) ;
MT_BUG_ON ( mt , value ! = xa_mk_value ( 8 ) ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
value = mas_prev ( & mas , 76 ) ;
MT_BUG_ON ( mt , value ! = NULL ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mas_unlock ( & mas ) ;
2022-09-06 22:48:45 +03:00
}
2023-05-18 17:55:28 +03:00
static noinline void __init check_root_expand ( struct maple_tree * mt )
2022-09-06 22:48:45 +03:00
{
2022-10-28 21:04:30 +03:00
MA_STATE ( mas , mt , 0 , 0 ) ;
void * ptr ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mas_lock ( & mas ) ;
mas_set ( & mas , 3 ) ;
ptr = mas_walk ( & mas ) ;
MT_BUG_ON ( mt , ptr ! = NULL ) ;
MT_BUG_ON ( mt , mas . index ! = 0 ) ;
MT_BUG_ON ( mt , mas . last ! = ULONG_MAX ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
ptr = & check_prev_entry ;
mas_set ( & mas , 1 ) ;
mas_store_gfp ( & mas , ptr , GFP_KERNEL ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mas_set ( & mas , 0 ) ;
ptr = mas_walk ( & mas ) ;
MT_BUG_ON ( mt , ptr ! = NULL ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mas_set ( & mas , 1 ) ;
ptr = mas_walk ( & mas ) ;
MT_BUG_ON ( mt , ptr ! = & check_prev_entry ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mas_set ( & mas , 2 ) ;
ptr = mas_walk ( & mas ) ;
MT_BUG_ON ( mt , ptr ! = NULL ) ;
mas_unlock ( & mas ) ;
mtree_destroy ( mt ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mt_init_flags ( mt , 0 ) ;
mas_lock ( & mas ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mas_set ( & mas , 0 ) ;
ptr = & check_prev_entry ;
mas_store_gfp ( & mas , ptr , GFP_KERNEL ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mas_set ( & mas , 5 ) ;
ptr = mas_walk ( & mas ) ;
MT_BUG_ON ( mt , ptr ! = NULL ) ;
MT_BUG_ON ( mt , mas . index ! = 1 ) ;
MT_BUG_ON ( mt , mas . last ! = ULONG_MAX ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mas_set_range ( & mas , 0 , 100 ) ;
ptr = mas_walk ( & mas ) ;
MT_BUG_ON ( mt , ptr ! = & check_prev_entry ) ;
MT_BUG_ON ( mt , mas . last ! = 0 ) ;
mas_unlock ( & mas ) ;
mtree_destroy ( mt ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mt_init_flags ( mt , 0 ) ;
mas_lock ( & mas ) ;
mas_set ( & mas , 0 ) ;
ptr = ( void * ) ( ( unsigned long ) check_prev_entry | 1UL ) ;
mas_store_gfp ( & mas , ptr , GFP_KERNEL ) ;
ptr = mas_next ( & mas , ULONG_MAX ) ;
MT_BUG_ON ( mt , ptr ! = NULL ) ;
MT_BUG_ON ( mt , ( mas . index ! = 1 ) & & ( mas . last ! = ULONG_MAX ) ) ;
mas_set ( & mas , 1 ) ;
ptr = mas_prev ( & mas , 0 ) ;
MT_BUG_ON ( mt , ( mas . index ! = 0 ) & & ( mas . last ! = 0 ) ) ;
MT_BUG_ON ( mt , ptr ! = ( void * ) ( ( unsigned long ) check_prev_entry | 1UL ) ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mas_unlock ( & mas ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mtree_destroy ( mt ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mt_init_flags ( mt , 0 ) ;
mas_lock ( & mas ) ;
mas_set ( & mas , 0 ) ;
ptr = ( void * ) ( ( unsigned long ) check_prev_entry | 2UL ) ;
mas_store_gfp ( & mas , ptr , GFP_KERNEL ) ;
ptr = mas_next ( & mas , ULONG_MAX ) ;
MT_BUG_ON ( mt , ptr ! = NULL ) ;
MT_BUG_ON ( mt , ( mas . index ! = 1 ) & & ( mas . last ! = ULONG_MAX ) ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mas_set ( & mas , 1 ) ;
ptr = mas_prev ( & mas , 0 ) ;
MT_BUG_ON ( mt , ( mas . index ! = 0 ) & & ( mas . last ! = 0 ) ) ;
MT_BUG_ON ( mt , ptr ! = ( void * ) ( ( unsigned long ) check_prev_entry | 2UL ) ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mas_unlock ( & mas ) ;
2022-09-06 22:48:45 +03:00
}
2023-05-18 17:55:28 +03:00
static noinline void __init check_gap_combining ( struct maple_tree * mt )
2022-09-06 22:48:45 +03:00
{
2022-10-28 21:04:30 +03:00
struct maple_enode * mn1 , * mn2 ;
void * entry ;
unsigned long singletons = 100 ;
2023-05-18 17:55:28 +03:00
static const unsigned long * seq100 ;
static const unsigned long seq100_64 [ ] = {
2022-10-28 21:04:30 +03:00
/* 0-5 */
74 , 75 , 76 ,
50 , 100 , 2 ,
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
/* 6-12 */
44 , 45 , 46 , 43 ,
20 , 50 , 3 ,
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
/* 13-20*/
80 , 81 , 82 ,
76 , 2 , 79 , 85 , 4 ,
} ;
2022-09-06 22:48:45 +03:00
2023-05-18 17:55:28 +03:00
static const unsigned long seq100_32 [ ] = {
2022-10-28 21:04:30 +03:00
/* 0-5 */
61 , 62 , 63 ,
50 , 100 , 2 ,
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
/* 6-12 */
31 , 32 , 33 , 30 ,
20 , 50 , 3 ,
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
/* 13-20*/
80 , 81 , 82 ,
76 , 2 , 79 , 85 , 4 ,
} ;
2022-09-06 22:48:45 +03:00
2023-05-18 17:55:28 +03:00
static const unsigned long seq2000 [ ] = {
2022-10-28 21:04:30 +03:00
1152 , 1151 ,
1100 , 1200 , 2 ,
} ;
2023-05-18 17:55:28 +03:00
static const unsigned long seq400 [ ] = {
2022-10-28 21:04:30 +03:00
286 , 318 ,
256 , 260 , 266 , 270 , 275 , 280 , 290 , 398 ,
286 , 310 ,
} ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
unsigned long index ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
MA_STATE ( mas , mt , 0 , 0 ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
if ( MAPLE_32BIT )
seq100 = seq100_32 ;
else
seq100 = seq100_64 ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
index = seq100 [ 0 ] ;
mas_set ( & mas , index ) ;
MT_BUG_ON ( mt , ! mtree_empty ( mt ) ) ;
check_seq ( mt , singletons , false ) ; /* create 100 singletons. */
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mt_set_non_kernel ( 1 ) ;
mtree_test_erase ( mt , seq100 [ 2 ] ) ;
check_load ( mt , seq100 [ 2 ] , NULL ) ;
mtree_test_erase ( mt , seq100 [ 1 ] ) ;
check_load ( mt , seq100 [ 1 ] , NULL ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
rcu_read_lock ( ) ;
entry = mas_find ( & mas , ULONG_MAX ) ;
MT_BUG_ON ( mt , entry ! = xa_mk_value ( index ) ) ;
mn1 = mas . node ;
mas_next ( & mas , ULONG_MAX ) ;
entry = mas_next ( & mas , ULONG_MAX ) ;
MT_BUG_ON ( mt , entry ! = xa_mk_value ( index + 4 ) ) ;
mn2 = mas . node ;
MT_BUG_ON ( mt , mn1 = = mn2 ) ; /* test the test. */
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
/*
* At this point , there is a gap of 2 at index + 1 between seq100 [ 3 ] and
* seq100 [ 4 ] . Search for the gap .
*/
mt_set_non_kernel ( 1 ) ;
2022-09-06 22:48:45 +03:00
mas_reset ( & mas ) ;
2022-10-28 21:04:30 +03:00
MT_BUG_ON ( mt , mas_empty_area_rev ( & mas , seq100 [ 3 ] , seq100 [ 4 ] ,
seq100 [ 5 ] ) ) ;
MT_BUG_ON ( mt , mas . index ! = index + 1 ) ;
rcu_read_unlock ( ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mtree_test_erase ( mt , seq100 [ 6 ] ) ;
check_load ( mt , seq100 [ 6 ] , NULL ) ;
mtree_test_erase ( mt , seq100 [ 7 ] ) ;
check_load ( mt , seq100 [ 7 ] , NULL ) ;
mtree_test_erase ( mt , seq100 [ 8 ] ) ;
index = seq100 [ 9 ] ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
rcu_read_lock ( ) ;
mas . index = index ;
mas . last = index ;
2022-09-06 22:48:45 +03:00
mas_reset ( & mas ) ;
2022-10-28 21:04:30 +03:00
entry = mas_find ( & mas , ULONG_MAX ) ;
MT_BUG_ON ( mt , entry ! = xa_mk_value ( index ) ) ;
mn1 = mas . node ;
entry = mas_next ( & mas , ULONG_MAX ) ;
MT_BUG_ON ( mt , entry ! = xa_mk_value ( index + 4 ) ) ;
mas_next ( & mas , ULONG_MAX ) ; /* go to the next entry. */
mn2 = mas . node ;
MT_BUG_ON ( mt , mn1 = = mn2 ) ; /* test the next entry is in the next node. */
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
/*
* At this point , there is a gap of 3 at seq100 [ 6 ] . Find it by
* searching 20 - 50 for size 3.
*/
2022-09-06 22:48:45 +03:00
mas_reset ( & mas ) ;
2022-10-28 21:04:30 +03:00
MT_BUG_ON ( mt , mas_empty_area_rev ( & mas , seq100 [ 10 ] , seq100 [ 11 ] ,
seq100 [ 12 ] ) ) ;
MT_BUG_ON ( mt , mas . index ! = seq100 [ 6 ] ) ;
rcu_read_unlock ( ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mt_set_non_kernel ( 1 ) ;
mtree_store ( mt , seq100 [ 13 ] , NULL , GFP_KERNEL ) ;
check_load ( mt , seq100 [ 13 ] , NULL ) ;
check_load ( mt , seq100 [ 14 ] , xa_mk_value ( seq100 [ 14 ] ) ) ;
mtree_store ( mt , seq100 [ 14 ] , NULL , GFP_KERNEL ) ;
check_load ( mt , seq100 [ 13 ] , NULL ) ;
check_load ( mt , seq100 [ 14 ] , NULL ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mas_reset ( & mas ) ;
rcu_read_lock ( ) ;
MT_BUG_ON ( mt , mas_empty_area_rev ( & mas , seq100 [ 16 ] , seq100 [ 15 ] ,
seq100 [ 17 ] ) ) ;
MT_BUG_ON ( mt , mas . index ! = seq100 [ 13 ] ) ;
mt_validate ( mt ) ;
rcu_read_unlock ( ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
/*
* * DEPRECATED : no retries anymore * Test retry entry in the start of a
* gap .
*/
mt_set_non_kernel ( 2 ) ;
mtree_test_store_range ( mt , seq100 [ 18 ] , seq100 [ 14 ] , NULL ) ;
mtree_test_erase ( mt , seq100 [ 15 ] ) ;
2022-09-06 22:48:45 +03:00
mas_reset ( & mas ) ;
2022-10-28 21:04:30 +03:00
rcu_read_lock ( ) ;
MT_BUG_ON ( mt , mas_empty_area_rev ( & mas , seq100 [ 16 ] , seq100 [ 19 ] ,
seq100 [ 20 ] ) ) ;
rcu_read_unlock ( ) ;
MT_BUG_ON ( mt , mas . index ! = seq100 [ 18 ] ) ;
mt_validate ( mt ) ;
mtree_destroy ( mt ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
/* seq 2000 tests are for multi-level tree gaps */
mt_init_flags ( mt , MT_FLAGS_ALLOC_RANGE ) ;
check_seq ( mt , 2000 , false ) ;
mt_set_non_kernel ( 1 ) ;
mtree_test_erase ( mt , seq2000 [ 0 ] ) ;
mtree_test_erase ( mt , seq2000 [ 1 ] ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mt_set_non_kernel ( 2 ) ;
mas_reset ( & mas ) ;
rcu_read_lock ( ) ;
MT_BUG_ON ( mt , mas_empty_area_rev ( & mas , seq2000 [ 2 ] , seq2000 [ 3 ] ,
seq2000 [ 4 ] ) ) ;
MT_BUG_ON ( mt , mas . index ! = seq2000 [ 1 ] ) ;
rcu_read_unlock ( ) ;
mt_validate ( mt ) ;
2022-09-06 22:48:45 +03:00
mtree_destroy ( mt ) ;
2022-10-28 21:04:30 +03:00
/* seq 400 tests rebalancing over two levels. */
mt_set_non_kernel ( 99 ) ;
mt_init_flags ( mt , MT_FLAGS_ALLOC_RANGE ) ;
check_seq ( mt , 400 , false ) ;
mtree_test_store_range ( mt , seq400 [ 0 ] , seq400 [ 1 ] , NULL ) ;
mt_set_non_kernel ( 0 ) ;
mtree_destroy ( mt ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mt_init_flags ( mt , MT_FLAGS_ALLOC_RANGE ) ;
check_seq ( mt , 400 , false ) ;
mt_set_non_kernel ( 50 ) ;
mtree_test_store_range ( mt , seq400 [ 2 ] , seq400 [ 9 ] ,
xa_mk_value ( seq400 [ 2 ] ) ) ;
mtree_test_store_range ( mt , seq400 [ 3 ] , seq400 [ 9 ] ,
xa_mk_value ( seq400 [ 3 ] ) ) ;
mtree_test_store_range ( mt , seq400 [ 4 ] , seq400 [ 9 ] ,
xa_mk_value ( seq400 [ 4 ] ) ) ;
mtree_test_store_range ( mt , seq400 [ 5 ] , seq400 [ 9 ] ,
xa_mk_value ( seq400 [ 5 ] ) ) ;
mtree_test_store_range ( mt , seq400 [ 0 ] , seq400 [ 9 ] ,
xa_mk_value ( seq400 [ 0 ] ) ) ;
mtree_test_store_range ( mt , seq400 [ 6 ] , seq400 [ 9 ] ,
xa_mk_value ( seq400 [ 6 ] ) ) ;
mtree_test_store_range ( mt , seq400 [ 7 ] , seq400 [ 9 ] ,
xa_mk_value ( seq400 [ 7 ] ) ) ;
mtree_test_store_range ( mt , seq400 [ 8 ] , seq400 [ 9 ] ,
xa_mk_value ( seq400 [ 8 ] ) ) ;
mtree_test_store_range ( mt , seq400 [ 10 ] , seq400 [ 11 ] ,
xa_mk_value ( seq400 [ 10 ] ) ) ;
mt_validate ( mt ) ;
mt_set_non_kernel ( 0 ) ;
mtree_destroy ( mt ) ;
}
2023-05-18 17:55:28 +03:00
static noinline void __init check_node_overwrite ( struct maple_tree * mt )
2022-09-06 22:48:45 +03:00
{
2022-10-28 21:04:30 +03:00
int i , max = 4000 ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
for ( i = 0 ; i < max ; i + + )
mtree_test_store_range ( mt , i * 100 , i * 100 + 50 , xa_mk_value ( i * 100 ) ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mtree_test_store_range ( mt , 319951 , 367950 , NULL ) ;
2023-05-18 17:55:14 +03:00
/*mt_dump(mt, mt_dump_dec); */
2022-10-28 21:04:30 +03:00
mt_validate ( mt ) ;
2022-09-06 22:48:45 +03:00
}
2022-10-28 21:04:30 +03:00
# if defined(BENCH_SLOT_STORE)
2023-05-18 17:55:28 +03:00
static noinline void __init bench_slot_store ( struct maple_tree * mt )
2022-09-06 22:48:45 +03:00
{
2022-10-28 21:04:30 +03:00
int i , brk = 105 , max = 1040 , brk_start = 100 , count = 20000000 ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
for ( i = 0 ; i < max ; i + = 10 )
mtree_store_range ( mt , i , i + 5 , xa_mk_value ( i ) , GFP_KERNEL ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
for ( i = 0 ; i < count ; i + + ) {
mtree_store_range ( mt , brk , brk , NULL , GFP_KERNEL ) ;
mtree_store_range ( mt , brk_start , brk , xa_mk_value ( brk ) ,
GFP_KERNEL ) ;
2022-09-06 22:48:45 +03:00
}
}
2022-10-28 21:04:30 +03:00
# endif
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
# if defined(BENCH_NODE_STORE)
2023-05-18 17:55:28 +03:00
static noinline void __init bench_node_store ( struct maple_tree * mt )
2022-09-06 22:48:45 +03:00
{
2022-10-28 21:04:30 +03:00
int i , overwrite = 76 , max = 240 , count = 20000000 ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
for ( i = 0 ; i < max ; i + = 10 )
mtree_store_range ( mt , i , i + 5 , xa_mk_value ( i ) , GFP_KERNEL ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
for ( i = 0 ; i < count ; i + + ) {
mtree_store_range ( mt , overwrite , overwrite + 15 ,
xa_mk_value ( overwrite ) , GFP_KERNEL ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
overwrite + = 5 ;
if ( overwrite > = 135 )
overwrite = 76 ;
2022-09-06 22:48:45 +03:00
}
}
2022-10-28 21:04:30 +03:00
# endif
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
# if defined(BENCH_AWALK)
2023-05-18 17:55:28 +03:00
static noinline void __init bench_awalk ( struct maple_tree * mt )
2022-09-06 22:48:45 +03:00
{
2022-10-28 21:04:30 +03:00
int i , max = 2500 , count = 50000000 ;
MA_STATE ( mas , mt , 1470 , 1470 ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
for ( i = 0 ; i < max ; i + = 10 )
mtree_store_range ( mt , i , i + 5 , xa_mk_value ( i ) , GFP_KERNEL ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mtree_store_range ( mt , 1470 , 1475 , NULL , GFP_KERNEL ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
for ( i = 0 ; i < count ; i + + ) {
mas_empty_area_rev ( & mas , 0 , 2000 , 10 ) ;
mas_reset ( & mas ) ;
2022-09-06 22:48:45 +03:00
}
2022-10-28 21:04:30 +03:00
}
# endif
# if defined(BENCH_WALK)
2023-05-18 17:55:28 +03:00
static noinline void __init bench_walk ( struct maple_tree * mt )
2022-10-28 21:04:30 +03:00
{
int i , max = 2500 , count = 550000000 ;
MA_STATE ( mas , mt , 1470 , 1470 ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
for ( i = 0 ; i < max ; i + = 10 )
mtree_store_range ( mt , i , i + 5 , xa_mk_value ( i ) , GFP_KERNEL ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
for ( i = 0 ; i < count ; i + + ) {
mas_walk ( & mas ) ;
mas_reset ( & mas ) ;
2022-09-06 22:48:45 +03:00
}
}
2022-10-28 21:04:30 +03:00
# endif
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
# if defined(BENCH_MT_FOR_EACH)
2023-05-18 17:55:28 +03:00
static noinline void __init bench_mt_for_each ( struct maple_tree * mt )
2022-09-06 22:48:45 +03:00
{
2022-10-28 21:04:30 +03:00
int i , count = 1000000 ;
unsigned long max = 2500 , index = 0 ;
void * entry ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
for ( i = 0 ; i < max ; i + = 5 )
mtree_store_range ( mt , i , i + 4 , xa_mk_value ( i ) , GFP_KERNEL ) ;
for ( i = 0 ; i < count ; i + + ) {
unsigned long j = 0 ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mt_for_each ( mt , entry , index , max ) {
MT_BUG_ON ( mt , entry ! = xa_mk_value ( j ) ) ;
j + = 5 ;
2022-09-06 22:48:45 +03:00
}
2022-10-28 21:04:30 +03:00
index = 0 ;
2022-09-06 22:48:45 +03:00
}
}
2022-10-28 21:04:30 +03:00
# endif
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
/* check_forking - simulate the kernel forking sequence with the tree. */
2023-05-18 17:55:28 +03:00
static noinline void __init check_forking ( struct maple_tree * mt )
2022-09-06 22:48:45 +03:00
{
2022-10-28 21:04:30 +03:00
struct maple_tree newmt ;
int i , nr_entries = 134 ;
void * val ;
MA_STATE ( mas , mt , 0 , 0 ) ;
MA_STATE ( newmas , mt , 0 , 0 ) ;
for ( i = 0 ; i < = nr_entries ; i + + )
mtree_store_range ( mt , i * 10 , i * 10 + 5 ,
xa_mk_value ( i ) , GFP_KERNEL ) ;
mt_set_non_kernel ( 99999 ) ;
mt_init_flags ( & newmt , MT_FLAGS_ALLOC_RANGE ) ;
newmas . tree = & newmt ;
mas_reset ( & newmas ) ;
mas_reset ( & mas ) ;
mas_lock ( & newmas ) ;
mas . index = 0 ;
mas . last = 0 ;
if ( mas_expected_entries ( & newmas , nr_entries ) ) {
pr_err ( " OOM! " ) ;
BUG_ON ( 1 ) ;
}
rcu_read_lock ( ) ;
mas_for_each ( & mas , val , ULONG_MAX ) {
newmas . index = mas . index ;
newmas . last = mas . last ;
mas_store ( & newmas , val ) ;
2022-09-06 22:48:45 +03:00
}
2022-10-28 21:04:30 +03:00
rcu_read_unlock ( ) ;
mas_destroy ( & newmas ) ;
mas_unlock ( & newmas ) ;
mt_validate ( & newmt ) ;
mt_set_non_kernel ( 0 ) ;
mtree_destroy ( & newmt ) ;
2022-09-06 22:48:45 +03:00
}
2023-05-18 17:55:28 +03:00
static noinline void __init check_iteration ( struct maple_tree * mt )
2023-01-20 19:26:05 +03:00
{
int i , nr_entries = 125 ;
void * val ;
MA_STATE ( mas , mt , 0 , 0 ) ;
for ( i = 0 ; i < = nr_entries ; i + + )
mtree_store_range ( mt , i * 10 , i * 10 + 9 ,
xa_mk_value ( i ) , GFP_KERNEL ) ;
mt_set_non_kernel ( 99999 ) ;
i = 0 ;
mas_lock ( & mas ) ;
mas_for_each ( & mas , val , 925 ) {
MT_BUG_ON ( mt , mas . index ! = i * 10 ) ;
MT_BUG_ON ( mt , mas . last ! = i * 10 + 9 ) ;
/* Overwrite end of entry 92 */
if ( i = = 92 ) {
mas . index = 925 ;
mas . last = 929 ;
mas_store ( & mas , val ) ;
}
i + + ;
}
/* Ensure mas_find() gets the next value */
val = mas_find ( & mas , ULONG_MAX ) ;
MT_BUG_ON ( mt , val ! = xa_mk_value ( i ) ) ;
mas_set ( & mas , 0 ) ;
i = 0 ;
mas_for_each ( & mas , val , 785 ) {
MT_BUG_ON ( mt , mas . index ! = i * 10 ) ;
MT_BUG_ON ( mt , mas . last ! = i * 10 + 9 ) ;
/* Overwrite start of entry 78 */
if ( i = = 78 ) {
mas . index = 780 ;
mas . last = 785 ;
mas_store ( & mas , val ) ;
} else {
i + + ;
}
}
val = mas_find ( & mas , ULONG_MAX ) ;
MT_BUG_ON ( mt , val ! = xa_mk_value ( i ) ) ;
mas_set ( & mas , 0 ) ;
i = 0 ;
mas_for_each ( & mas , val , 765 ) {
MT_BUG_ON ( mt , mas . index ! = i * 10 ) ;
MT_BUG_ON ( mt , mas . last ! = i * 10 + 9 ) ;
/* Overwrite end of entry 76 and advance to the end */
if ( i = = 76 ) {
mas . index = 760 ;
mas . last = 765 ;
mas_store ( & mas , val ) ;
mas_next ( & mas , ULONG_MAX ) ;
}
i + + ;
}
/* Make sure the next find returns the one after 765, 766-769 */
val = mas_find ( & mas , ULONG_MAX ) ;
MT_BUG_ON ( mt , val ! = xa_mk_value ( 76 ) ) ;
mas_unlock ( & mas ) ;
mas_destroy ( & mas ) ;
mt_set_non_kernel ( 0 ) ;
}
2023-05-18 17:55:28 +03:00
static noinline void __init check_mas_store_gfp ( struct maple_tree * mt )
2022-09-06 22:48:45 +03:00
{
2022-10-28 21:04:30 +03:00
struct maple_tree newmt ;
int i , nr_entries = 135 ;
void * val ;
MA_STATE ( mas , mt , 0 , 0 ) ;
MA_STATE ( newmas , mt , 0 , 0 ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
for ( i = 0 ; i < = nr_entries ; i + + )
mtree_store_range ( mt , i * 10 , i * 10 + 5 ,
xa_mk_value ( i ) , GFP_KERNEL ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mt_set_non_kernel ( 99999 ) ;
mt_init_flags ( & newmt , MT_FLAGS_ALLOC_RANGE ) ;
newmas . tree = & newmt ;
rcu_read_lock ( ) ;
mas_lock ( & newmas ) ;
mas_reset ( & newmas ) ;
mas_set ( & mas , 0 ) ;
mas_for_each ( & mas , val , ULONG_MAX ) {
newmas . index = mas . index ;
newmas . last = mas . last ;
mas_store_gfp ( & newmas , val , GFP_KERNEL ) ;
2022-09-06 22:48:45 +03:00
}
2022-10-28 21:04:30 +03:00
mas_unlock ( & newmas ) ;
rcu_read_unlock ( ) ;
mt_validate ( & newmt ) ;
mt_set_non_kernel ( 0 ) ;
mtree_destroy ( & newmt ) ;
2022-09-06 22:48:45 +03:00
}
2022-10-28 21:04:30 +03:00
# if defined(BENCH_FORK)
2023-05-18 17:55:28 +03:00
static noinline void __init bench_forking ( struct maple_tree * mt )
2022-09-06 22:48:45 +03:00
{
2022-10-28 21:04:30 +03:00
struct maple_tree newmt ;
int i , nr_entries = 134 , nr_fork = 80000 ;
void * val ;
MA_STATE ( mas , mt , 0 , 0 ) ;
MA_STATE ( newmas , mt , 0 , 0 ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
for ( i = 0 ; i < = nr_entries ; i + + )
mtree_store_range ( mt , i * 10 , i * 10 + 5 ,
xa_mk_value ( i ) , GFP_KERNEL ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
for ( i = 0 ; i < nr_fork ; i + + ) {
mt_set_non_kernel ( 99999 ) ;
mt_init_flags ( & newmt , MT_FLAGS_ALLOC_RANGE ) ;
newmas . tree = & newmt ;
mas_reset ( & newmas ) ;
mas_reset ( & mas ) ;
mas . index = 0 ;
mas . last = 0 ;
rcu_read_lock ( ) ;
mas_lock ( & newmas ) ;
if ( mas_expected_entries ( & newmas , nr_entries ) ) {
printk ( " OOM! " ) ;
BUG_ON ( 1 ) ;
}
mas_for_each ( & mas , val , ULONG_MAX ) {
newmas . index = mas . index ;
newmas . last = mas . last ;
mas_store ( & newmas , val ) ;
2022-09-06 22:48:45 +03:00
}
2022-10-28 21:04:30 +03:00
mas_destroy ( & newmas ) ;
mas_unlock ( & newmas ) ;
rcu_read_unlock ( ) ;
mt_validate ( & newmt ) ;
mt_set_non_kernel ( 0 ) ;
mtree_destroy ( & newmt ) ;
2022-09-06 22:48:45 +03:00
}
}
2022-10-28 21:04:30 +03:00
# endif
2022-09-06 22:48:45 +03:00
2023-05-18 17:55:28 +03:00
static noinline void __init next_prev_test ( struct maple_tree * mt )
2022-09-06 22:48:45 +03:00
{
2022-10-28 21:04:30 +03:00
int i , nr_entries ;
void * val ;
MA_STATE ( mas , mt , 0 , 0 ) ;
struct maple_enode * mn ;
2023-05-18 17:55:28 +03:00
static const unsigned long * level2 ;
static const unsigned long level2_64 [ ] = { 707 , 1000 , 710 , 715 , 720 ,
725 } ;
static const unsigned long level2_32 [ ] = { 1747 , 2000 , 1750 , 1755 ,
1760 , 1765 } ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
if ( MAPLE_32BIT ) {
nr_entries = 500 ;
level2 = level2_32 ;
} else {
nr_entries = 200 ;
level2 = level2_64 ;
}
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
for ( i = 0 ; i < = nr_entries ; i + + )
mtree_store_range ( mt , i * 10 , i * 10 + 5 ,
xa_mk_value ( i ) , GFP_KERNEL ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mas_lock ( & mas ) ;
for ( i = 0 ; i < = nr_entries / 2 ; i + + ) {
mas_next ( & mas , 1000 ) ;
if ( mas_is_none ( & mas ) )
break ;
2022-09-06 22:48:45 +03:00
}
2022-10-28 21:04:30 +03:00
mas_reset ( & mas ) ;
mas_set ( & mas , 0 ) ;
i = 0 ;
mas_for_each ( & mas , val , 1000 ) {
i + + ;
2022-09-06 22:48:45 +03:00
}
2022-10-28 21:04:30 +03:00
mas_reset ( & mas ) ;
mas_set ( & mas , 0 ) ;
i = 0 ;
mas_for_each ( & mas , val , 1000 ) {
mas_pause ( & mas ) ;
i + + ;
}
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
/*
* 680 - 685 = 0x61a00001930c
* 686 - 689 = NULL ;
* 690 - 695 = 0x61a00001930c
* Check simple next / prev
*/
mas_set ( & mas , 686 ) ;
val = mas_walk ( & mas ) ;
MT_BUG_ON ( mt , val ! = NULL ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
val = mas_next ( & mas , 1000 ) ;
MT_BUG_ON ( mt , val ! = xa_mk_value ( 690 / 10 ) ) ;
MT_BUG_ON ( mt , mas . index ! = 690 ) ;
MT_BUG_ON ( mt , mas . last ! = 695 ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
val = mas_prev ( & mas , 0 ) ;
MT_BUG_ON ( mt , val ! = xa_mk_value ( 680 / 10 ) ) ;
MT_BUG_ON ( mt , mas . index ! = 680 ) ;
MT_BUG_ON ( mt , mas . last ! = 685 ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
val = mas_next ( & mas , 1000 ) ;
MT_BUG_ON ( mt , val ! = xa_mk_value ( 690 / 10 ) ) ;
MT_BUG_ON ( mt , mas . index ! = 690 ) ;
MT_BUG_ON ( mt , mas . last ! = 695 ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
val = mas_next ( & mas , 1000 ) ;
MT_BUG_ON ( mt , val ! = xa_mk_value ( 700 / 10 ) ) ;
MT_BUG_ON ( mt , mas . index ! = 700 ) ;
MT_BUG_ON ( mt , mas . last ! = 705 ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
/* Check across node boundaries of the tree */
mas_set ( & mas , 70 ) ;
val = mas_walk ( & mas ) ;
MT_BUG_ON ( mt , val ! = xa_mk_value ( 70 / 10 ) ) ;
MT_BUG_ON ( mt , mas . index ! = 70 ) ;
MT_BUG_ON ( mt , mas . last ! = 75 ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
val = mas_next ( & mas , 1000 ) ;
MT_BUG_ON ( mt , val ! = xa_mk_value ( 80 / 10 ) ) ;
MT_BUG_ON ( mt , mas . index ! = 80 ) ;
MT_BUG_ON ( mt , mas . last ! = 85 ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
val = mas_prev ( & mas , 70 ) ;
MT_BUG_ON ( mt , val ! = xa_mk_value ( 70 / 10 ) ) ;
MT_BUG_ON ( mt , mas . index ! = 70 ) ;
MT_BUG_ON ( mt , mas . last ! = 75 ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
/* Check across two levels of the tree */
mas_reset ( & mas ) ;
mas_set ( & mas , level2 [ 0 ] ) ;
val = mas_walk ( & mas ) ;
MT_BUG_ON ( mt , val ! = NULL ) ;
val = mas_next ( & mas , level2 [ 1 ] ) ;
MT_BUG_ON ( mt , val ! = xa_mk_value ( level2 [ 2 ] / 10 ) ) ;
MT_BUG_ON ( mt , mas . index ! = level2 [ 2 ] ) ;
MT_BUG_ON ( mt , mas . last ! = level2 [ 3 ] ) ;
mn = mas . node ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
val = mas_next ( & mas , level2 [ 1 ] ) ;
MT_BUG_ON ( mt , val ! = xa_mk_value ( level2 [ 4 ] / 10 ) ) ;
MT_BUG_ON ( mt , mas . index ! = level2 [ 4 ] ) ;
MT_BUG_ON ( mt , mas . last ! = level2 [ 5 ] ) ;
MT_BUG_ON ( mt , mn = = mas . node ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
val = mas_prev ( & mas , 0 ) ;
MT_BUG_ON ( mt , val ! = xa_mk_value ( level2 [ 2 ] / 10 ) ) ;
MT_BUG_ON ( mt , mas . index ! = level2 [ 2 ] ) ;
MT_BUG_ON ( mt , mas . last ! = level2 [ 3 ] ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
/* Check running off the end and back on */
mas_set ( & mas , nr_entries * 10 ) ;
val = mas_walk ( & mas ) ;
MT_BUG_ON ( mt , val ! = xa_mk_value ( nr_entries ) ) ;
MT_BUG_ON ( mt , mas . index ! = ( nr_entries * 10 ) ) ;
MT_BUG_ON ( mt , mas . last ! = ( nr_entries * 10 + 5 ) ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
val = mas_next ( & mas , ULONG_MAX ) ;
MT_BUG_ON ( mt , val ! = NULL ) ;
MT_BUG_ON ( mt , mas . index ! = ULONG_MAX ) ;
MT_BUG_ON ( mt , mas . last ! = ULONG_MAX ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
val = mas_prev ( & mas , 0 ) ;
MT_BUG_ON ( mt , val ! = xa_mk_value ( nr_entries ) ) ;
MT_BUG_ON ( mt , mas . index ! = ( nr_entries * 10 ) ) ;
MT_BUG_ON ( mt , mas . last ! = ( nr_entries * 10 + 5 ) ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
/* Check running off the start and back on */
mas_reset ( & mas ) ;
mas_set ( & mas , 10 ) ;
val = mas_walk ( & mas ) ;
MT_BUG_ON ( mt , val ! = xa_mk_value ( 1 ) ) ;
MT_BUG_ON ( mt , mas . index ! = 10 ) ;
MT_BUG_ON ( mt , mas . last ! = 15 ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
val = mas_prev ( & mas , 0 ) ;
MT_BUG_ON ( mt , val ! = xa_mk_value ( 0 ) ) ;
MT_BUG_ON ( mt , mas . index ! = 0 ) ;
MT_BUG_ON ( mt , mas . last ! = 5 ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
val = mas_prev ( & mas , 0 ) ;
MT_BUG_ON ( mt , val ! = NULL ) ;
MT_BUG_ON ( mt , mas . index ! = 0 ) ;
MT_BUG_ON ( mt , mas . last ! = 0 ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mas . index = 0 ;
mas . last = 5 ;
mas_store ( & mas , NULL ) ;
mas_reset ( & mas ) ;
mas_set ( & mas , 10 ) ;
mas_walk ( & mas ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
val = mas_prev ( & mas , 0 ) ;
MT_BUG_ON ( mt , val ! = NULL ) ;
MT_BUG_ON ( mt , mas . index ! = 0 ) ;
MT_BUG_ON ( mt , mas . last ! = 0 ) ;
mas_unlock ( & mas ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mtree_destroy ( mt ) ;
2022-09-06 22:48:45 +03:00
2022-10-28 21:04:30 +03:00
mt_init ( mt ) ;
mtree_store_range ( mt , 0 , 0 , xa_mk_value ( 0 ) , GFP_KERNEL ) ;
mtree_store_range ( mt , 5 , 5 , xa_mk_value ( 5 ) , GFP_KERNEL ) ;
2022-09-06 22:48:45 +03:00
rcu_read_lock ( ) ;
2022-10-28 21:04:30 +03:00
mas_set ( & mas , 5 ) ;
val = mas_prev ( & mas , 4 ) ;
MT_BUG_ON ( mt , val ! = NULL ) ;
2022-09-06 22:48:45 +03:00
rcu_read_unlock ( ) ;
}
/* Test spanning writes that require balancing right sibling or right cousin */
2023-05-18 17:55:28 +03:00
static noinline void __init check_spanning_relatives ( struct maple_tree * mt )
2022-09-06 22:48:45 +03:00
{
unsigned long i , nr_entries = 1000 ;
for ( i = 0 ; i < = nr_entries ; i + + )
mtree_store_range ( mt , i * 10 , i * 10 + 5 ,
xa_mk_value ( i ) , GFP_KERNEL ) ;
mtree_store_range ( mt , 9365 , 9955 , NULL , GFP_KERNEL ) ;
}
2023-05-18 17:55:28 +03:00
static noinline void __init check_fuzzer ( struct maple_tree * mt )
2022-09-06 22:48:45 +03:00
{
/*
* 1. Causes a spanning rebalance of a single root node .
* Fixed by setting the correct limit in mast_cp_to_nodes ( ) when the
* entire right side is consumed .
*/
mtree_test_insert ( mt , 88 , ( void * ) 0xb1 ) ;
mtree_test_insert ( mt , 84 , ( void * ) 0xa9 ) ;
mtree_test_insert ( mt , 2 , ( void * ) 0x5 ) ;
mtree_test_insert ( mt , 4 , ( void * ) 0x9 ) ;
mtree_test_insert ( mt , 14 , ( void * ) 0x1d ) ;
mtree_test_insert ( mt , 7 , ( void * ) 0xf ) ;
mtree_test_insert ( mt , 12 , ( void * ) 0x19 ) ;
mtree_test_insert ( mt , 18 , ( void * ) 0x25 ) ;
mtree_test_store_range ( mt , 8 , 18 , ( void * ) 0x11 ) ;
mtree_destroy ( mt ) ;
/*
* 2. Cause a spanning rebalance of two nodes in root .
* Fixed by setting mast - > r - > max correctly .
*/
mt_init_flags ( mt , 0 ) ;
mtree_test_store ( mt , 87 , ( void * ) 0xaf ) ;
mtree_test_store ( mt , 0 , ( void * ) 0x1 ) ;
mtree_test_load ( mt , 4 ) ;
mtree_test_insert ( mt , 4 , ( void * ) 0x9 ) ;
mtree_test_store ( mt , 8 , ( void * ) 0x11 ) ;
mtree_test_store ( mt , 44 , ( void * ) 0x59 ) ;
mtree_test_store ( mt , 68 , ( void * ) 0x89 ) ;
mtree_test_store ( mt , 2 , ( void * ) 0x5 ) ;
mtree_test_insert ( mt , 43 , ( void * ) 0x57 ) ;
mtree_test_insert ( mt , 24 , ( void * ) 0x31 ) ;
mtree_test_insert ( mt , 844 , ( void * ) 0x699 ) ;
mtree_test_store ( mt , 84 , ( void * ) 0xa9 ) ;
mtree_test_store ( mt , 4 , ( void * ) 0x9 ) ;
mtree_test_erase ( mt , 4 ) ;
mtree_test_load ( mt , 5 ) ;
mtree_test_erase ( mt , 0 ) ;
mtree_destroy ( mt ) ;
/*
* 3. Cause a node overflow on copy
* Fixed by using the correct check for node size in mas_wr_modify ( )
* Also discovered issue with metadata setting .
*/
mt_init_flags ( mt , 0 ) ;
2022-10-28 21:04:30 +03:00
mtree_test_store_range ( mt , 0 , ULONG_MAX , ( void * ) 0x1 ) ;
2022-09-06 22:48:45 +03:00
mtree_test_store ( mt , 4 , ( void * ) 0x9 ) ;
mtree_test_erase ( mt , 5 ) ;
mtree_test_erase ( mt , 0 ) ;
mtree_test_erase ( mt , 4 ) ;
mtree_test_store ( mt , 5 , ( void * ) 0xb ) ;
mtree_test_erase ( mt , 5 ) ;
mtree_test_store ( mt , 5 , ( void * ) 0xb ) ;
mtree_test_erase ( mt , 5 ) ;
mtree_test_erase ( mt , 4 ) ;
mtree_test_store ( mt , 4 , ( void * ) 0x9 ) ;
mtree_test_store ( mt , 444 , ( void * ) 0x379 ) ;
mtree_test_store ( mt , 0 , ( void * ) 0x1 ) ;
mtree_test_load ( mt , 0 ) ;
mtree_test_store ( mt , 5 , ( void * ) 0xb ) ;
mtree_test_erase ( mt , 0 ) ;
mtree_destroy ( mt ) ;
/*
* 4. spanning store failure due to writing incorrect pivot value at
* last slot .
* Fixed by setting mast - > r - > max correctly in mast_cp_to_nodes ( )
*
*/
mt_init_flags ( mt , 0 ) ;
mtree_test_insert ( mt , 261 , ( void * ) 0x20b ) ;
mtree_test_store ( mt , 516 , ( void * ) 0x409 ) ;
mtree_test_store ( mt , 6 , ( void * ) 0xd ) ;
mtree_test_insert ( mt , 5 , ( void * ) 0xb ) ;
mtree_test_insert ( mt , 1256 , ( void * ) 0x9d1 ) ;
mtree_test_store ( mt , 4 , ( void * ) 0x9 ) ;
mtree_test_erase ( mt , 1 ) ;
mtree_test_store ( mt , 56 , ( void * ) 0x71 ) ;
mtree_test_insert ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_store ( mt , 24 , ( void * ) 0x31 ) ;
mtree_test_erase ( mt , 1 ) ;
mtree_test_insert ( mt , 2263 , ( void * ) 0x11af ) ;
mtree_test_insert ( mt , 446 , ( void * ) 0x37d ) ;
mtree_test_store_range ( mt , 6 , 45 , ( void * ) 0xd ) ;
mtree_test_store_range ( mt , 3 , 446 , ( void * ) 0x7 ) ;
mtree_destroy ( mt ) ;
/*
* 5. mas_wr_extend_null ( ) may overflow slots .
* Fix by checking against wr_mas - > node_end .
*/
mt_init_flags ( mt , 0 ) ;
mtree_test_store ( mt , 48 , ( void * ) 0x61 ) ;
mtree_test_store ( mt , 3 , ( void * ) 0x7 ) ;
mtree_test_load ( mt , 0 ) ;
mtree_test_store ( mt , 88 , ( void * ) 0xb1 ) ;
mtree_test_store ( mt , 81 , ( void * ) 0xa3 ) ;
mtree_test_insert ( mt , 0 , ( void * ) 0x1 ) ;
mtree_test_insert ( mt , 8 , ( void * ) 0x11 ) ;
mtree_test_insert ( mt , 4 , ( void * ) 0x9 ) ;
mtree_test_insert ( mt , 2480 , ( void * ) 0x1361 ) ;
2022-10-28 21:04:30 +03:00
mtree_test_insert ( mt , ULONG_MAX ,
2022-09-06 22:48:45 +03:00
( void * ) 0xffffffffffffffff ) ;
2022-10-28 21:04:30 +03:00
mtree_test_erase ( mt , ULONG_MAX ) ;
2022-09-06 22:48:45 +03:00
mtree_destroy ( mt ) ;
/*
* 6. When reusing a node with an implied pivot and the node is
* shrinking , old data would be left in the implied slot
* Fixed by checking the last pivot for the mas - > max and clear
* accordingly . This only affected the left - most node as that node is
* the only one allowed to end in NULL .
*/
mt_init_flags ( mt , 0 ) ;
mtree_test_erase ( mt , 3 ) ;
mtree_test_insert ( mt , 22 , ( void * ) 0x2d ) ;
mtree_test_insert ( mt , 15 , ( void * ) 0x1f ) ;
mtree_test_load ( mt , 2 ) ;
mtree_test_insert ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_insert ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_insert ( mt , 5 , ( void * ) 0xb ) ;
mtree_test_erase ( mt , 1 ) ;
mtree_test_insert ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_insert ( mt , 4 , ( void * ) 0x9 ) ;
mtree_test_insert ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_erase ( mt , 1 ) ;
mtree_test_insert ( mt , 2 , ( void * ) 0x5 ) ;
mtree_test_insert ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_erase ( mt , 3 ) ;
mtree_test_insert ( mt , 22 , ( void * ) 0x2d ) ;
mtree_test_insert ( mt , 15 , ( void * ) 0x1f ) ;
mtree_test_insert ( mt , 2 , ( void * ) 0x5 ) ;
mtree_test_insert ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_insert ( mt , 8 , ( void * ) 0x11 ) ;
mtree_test_load ( mt , 2 ) ;
mtree_test_insert ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_insert ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_store ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_insert ( mt , 5 , ( void * ) 0xb ) ;
mtree_test_erase ( mt , 1 ) ;
mtree_test_insert ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_insert ( mt , 4 , ( void * ) 0x9 ) ;
mtree_test_insert ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_erase ( mt , 1 ) ;
mtree_test_insert ( mt , 2 , ( void * ) 0x5 ) ;
mtree_test_insert ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_erase ( mt , 3 ) ;
mtree_test_insert ( mt , 22 , ( void * ) 0x2d ) ;
mtree_test_insert ( mt , 15 , ( void * ) 0x1f ) ;
mtree_test_insert ( mt , 2 , ( void * ) 0x5 ) ;
mtree_test_insert ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_insert ( mt , 8 , ( void * ) 0x11 ) ;
mtree_test_insert ( mt , 12 , ( void * ) 0x19 ) ;
mtree_test_erase ( mt , 1 ) ;
mtree_test_store_range ( mt , 4 , 62 , ( void * ) 0x9 ) ;
mtree_test_erase ( mt , 62 ) ;
mtree_test_store_range ( mt , 1 , 0 , ( void * ) 0x3 ) ;
mtree_test_insert ( mt , 11 , ( void * ) 0x17 ) ;
mtree_test_insert ( mt , 3 , ( void * ) 0x7 ) ;
mtree_test_insert ( mt , 3 , ( void * ) 0x7 ) ;
mtree_test_store ( mt , 62 , ( void * ) 0x7d ) ;
mtree_test_erase ( mt , 62 ) ;
mtree_test_store_range ( mt , 1 , 15 , ( void * ) 0x3 ) ;
mtree_test_erase ( mt , 1 ) ;
mtree_test_insert ( mt , 22 , ( void * ) 0x2d ) ;
mtree_test_insert ( mt , 12 , ( void * ) 0x19 ) ;
mtree_test_erase ( mt , 1 ) ;
mtree_test_insert ( mt , 3 , ( void * ) 0x7 ) ;
mtree_test_store ( mt , 62 , ( void * ) 0x7d ) ;
mtree_test_erase ( mt , 62 ) ;
mtree_test_insert ( mt , 122 , ( void * ) 0xf5 ) ;
mtree_test_store ( mt , 3 , ( void * ) 0x7 ) ;
mtree_test_insert ( mt , 0 , ( void * ) 0x1 ) ;
mtree_test_store_range ( mt , 0 , 1 , ( void * ) 0x1 ) ;
mtree_test_insert ( mt , 85 , ( void * ) 0xab ) ;
mtree_test_insert ( mt , 72 , ( void * ) 0x91 ) ;
mtree_test_insert ( mt , 81 , ( void * ) 0xa3 ) ;
mtree_test_insert ( mt , 726 , ( void * ) 0x5ad ) ;
mtree_test_insert ( mt , 0 , ( void * ) 0x1 ) ;
mtree_test_insert ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_store ( mt , 51 , ( void * ) 0x67 ) ;
mtree_test_insert ( mt , 611 , ( void * ) 0x4c7 ) ;
mtree_test_insert ( mt , 485 , ( void * ) 0x3cb ) ;
mtree_test_insert ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_erase ( mt , 1 ) ;
mtree_test_insert ( mt , 0 , ( void * ) 0x1 ) ;
mtree_test_insert ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_insert_range ( mt , 26 , 1 , ( void * ) 0x35 ) ;
mtree_test_load ( mt , 1 ) ;
mtree_test_store_range ( mt , 1 , 22 , ( void * ) 0x3 ) ;
mtree_test_insert ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_erase ( mt , 1 ) ;
mtree_test_load ( mt , 53 ) ;
mtree_test_load ( mt , 1 ) ;
mtree_test_store_range ( mt , 1 , 1 , ( void * ) 0x3 ) ;
mtree_test_insert ( mt , 222 , ( void * ) 0x1bd ) ;
mtree_test_insert ( mt , 485 , ( void * ) 0x3cb ) ;
mtree_test_insert ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_erase ( mt , 1 ) ;
mtree_test_load ( mt , 0 ) ;
mtree_test_insert ( mt , 21 , ( void * ) 0x2b ) ;
mtree_test_insert ( mt , 3 , ( void * ) 0x7 ) ;
mtree_test_store ( mt , 621 , ( void * ) 0x4db ) ;
mtree_test_insert ( mt , 0 , ( void * ) 0x1 ) ;
mtree_test_erase ( mt , 5 ) ;
mtree_test_insert ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_store ( mt , 62 , ( void * ) 0x7d ) ;
mtree_test_erase ( mt , 62 ) ;
mtree_test_store_range ( mt , 1 , 0 , ( void * ) 0x3 ) ;
mtree_test_insert ( mt , 22 , ( void * ) 0x2d ) ;
mtree_test_insert ( mt , 12 , ( void * ) 0x19 ) ;
mtree_test_erase ( mt , 1 ) ;
mtree_test_insert ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_store_range ( mt , 4 , 62 , ( void * ) 0x9 ) ;
mtree_test_erase ( mt , 62 ) ;
mtree_test_erase ( mt , 1 ) ;
mtree_test_load ( mt , 1 ) ;
mtree_test_store_range ( mt , 1 , 22 , ( void * ) 0x3 ) ;
mtree_test_insert ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_erase ( mt , 1 ) ;
mtree_test_load ( mt , 53 ) ;
mtree_test_load ( mt , 1 ) ;
mtree_test_store_range ( mt , 1 , 1 , ( void * ) 0x3 ) ;
mtree_test_insert ( mt , 222 , ( void * ) 0x1bd ) ;
mtree_test_insert ( mt , 485 , ( void * ) 0x3cb ) ;
mtree_test_insert ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_erase ( mt , 1 ) ;
mtree_test_insert ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_load ( mt , 0 ) ;
mtree_test_load ( mt , 0 ) ;
mtree_destroy ( mt ) ;
/*
* 7. Previous fix was incomplete , fix mas_resuse_node ( ) clearing of old
* data by overwriting it first - that way metadata is of no concern .
*/
mt_init_flags ( mt , 0 ) ;
mtree_test_load ( mt , 1 ) ;
mtree_test_insert ( mt , 102 , ( void * ) 0xcd ) ;
mtree_test_erase ( mt , 2 ) ;
mtree_test_erase ( mt , 0 ) ;
mtree_test_load ( mt , 0 ) ;
mtree_test_insert ( mt , 4 , ( void * ) 0x9 ) ;
mtree_test_insert ( mt , 2 , ( void * ) 0x5 ) ;
mtree_test_insert ( mt , 110 , ( void * ) 0xdd ) ;
mtree_test_insert ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_insert_range ( mt , 5 , 0 , ( void * ) 0xb ) ;
mtree_test_erase ( mt , 2 ) ;
mtree_test_store ( mt , 0 , ( void * ) 0x1 ) ;
mtree_test_store ( mt , 112 , ( void * ) 0xe1 ) ;
mtree_test_insert ( mt , 21 , ( void * ) 0x2b ) ;
mtree_test_store ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_insert_range ( mt , 110 , 2 , ( void * ) 0xdd ) ;
mtree_test_store ( mt , 2 , ( void * ) 0x5 ) ;
mtree_test_load ( mt , 22 ) ;
mtree_test_erase ( mt , 2 ) ;
mtree_test_store ( mt , 210 , ( void * ) 0x1a5 ) ;
mtree_test_store_range ( mt , 0 , 2 , ( void * ) 0x1 ) ;
mtree_test_store ( mt , 2 , ( void * ) 0x5 ) ;
mtree_test_erase ( mt , 2 ) ;
mtree_test_erase ( mt , 22 ) ;
mtree_test_erase ( mt , 1 ) ;
mtree_test_erase ( mt , 2 ) ;
mtree_test_store ( mt , 0 , ( void * ) 0x1 ) ;
mtree_test_load ( mt , 112 ) ;
mtree_test_insert ( mt , 2 , ( void * ) 0x5 ) ;
mtree_test_erase ( mt , 2 ) ;
mtree_test_store ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_insert_range ( mt , 1 , 2 , ( void * ) 0x3 ) ;
mtree_test_erase ( mt , 0 ) ;
mtree_test_erase ( mt , 2 ) ;
mtree_test_store ( mt , 2 , ( void * ) 0x5 ) ;
mtree_test_erase ( mt , 0 ) ;
mtree_test_erase ( mt , 2 ) ;
mtree_test_store ( mt , 0 , ( void * ) 0x1 ) ;
mtree_test_store ( mt , 0 , ( void * ) 0x1 ) ;
mtree_test_erase ( mt , 2 ) ;
mtree_test_store ( mt , 2 , ( void * ) 0x5 ) ;
mtree_test_erase ( mt , 2 ) ;
mtree_test_insert ( mt , 2 , ( void * ) 0x5 ) ;
mtree_test_insert_range ( mt , 1 , 2 , ( void * ) 0x3 ) ;
mtree_test_erase ( mt , 0 ) ;
mtree_test_erase ( mt , 2 ) ;
mtree_test_store ( mt , 0 , ( void * ) 0x1 ) ;
mtree_test_load ( mt , 112 ) ;
mtree_test_store_range ( mt , 110 , 12 , ( void * ) 0xdd ) ;
mtree_test_store ( mt , 2 , ( void * ) 0x5 ) ;
mtree_test_load ( mt , 110 ) ;
mtree_test_insert_range ( mt , 4 , 71 , ( void * ) 0x9 ) ;
mtree_test_load ( mt , 2 ) ;
mtree_test_store ( mt , 2 , ( void * ) 0x5 ) ;
mtree_test_insert_range ( mt , 11 , 22 , ( void * ) 0x17 ) ;
mtree_test_erase ( mt , 12 ) ;
mtree_test_store ( mt , 2 , ( void * ) 0x5 ) ;
mtree_test_load ( mt , 22 ) ;
mtree_destroy ( mt ) ;
/*
* 8. When rebalancing or spanning_rebalance ( ) , the max of the new node
* may be set incorrectly to the final pivot and not the right max .
* Fix by setting the left max to orig right max if the entire node is
* consumed .
*/
mt_init_flags ( mt , 0 ) ;
mtree_test_store ( mt , 6 , ( void * ) 0xd ) ;
mtree_test_store ( mt , 67 , ( void * ) 0x87 ) ;
mtree_test_insert ( mt , 15 , ( void * ) 0x1f ) ;
mtree_test_insert ( mt , 6716 , ( void * ) 0x3479 ) ;
mtree_test_store ( mt , 61 , ( void * ) 0x7b ) ;
mtree_test_insert ( mt , 13 , ( void * ) 0x1b ) ;
mtree_test_store ( mt , 8 , ( void * ) 0x11 ) ;
mtree_test_insert ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_load ( mt , 0 ) ;
mtree_test_erase ( mt , 67167 ) ;
mtree_test_insert_range ( mt , 6 , 7167 , ( void * ) 0xd ) ;
mtree_test_insert ( mt , 6 , ( void * ) 0xd ) ;
mtree_test_erase ( mt , 67 ) ;
mtree_test_insert ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_erase ( mt , 667167 ) ;
mtree_test_insert ( mt , 6 , ( void * ) 0xd ) ;
mtree_test_store ( mt , 67 , ( void * ) 0x87 ) ;
mtree_test_insert ( mt , 5 , ( void * ) 0xb ) ;
mtree_test_erase ( mt , 1 ) ;
mtree_test_insert ( mt , 6 , ( void * ) 0xd ) ;
mtree_test_erase ( mt , 67 ) ;
mtree_test_insert ( mt , 15 , ( void * ) 0x1f ) ;
mtree_test_insert ( mt , 67167 , ( void * ) 0x20cbf ) ;
mtree_test_insert ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_load ( mt , 7 ) ;
mtree_test_insert ( mt , 16 , ( void * ) 0x21 ) ;
mtree_test_insert ( mt , 36 , ( void * ) 0x49 ) ;
mtree_test_store ( mt , 67 , ( void * ) 0x87 ) ;
mtree_test_store ( mt , 6 , ( void * ) 0xd ) ;
mtree_test_insert ( mt , 367 , ( void * ) 0x2df ) ;
mtree_test_insert ( mt , 115 , ( void * ) 0xe7 ) ;
mtree_test_store ( mt , 0 , ( void * ) 0x1 ) ;
mtree_test_store_range ( mt , 1 , 3 , ( void * ) 0x3 ) ;
mtree_test_store ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_erase ( mt , 67167 ) ;
mtree_test_insert_range ( mt , 6 , 47 , ( void * ) 0xd ) ;
mtree_test_store ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_insert_range ( mt , 1 , 67 , ( void * ) 0x3 ) ;
mtree_test_load ( mt , 67 ) ;
mtree_test_insert ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_erase ( mt , 67167 ) ;
mtree_destroy ( mt ) ;
/*
* 9. spanning store to the end of data caused an invalid metadata
* length which resulted in a crash eventually .
* Fix by checking if there is a value in pivot before incrementing the
* metadata end in mab_mas_cp ( ) . To ensure this doesn ' t happen again ,
* abstract the two locations this happens into a function called
* mas_leaf_set_meta ( ) .
*/
mt_init_flags ( mt , 0 ) ;
mtree_test_insert ( mt , 21 , ( void * ) 0x2b ) ;
mtree_test_insert ( mt , 12 , ( void * ) 0x19 ) ;
mtree_test_insert ( mt , 6 , ( void * ) 0xd ) ;
mtree_test_insert ( mt , 8 , ( void * ) 0x11 ) ;
mtree_test_insert ( mt , 2 , ( void * ) 0x5 ) ;
mtree_test_insert ( mt , 91 , ( void * ) 0xb7 ) ;
mtree_test_insert ( mt , 18 , ( void * ) 0x25 ) ;
mtree_test_insert ( mt , 81 , ( void * ) 0xa3 ) ;
mtree_test_store_range ( mt , 0 , 128 , ( void * ) 0x1 ) ;
mtree_test_store ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_erase ( mt , 8 ) ;
mtree_test_insert ( mt , 11 , ( void * ) 0x17 ) ;
mtree_test_insert ( mt , 8 , ( void * ) 0x11 ) ;
mtree_test_insert ( mt , 21 , ( void * ) 0x2b ) ;
mtree_test_insert ( mt , 2 , ( void * ) 0x5 ) ;
2022-10-28 21:04:30 +03:00
mtree_test_insert ( mt , ULONG_MAX - 10 , ( void * ) 0xffffffffffffffeb ) ;
mtree_test_erase ( mt , ULONG_MAX - 10 ) ;
2022-09-06 22:48:45 +03:00
mtree_test_store_range ( mt , 0 , 281 , ( void * ) 0x1 ) ;
mtree_test_erase ( mt , 2 ) ;
mtree_test_insert ( mt , 1211 , ( void * ) 0x977 ) ;
mtree_test_insert ( mt , 111 , ( void * ) 0xdf ) ;
mtree_test_insert ( mt , 13 , ( void * ) 0x1b ) ;
mtree_test_insert ( mt , 211 , ( void * ) 0x1a7 ) ;
mtree_test_insert ( mt , 11 , ( void * ) 0x17 ) ;
mtree_test_insert ( mt , 5 , ( void * ) 0xb ) ;
mtree_test_insert ( mt , 1218 , ( void * ) 0x985 ) ;
mtree_test_insert ( mt , 61 , ( void * ) 0x7b ) ;
mtree_test_store ( mt , 1 , ( void * ) 0x3 ) ;
mtree_test_insert ( mt , 121 , ( void * ) 0xf3 ) ;
mtree_test_insert ( mt , 8 , ( void * ) 0x11 ) ;
mtree_test_insert ( mt , 21 , ( void * ) 0x2b ) ;
mtree_test_insert ( mt , 2 , ( void * ) 0x5 ) ;
2022-10-28 21:04:30 +03:00
mtree_test_insert ( mt , ULONG_MAX - 10 , ( void * ) 0xffffffffffffffeb ) ;
mtree_test_erase ( mt , ULONG_MAX - 10 ) ;
2022-09-06 22:48:45 +03:00
}
2022-10-28 21:04:30 +03:00
/* duplicate the tree with a specific gap */
2023-05-18 17:55:28 +03:00
static noinline void __init check_dup_gaps ( struct maple_tree * mt ,
2022-09-06 22:48:45 +03:00
unsigned long nr_entries , bool zero_start ,
unsigned long gap )
{
unsigned long i = 0 ;
struct maple_tree newmt ;
int ret ;
void * tmp ;
MA_STATE ( mas , mt , 0 , 0 ) ;
MA_STATE ( newmas , & newmt , 0 , 0 ) ;
if ( ! zero_start )
i = 1 ;
mt_zero_nr_tallocated ( ) ;
for ( ; i < = nr_entries ; i + + )
mtree_store_range ( mt , i * 10 , ( i + 1 ) * 10 - gap ,
xa_mk_value ( i ) , GFP_KERNEL ) ;
mt_init_flags ( & newmt , MT_FLAGS_ALLOC_RANGE ) ;
mt_set_non_kernel ( 99999 ) ;
2022-10-28 21:04:30 +03:00
mas_lock ( & newmas ) ;
2022-09-06 22:48:45 +03:00
ret = mas_expected_entries ( & newmas , nr_entries ) ;
mt_set_non_kernel ( 0 ) ;
MT_BUG_ON ( mt , ret ! = 0 ) ;
2022-10-28 21:04:30 +03:00
rcu_read_lock ( ) ;
2022-09-06 22:48:45 +03:00
mas_for_each ( & mas , tmp , ULONG_MAX ) {
newmas . index = mas . index ;
newmas . last = mas . last ;
mas_store ( & newmas , tmp ) ;
}
2022-10-28 21:04:30 +03:00
rcu_read_unlock ( ) ;
2022-09-06 22:48:45 +03:00
mas_destroy ( & newmas ) ;
2022-10-28 21:04:30 +03:00
mas_unlock ( & newmas ) ;
2022-09-06 22:48:45 +03:00
mtree_destroy ( & newmt ) ;
}
2022-10-28 21:04:30 +03:00
/* Duplicate many sizes of trees. Mainly to test expected entry values */
2023-05-18 17:55:28 +03:00
static noinline void __init check_dup ( struct maple_tree * mt )
2022-09-06 22:48:45 +03:00
{
int i ;
2022-10-28 21:04:30 +03:00
int big_start = 100010 ;
2022-09-06 22:48:45 +03:00
/* Check with a value at zero */
for ( i = 10 ; i < 1000 ; i + + ) {
mt_init_flags ( mt , MT_FLAGS_ALLOC_RANGE ) ;
check_dup_gaps ( mt , i , true , 5 ) ;
mtree_destroy ( mt ) ;
2022-10-28 21:04:30 +03:00
rcu_barrier ( ) ;
2022-09-06 22:48:45 +03:00
}
2022-10-28 21:04:30 +03:00
cond_resched ( ) ;
mt_cache_shrink ( ) ;
2022-09-06 22:48:45 +03:00
/* Check with a value at zero, no gap */
for ( i = 1000 ; i < 2000 ; i + + ) {
mt_init_flags ( mt , MT_FLAGS_ALLOC_RANGE ) ;
check_dup_gaps ( mt , i , true , 0 ) ;
mtree_destroy ( mt ) ;
2022-10-28 21:04:30 +03:00
rcu_barrier ( ) ;
2022-09-06 22:48:45 +03:00
}
2022-10-28 21:04:30 +03:00
cond_resched ( ) ;
mt_cache_shrink ( ) ;
2022-09-06 22:48:45 +03:00
/* Check with a value at zero and unreasonably large */
2022-10-28 21:04:30 +03:00
for ( i = big_start ; i < big_start + 10 ; i + + ) {
2022-09-06 22:48:45 +03:00
mt_init_flags ( mt , MT_FLAGS_ALLOC_RANGE ) ;
check_dup_gaps ( mt , i , true , 5 ) ;
mtree_destroy ( mt ) ;
2022-10-28 21:04:30 +03:00
rcu_barrier ( ) ;
2022-09-06 22:48:45 +03:00
}
2022-10-28 21:04:30 +03:00
cond_resched ( ) ;
mt_cache_shrink ( ) ;
2022-09-06 22:48:45 +03:00
/* Small to medium size not starting at zero*/
for ( i = 200 ; i < 1000 ; i + + ) {
mt_init_flags ( mt , MT_FLAGS_ALLOC_RANGE ) ;
check_dup_gaps ( mt , i , false , 5 ) ;
mtree_destroy ( mt ) ;
2022-10-28 21:04:30 +03:00
rcu_barrier ( ) ;
2022-09-06 22:48:45 +03:00
}
2022-10-28 21:04:30 +03:00
cond_resched ( ) ;
mt_cache_shrink ( ) ;
2022-09-06 22:48:45 +03:00
/* Unreasonably large not starting at zero*/
2022-10-28 21:04:30 +03:00
for ( i = big_start ; i < big_start + 10 ; i + + ) {
2022-09-06 22:48:45 +03:00
mt_init_flags ( mt , MT_FLAGS_ALLOC_RANGE ) ;
check_dup_gaps ( mt , i , false , 5 ) ;
mtree_destroy ( mt ) ;
2022-10-28 21:04:30 +03:00
rcu_barrier ( ) ;
cond_resched ( ) ;
mt_cache_shrink ( ) ;
2022-09-06 22:48:45 +03:00
}
/* Check non-allocation tree not starting at zero */
for ( i = 1500 ; i < 3000 ; i + + ) {
mt_init_flags ( mt , 0 ) ;
check_dup_gaps ( mt , i , false , 5 ) ;
mtree_destroy ( mt ) ;
2022-10-28 21:04:30 +03:00
rcu_barrier ( ) ;
cond_resched ( ) ;
if ( i % 2 = = 0 )
mt_cache_shrink ( ) ;
2022-09-06 22:48:45 +03:00
}
2022-10-28 21:04:30 +03:00
mt_cache_shrink ( ) ;
2022-09-06 22:48:45 +03:00
/* Check non-allocation tree starting at zero */
for ( i = 200 ; i < 1000 ; i + + ) {
mt_init_flags ( mt , 0 ) ;
check_dup_gaps ( mt , i , true , 5 ) ;
mtree_destroy ( mt ) ;
2022-10-28 21:04:30 +03:00
rcu_barrier ( ) ;
cond_resched ( ) ;
2022-09-06 22:48:45 +03:00
}
2022-10-28 21:04:30 +03:00
mt_cache_shrink ( ) ;
2022-09-06 22:48:45 +03:00
/* Unreasonably large */
2022-10-28 21:04:30 +03:00
for ( i = big_start + 5 ; i < big_start + 10 ; i + + ) {
2022-09-06 22:48:45 +03:00
mt_init_flags ( mt , 0 ) ;
check_dup_gaps ( mt , i , true , 5 ) ;
mtree_destroy ( mt ) ;
2022-10-28 21:04:30 +03:00
rcu_barrier ( ) ;
mt_cache_shrink ( ) ;
cond_resched ( ) ;
2022-09-06 22:48:45 +03:00
}
}
2023-05-18 17:55:28 +03:00
static noinline void __init check_bnode_min_spanning ( struct maple_tree * mt )
2022-12-19 19:20:15 +03:00
{
int i = 50 ;
MA_STATE ( mas , mt , 0 , 0 ) ;
mt_set_non_kernel ( 9999 ) ;
mas_lock ( & mas ) ;
do {
mas_set_range ( & mas , i * 10 , i * 10 + 9 ) ;
mas_store ( & mas , check_bnode_min_spanning ) ;
} while ( i - - ) ;
mas_set_range ( & mas , 240 , 509 ) ;
mas_store ( & mas , NULL ) ;
mas_unlock ( & mas ) ;
mas_destroy ( & mas ) ;
mt_set_non_kernel ( 0 ) ;
}
2023-05-18 17:55:28 +03:00
static noinline void __init check_empty_area_window ( struct maple_tree * mt )
2023-01-11 23:02:07 +03:00
{
unsigned long i , nr_entries = 20 ;
MA_STATE ( mas , mt , 0 , 0 ) ;
for ( i = 1 ; i < = nr_entries ; i + + )
mtree_store_range ( mt , i * 10 , i * 10 + 9 ,
xa_mk_value ( i ) , GFP_KERNEL ) ;
/* Create another hole besides the one at 0 */
mtree_store_range ( mt , 160 , 169 , NULL , GFP_KERNEL ) ;
/* Check lower bounds that don't fit */
rcu_read_lock ( ) ;
MT_BUG_ON ( mt , mas_empty_area_rev ( & mas , 5 , 90 , 10 ) ! = - EBUSY ) ;
mas_reset ( & mas ) ;
MT_BUG_ON ( mt , mas_empty_area_rev ( & mas , 6 , 90 , 5 ) ! = - EBUSY ) ;
/* Check lower bound that does fit */
mas_reset ( & mas ) ;
MT_BUG_ON ( mt , mas_empty_area_rev ( & mas , 5 , 90 , 5 ) ! = 0 ) ;
MT_BUG_ON ( mt , mas . index ! = 5 ) ;
MT_BUG_ON ( mt , mas . last ! = 9 ) ;
rcu_read_unlock ( ) ;
/* Check one gap that doesn't fit and one that does */
rcu_read_lock ( ) ;
mas_reset ( & mas ) ;
MT_BUG_ON ( mt , mas_empty_area_rev ( & mas , 5 , 217 , 9 ) ! = 0 ) ;
MT_BUG_ON ( mt , mas . index ! = 161 ) ;
MT_BUG_ON ( mt , mas . last ! = 169 ) ;
/* Check one gap that does fit above the min */
mas_reset ( & mas ) ;
MT_BUG_ON ( mt , mas_empty_area_rev ( & mas , 100 , 218 , 3 ) ! = 0 ) ;
MT_BUG_ON ( mt , mas . index ! = 216 ) ;
MT_BUG_ON ( mt , mas . last ! = 218 ) ;
/* Check size that doesn't fit any gap */
mas_reset ( & mas ) ;
MT_BUG_ON ( mt , mas_empty_area_rev ( & mas , 100 , 218 , 16 ) ! = - EBUSY ) ;
/*
* Check size that doesn ' t fit the lower end of the window but
* does fit the gap
*/
mas_reset ( & mas ) ;
MT_BUG_ON ( mt , mas_empty_area_rev ( & mas , 167 , 200 , 4 ) ! = - EBUSY ) ;
/*
* Check size that doesn ' t fit the upper end of the window but
* does fit the gap
*/
mas_reset ( & mas ) ;
MT_BUG_ON ( mt , mas_empty_area_rev ( & mas , 100 , 162 , 4 ) ! = - EBUSY ) ;
/* Check mas_empty_area forward */
mas_reset ( & mas ) ;
MT_BUG_ON ( mt , mas_empty_area ( & mas , 0 , 100 , 9 ) ! = 0 ) ;
MT_BUG_ON ( mt , mas . index ! = 0 ) ;
MT_BUG_ON ( mt , mas . last ! = 8 ) ;
mas_reset ( & mas ) ;
MT_BUG_ON ( mt , mas_empty_area ( & mas , 0 , 100 , 4 ) ! = 0 ) ;
MT_BUG_ON ( mt , mas . index ! = 0 ) ;
MT_BUG_ON ( mt , mas . last ! = 3 ) ;
mas_reset ( & mas ) ;
MT_BUG_ON ( mt , mas_empty_area ( & mas , 0 , 100 , 11 ) ! = - EBUSY ) ;
mas_reset ( & mas ) ;
MT_BUG_ON ( mt , mas_empty_area ( & mas , 5 , 100 , 6 ) ! = - EBUSY ) ;
mas_reset ( & mas ) ;
MT_BUG_ON ( mt , mas_empty_area ( & mas , 0 , 8 , 10 ) ! = - EBUSY ) ;
mas_reset ( & mas ) ;
mas_empty_area ( & mas , 100 , 165 , 3 ) ;
mas_reset ( & mas ) ;
MT_BUG_ON ( mt , mas_empty_area ( & mas , 100 , 163 , 6 ) ! = - EBUSY ) ;
rcu_read_unlock ( ) ;
}
2023-05-18 17:55:28 +03:00
static noinline void __init check_empty_area_fill ( struct maple_tree * mt )
2023-03-07 21:02:47 +03:00
{
const unsigned long max = 0x25D78000 ;
unsigned long size ;
int loop , shift ;
MA_STATE ( mas , mt , 0 , 0 ) ;
mt_set_non_kernel ( 99999 ) ;
for ( shift = 12 ; shift < = 16 ; shift + + ) {
loop = 5000 ;
size = 1 < < shift ;
while ( loop - - ) {
mas_set ( & mas , 0 ) ;
mas_lock ( & mas ) ;
MT_BUG_ON ( mt , mas_empty_area ( & mas , 0 , max , size ) ! = 0 ) ;
MT_BUG_ON ( mt , mas . last ! = mas . index + size - 1 ) ;
mas_store_gfp ( & mas , ( void * ) size , GFP_KERNEL ) ;
mas_unlock ( & mas ) ;
mas_reset ( & mas ) ;
}
}
/* No space left. */
size = 0x1000 ;
rcu_read_lock ( ) ;
MT_BUG_ON ( mt , mas_empty_area ( & mas , 0 , max , size ) ! = - EBUSY ) ;
rcu_read_unlock ( ) ;
/* Fill a depth 3 node to the maximum */
for ( unsigned long i = 629440511 ; i < = 629440800 ; i + = 6 )
mtree_store_range ( mt , i , i + 5 , ( void * ) i , GFP_KERNEL ) ;
/* Make space in the second-last depth 4 node */
mtree_erase ( mt , 631668735 ) ;
/* Make space in the last depth 4 node */
mtree_erase ( mt , 629506047 ) ;
mas_reset ( & mas ) ;
/* Search from just after the gap in the second-last depth 4 */
rcu_read_lock ( ) ;
MT_BUG_ON ( mt , mas_empty_area ( & mas , 629506048 , 690000000 , 0x5000 ) ! = 0 ) ;
rcu_read_unlock ( ) ;
mt_set_non_kernel ( 0 ) ;
}
2022-09-06 22:48:45 +03:00
static DEFINE_MTREE ( tree ) ;
2023-05-18 17:55:28 +03:00
static int __init maple_tree_seed ( void )
2022-09-06 22:48:45 +03:00
{
2023-05-18 17:55:28 +03:00
unsigned long set [ ] = { 5015 , 5014 , 5017 , 25 , 1000 ,
1001 , 1002 , 1003 , 1005 , 0 ,
5003 , 5002 } ;
2022-09-06 22:48:45 +03:00
void * ptr = & set ;
pr_info ( " \n TEST STARTING \n \n " ) ;
mt_init_flags ( & tree , MT_FLAGS_ALLOC_RANGE ) ;
check_root_expand ( & tree ) ;
mtree_destroy ( & tree ) ;
# if defined(BENCH_SLOT_STORE)
# define BENCH
mt_init_flags ( & tree , MT_FLAGS_ALLOC_RANGE ) ;
bench_slot_store ( & tree ) ;
mtree_destroy ( & tree ) ;
goto skip ;
# endif
# if defined(BENCH_NODE_STORE)
# define BENCH
mt_init_flags ( & tree , MT_FLAGS_ALLOC_RANGE ) ;
bench_node_store ( & tree ) ;
mtree_destroy ( & tree ) ;
goto skip ;
# endif
# if defined(BENCH_AWALK)
# define BENCH
mt_init_flags ( & tree , MT_FLAGS_ALLOC_RANGE ) ;
bench_awalk ( & tree ) ;
mtree_destroy ( & tree ) ;
goto skip ;
# endif
# if defined(BENCH_WALK)
# define BENCH
mt_init_flags ( & tree , MT_FLAGS_ALLOC_RANGE ) ;
bench_walk ( & tree ) ;
mtree_destroy ( & tree ) ;
goto skip ;
# endif
# if defined(BENCH_FORK)
# define BENCH
mt_init_flags ( & tree , MT_FLAGS_ALLOC_RANGE ) ;
bench_forking ( & tree ) ;
mtree_destroy ( & tree ) ;
goto skip ;
# endif
# if defined(BENCH_MT_FOR_EACH)
# define BENCH
mt_init_flags ( & tree , MT_FLAGS_ALLOC_RANGE ) ;
bench_mt_for_each ( & tree ) ;
mtree_destroy ( & tree ) ;
goto skip ;
# endif
2023-01-20 19:26:05 +03:00
mt_init_flags ( & tree , MT_FLAGS_ALLOC_RANGE ) ;
check_iteration ( & tree ) ;
mtree_destroy ( & tree ) ;
2022-09-06 22:48:45 +03:00
mt_init_flags ( & tree , MT_FLAGS_ALLOC_RANGE ) ;
check_forking ( & tree ) ;
mtree_destroy ( & tree ) ;
mt_init_flags ( & tree , MT_FLAGS_ALLOC_RANGE ) ;
check_mas_store_gfp ( & tree ) ;
mtree_destroy ( & tree ) ;
/* Test ranges (store and insert) */
mt_init_flags ( & tree , 0 ) ;
check_ranges ( & tree ) ;
mtree_destroy ( & tree ) ;
2022-10-28 21:04:30 +03:00
# if defined(CONFIG_64BIT)
/* These tests have ranges outside of 4GB */
2022-09-06 22:48:45 +03:00
mt_init_flags ( & tree , MT_FLAGS_ALLOC_RANGE ) ;
check_alloc_range ( & tree ) ;
mtree_destroy ( & tree ) ;
mt_init_flags ( & tree , MT_FLAGS_ALLOC_RANGE ) ;
check_alloc_rev_range ( & tree ) ;
mtree_destroy ( & tree ) ;
2022-10-28 21:04:30 +03:00
# endif
2022-09-06 22:48:45 +03:00
mt_init_flags ( & tree , 0 ) ;
check_load ( & tree , set [ 0 ] , NULL ) ; /* See if 5015 -> NULL */
check_insert ( & tree , set [ 9 ] , & tree ) ; /* Insert 0 */
check_load ( & tree , set [ 9 ] , & tree ) ; /* See if 0 -> &tree */
check_load ( & tree , set [ 0 ] , NULL ) ; /* See if 5015 -> NULL */
check_insert ( & tree , set [ 10 ] , ptr ) ; /* Insert 5003 */
check_load ( & tree , set [ 9 ] , & tree ) ; /* See if 0 -> &tree */
check_load ( & tree , set [ 11 ] , NULL ) ; /* See if 5002 -> NULL */
check_load ( & tree , set [ 10 ] , ptr ) ; /* See if 5003 -> ptr */
/* Clear out the tree */
mtree_destroy ( & tree ) ;
/* Try to insert, insert a dup, and load back what was inserted. */
mt_init_flags ( & tree , 0 ) ;
check_insert ( & tree , set [ 0 ] , & tree ) ; /* Insert 5015 */
check_dup_insert ( & tree , set [ 0 ] , & tree ) ; /* Insert 5015 again */
check_load ( & tree , set [ 0 ] , & tree ) ; /* See if 5015 -> &tree */
/*
* Second set of tests try to load a value that doesn ' t exist , inserts
* a second value , then loads the value again
*/
check_load ( & tree , set [ 1 ] , NULL ) ; /* See if 5014 -> NULL */
check_insert ( & tree , set [ 1 ] , ptr ) ; /* insert 5014 -> ptr */
check_load ( & tree , set [ 1 ] , ptr ) ; /* See if 5014 -> ptr */
check_load ( & tree , set [ 0 ] , & tree ) ; /* See if 5015 -> &tree */
/*
* Tree currently contains :
* p [ 0 ] : 14 - > ( nil ) p [ 1 ] : 15 - > ptr p [ 2 ] : 16 - > & tree p [ 3 ] : 0 - > ( nil )
*/
check_insert ( & tree , set [ 6 ] , ptr ) ; /* insert 1002 -> ptr */
check_insert ( & tree , set [ 7 ] , & tree ) ; /* insert 1003 -> &tree */
check_load ( & tree , set [ 0 ] , & tree ) ; /* See if 5015 -> &tree */
check_load ( & tree , set [ 1 ] , ptr ) ; /* See if 5014 -> ptr */
check_load ( & tree , set [ 6 ] , ptr ) ; /* See if 1002 -> ptr */
check_load ( & tree , set [ 7 ] , & tree ) ; /* 1003 = &tree ? */
/* Clear out tree */
mtree_destroy ( & tree ) ;
mt_init_flags ( & tree , 0 ) ;
/* Test inserting into a NULL hole. */
check_insert ( & tree , set [ 5 ] , ptr ) ; /* insert 1001 -> ptr */
check_insert ( & tree , set [ 7 ] , & tree ) ; /* insert 1003 -> &tree */
check_insert ( & tree , set [ 6 ] , ptr ) ; /* insert 1002 -> ptr */
check_load ( & tree , set [ 5 ] , ptr ) ; /* See if 1001 -> ptr */
check_load ( & tree , set [ 6 ] , ptr ) ; /* See if 1002 -> ptr */
check_load ( & tree , set [ 7 ] , & tree ) ; /* See if 1003 -> &tree */
/* Clear out the tree */
mtree_destroy ( & tree ) ;
mt_init_flags ( & tree , 0 ) ;
/*
* set [ ] = { 5015 , 5014 , 5017 , 25 , 1000 ,
* 1001 , 1002 , 1003 , 1005 , 0 ,
* 5003 , 5002 } ;
*/
check_insert ( & tree , set [ 0 ] , ptr ) ; /* 5015 */
check_insert ( & tree , set [ 1 ] , & tree ) ; /* 5014 */
check_insert ( & tree , set [ 2 ] , ptr ) ; /* 5017 */
check_insert ( & tree , set [ 3 ] , & tree ) ; /* 25 */
check_load ( & tree , set [ 0 ] , ptr ) ;
check_load ( & tree , set [ 1 ] , & tree ) ;
check_load ( & tree , set [ 2 ] , ptr ) ;
check_load ( & tree , set [ 3 ] , & tree ) ;
check_insert ( & tree , set [ 4 ] , ptr ) ; /* 1000 < Should split. */
check_load ( & tree , set [ 0 ] , ptr ) ;
check_load ( & tree , set [ 1 ] , & tree ) ;
check_load ( & tree , set [ 2 ] , ptr ) ;
check_load ( & tree , set [ 3 ] , & tree ) ; /*25 */
check_load ( & tree , set [ 4 ] , ptr ) ;
check_insert ( & tree , set [ 5 ] , & tree ) ; /* 1001 */
check_load ( & tree , set [ 0 ] , ptr ) ;
check_load ( & tree , set [ 1 ] , & tree ) ;
check_load ( & tree , set [ 2 ] , ptr ) ;
check_load ( & tree , set [ 3 ] , & tree ) ;
check_load ( & tree , set [ 4 ] , ptr ) ;
check_load ( & tree , set [ 5 ] , & tree ) ;
check_insert ( & tree , set [ 6 ] , ptr ) ;
check_load ( & tree , set [ 0 ] , ptr ) ;
check_load ( & tree , set [ 1 ] , & tree ) ;
check_load ( & tree , set [ 2 ] , ptr ) ;
check_load ( & tree , set [ 3 ] , & tree ) ;
check_load ( & tree , set [ 4 ] , ptr ) ;
check_load ( & tree , set [ 5 ] , & tree ) ;
check_load ( & tree , set [ 6 ] , ptr ) ;
check_insert ( & tree , set [ 7 ] , & tree ) ;
check_load ( & tree , set [ 0 ] , ptr ) ;
check_insert ( & tree , set [ 8 ] , ptr ) ;
check_insert ( & tree , set [ 9 ] , & tree ) ;
check_load ( & tree , set [ 0 ] , ptr ) ;
check_load ( & tree , set [ 1 ] , & tree ) ;
check_load ( & tree , set [ 2 ] , ptr ) ;
check_load ( & tree , set [ 3 ] , & tree ) ;
check_load ( & tree , set [ 4 ] , ptr ) ;
check_load ( & tree , set [ 5 ] , & tree ) ;
check_load ( & tree , set [ 6 ] , ptr ) ;
check_load ( & tree , set [ 9 ] , & tree ) ;
mtree_destroy ( & tree ) ;
mt_init_flags ( & tree , 0 ) ;
check_seq ( & tree , 16 , false ) ;
mtree_destroy ( & tree ) ;
mt_init_flags ( & tree , 0 ) ;
check_seq ( & tree , 1000 , true ) ;
mtree_destroy ( & tree ) ;
mt_init_flags ( & tree , MT_FLAGS_ALLOC_RANGE ) ;
check_rev_seq ( & tree , 1000 , true ) ;
mtree_destroy ( & tree ) ;
check_lower_bound_split ( & tree ) ;
check_upper_bound_split ( & tree ) ;
check_mid_split ( & tree ) ;
mt_init_flags ( & tree , 0 ) ;
check_next_entry ( & tree ) ;
check_find ( & tree ) ;
check_find_2 ( & tree ) ;
mtree_destroy ( & tree ) ;
mt_init_flags ( & tree , MT_FLAGS_ALLOC_RANGE ) ;
check_prev_entry ( & tree ) ;
mtree_destroy ( & tree ) ;
mt_init_flags ( & tree , MT_FLAGS_ALLOC_RANGE ) ;
check_gap_combining ( & tree ) ;
mtree_destroy ( & tree ) ;
mt_init_flags ( & tree , MT_FLAGS_ALLOC_RANGE ) ;
check_node_overwrite ( & tree ) ;
mtree_destroy ( & tree ) ;
mt_init_flags ( & tree , MT_FLAGS_ALLOC_RANGE ) ;
next_prev_test ( & tree ) ;
mtree_destroy ( & tree ) ;
mt_init_flags ( & tree , MT_FLAGS_ALLOC_RANGE ) ;
check_spanning_relatives ( & tree ) ;
mtree_destroy ( & tree ) ;
mt_init_flags ( & tree , MT_FLAGS_ALLOC_RANGE ) ;
check_rev_find ( & tree ) ;
mtree_destroy ( & tree ) ;
mt_init_flags ( & tree , 0 ) ;
check_fuzzer ( & tree ) ;
mtree_destroy ( & tree ) ;
mt_init_flags ( & tree , MT_FLAGS_ALLOC_RANGE ) ;
check_dup ( & tree ) ;
mtree_destroy ( & tree ) ;
2022-12-19 19:20:15 +03:00
mt_init_flags ( & tree , MT_FLAGS_ALLOC_RANGE ) ;
check_bnode_min_spanning ( & tree ) ;
mtree_destroy ( & tree ) ;
2023-01-11 23:02:07 +03:00
mt_init_flags ( & tree , MT_FLAGS_ALLOC_RANGE ) ;
check_empty_area_window ( & tree ) ;
mtree_destroy ( & tree ) ;
2023-03-07 21:02:47 +03:00
mt_init_flags ( & tree , MT_FLAGS_ALLOC_RANGE ) ;
check_empty_area_fill ( & tree ) ;
mtree_destroy ( & tree ) ;
2022-09-06 22:48:45 +03:00
# if defined(BENCH)
skip :
# endif
rcu_barrier ( ) ;
pr_info ( " maple_tree: %u of %u tests passed \n " ,
atomic_read ( & maple_tree_tests_passed ) ,
atomic_read ( & maple_tree_tests_run ) ) ;
if ( atomic_read ( & maple_tree_tests_run ) = =
atomic_read ( & maple_tree_tests_passed ) )
return 0 ;
return - EINVAL ;
}
2023-05-18 17:55:28 +03:00
static void __exit maple_tree_harvest ( void )
2022-09-06 22:48:45 +03:00
{
}
module_init ( maple_tree_seed ) ;
module_exit ( maple_tree_harvest ) ;
MODULE_AUTHOR ( " Liam R. Howlett <Liam.Howlett@Oracle.com> " ) ;
MODULE_LICENSE ( " GPL " ) ;