2022-07-07 23:16:20 +03:00
# ifndef IOU_ALLOC_CACHE_H
# define IOU_ALLOC_CACHE_H
2022-07-07 23:20:54 +03:00
/*
* Don ' t allow the cache to grow beyond this size .
*/
# define IO_ALLOC_CACHE_MAX 512
2022-07-07 23:16:20 +03:00
struct io_cache_entry {
2023-02-23 19:43:52 +03:00
struct io_wq_work_node node ;
2022-07-07 23:16:20 +03:00
} ;
2022-07-07 23:20:54 +03:00
static inline bool io_alloc_cache_put ( struct io_alloc_cache * cache ,
2022-07-07 23:16:20 +03:00
struct io_cache_entry * entry )
{
2023-04-04 15:39:57 +03:00
if ( cache - > nr_cached < cache - > max_cached ) {
2022-07-07 23:20:54 +03:00
cache - > nr_cached + + ;
2023-02-23 19:43:52 +03:00
wq_stack_add_head ( & entry - > node , & cache - > list ) ;
2023-02-23 19:43:53 +03:00
/* KASAN poisons object */
kasan_slab_free_mempool ( entry ) ;
2022-07-07 23:20:54 +03:00
return true ;
}
return false ;
2022-07-07 23:16:20 +03:00
}
2023-04-11 14:06:05 +03:00
static inline bool io_alloc_cache_empty ( struct io_alloc_cache * cache )
{
return ! cache - > list . next ;
}
2022-07-07 23:16:20 +03:00
static inline struct io_cache_entry * io_alloc_cache_get ( struct io_alloc_cache * cache )
{
2023-02-23 19:43:52 +03:00
if ( cache - > list . next ) {
struct io_cache_entry * entry ;
2022-07-07 23:16:20 +03:00
2023-02-23 19:43:52 +03:00
entry = container_of ( cache - > list . next , struct io_cache_entry , node ) ;
2023-02-23 19:43:53 +03:00
kasan_unpoison_range ( entry , cache - > elem_size ) ;
2023-02-23 19:43:52 +03:00
cache - > list . next = cache - > list . next - > next ;
2023-03-30 15:52:38 +03:00
cache - > nr_cached - - ;
2023-02-23 19:43:52 +03:00
return entry ;
2022-07-07 23:16:20 +03:00
}
return NULL ;
}
2023-04-04 15:39:57 +03:00
static inline void io_alloc_cache_init ( struct io_alloc_cache * cache ,
unsigned max_nr , size_t size )
2022-07-07 23:16:20 +03:00
{
2023-02-23 19:43:52 +03:00
cache - > list . next = NULL ;
2022-07-07 23:20:54 +03:00
cache - > nr_cached = 0 ;
2023-04-04 15:39:57 +03:00
cache - > max_cached = max_nr ;
2023-02-23 19:43:53 +03:00
cache - > elem_size = size ;
2022-07-07 23:16:20 +03:00
}
static inline void io_alloc_cache_free ( struct io_alloc_cache * cache ,
void ( * free ) ( struct io_cache_entry * ) )
{
2023-02-23 19:43:52 +03:00
while ( 1 ) {
struct io_cache_entry * entry = io_alloc_cache_get ( cache ) ;
2022-07-07 23:16:20 +03:00
2023-02-23 19:43:52 +03:00
if ( ! entry )
break ;
free ( entry ) ;
2022-07-07 23:16:20 +03:00
}
2022-07-07 23:20:54 +03:00
cache - > nr_cached = 0 ;
2022-07-07 23:16:20 +03:00
}
# endif