2005-04-16 15:20:36 -07:00
# ifndef __NET_GEN_STATS_H
# define __NET_GEN_STATS_H
# include <linux/gen_stats.h>
# include <linux/socket.h>
# include <linux/rtnetlink.h>
# include <linux/pkt_sched.h>
2014-09-28 11:52:56 -07:00
struct gnet_stats_basic_cpu {
struct gnet_stats_basic_packed bstats ;
struct u64_stats_sync syncp ;
} ;
2016-12-04 09:48:16 -08:00
struct net_rate_estimator ;
2009-11-03 03:26:03 +00:00
struct gnet_dump {
2005-04-16 15:20:36 -07:00
spinlock_t * lock ;
struct sk_buff * skb ;
2008-01-22 22:11:17 -08:00
struct nlattr * tail ;
2005-04-16 15:20:36 -07:00
2011-03-30 22:57:33 -03:00
/* Backward compatibility */
2005-04-16 15:20:36 -07:00
int compat_tc_stats ;
int compat_xstats ;
2016-04-26 10:06:18 +02:00
int padattr ;
2005-04-16 15:20:36 -07:00
void * xstats ;
int xstats_len ;
struct tc_stats tc_stats ;
} ;
2013-09-20 11:23:26 -07:00
int gnet_stats_start_copy ( struct sk_buff * skb , int type , spinlock_t * lock ,
2016-04-26 10:06:18 +02:00
struct gnet_dump * d , int padattr ) ;
2013-09-20 11:23:26 -07:00
int gnet_stats_start_copy_compat ( struct sk_buff * skb , int type ,
int tc_stats_type , int xstats_type ,
2016-04-26 10:06:18 +02:00
spinlock_t * lock , struct gnet_dump * d ,
int padattr ) ;
2005-04-16 15:20:36 -07:00
2016-06-06 09:37:16 -07:00
int gnet_stats_copy_basic ( const seqcount_t * running ,
struct gnet_dump * d ,
2014-09-28 11:52:56 -07:00
struct gnet_stats_basic_cpu __percpu * cpu ,
2013-09-20 11:23:26 -07:00
struct gnet_stats_basic_packed * b ) ;
2016-06-06 09:37:16 -07:00
void __gnet_stats_copy_basic ( const seqcount_t * running ,
struct gnet_stats_basic_packed * bstats ,
2014-09-28 11:52:56 -07:00
struct gnet_stats_basic_cpu __percpu * cpu ,
struct gnet_stats_basic_packed * b ) ;
2013-09-20 11:23:26 -07:00
int gnet_stats_copy_rate_est ( struct gnet_dump * d ,
2016-12-04 09:48:16 -08:00
struct net_rate_estimator __rcu * * ptr ) ;
2014-09-28 11:53:57 -07:00
int gnet_stats_copy_queue ( struct gnet_dump * d ,
2014-09-28 11:54:24 -07:00
struct gnet_stats_queue __percpu * cpu_q ,
struct gnet_stats_queue * q , __u32 qlen ) ;
2013-09-20 11:23:26 -07:00
int gnet_stats_copy_app ( struct gnet_dump * d , void * st , int len ) ;
int gnet_stats_finish_copy ( struct gnet_dump * d ) ;
int gen_new_estimator ( struct gnet_stats_basic_packed * bstats ,
2014-09-28 11:52:56 -07:00
struct gnet_stats_basic_cpu __percpu * cpu_bstats ,
2016-12-04 09:48:16 -08:00
struct net_rate_estimator __rcu * * rate_est ,
2016-06-06 09:37:16 -07:00
spinlock_t * stats_lock ,
seqcount_t * running , struct nlattr * opt ) ;
2016-12-04 09:48:16 -08:00
void gen_kill_estimator ( struct net_rate_estimator __rcu * * ptr ) ;
2013-09-20 11:23:26 -07:00
int gen_replace_estimator ( struct gnet_stats_basic_packed * bstats ,
2014-09-28 11:52:56 -07:00
struct gnet_stats_basic_cpu __percpu * cpu_bstats ,
2016-12-04 09:48:16 -08:00
struct net_rate_estimator __rcu * * ptr ,
2016-06-06 09:37:16 -07:00
spinlock_t * stats_lock ,
seqcount_t * running , struct nlattr * opt ) ;
2016-12-04 09:48:16 -08:00
bool gen_estimator_active ( struct net_rate_estimator __rcu * * ptr ) ;
bool gen_estimator_read ( struct net_rate_estimator __rcu * * ptr ,
struct gnet_stats_rate_est64 * sample ) ;
2005-04-16 15:20:36 -07:00
# endif