Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
| 2 | #ifndef __NET_GEN_STATS_H |
| 3 | #define __NET_GEN_STATS_H |
| 4 | |
| 5 | #include <linux/gen_stats.h> |
| 6 | #include <linux/socket.h> |
| 7 | #include <linux/rtnetlink.h> |
| 8 | #include <linux/pkt_sched.h> |
| 9 | |
| 10 | struct gnet_stats_basic_cpu { |
| 11 | struct gnet_stats_basic_packed bstats; |
| 12 | struct u64_stats_sync syncp; |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 13 | } __aligned(2 * sizeof(u64)); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 14 | |
| 15 | struct net_rate_estimator; |
| 16 | |
| 17 | struct gnet_dump { |
| 18 | spinlock_t * lock; |
| 19 | struct sk_buff * skb; |
| 20 | struct nlattr * tail; |
| 21 | |
| 22 | /* Backward compatibility */ |
| 23 | int compat_tc_stats; |
| 24 | int compat_xstats; |
| 25 | int padattr; |
| 26 | void * xstats; |
| 27 | int xstats_len; |
| 28 | struct tc_stats tc_stats; |
| 29 | }; |
| 30 | |
| 31 | int gnet_stats_start_copy(struct sk_buff *skb, int type, spinlock_t *lock, |
| 32 | struct gnet_dump *d, int padattr); |
| 33 | |
| 34 | int gnet_stats_start_copy_compat(struct sk_buff *skb, int type, |
| 35 | int tc_stats_type, int xstats_type, |
| 36 | spinlock_t *lock, struct gnet_dump *d, |
| 37 | int padattr); |
| 38 | |
| 39 | int gnet_stats_copy_basic(const seqcount_t *running, |
| 40 | struct gnet_dump *d, |
| 41 | struct gnet_stats_basic_cpu __percpu *cpu, |
| 42 | struct gnet_stats_basic_packed *b); |
| 43 | void __gnet_stats_copy_basic(const seqcount_t *running, |
| 44 | struct gnet_stats_basic_packed *bstats, |
| 45 | struct gnet_stats_basic_cpu __percpu *cpu, |
| 46 | struct gnet_stats_basic_packed *b); |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 47 | int gnet_stats_copy_basic_hw(const seqcount_t *running, |
| 48 | struct gnet_dump *d, |
| 49 | struct gnet_stats_basic_cpu __percpu *cpu, |
| 50 | struct gnet_stats_basic_packed *b); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 51 | int gnet_stats_copy_rate_est(struct gnet_dump *d, |
| 52 | struct net_rate_estimator __rcu **ptr); |
| 53 | int gnet_stats_copy_queue(struct gnet_dump *d, |
| 54 | struct gnet_stats_queue __percpu *cpu_q, |
| 55 | struct gnet_stats_queue *q, __u32 qlen); |
| 56 | void __gnet_stats_copy_queue(struct gnet_stats_queue *qstats, |
| 57 | const struct gnet_stats_queue __percpu *cpu_q, |
| 58 | const struct gnet_stats_queue *q, __u32 qlen); |
| 59 | int gnet_stats_copy_app(struct gnet_dump *d, void *st, int len); |
| 60 | |
| 61 | int gnet_stats_finish_copy(struct gnet_dump *d); |
| 62 | |
| 63 | int gen_new_estimator(struct gnet_stats_basic_packed *bstats, |
| 64 | struct gnet_stats_basic_cpu __percpu *cpu_bstats, |
| 65 | struct net_rate_estimator __rcu **rate_est, |
| 66 | spinlock_t *lock, |
| 67 | seqcount_t *running, struct nlattr *opt); |
| 68 | void gen_kill_estimator(struct net_rate_estimator __rcu **ptr); |
| 69 | int gen_replace_estimator(struct gnet_stats_basic_packed *bstats, |
| 70 | struct gnet_stats_basic_cpu __percpu *cpu_bstats, |
| 71 | struct net_rate_estimator __rcu **ptr, |
| 72 | spinlock_t *lock, |
| 73 | seqcount_t *running, struct nlattr *opt); |
| 74 | bool gen_estimator_active(struct net_rate_estimator __rcu **ptr); |
| 75 | bool gen_estimator_read(struct net_rate_estimator __rcu **ptr, |
| 76 | struct gnet_stats_rate_est64 *sample); |
| 77 | #endif |