2020-05-20 21:20:51 +02:00
/* SPDX-License-Identifier: GPL-2.0 */
/* Interface for implementing AF_XDP zero-copy support in drivers.
* Copyright ( c ) 2020 Intel Corporation .
*/
# ifndef _LINUX_XDP_SOCK_DRV_H
# define _LINUX_XDP_SOCK_DRV_H
# include <net/xdp_sock.h>
2020-05-20 21:20:53 +02:00
# include <net/xsk_buff_pool.h>
2020-05-20 21:20:51 +02:00
# ifdef CONFIG_XDP_SOCKETS
2020-08-28 10:26:16 +02:00
void xsk_tx_completed ( struct xsk_buff_pool * pool , u32 nb_entries ) ;
bool xsk_tx_peek_desc ( struct xsk_buff_pool * pool , struct xdp_desc * desc ) ;
2020-11-16 12:12:46 +01:00
u32 xsk_tx_peek_release_desc_batch ( struct xsk_buff_pool * pool , struct xdp_desc * desc , u32 max ) ;
2020-08-28 10:26:16 +02:00
void xsk_tx_release ( struct xsk_buff_pool * pool ) ;
struct xsk_buff_pool * xsk_get_pool_from_qid ( struct net_device * dev ,
u16 queue_id ) ;
void xsk_set_rx_need_wakeup ( struct xsk_buff_pool * pool ) ;
void xsk_set_tx_need_wakeup ( struct xsk_buff_pool * pool ) ;
void xsk_clear_rx_need_wakeup ( struct xsk_buff_pool * pool ) ;
void xsk_clear_tx_need_wakeup ( struct xsk_buff_pool * pool ) ;
bool xsk_uses_need_wakeup ( struct xsk_buff_pool * pool ) ;
2020-05-20 21:20:51 +02:00
2020-08-28 10:26:16 +02:00
static inline u32 xsk_pool_get_headroom ( struct xsk_buff_pool * pool )
2020-05-20 21:20:53 +02:00
{
2020-08-28 10:26:16 +02:00
return XDP_PACKET_HEADROOM + pool - > headroom ;
2020-05-20 21:20:53 +02:00
}
2020-08-28 10:26:16 +02:00
static inline u32 xsk_pool_get_chunk_size ( struct xsk_buff_pool * pool )
2020-05-20 21:20:53 +02:00
{
2020-08-28 10:26:16 +02:00
return pool - > chunk_size ;
2020-05-20 21:20:53 +02:00
}
2020-08-28 10:26:16 +02:00
static inline u32 xsk_pool_get_rx_frame_size ( struct xsk_buff_pool * pool )
2020-05-20 21:20:53 +02:00
{
2020-08-28 10:26:16 +02:00
return xsk_pool_get_chunk_size ( pool ) - xsk_pool_get_headroom ( pool ) ;
2020-05-20 21:20:53 +02:00
}
2020-08-28 10:26:16 +02:00
static inline void xsk_pool_set_rxq_info ( struct xsk_buff_pool * pool ,
2020-05-20 21:20:53 +02:00
struct xdp_rxq_info * rxq )
{
2020-08-28 10:26:16 +02:00
xp_set_rxq_info ( pool , rxq ) ;
2020-05-20 21:20:53 +02:00
}
2020-08-28 10:26:16 +02:00
static inline void xsk_pool_dma_unmap ( struct xsk_buff_pool * pool ,
2020-05-20 21:20:53 +02:00
unsigned long attrs )
{
2020-08-28 10:26:16 +02:00
xp_dma_unmap ( pool , attrs ) ;
2020-05-20 21:20:53 +02:00
}
2020-08-28 10:26:16 +02:00
static inline int xsk_pool_dma_map ( struct xsk_buff_pool * pool ,
struct device * dev , unsigned long attrs )
2020-05-20 21:20:53 +02:00
{
2020-08-28 10:26:16 +02:00
struct xdp_umem * umem = pool - > umem ;
return xp_dma_map ( pool , dev , attrs , umem - > pgs , umem - > npgs ) ;
2020-05-20 21:20:53 +02:00
}
static inline dma_addr_t xsk_buff_xdp_get_dma ( struct xdp_buff * xdp )
{
struct xdp_buff_xsk * xskb = container_of ( xdp , struct xdp_buff_xsk , xdp ) ;
return xp_get_dma ( xskb ) ;
}
static inline dma_addr_t xsk_buff_xdp_get_frame_dma ( struct xdp_buff * xdp )
{
struct xdp_buff_xsk * xskb = container_of ( xdp , struct xdp_buff_xsk , xdp ) ;
return xp_get_frame_dma ( xskb ) ;
}
2020-08-28 10:26:16 +02:00
static inline struct xdp_buff * xsk_buff_alloc ( struct xsk_buff_pool * pool )
2020-05-20 21:20:53 +02:00
{
2020-08-28 10:26:16 +02:00
return xp_alloc ( pool ) ;
2020-05-20 21:20:53 +02:00
}
2020-08-28 10:26:16 +02:00
static inline bool xsk_buff_can_alloc ( struct xsk_buff_pool * pool , u32 count )
2020-05-20 21:20:53 +02:00
{
2020-08-28 10:26:16 +02:00
return xp_can_alloc ( pool , count ) ;
2020-05-20 21:20:53 +02:00
}
static inline void xsk_buff_free ( struct xdp_buff * xdp )
{
struct xdp_buff_xsk * xskb = container_of ( xdp , struct xdp_buff_xsk , xdp ) ;
xp_free ( xskb ) ;
}
2020-08-28 10:26:16 +02:00
static inline dma_addr_t xsk_buff_raw_get_dma ( struct xsk_buff_pool * pool ,
u64 addr )
2020-05-20 21:20:53 +02:00
{
2020-08-28 10:26:16 +02:00
return xp_raw_get_dma ( pool , addr ) ;
2020-05-20 21:20:53 +02:00
}
2020-08-28 10:26:16 +02:00
static inline void * xsk_buff_raw_get_data ( struct xsk_buff_pool * pool , u64 addr )
2020-05-20 21:20:53 +02:00
{
2020-08-28 10:26:16 +02:00
return xp_raw_get_data ( pool , addr ) ;
2020-05-20 21:20:53 +02:00
}
2020-08-28 10:26:24 +02:00
static inline void xsk_buff_dma_sync_for_cpu ( struct xdp_buff * xdp , struct xsk_buff_pool * pool )
2020-05-20 21:20:53 +02:00
{
struct xdp_buff_xsk * xskb = container_of ( xdp , struct xdp_buff_xsk , xdp ) ;
2020-08-28 10:26:24 +02:00
if ( ! pool - > dma_need_sync )
return ;
2020-05-20 21:20:53 +02:00
xp_dma_sync_for_cpu ( xskb ) ;
}
2020-08-28 10:26:16 +02:00
static inline void xsk_buff_raw_dma_sync_for_device ( struct xsk_buff_pool * pool ,
2020-05-20 21:20:53 +02:00
dma_addr_t dma ,
size_t size )
{
2020-08-28 10:26:16 +02:00
xp_dma_sync_for_device ( pool , dma , size ) ;
2020-05-20 21:20:53 +02:00
}
2020-05-20 21:20:51 +02:00
# else
2020-08-28 10:26:16 +02:00
static inline void xsk_tx_completed ( struct xsk_buff_pool * pool , u32 nb_entries )
2020-05-20 21:20:51 +02:00
{
}
2020-08-28 10:26:16 +02:00
static inline bool xsk_tx_peek_desc ( struct xsk_buff_pool * pool ,
struct xdp_desc * desc )
2020-05-20 21:20:51 +02:00
{
return false ;
}
2020-11-16 12:12:46 +01:00
static inline u32 xsk_tx_peek_release_desc_batch ( struct xsk_buff_pool * pool , struct xdp_desc * desc ,
u32 max )
{
return 0 ;
}
2020-08-28 10:26:16 +02:00
static inline void xsk_tx_release ( struct xsk_buff_pool * pool )
2020-05-20 21:20:51 +02:00
{
}
2020-08-28 10:26:15 +02:00
static inline struct xsk_buff_pool *
2020-08-28 10:26:16 +02:00
xsk_get_pool_from_qid ( struct net_device * dev , u16 queue_id )
2020-05-20 21:20:51 +02:00
{
return NULL ;
}
2020-08-28 10:26:16 +02:00
static inline void xsk_set_rx_need_wakeup ( struct xsk_buff_pool * pool )
2020-05-20 21:20:51 +02:00
{
}
2020-08-28 10:26:16 +02:00
static inline void xsk_set_tx_need_wakeup ( struct xsk_buff_pool * pool )
2020-05-20 21:20:51 +02:00
{
}
2020-08-28 10:26:16 +02:00
static inline void xsk_clear_rx_need_wakeup ( struct xsk_buff_pool * pool )
2020-05-20 21:20:51 +02:00
{
}
2020-08-28 10:26:16 +02:00
static inline void xsk_clear_tx_need_wakeup ( struct xsk_buff_pool * pool )
2020-05-20 21:20:51 +02:00
{
}
2020-08-28 10:26:16 +02:00
static inline bool xsk_uses_need_wakeup ( struct xsk_buff_pool * pool )
2020-05-20 21:20:51 +02:00
{
return false ;
}
2020-08-28 10:26:16 +02:00
static inline u32 xsk_pool_get_headroom ( struct xsk_buff_pool * pool )
2020-05-20 21:20:53 +02:00
{
return 0 ;
}
2020-08-28 10:26:16 +02:00
static inline u32 xsk_pool_get_chunk_size ( struct xsk_buff_pool * pool )
2020-05-20 21:20:53 +02:00
{
return 0 ;
}
2020-08-28 10:26:16 +02:00
static inline u32 xsk_pool_get_rx_frame_size ( struct xsk_buff_pool * pool )
2020-05-20 21:20:53 +02:00
{
return 0 ;
}
2020-08-28 10:26:16 +02:00
static inline void xsk_pool_set_rxq_info ( struct xsk_buff_pool * pool ,
2020-05-20 21:20:53 +02:00
struct xdp_rxq_info * rxq )
{
}
2020-08-28 10:26:16 +02:00
static inline void xsk_pool_dma_unmap ( struct xsk_buff_pool * pool ,
2020-05-20 21:20:53 +02:00
unsigned long attrs )
{
}
2020-08-28 10:26:16 +02:00
static inline int xsk_pool_dma_map ( struct xsk_buff_pool * pool ,
struct device * dev , unsigned long attrs )
2020-05-20 21:20:53 +02:00
{
return 0 ;
}
static inline dma_addr_t xsk_buff_xdp_get_dma ( struct xdp_buff * xdp )
{
return 0 ;
}
static inline dma_addr_t xsk_buff_xdp_get_frame_dma ( struct xdp_buff * xdp )
{
return 0 ;
}
2020-08-28 10:26:16 +02:00
static inline struct xdp_buff * xsk_buff_alloc ( struct xsk_buff_pool * pool )
2020-05-20 21:20:53 +02:00
{
return NULL ;
}
2020-08-28 10:26:16 +02:00
static inline bool xsk_buff_can_alloc ( struct xsk_buff_pool * pool , u32 count )
2020-05-20 21:20:53 +02:00
{
return false ;
}
static inline void xsk_buff_free ( struct xdp_buff * xdp )
{
}
2020-08-28 10:26:16 +02:00
static inline dma_addr_t xsk_buff_raw_get_dma ( struct xsk_buff_pool * pool ,
u64 addr )
2020-05-20 21:20:53 +02:00
{
return 0 ;
}
2020-08-28 10:26:16 +02:00
static inline void * xsk_buff_raw_get_data ( struct xsk_buff_pool * pool , u64 addr )
2020-05-20 21:20:53 +02:00
{
return NULL ;
}
2020-08-28 10:26:24 +02:00
static inline void xsk_buff_dma_sync_for_cpu ( struct xdp_buff * xdp , struct xsk_buff_pool * pool )
2020-05-20 21:20:53 +02:00
{
}
2020-08-28 10:26:16 +02:00
static inline void xsk_buff_raw_dma_sync_for_device ( struct xsk_buff_pool * pool ,
2020-05-20 21:20:53 +02:00
dma_addr_t dma ,
size_t size )
{
}
2020-05-20 21:20:51 +02:00
# endif /* CONFIG_XDP_SOCKETS */
# endif /* _LINUX_XDP_SOCK_DRV_H */