Searched refs:xsk (Results 1 - 25 of 41) sorted by last modified time

12

/linux-master/net/xdp/
H A Dxsk_buff_pool.c9 #include "xsk.h"
149 bpf.xsk.pool = NULL;
150 bpf.xsk.queue_id = pool->queue_id;
215 bpf.xsk.pool = pool;
216 bpf.xsk.queue_id = queue_id;
/linux-master/drivers/net/ethernet/stmicro/stmmac/
H A Dstmmac_main.c6733 return stmmac_xdp_setup_pool(priv, bpf->xsk.pool,
6734 bpf->xsk.queue_id);
/linux-master/drivers/net/ethernet/mellanox/mlx5/core/
H A Den_rx.c57 #include "en/xsk/rx.h"
2683 int mlx5e_rq_set_handlers(struct mlx5e_rq *rq, struct mlx5e_params *params, bool xsk) argument
2691 rq->mpwqe.skb_from_cqe_mpwrq = xsk ?
2715 rq->wqe.skb_from_cqe = xsk ?
H A Den_stats.c2435 (NUM_XSKRQ_STATS * max_nch * priv->xsk.ever_used) +
2436 (NUM_XSKSQ_STATS * max_nch * priv->xsk.ever_used);
2441 bool is_xsk = priv->xsk.ever_used;
2474 bool is_xsk = priv->xsk.ever_used;
H A Den_main.c63 #include "en/xsk/pool.h"
64 #include "en/xsk/setup.h"
65 #include "en/xsk/rx.h"
66 #include "en/xsk/tx.h"
798 struct mlx5e_xsk_param *xsk,
818 rq->buff.headroom = mlx5e_get_rq_headroom(mdev, params, xsk);
838 rq->mpwqe.page_shift = mlx5e_mpwrq_page_shift(mdev, xsk);
839 rq->mpwqe.umr_mode = mlx5e_mpwrq_umr_mode(mdev, xsk);
851 mlx5e_mpwqe_get_log_rq_size(mdev, params, xsk);
853 if (!mlx5e_rx_mpwqe_is_linear_skb(mdev, params, xsk)
797 mlx5e_alloc_rq(struct mlx5e_params *params, struct mlx5e_xsk_param *xsk, struct mlx5e_rq_param *rqp, int node, struct mlx5e_rq *rq) argument
1270 mlx5e_open_rq(struct mlx5e_params *params, struct mlx5e_rq_param *param, struct mlx5e_xsk_param *xsk, int node, u16 q_counter, struct mlx5e_rq *rq) argument
2637 struct mlx5e_xsk_param xsk; local
4403 struct mlx5e_xsk_param xsk; local
5160 mlx5e_build_nic_params(struct mlx5e_priv *priv, struct mlx5e_xsk *xsk, u16 mtu) argument
[all...]
H A Den.h327 struct mlx5e_xsk *xsk; member in struct:mlx5e_params
352 MLX5E_RQ_STATE_XSK, /* set to indicate an xsk rq */
613 int mlx5e_rq_set_handlers(struct mlx5e_rq *rq, struct mlx5e_params *params, bool xsk);
949 struct mlx5e_xsk xsk; member in struct:mlx5e_priv
1041 struct mlx5e_xsk_param *xsk, int node, u16 q_counter,
1237 void mlx5e_build_nic_params(struct mlx5e_priv *priv, struct mlx5e_xsk *xsk, u16 mtu);
/linux-master/drivers/net/ethernet/intel/igc/
H A Digc_main.c6562 return igc_xdp_setup_pool(adapter, bpf->xsk.pool,
6563 bpf->xsk.queue_id);
/linux-master/drivers/net/ethernet/freescale/dpaa2/
H A Ddpaa2-xsk.c50 xdp_buff = swa->xsk.xdp_buff;
365 swa->xsk.sgt_size = sgt_buf_size;
H A Ddpaa2-eth.c300 xdp_buff = swa->xsk.xdp_buff;
1191 dma_unmap_single(dev, fd_addr, swa->xsk.sgt_size,
1728 swa->xsk.xdp_buff = xdp_buffs[i];
2799 return dpaa2_xsk_setup_pool(dev, xdp->xsk.pool, xdp->xsk.queue_id);
/linux-master/drivers/net/ethernet/engleder/
H A Dtsnep_main.c1755 /* keep pages to prevent allocation failures when xsk is
2284 return tsnep_xdp_setup_pool(adapter, bpf->xsk.pool,
2285 bpf->xsk.queue_id);
/linux-master/tools/testing/selftests/bpf/
H A Dxskxceiver.c46 * then remove xsk sockets from queue 0 on both veth interfaces and
102 #include "xsk.h"
165 static void gen_eth_hdr(struct xsk_socket_info *xsk, struct ethhdr *eth_hdr) argument
167 memcpy(eth_hdr->h_dest, xsk->dst_mac, ETH_ALEN);
168 memcpy(eth_hdr->h_source, xsk->src_mac, ETH_ALEN);
233 static void enable_busy_poll(struct xsk_socket_info *xsk) argument
238 if (setsockopt(xsk_socket__fd(xsk->xsk), SOL_SOCKET, SO_PREFER_BUSY_POLL,
243 if (setsockopt(xsk_socket__fd(xsk->xsk), SOL_SOCKE
253 __xsk_configure_socket(struct xsk_socket_info *xsk, struct xsk_umem_info *umem, struct ifobject *ifobject, bool shared) argument
278 struct xsk_socket_info *xsk; local
803 pkt_generate(struct xsk_socket_info *xsk, struct xsk_umem_info *umem, u64 addr, u32 len, u32 pkt_nb, u32 bytes_written) argument
1036 kick_tx(struct xsk_socket_info *xsk) argument
1050 kick_rx(struct xsk_socket_info *xsk) argument
1061 complete_pkts(struct xsk_socket_info *xsk, int batch_size) argument
1091 __receive_pkts(struct test_spec *test, struct xsk_socket_info *xsk) argument
1211 all_packets_received(struct test_spec *test, struct xsk_socket_info *xsk, u32 sock_num, unsigned long *bitmap) argument
1234 struct xsk_socket_info *xsk; local
1268 __send_pkts(struct ifobject *ifobject, struct xsk_socket_info *xsk, bool timeout) argument
1392 wait_for_tx_completion(struct xsk_socket_info *xsk) argument
1456 get_xsk_stats(struct xsk_socket *xsk, struct xdp_statistics *stats) argument
1481 struct xsk_socket *xsk = ifobject->xsk->xsk; local
1508 struct xsk_socket *xsk = ifobject->xsk->xsk; local
1529 struct xsk_socket *xsk = ifobject->xsk->xsk; local
1550 struct xsk_socket *xsk = ifobject->xsk->xsk; local
[all...]
H A Dxskxceiver.h90 struct xsk_socket *xsk; member in struct:xsk_socket_info
130 struct xsk_socket_info *xsk; member in struct:ifobject
H A Dxdp_hw_metadata.c21 #include "xsk.h"
48 struct xsk { struct
60 struct xsk *rx_xsk;
70 static int open_xsk(int ifindex, struct xsk *xsk, __u32 queue_id) argument
90 xsk->umem_area = mmap(NULL, UMEM_SIZE, PROT_READ | PROT_WRITE, mmap_flags, -1, 0);
91 if (xsk->umem_area == MAP_FAILED)
94 ret = xsk_umem__create(&xsk->umem,
95 xsk->umem_area, UMEM_SIZE,
96 &xsk
132 close_xsk(struct xsk *xsk) argument
141 refill_rx(struct xsk *xsk, __u64 addr) argument
152 kick_tx(struct xsk *xsk) argument
157 kick_rx(struct xsk *xsk) argument
282 complete_tx(struct xsk *xsk, clockid_t clock_id) argument
324 ping_pong(struct xsk *xsk, void *rx_packet, clockid_t clock_id) argument
453 struct xsk *xsk = &rx_xsk[i]; local
[all...]
H A DMakefile311 $(OUTPUT)/xsk.o: $(BPFOBJ)
637 xsk.c \
701 $(OUTPUT)/xskxceiver: $(EXTRA_SRC) xskxceiver.c xskxceiver.h $(OUTPUT)/network_helpers.o $(OUTPUT)/xsk.o $(OUTPUT)/xsk_xdp_progs.skel.h $(BPFOBJ) | $(OUTPUT)
705 $(OUTPUT)/xdp_hw_metadata: xdp_hw_metadata.c $(OUTPUT)/network_helpers.o $(OUTPUT)/xsk.o $(OUTPUT)/xdp_hw_metadata.skel.h | $(OUTPUT)
/linux-master/tools/testing/selftests/bpf/prog_tests/
H A Dxdp_metadata.c7 #include "xsk.h"
49 struct xsk { struct
59 static int open_xsk(int ifindex, struct xsk *xsk) argument
79 xsk->umem_area = mmap(NULL, UMEM_SIZE, PROT_READ | PROT_WRITE, mmap_flags, -1, 0);
80 if (!ASSERT_NEQ(xsk->umem_area, MAP_FAILED, "mmap"))
83 ret = xsk_umem__create(&xsk->umem,
84 xsk->umem_area, UMEM_SIZE,
85 &xsk->fill,
86 &xsk
126 close_xsk(struct xsk *xsk) argument
152 generate_packet(struct xsk *xsk, __u16 dst_port) argument
243 complete_tx(struct xsk *xsk) argument
264 refill_rx(struct xsk *xsk, __u64 addr) argument
275 verify_xsk_metadata(struct xsk *xsk, bool sent_from_af_xdp) argument
[all...]
/linux-master/include/linux/
H A Dnetdevice.h965 } xsk; member in union:netdev_bpf::__anon2542
/linux-master/drivers/net/ethernet/netronome/nfp/
H A Dnfp_net_common.c2231 return nfp_net_xsk_setup_pool(netdev, xdp->xsk.pool,
2232 xdp->xsk.queue_id);
/linux-master/drivers/net/ethernet/mellanox/mlx5/core/en/
H A Dxdp.c40 int mlx5e_xdp_max_mtu(struct mlx5e_params *params, struct mlx5e_xsk_param *xsk) argument
42 int hr = mlx5e_get_linear_rq_headroom(params, xsk);
H A Dparams.c20 u8 mlx5e_mpwrq_page_shift(struct mlx5_core_dev *mdev, struct mlx5e_xsk_param *xsk) argument
22 u8 req_page_shift = xsk ? order_base_2(xsk->chunk_size) : PAGE_SHIFT;
26 if (WARN_ON_ONCE(!xsk && req_page_shift < min_page_shift))
33 mlx5e_mpwrq_umr_mode(struct mlx5_core_dev *mdev, struct mlx5e_xsk_param *xsk) argument
44 u8 page_shift = mlx5e_mpwrq_page_shift(mdev, xsk);
45 bool unaligned = xsk ? xsk->unaligned : false;
48 if (xsk) {
49 oversized = xsk
219 mlx5e_get_linear_rq_headroom(struct mlx5e_params *params, struct mlx5e_xsk_param *xsk) argument
236 mlx5e_rx_get_linear_sz_xsk(struct mlx5e_params *params, struct mlx5e_xsk_param *xsk) argument
256 mlx5e_rx_get_linear_stride_sz(struct mlx5_core_dev *mdev, struct mlx5e_params *params, struct mlx5e_xsk_param *xsk, bool mpwqe) argument
284 mlx5e_mpwqe_log_pkts_per_wqe(struct mlx5_core_dev *mdev, struct mlx5e_params *params, struct mlx5e_xsk_param *xsk) argument
296 mlx5e_rx_is_linear_skb(struct mlx5_core_dev *mdev, struct mlx5e_params *params, struct mlx5e_xsk_param *xsk) argument
342 mlx5e_verify_params_rx_mpwqe_strides(struct mlx5_core_dev *mdev, struct mlx5e_params *params, struct mlx5e_xsk_param *xsk) argument
356 mlx5e_rx_mpwqe_is_linear_skb(struct mlx5_core_dev *mdev, struct mlx5e_params *params, struct mlx5e_xsk_param *xsk) argument
382 mlx5e_mpwqe_get_log_rq_size(struct mlx5_core_dev *mdev, struct mlx5e_params *params, struct mlx5e_xsk_param *xsk) argument
431 mlx5e_mpwqe_get_log_stride_size(struct mlx5_core_dev *mdev, struct mlx5e_params *params, struct mlx5e_xsk_param *xsk) argument
445 mlx5e_mpwqe_get_log_num_strides(struct mlx5_core_dev *mdev, struct mlx5e_params *params, struct mlx5e_xsk_param *xsk) argument
467 mlx5e_get_rq_headroom(struct mlx5_core_dev *mdev, struct mlx5e_params *params, struct mlx5e_xsk_param *xsk) argument
544 mlx5e_mpwrq_validate_xsk(struct mlx5_core_dev *mdev, struct mlx5e_params *params, struct mlx5e_xsk_param *xsk) argument
679 mlx5e_build_rq_frags_info(struct mlx5_core_dev *mdev, struct mlx5e_params *params, struct mlx5e_xsk_param *xsk, struct mlx5e_rq_frags_info *info, u32 *xdp_frag_size) argument
826 mlx5e_shampo_get_log_cq_size(struct mlx5_core_dev *mdev, struct mlx5e_params *params, struct mlx5e_xsk_param *xsk) argument
843 mlx5e_build_rx_cq_param(struct mlx5_core_dev *mdev, struct mlx5e_params *params, struct mlx5e_xsk_param *xsk, struct mlx5e_cq_param *param) argument
889 mlx5e_build_rq_param(struct mlx5_core_dev *mdev, struct mlx5e_params *params, struct mlx5e_xsk_param *xsk, struct mlx5e_rq_param *param) argument
1090 mlx5e_mpwrq_total_umr_wqebbs(struct mlx5_core_dev *mdev, struct mlx5e_params *params, struct mlx5e_xsk_param *xsk) argument
1133 struct mlx5e_xsk_param xsk = { local
1218 mlx5e_build_xdpsq_param(struct mlx5_core_dev *mdev, struct mlx5e_params *params, struct mlx5e_xsk_param *xsk, struct mlx5e_sq_param *param) argument
[all...]
H A Dparams.h59 u8 mlx5e_mpwrq_page_shift(struct mlx5_core_dev *mdev, struct mlx5e_xsk_param *xsk);
61 mlx5e_mpwrq_umr_mode(struct mlx5_core_dev *mdev, struct mlx5e_xsk_param *xsk);
83 struct mlx5e_xsk_param *xsk);
89 struct mlx5e_xsk_param *xsk);
92 struct mlx5e_xsk_param *xsk);
95 struct mlx5e_xsk_param *xsk);
98 struct mlx5e_xsk_param *xsk);
113 struct mlx5e_xsk_param *xsk);
116 struct mlx5e_xsk_param *xsk);
120 struct mlx5e_xsk_param *xsk);
[all...]
/linux-master/drivers/net/ethernet/mellanox/mlx5/core/en/xsk/
H A Dsetup.c12 struct mlx5e_xsk_param *xsk)
14 if (!mlx5e_rx_is_linear_skb(mdev, params, xsk)) {
28 struct mlx5e_xsk_param *xsk,
32 * and xsk->chunk_size is limited to 65535 bytes.
34 if ((size_t)xsk->chunk_size > PAGE_SIZE || xsk->chunk_size < MLX5E_MIN_XSK_CHUNK_SIZE) {
35 mlx5_core_err(mdev, "XSK chunk size %u out of bounds [%u, %lu]\n", xsk->chunk_size,
45 return !mlx5e_mpwrq_validate_xsk(mdev, params, xsk);
47 return !mlx5e_legacy_rq_validate_xsk(mdev, params, xsk);
53 struct mlx5e_xsk_param *xsk,
10 mlx5e_legacy_rq_validate_xsk(struct mlx5_core_dev *mdev, struct mlx5e_params *params, struct mlx5e_xsk_param *xsk) argument
27 mlx5e_validate_xsk_param(struct mlx5e_params *params, struct mlx5e_xsk_param *xsk, struct mlx5_core_dev *mdev) argument
51 mlx5e_build_xsk_cparam(struct mlx5_core_dev *mdev, struct mlx5e_params *params, struct mlx5e_xsk_param *xsk, struct mlx5e_channel_param *cparam) argument
60 mlx5e_init_xsk_rq(struct mlx5e_channel *c, struct mlx5e_params *params, struct xsk_buff_pool *pool, struct mlx5e_xsk_param *xsk, struct mlx5e_rq *rq) argument
93 mlx5e_open_xsk_rq(struct mlx5e_channel *c, struct mlx5e_params *params, struct mlx5e_rq_param *rq_params, struct xsk_buff_pool *pool, struct mlx5e_xsk_param *xsk) argument
113 mlx5e_open_xsk(struct mlx5e_priv *priv, struct mlx5e_params *params, struct mlx5e_xsk_param *xsk, struct xsk_buff_pool *pool, struct mlx5e_channel *c) argument
[all...]
/linux-master/drivers/net/ethernet/intel/ixgbe/
H A Dixgbe_main.c10346 return ixgbe_xsk_pool_setup(adapter, xdp->xsk.pool,
10347 xdp->xsk.queue_id);
/linux-master/drivers/net/ethernet/intel/ice/
H A Dice_main.c3074 return ice_xsk_pool_setup(vsi, xdp->xsk.pool,
3075 xdp->xsk.queue_id);
/linux-master/drivers/net/ethernet/intel/i40e/
H A Di40e_main.c13623 return i40e_xsk_pool_setup(vsi, xdp->xsk.pool,
13624 xdp->xsk.queue_id);
/linux-master/drivers/net/ethernet/google/gve/
H A Dgve_main.c1557 dev_err(&priv->pdev->dev, "xsk pool invalid qid %d", qid);
1562 dev_err(&priv->pdev->dev, "xsk pool frame_len too small");
1724 if (xdp->xsk.pool)
1725 return gve_xsk_pool_enable(dev, xdp->xsk.pool, xdp->xsk.queue_id);
1727 return gve_xsk_pool_disable(dev, xdp->xsk.queue_id);

Completed in 687 milliseconds

12