Searched refs:xdpf (Results 1 - 25 of 80) sorted by relevance

1234

/linux-master/net/core/
H A Dxdp.c411 void xdp_return_frame(struct xdp_frame *xdpf) argument
416 if (likely(!xdp_frame_has_frags(xdpf)))
419 sinfo = xdp_get_shared_info_from_frame(xdpf);
423 __xdp_return(page_address(page), &xdpf->mem, false, NULL);
426 __xdp_return(xdpf->data, &xdpf->mem, false, NULL);
430 void xdp_return_frame_rx_napi(struct xdp_frame *xdpf) argument
435 if (likely(!xdp_frame_has_frags(xdpf)))
438 sinfo = xdp_get_shared_info_from_frame(xdpf);
442 __xdp_return(page_address(page), &xdpf
473 xdp_return_frame_bulk(struct xdp_frame *xdpf, struct xdp_frame_bulk *bq) argument
549 struct xdp_frame *xdpf; local
601 __xdp_build_skb_from_frame(struct xdp_frame *xdpf, struct sk_buff *skb, struct net_device *dev) argument
657 xdp_build_skb_from_frame(struct xdp_frame *xdpf, struct net_device *dev) argument
672 xdpf_clone(struct xdp_frame *xdpf) argument
[all...]
/linux-master/drivers/net/ethernet/amazon/ena/
H A Dena_xdp.h40 struct xdp_frame *xdpf,
87 struct xdp_frame *xdpf; local
96 xdpf = xdp_convert_buff_to_frame(xdp);
97 if (unlikely(!xdpf)) {
110 if (ena_xdp_xmit_frame(xdp_ring, rx_ring->adapter, xdpf,
112 xdp_return_frame(xdpf);
H A Dena_xdp.c13 if (likely(tx_info->xdpf))
21 struct xdp_frame *xdpf,
31 tx_info->xdpf = xdpf;
32 data = tx_info->xdpf->data;
33 size = tx_info->xdpf->len;
77 struct xdp_frame *xdpf,
90 rc = ena_xdp_tx_map_frame(tx_ring, tx_info, xdpf, &ena_tx_ctx);
101 xdpf->len);
116 tx_info->xdpf
19 ena_xdp_tx_map_frame(struct ena_ring *tx_ring, struct ena_tx_buffer *tx_info, struct xdp_frame *xdpf, struct ena_com_tx_ctx *ena_tx_ctx) argument
75 ena_xdp_xmit_frame(struct ena_ring *tx_ring, struct ena_adapter *adapter, struct xdp_frame *xdpf, int flags) argument
381 struct xdp_frame *xdpf; local
[all...]
/linux-master/kernel/bpf/
H A Dcpumap.c190 struct xdp_frame *xdpf = frames[i]; local
194 rxq.dev = xdpf->dev_rx;
195 rxq.mem = xdpf->mem;
198 xdp_convert_frame_to_buff(xdpf, &xdp);
203 err = xdp_update_frame_from_buff(&xdp, xdpf);
205 xdp_return_frame(xdpf);
208 frames[nframes++] = xdpf;
213 err = xdp_do_redirect(xdpf->dev_rx, &xdp,
216 xdp_return_frame(xdpf);
226 xdp_return_frame(xdpf);
344 struct xdp_frame *xdpf = frames[i]; local
685 struct xdp_frame *xdpf = bq->q[i]; local
707 bq_enqueue(struct bpf_cpu_map_entry *rcpu, struct xdp_frame *xdpf) argument
730 cpu_map_enqueue(struct bpf_cpu_map_entry *rcpu, struct xdp_frame *xdpf, struct net_device *dev_rx) argument
[all...]
H A Ddevmap.c332 struct xdp_frame *xdpf = frames[i]; local
336 xdp_convert_frame_to_buff(xdpf, &xdp);
342 err = xdp_update_frame_from_buff(&xdp, xdpf);
344 xdp_return_frame_rx_napi(xdpf);
346 frames[nframes++] = xdpf;
355 xdp_return_frame_rx_napi(xdpf);
374 struct xdp_frame *xdpf = bq->q[i]; local
376 prefetch(xdpf);
453 static void bq_enqueue(struct net_device *dev, struct xdp_frame *xdpf, argument
475 bq->q[bq->count++] = xdpf;
478 __xdp_enqueue(struct net_device *dev, struct xdp_frame *xdpf, struct net_device *dev_rx, struct bpf_prog *xdp_prog) argument
530 dev_xdp_enqueue(struct net_device *dev, struct xdp_frame *xdpf, struct net_device *dev_rx) argument
536 dev_map_enqueue(struct bpf_dtab_netdev *dst, struct xdp_frame *xdpf, struct net_device *dev_rx) argument
544 is_valid_dst(struct bpf_dtab_netdev *obj, struct xdp_frame *xdpf) argument
562 dev_map_enqueue_clone(struct bpf_dtab_netdev *obj, struct net_device *dev_rx, struct xdp_frame *xdpf) argument
602 dev_map_enqueue_multi(struct xdp_frame *xdpf, struct net_device *dev_rx, struct bpf_map *map, bool exclude_ingress) argument
[all...]
/linux-master/drivers/net/vmxnet3/
H A Dvmxnet3_xdp.c116 struct xdp_frame *xdpf,
128 dw2 |= xdpf->len;
132 buf_size = xdpf->len;
143 xdpf->data, buf_size,
149 page = virt_to_page(xdpf->data);
156 tbi->xdpf = xdpf;
202 struct xdp_frame *xdpf)
215 err = vmxnet3_xdp_xmit_frame(adapter, xdpf, tq, false);
254 struct xdp_frame *xdpf; local
115 vmxnet3_xdp_xmit_frame(struct vmxnet3_adapter *adapter, struct xdp_frame *xdpf, struct vmxnet3_tx_queue *tq, bool dma_map) argument
201 vmxnet3_xdp_xmit_back(struct vmxnet3_adapter *adapter, struct xdp_frame *xdpf) argument
[all...]
/linux-master/drivers/net/ethernet/mellanox/mlx5/core/en/
H A Dxdp.c67 struct xdp_frame *xdpf; local
71 xdpf = xdp_convert_buff_to_frame(xdp);
72 if (unlikely(!xdpf))
76 xdptxd->data = xdpf->data;
77 xdptxd->len = xdpf->len;
78 xdptxd->has_frags = xdp_frame_has_frags(xdpf);
99 xdp_return_frame(xdpf);
113 (union mlx5e_xdp_info) { .frame.xdpf = xdpf });
125 dma_addr = page_pool_get_dma_addr(page) + (xdpf
675 struct xdp_frame *xdpf; local
872 struct xdp_frame *xdpf = frames[i]; local
[all...]
/linux-master/include/net/
H A Dxdp.h243 struct sk_buff *__xdp_build_skb_from_frame(struct xdp_frame *xdpf,
246 struct sk_buff *xdp_build_skb_from_frame(struct xdp_frame *xdpf,
249 struct xdp_frame *xdpf_clone(struct xdp_frame *xdpf);
313 void xdp_return_frame(struct xdp_frame *xdpf);
314 void xdp_return_frame_rx_napi(struct xdp_frame *xdpf);
317 void xdp_return_frame_bulk(struct xdp_frame *xdpf,
320 static __always_inline unsigned int xdp_get_frame_len(struct xdp_frame *xdpf) argument
323 unsigned int len = xdpf->len;
325 if (likely(!xdp_frame_has_frags(xdpf)))
328 sinfo = xdp_get_shared_info_from_frame(xdpf);
[all...]
/linux-master/drivers/net/ethernet/sfc/siena/
H A Dtx.c213 struct xdp_frame *xdpf; local
255 xdpf = xdpfs[i];
263 len = xdpf->len;
267 xdpf->data, len,
274 tx_buffer->xdpf = xdpf;
/linux-master/drivers/net/ethernet/microchip/lan966x/
H A Dlan966x_xdp.c62 struct xdp_frame *xdpf = frames[i]; local
65 err = lan966x_fdma_xmit_xdpf(port, xdpf, 0);
H A Dlan966x_fdma.c435 xdp_return_frame_bulk(dcb_buf->data.xdpf, &bq);
710 struct xdp_frame *xdpf; local
732 xdpf = ptr;
734 if (xdpf->headroom < IFH_LEN_BYTES) {
739 ifh = xdpf->data - IFH_LEN_BYTES;
745 xdpf->data - IFH_LEN_BYTES,
746 xdpf->len + IFH_LEN_BYTES,
753 next_dcb_buf->data.xdpf = xdpf;
754 next_dcb_buf->len = xdpf
[all...]
/linux-master/drivers/net/ethernet/intel/ixgbe/
H A Dixgbe_txrx_common.h17 struct xdp_frame *xdpf);
H A Dixgbe_xsk.c104 struct xdp_frame *xdpf; local
125 xdpf = xdp_convert_buff_to_frame(xdp);
126 if (unlikely(!xdpf))
131 result = ixgbe_xmit_xdp_ring(ring, xdpf);
419 tx_bi->xdpf = NULL;
450 xdp_return_frame(tx_bi->xdpf);
477 if (tx_bi->xdpf)
482 tx_bi->xdpf = NULL;
551 if (tx_bi->xdpf)
556 tx_bi->xdpf
[all...]
/linux-master/drivers/net/ethernet/aquantia/atlantic/
H A Daq_ring.c320 } else if (buff->xdpf) {
323 self->stats.tx.bytes += xdp_get_frame_len(buff->xdpf);
325 xdp_return_frame_rx_napi(buff->xdpf);
330 buff->xdpf = NULL;
377 struct xdp_frame *xdpf = frames[i]; local
379 if (aq_nic_xmit_xdpf(aq_nic, ring, xdpf) == NETDEV_TX_BUSY)
390 struct xdp_frame *xdpf; local
393 xdpf = xdp_convert_buff_to_frame(xdp);
394 if (unlikely(!xdpf))
397 skb = xdp_build_skb_from_frame(xdpf, de
412 struct xdp_frame *xdpf; local
[all...]
/linux-master/drivers/net/ethernet/sfc/
H A Dtx.c426 struct xdp_frame *xdpf; local
468 xdpf = xdpfs[i];
476 len = xdpf->len;
480 xdpf->data, len,
487 tx_buffer->xdpf = xdpf;
/linux-master/drivers/net/ethernet/fungible/funeth/
H A Dfuneth_tx.c500 xdp_return_frame(q->info[reclaim_idx].xdpf);
514 bool fun_xdp_tx(struct funeth_txq *q, struct xdp_frame *xdpf) argument
516 unsigned int idx, nfrags = 1, ndesc = 1, tot_len = xdpf->len;
525 if (unlikely(xdp_frame_has_frags(xdpf))) {
526 si = xdp_get_shared_info_from_frame(xdpf);
527 tot_len = xdp_get_frame_len(xdpf);
539 if (unlikely(fun_map_pkt(q->dma_dev, si, xdpf->data, xdpf->len, dma,
558 q->info[idx].xdpf = xdpf;
[all...]
H A Dfuneth_txrx.h101 struct xdp_frame *xdpf; /* associated XDP frame (XDP path) */ member in union:funeth_tx_info::__anon1069
250 bool fun_xdp_tx(struct funeth_txq *q, struct xdp_frame *xdpf);
/linux-master/drivers/net/ethernet/ti/
H A Dcpsw_priv.c55 struct xdp_frame *xdpf; local
62 xdpf = cpsw_handle_to_xdpf(token);
63 xmeta = (void *)xdpf + CPSW_XMETA_OFFSET;
66 xdp_return_frame(xdpf);
1291 int cpsw_xdp_tx_frame(struct cpsw_priv *priv, struct xdp_frame *xdpf, argument
1300 xmeta = (void *)xdpf + CPSW_XMETA_OFFSET;
1307 dma += xdpf->headroom + sizeof(struct xdp_frame);
1308 ret = cpdma_chan_submit_mapped(txch, cpsw_xdpf_to_handle(xdpf),
1309 dma, xdpf->len, port);
1311 if (sizeof(*xmeta) > xdpf
1330 struct xdp_frame *xdpf; local
[all...]
/linux-master/drivers/net/ethernet/freescale/dpaa/
H A Ddpaa_eth.c1717 xdp_return_frame(swbp->xdpf);
2198 struct xdp_frame *new_xdpf, *xdpf = *init_xdpf; local
2205 * enough to store the xdpf backpointer. Use an aligned headroom
2212 if (PTR_IS_ALIGNED(xdpf->data, DPAA_FD_DATA_ALIGNMENT) &&
2213 xdpf->headroom >= priv->tx_headroom) {
2214 xdpf->headroom = priv->tx_headroom;
2219 * store the xdpf backpointer. If the available headroom isn't large
2222 aligned_data = PTR_ALIGN_DOWN(xdpf->data, DPAA_FD_DATA_ALIGNMENT);
2223 data_shift = xdpf->data - aligned_data;
2226 * shifting the data as well as storing the xdpf backpointe
2494 dpaa_xdp_xmit_frame(struct net_device *net_dev, struct xdp_frame *xdpf) argument
2572 struct xdp_frame *xdpf; local
3048 struct xdp_frame *xdpf; local
[all...]
H A Ddpaa_eth.h131 struct xdp_frame *xdpf; member in struct:dpaa_eth_swbp
/linux-master/drivers/net/ethernet/qlogic/qede/
H A Dqede_fp.c305 u16 len, struct page *page, struct xdp_frame *xdpf)
332 xdp->xdpf = xdpf;
345 struct xdp_frame *xdpf; local
362 xdpf = frames[i];
364 mapping = dma_map_single(dmadev, xdpf->data, xdpf->len,
369 if (unlikely(qede_xdp_xmit(xdp_tx, mapping, 0, xdpf->len,
370 NULL, xdpf)))
404 struct xdp_frame *xdpf; local
304 qede_xdp_xmit(struct qede_tx_queue *txq, dma_addr_t dma, u16 pad, u16 len, struct page *page, struct xdp_frame *xdpf) argument
[all...]
/linux-master/drivers/net/ethernet/socionext/
H A Dnetsec.c272 struct xdp_frame *xdpf; member in union:netsec_desc::__anon1624
675 bytes += desc->xdpf->len;
677 xdp_return_frame_rx_napi(desc->xdpf);
679 xdp_return_frame_bulk(desc->xdpf, &bq);
821 dring->desc[idx].xdpf = buf;
829 struct xdp_frame *xdpf, bool is_ndo)
833 struct page *page = virt_to_page(xdpf->data);
851 dma_handle = dma_map_single(priv->dev, xdpf->data, xdpf->len,
865 dma_handle = page_pool_get_dma_addr(page) + xdpf
828 netsec_xdp_queue_one(struct netsec_priv *priv, struct xdp_frame *xdpf, bool is_ndo) argument
885 struct xdp_frame *xdpf = xdp_convert_buff_to_frame(xdp); local
1773 struct xdp_frame *xdpf = frames[i]; local
[all...]
/linux-master/drivers/net/ethernet/intel/i40e/
H A Di40e_xsk.c638 xdp_return_frame(tx_bi->xdpf);
678 if (tx_bi->xdpf) {
680 tx_bi->xdpf = NULL;
779 if (tx_bi->xdpf)
784 tx_bi->xdpf = NULL;
/linux-master/drivers/net/ethernet/broadcom/bnxt/
H A Dbnxt_xdp.c110 struct xdp_frame *xdpf)
116 tx_buf->xdpf = xdpf;
145 xdp_return_frame(tx_buf->xdpf);
147 tx_buf->xdpf = NULL;
107 __bnxt_xmit_xdp_redirect(struct bnxt *bp, struct bnxt_tx_ring_info *txr, dma_addr_t mapping, u32 len, struct xdp_frame *xdpf) argument
/linux-master/drivers/net/ethernet/engleder/
H A Dtsnep_main.c386 /* xdpf and zc are union with skb */
618 static int tsnep_xdp_tx_map(struct xdp_frame *xdpf, struct tsnep_tx *tx, argument
632 len = xdpf->len;
637 xdpf->data;
645 virt_to_page(xdpf->data);
650 dma += sizeof(*xdpf) + xdpf->headroom;
674 static bool tsnep_xdp_xmit_frame_ring(struct xdp_frame *xdpf, argument
677 struct skb_shared_info *shinfo = xdp_get_shared_info_from_frame(xdpf);
682 if (unlikely(xdp_frame_has_frags(xdpf)))
727 struct xdp_frame *xdpf = xdp_convert_buff_to_frame(xdp); local
[all...]

Completed in 487 milliseconds

1234