Lines Matching defs:xdp

9 #include <net/xdp.h>
2040 * @xdp: xdp_buff pointing to the data
2043 struct xdp_buff *xdp)
2045 u32 nr_frags = xdp_get_shared_info_from_buff(xdp)->nr_frags;
2049 xdp->flags = 0;
2060 i40e_rx_buffer_flip(rx_buffer, xdp->frame_sz);
2075 * @xdp: xdp_buff pointing to the data
2082 struct xdp_buff *xdp)
2084 unsigned int size = xdp->data_end - xdp->data;
2092 net_prefetch(xdp->data);
2104 * xdp->data_meta will always point to xdp->data, since
2107 * add xdp->data_meta handling here.
2118 headlen = eth_get_headlen(skb->dev, xdp->data,
2122 memcpy(__skb_put(skb, headlen), xdp->data,
2125 if (unlikely(xdp_buff_has_frags(xdp))) {
2126 sinfo = xdp_get_shared_info_from_buff(xdp);
2139 size, xdp->frame_sz);
2141 i40e_rx_buffer_flip(rx_buffer, xdp->frame_sz);
2147 if (unlikely(xdp_buff_has_frags(xdp))) {
2155 nr_frags * xdp->frame_sz,
2156 xdp_buff_is_frag_pfmemalloc(xdp));
2162 i40e_process_rx_buffs(rx_ring, I40E_XDP_PASS, xdp);
2171 * @xdp: xdp_buff pointing to the data
2177 struct xdp_buff *xdp)
2179 unsigned int metasize = xdp->data - xdp->data_meta;
2184 /* Prefetch first cache line of first page. If xdp->data_meta
2185 * is unused, this points exactly as xdp->data, otherwise we
2189 net_prefetch(xdp->data_meta);
2191 if (unlikely(xdp_buff_has_frags(xdp))) {
2192 sinfo = xdp_get_shared_info_from_buff(xdp);
2197 skb = napi_build_skb(xdp->data_hard_start, xdp->frame_sz);
2202 skb_reserve(skb, xdp->data - xdp->data_hard_start);
2203 __skb_put(skb, xdp->data_end - xdp->data);
2207 if (unlikely(xdp_buff_has_frags(xdp))) {
2210 nr_frags * xdp->frame_sz,
2211 xdp_buff_is_frag_pfmemalloc(xdp));
2213 i40e_process_rx_buffs(rx_ring, I40E_XDP_PASS, xdp);
2219 i40e_rx_buffer_flip(rx_buffer, xdp->frame_sz);
2249 int i40e_xmit_xdp_tx_ring(struct xdp_buff *xdp, struct i40e_ring *xdp_ring)
2251 struct xdp_frame *xdpf = xdp_convert_buff_to_frame(xdp);
2262 * @xdp: XDP buffer containing the frame
2265 static int i40e_run_xdp(struct i40e_ring *rx_ring, struct xdp_buff *xdp, struct bpf_prog *xdp_prog)
2274 prefetchw(xdp->data_hard_start); /* xdp_frame write */
2276 act = bpf_prog_run_xdp(xdp_prog, xdp);
2282 result = i40e_xmit_xdp_tx_ring(xdp, xdp_ring);
2287 err = xdp_do_redirect(rx_ring->netdev, xdp, xdp_prog);
2379 * @xdp: xdp_buff pointing to the data
2384 static int i40e_add_xdp_frag(struct xdp_buff *xdp, u32 *nr_frags,
2387 struct skb_shared_info *sinfo = xdp_get_shared_info_from_buff(xdp);
2389 if (!xdp_buff_has_frags(xdp)) {
2392 xdp_buff_set_frags_flag(xdp);
2404 xdp_buff_set_frag_pfmemalloc(xdp);
2413 * @xdp: xdp_buff pointing to the data
2417 struct xdp_buff *xdp,
2420 i40e_process_rx_buffs(rx_ring, I40E_XDP_CONSUMED, xdp);
2423 xdp->data = NULL;
2446 struct xdp_buff *xdp = &rx_ring->xdp;
2508 i40e_trace(clean_rx_irq, rx_ring, rx_desc, xdp);
2515 if (!xdp->data) {
2520 xdp_prepare_buff(xdp, hard_start, offset, size, true);
2523 xdp->frame_sz = i40e_rx_frame_truesize(rx_ring, size);
2525 } else if (i40e_add_xdp_frag(xdp, &nfrags, rx_buffer, size) &&
2528 i40e_consume_xdp_buff(rx_ring, xdp, rx_buffer);
2535 xdp_res = i40e_run_xdp(rx_ring, xdp, xdp_prog);
2540 if (unlikely(xdp_buff_has_frags(xdp))) {
2541 i40e_process_rx_buffs(rx_ring, xdp_res, xdp);
2542 size = xdp_get_buff_len(xdp);
2544 i40e_rx_buffer_flip(rx_buffer, xdp->frame_sz);
2551 skb = i40e_build_skb(rx_ring, xdp);
2553 skb = i40e_construct_skb(rx_ring, xdp);
2558 i40e_consume_xdp_buff(rx_ring, xdp, rx_buffer);
2571 i40e_trace(clean_rx_irq_rx, rx_ring, rx_desc, xdp);
2582 xdp->data = NULL;