Lines Matching defs:tx_ring

8 static int validate_xdp_req_id(struct ena_ring *tx_ring, u16 req_id)
12 tx_info = &tx_ring->tx_buffer_info[req_id];
16 return handle_invalid_req_id(tx_ring, req_id, tx_info, true);
19 static int ena_xdp_tx_map_frame(struct ena_ring *tx_ring,
24 struct ena_adapter *adapter = tx_ring->adapter;
35 if (tx_ring->tx_mem_queue_type == ENA_ADMIN_PLACEMENT_POLICY_DEV) {
37 push_len = min_t(u32, size, tx_ring->tx_max_header_size);
48 dma = dma_map_single(tx_ring->dev,
52 if (unlikely(dma_mapping_error(tx_ring->dev, dma)))
68 ena_increase_stat(&tx_ring->tx_stats.dma_mapping_err, 1,
69 &tx_ring->syncp);
75 int ena_xdp_xmit_frame(struct ena_ring *tx_ring,
85 next_to_use = tx_ring->next_to_use;
86 req_id = tx_ring->free_ids[next_to_use];
87 tx_info = &tx_ring->tx_buffer_info[req_id];
90 rc = ena_xdp_tx_map_frame(tx_ring, tx_info, xdpf, &ena_tx_ctx);
97 tx_ring,
109 ena_ring_tx_doorbell(tx_ring);
114 ena_unmap_tx_buff(tx_ring, tx_info);
125 struct ena_ring *tx_ring;
140 tx_ring = &adapter->tx_ring[qid];
143 spin_lock(&tx_ring->xdp_tx_lock);
146 if (ena_xdp_xmit_frame(tx_ring, adapter, frames[i], 0))
153 ena_ring_tx_doorbell(tx_ring);
155 spin_unlock(&tx_ring->xdp_tx_lock);
367 static int ena_clean_xdp_irq(struct ena_ring *tx_ring, u32 budget)
375 if (unlikely(!tx_ring))
377 next_to_clean = tx_ring->next_to_clean;
383 rc = ena_com_tx_comp_req_id_get(tx_ring->ena_com_io_cq,
387 handle_invalid_req_id(tx_ring, req_id, NULL, true);
392 rc = validate_xdp_req_id(tx_ring, req_id);
396 tx_info = &tx_ring->tx_buffer_info[req_id];
402 ena_unmap_tx_buff(tx_ring, tx_info);
407 tx_ring->free_ids[next_to_clean] = req_id;
409 tx_ring->ring_size);
411 netif_dbg(tx_ring->adapter, tx_done, tx_ring->netdev,
412 "tx_poll: q %d pkt #%d req_id %d\n", tx_ring->qid, tx_pkts, req_id);
415 tx_ring->next_to_clean = next_to_clean;
416 ena_com_comp_ack(tx_ring->ena_com_io_sq, total_done);
418 netif_dbg(tx_ring->adapter, tx_done, tx_ring->netdev,
420 tx_ring->qid, tx_pkts);
431 struct ena_ring *tx_ring;
435 tx_ring = ena_napi->tx_ring;
437 if (!test_bit(ENA_FLAG_DEV_UP, &tx_ring->adapter->flags) ||
438 test_bit(ENA_FLAG_TRIGGER_RESET, &tx_ring->adapter->flags)) {
443 work_done = ena_clean_xdp_irq(tx_ring, budget);
448 if (unlikely(!test_bit(ENA_FLAG_DEV_UP, &tx_ring->adapter->flags))) {
452 ena_increase_stat(&tx_ring->tx_stats.napi_comp, 1,
453 &tx_ring->syncp);
455 ena_unmask_interrupt(tx_ring, NULL);
457 ena_update_ring_numa_node(tx_ring, NULL);
463 u64_stats_update_begin(&tx_ring->syncp);
464 tx_ring->tx_stats.tx_poll++;
465 u64_stats_update_end(&tx_ring->syncp);
466 tx_ring->tx_stats.last_napi_jiffies = jiffies;