Searched refs:txr_idx (Results 1 - 8 of 8) sorted by relevance

/freebsd-10.0-release/sys/dev/qlxge/
H A Dqls_glbl.h68 extern int qls_hw_tx_done(qla_host_t *ha, uint32_t txr_idx);
71 uint32_t tx_idx, struct mbuf *mp, uint32_t txr_idx);
H A Dqls_isr.c47 qls_tx_comp(qla_host_t *ha, uint32_t txr_idx, q81_tx_mac_comp_t *tx_comp) argument
57 txb = &ha->tx_ring[txr_idx].tx_buf[tx_idx];
69 ha->tx_ring[txr_idx].txr_done++;
71 if (ha->tx_ring[txr_idx].txr_done == NUM_TX_DESCRIPTORS)
72 ha->tx_ring[txr_idx].txr_done = 0;
H A Dqls_hw.c641 qls_hw_tx_done(qla_host_t *ha, uint32_t txr_idx) argument
645 txr_done = ha->tx_ring[txr_idx].txr_done;
646 txr_next = ha->tx_ring[txr_idx].txr_next;
649 ha->tx_ring[txr_idx].txr_free = NUM_TX_DESCRIPTORS;
651 ha->tx_ring[txr_idx].txr_free = txr_done - txr_next;
653 ha->tx_ring[txr_idx].txr_free = NUM_TX_DESCRIPTORS +
657 if (ha->tx_ring[txr_idx].txr_free <= QLA_TX_MIN_FREE)
672 uint32_t txr_next, struct mbuf *mp, uint32_t txr_idx)
691 if (ha->tx_ring[txr_idx].txr_free <= (NUM_TX_DESCRIPTORS >> 2)) {
692 if (qls_hw_tx_done(ha, txr_idx)) {
671 qls_hw_send(qla_host_t *ha, bus_dma_segment_t *segs, int nsegs, uint32_t txr_next, struct mbuf *mp, uint32_t txr_idx) argument
[all...]
H A Dqls_os.c1140 uint32_t txr_idx = 0; local
1145 txr_idx = m_head->m_pkthdr.flowid & (ha->num_tx_rings - 1);
1147 tx_idx = ha->tx_ring[txr_idx].txr_next;
1149 map = ha->tx_ring[txr_idx].tx_buf[tx_idx].map;
1209 if (!(ret = qls_hw_send(ha, segs, nsegs, tx_idx, m_head, txr_idx))) {
1211 ha->tx_ring[txr_idx].count++;
1212 ha->tx_ring[txr_idx].tx_buf[tx_idx].m_head = m_head;
1213 ha->tx_ring[txr_idx].tx_buf[tx_idx].map = map;
/freebsd-10.0-release/sys/dev/qlxgbe/
H A Dql_glbl.h57 uint32_t tx_idx, struct mbuf *mp, uint32_t txr_idx);
H A Dql_hw.c53 static void qla_hw_tx_done_locked(qla_host_t *ha, uint32_t txr_idx);
185 OID_AUTO, "tx_ring_index", CTLFLAG_RW, &ha->txr_idx,
186 ha->txr_idx, "Tx Ring Used");
1318 cmd |= (ha->hw.tx_cntxt[ha->txr_idx].tx_cntxt_id << 16);
1555 uint32_t tx_idx, struct mbuf *mp, uint32_t txr_idx)
1622 if (hw->tx_cntxt[txr_idx].txr_free <= (num_tx_cmds + QLA_TX_MIN_FREE)) {
1623 qla_hw_tx_done_locked(ha, txr_idx);
1624 if (hw->tx_cntxt[txr_idx].txr_free <=
1633 tx_cmd = &hw->tx_cntxt[txr_idx].tx_ring_base[tx_idx];
1706 txr_next = hw->tx_cntxt[txr_idx]
1554 ql_hw_send(qla_host_t *ha, bus_dma_segment_t *segs, int nsegs, uint32_t tx_idx, struct mbuf *mp, uint32_t txr_idx) argument
2271 qla_init_xmt_cntxt_i(qla_host_t *ha, uint32_t txr_idx) argument
2339 qla_del_xmt_cntxt_i(qla_host_t *ha, uint32_t txr_idx) argument
2546 qla_hw_tx_done_locked(qla_host_t *ha, uint32_t txr_idx) argument
[all...]
H A Dql_def.h177 uint32_t txr_idx; /* index of the current tx ring */ member in struct:qla_host
H A Dql_os.c1144 uint32_t txr_idx = ha->txr_idx; local
1149 txr_idx = m_head->m_pkthdr.flowid & (ha->hw.num_tx_rings - 1);
1151 tx_idx = ha->hw.tx_cntxt[txr_idx].txr_next;
1152 map = ha->tx_ring[txr_idx].tx_buf[tx_idx].map;
1212 if (!(ret = ql_hw_send(ha, segs, nsegs, tx_idx, m_head, txr_idx))) {
1214 ha->tx_ring[txr_idx].count++;
1215 ha->tx_ring[txr_idx].tx_buf[tx_idx].m_head = m_head;

Completed in 277 milliseconds