Lines Matching defs:orig

355 	struct ring_desc *orig;
1023 if (np->rx_ring.orig)
1028 np->rx_ring.orig, np->ring_addr);
1827 less_rx = np->get_rx.orig;
1828 if (less_rx-- == np->rx_ring.orig)
1829 less_rx = np->last_rx.orig;
1831 while (np->put_rx.orig != less_rx) {
1845 np->put_rx.orig->buf = cpu_to_le32(np->put_rx_ctx->dma);
1847 np->put_rx.orig->flaglen = cpu_to_le32(np->rx_buf_sz | NV_RX_AVAIL);
1848 if (unlikely(np->put_rx.orig++ == np->last_rx.orig))
1849 np->put_rx.orig = np->rx_ring.orig;
1923 np->last_rx.orig = &np->rx_ring.orig[np->rx_ring_size-1];
1932 np->rx_ring.orig[i].flaglen = 0;
1933 np->rx_ring.orig[i].buf = 0;
1954 np->last_tx.orig = &np->tx_ring.orig[np->tx_ring_size-1];
1968 np->tx_ring.orig[i].flaglen = 0;
1969 np->tx_ring.orig[i].buf = 0;
2031 np->tx_ring.orig[i].flaglen = 0;
2032 np->tx_ring.orig[i].buf = 0;
2062 np->rx_ring.orig[i].flaglen = 0;
2063 np->rx_ring.orig[i].buf = 0;
2248 start_tx = put_tx = np->put_tx.orig;
2276 if (unlikely(put_tx++ == np->last_tx.orig))
2277 put_tx = np->tx_ring.orig;
2325 if (unlikely(put_tx++ == np->last_tx.orig))
2326 put_tx = np->tx_ring.orig;
2332 if (unlikely(put_tx == np->tx_ring.orig))
2333 prev_tx = np->last_tx.orig;
2363 np->put_tx.orig = put_tx;
2611 struct ring_desc *orig_get_tx = np->get_tx.orig;
2614 while ((np->get_tx.orig != np->put_tx.orig) &&
2615 !((flags = le32_to_cpu(np->get_tx.orig->flaglen)) & NV_TX_VALID) &&
2661 if (unlikely(np->get_tx.orig++ == np->last_tx.orig))
2662 np->get_tx.orig = np->tx_ring.orig;
2669 if (unlikely((np->tx_stop == 1) && (np->get_tx.orig != orig_get_tx))) {
2774 le32_to_cpu(np->tx_ring.orig[i].buf),
2775 le32_to_cpu(np->tx_ring.orig[i].flaglen),
2776 le32_to_cpu(np->tx_ring.orig[i+1].buf),
2777 le32_to_cpu(np->tx_ring.orig[i+1].flaglen),
2778 le32_to_cpu(np->tx_ring.orig[i+2].buf),
2779 le32_to_cpu(np->tx_ring.orig[i+2].flaglen),
2780 le32_to_cpu(np->tx_ring.orig[i+3].buf),
2781 le32_to_cpu(np->tx_ring.orig[i+3].flaglen));
2899 while ((np->get_rx.orig != np->put_rx.orig) &&
2900 !((flags = le32_to_cpu(np->get_rx.orig->flaglen)) & NV_RX_AVAIL) &&
2981 if (unlikely(np->get_rx.orig++ == np->last_rx.orig))
2982 np->get_rx.orig = np->rx_ring.orig;
4752 np->rx_ring.orig = (struct ring_desc *)rxtx_ring;
4753 np->tx_ring.orig = &np->rx_ring.orig[np->rx_ring_size];
5197 np->tx_ring.orig[0].buf = cpu_to_le32(test_dma_addr);
5198 np->tx_ring.orig[0].flaglen = cpu_to_le32((pkt_len-1) | np->tx_flags | tx_flags_extra);
5211 flags = le32_to_cpu(np->rx_ring.orig[0].flaglen);
5212 len = nv_descr_getlength(&np->rx_ring.orig[0], np->desc_ver);
5850 np->rx_ring.orig = dma_alloc_coherent(&pci_dev->dev,
5856 if (!np->rx_ring.orig)
5858 np->tx_ring.orig = &np->rx_ring.orig[np->rx_ring_size];
5909 * Set orig mac address back to the reversed version.