Searched refs:TX_RING_SIZE (Results 1 - 25 of 57) sorted by relevance

123

/netgear-R7000-V1.0.7.12_1.2.5/components/opensource/linux/linux-2.6.36/drivers/net/
H A Dpasemi_mac.h31 #define TX_RING_SIZE 4096 macro
32 #define CS_RING_SIZE (TX_RING_SIZE*2)
111 #define TX_DESC(tx, num) ((tx)->chan.ring_virt[(num) & (TX_RING_SIZE-1)])
112 #define TX_DESC_INFO(tx, num) ((tx)->ring_info[(num) & (TX_RING_SIZE-1)])
H A Dsungem.h882 #define TX_RING_SIZE 128 macro
885 #if TX_RING_SIZE == 32
887 #elif TX_RING_SIZE == 64
889 #elif TX_RING_SIZE == 128
891 #elif TX_RING_SIZE == 256
893 #elif TX_RING_SIZE == 512
895 #elif TX_RING_SIZE == 1024
897 #elif TX_RING_SIZE == 2048
899 #elif TX_RING_SIZE == 4096
901 #elif TX_RING_SIZE
[all...]
H A D7990.h38 #define TX_RING_SIZE (1<<LANCE_LOG_TX_BUFFERS) macro
40 #define TX_RING_MOD_MASK (TX_RING_SIZE - 1)
87 volatile struct lance_tx_desc btx_ring[TX_RING_SIZE];
90 volatile char tx_buf [TX_RING_SIZE][TX_BUFF_SIZE];
H A Dsunhme.h330 #define TX_RING_SIZE 32 /* Must be >16 and <255, multiple of 16 */ macro
333 #if (TX_RING_SIZE < 16 || TX_RING_SIZE > 256 || (TX_RING_SIZE % 16) != 0)
334 #error TX_RING_SIZE holds illegal value
360 #define NEXT_TX(num) (((num) + 1) & (TX_RING_SIZE - 1))
362 #define PREV_TX(num) (((num) - 1) & (TX_RING_SIZE - 1))
366 (hp)->tx_old + (TX_RING_SIZE - 1) - (hp)->tx_new : \
417 struct sk_buff *tx_skbs[TX_RING_SIZE];
H A Dmyri_sbus.h182 #define TX_RING_SIZE 16 macro
200 #define NEXT_TX(num) (((num) + 1) & (TX_RING_SIZE - 1))
201 #define PREV_TX(num) (((num) - 1) & (TX_RING_SIZE - 1))
205 (head) + (TX_RING_SIZE - 1) - (tail) : \
281 struct sk_buff *tx_skbs[TX_RING_SIZE]; /* TX skb's */
H A Dariadne.c83 #define TX_RING_SIZE 5 macro
94 volatile struct TDRE *tx_ring[TX_RING_SIZE];
96 volatile u_short *tx_buff[TX_RING_SIZE];
109 struct TDRE tx_ring[TX_RING_SIZE];
111 u_short tx_buff[TX_RING_SIZE][PKT_BUF_SIZE/sizeof(u_short)];
305 lance->RDP = swapw(((u_short)-TX_RING_SIZE));
345 for (i = 0; i < TX_RING_SIZE; i++) {
444 int entry = dirty_tx % TX_RING_SIZE;
480 if (priv->cur_tx - dirty_tx >= TX_RING_SIZE) {
483 dirty_tx += TX_RING_SIZE;
[all...]
H A Dsunbmac.h267 #define TX_RING_SIZE 256 macro
271 #define NEXT_TX(num) (((num) + 1) & (TX_RING_SIZE - 1))
273 #define PREV_TX(num) (((num) - 1) & (TX_RING_SIZE - 1))
277 (bp)->tx_old + (TX_RING_SIZE - 1) - (bp)->tx_new : \
316 struct sk_buff *tx_skbs[TX_RING_SIZE];
H A Datarilance.c111 #define TX_RING_SIZE (1 << TX_LOG_RING_SIZE) macro
113 #define TX_RING_MOD_MASK (TX_RING_SIZE - 1)
157 struct lance_tx_head tx_head[TX_RING_SIZE];
707 for( i = 0; i < TX_RING_SIZE; i++ ) {
758 for( i = 0 ; i < TX_RING_SIZE; i++ )
830 while( lp->cur_tx >= TX_RING_SIZE && lp->dirty_tx >= TX_RING_SIZE ) {
831 lp->cur_tx -= TX_RING_SIZE;
832 lp->dirty_tx -= TX_RING_SIZE;
920 if (lp->cur_tx - dirty_tx >= TX_RING_SIZE) {
[all...]
H A Dyellowfin.c72 #define TX_RING_SIZE 16 macro
73 #define TX_QUEUE_SIZE 12 /* Must be > 4 && <= TX_RING_SIZE */
75 #define STATUS_TOTAL_SIZE TX_RING_SIZE*sizeof(struct tx_status_words)
76 #define TX_TOTAL_SIZE 2*TX_RING_SIZE*sizeof(struct yellowfin_desc)
156 the list. The ring sizes are set at compile time by RX/TX_RING_SIZE.
311 struct sk_buff* tx_skbuff[TX_RING_SIZE];
706 for (i = 0; i < TX_RING_SIZE; i++)
766 for (i = 0; i < TX_RING_SIZE; i++) {
770 ((i+1)%TX_RING_SIZE)*sizeof(struct yellowfin_desc));
777 for (i = 0; i < TX_RING_SIZE;
[all...]
H A Dhamachi.c119 #define TX_RING_SIZE 64 macro
121 #define TX_TOTAL_SIZE TX_RING_SIZE*sizeof(struct hamachi_desc)
237 the list. The ring sizes are set at compile time by RX/TX_RING_SIZE.
492 struct sk_buff* tx_skbuff[TX_RING_SIZE];
1020 int entry = hmp->dirty_tx % TX_RING_SIZE;
1035 if (entry >= TX_RING_SIZE-1)
1036 hmp->tx_ring[TX_RING_SIZE-1].status_n_length |=
1085 for (i = 0; i < TX_RING_SIZE; i++)
1108 for (i = 0; i < TX_RING_SIZE; i++){
1111 if (i >= TX_RING_SIZE
[all...]
H A Dsundance.c68 #define TX_RING_SIZE 32 macro
69 #define TX_QUEUE_LEN (TX_RING_SIZE - 1) /* Limit ring entries actually used. */
72 #define TX_TOTAL_SIZE TX_RING_SIZE*sizeof(struct netdev_desc)
142 the list. The ring sizes are set at compile time by RX/TX_RING_SIZE.
367 struct sk_buff* tx_skbuff[TX_RING_SIZE];
947 for (i=0; i<TX_RING_SIZE; i++) {
960 np->cur_tx, np->cur_tx % TX_RING_SIZE,
961 np->dirty_tx, np->dirty_tx % TX_RING_SIZE);
1019 for (i = 0; i < TX_RING_SIZE; i++) {
1029 unsigned head = np->cur_task % TX_RING_SIZE;
[all...]
H A Dlance.c193 #define TX_RING_SIZE (1 << (LANCE_LOG_TX_BUFFERS)) macro
194 #define TX_RING_MOD_MASK (TX_RING_SIZE - 1)
238 struct lance_tx_head tx_ring[TX_RING_SIZE];
242 struct sk_buff* tx_skbuff[TX_RING_SIZE];
560 lp->tx_bounce_buffs = kmalloc(PKT_BUF_SZ*TX_RING_SIZE,
851 for (i = 0; i < TX_RING_SIZE; i++) {
889 for (i = 0; i < TX_RING_SIZE; i++) {
938 for (i = 0; i < TX_RING_SIZE; i++)
1013 if ((lp->cur_tx - lp->dirty_tx) >= TX_RING_SIZE)
1096 if (lp->cur_tx - dirty_tx >= TX_RING_SIZE) {
[all...]
H A Dsunqe.h290 #define TX_RING_SIZE 16 macro
300 (qp)->tx_old + (TX_RING_SIZE - 1) - (qp)->tx_new : \
325 u8 tx_buf[TX_RING_SIZE][PKT_BUF_SZ];
H A Ddl2k.h38 #define TX_RING_SIZE 256 macro
39 #define TX_QUEUE_LEN (TX_RING_SIZE - 1) /* Limit ring entries actually used.*/
41 #define TX_TOTAL_SIZE TX_RING_SIZE*sizeof(struct netdev_desc)
477 struct sk_buff *tx_skbuff[TX_RING_SIZE];
H A Dpasemi_mac_ethtool.c130 ering->tx_max_pending = TX_RING_SIZE/2;
H A Dvia-rhine.c71 #define TX_RING_SIZE 16 macro
157 the list. The ring sizes are set at compile time by RX/TX_RING_SIZE.
376 struct sk_buff *tx_skbuff[TX_RING_SIZE];
377 dma_addr_t tx_skbuff_dma[TX_RING_SIZE];
380 unsigned char *tx_buf[TX_RING_SIZE];
857 TX_RING_SIZE * sizeof(struct tx_desc),
865 PKT_BUF_SZ * TX_RING_SIZE,
870 TX_RING_SIZE * sizeof(struct tx_desc),
890 TX_RING_SIZE * sizeof(struct tx_desc),
895 pci_free_consistent(rp->pdev, PKT_BUF_SZ * TX_RING_SIZE,
[all...]
H A Dmacb.c36 #define TX_RING_SIZE 128 macro
37 #define DEF_TX_RING_PENDING (TX_RING_SIZE - 1)
38 #define TX_RING_BYTES (sizeof(struct dma_desc) * TX_RING_SIZE)
41 (TX_RING_SIZE - (bp)->tx_pending)
46 #define NEXT_TX(n) (((n) + 1) & (TX_RING_SIZE - 1))
51 #define MACB_TX_WAKEUP_THRESH (TX_RING_SIZE / 4)
321 for (i = 0; i < TX_RING_SIZE; i++)
652 if (entry == (TX_RING_SIZE - 1))
700 size = TX_RING_SIZE * sizeof(struct ring_info);
752 for (i = 0; i < TX_RING_SIZE;
[all...]
H A D3c515.c56 #define TX_RING_SIZE 16 macro
306 struct boom_tx_desc tx_ring[TX_RING_SIZE];
309 struct sk_buff *tx_skbuff[TX_RING_SIZE];
844 for (i = 0; i < TX_RING_SIZE; i++)
982 for (i = 0; i < TX_RING_SIZE; i++) {
1012 int entry = vp->cur_tx % TX_RING_SIZE;
1020 prev_entry = &vp->tx_ring[(vp->cur_tx - 1) % TX_RING_SIZE];
1050 if (vp->cur_tx - vp->dirty_tx > TX_RING_SIZE - 1)
1175 int entry = dirty_tx % TX_RING_SIZE;
1186 if (lp->tx_full && (lp->cur_tx - dirty_tx <= TX_RING_SIZE
[all...]
H A Ddl2k.c210 else if (tx_coalesce > TX_RING_SIZE-1)
211 tx_coalesce = TX_RING_SIZE - 1;
554 for (i = 0; i < TX_RING_SIZE; i++) {
558 ((i+1)%TX_RING_SIZE) *
612 entry = np->cur_tx % TX_RING_SIZE;
640 np->cur_tx = (np->cur_tx + 1) % TX_RING_SIZE;
641 if ((np->cur_tx - np->old_tx + TX_RING_SIZE) % TX_RING_SIZE
709 int entry = np->old_tx % TX_RING_SIZE;
734 entry = (entry + 1) % TX_RING_SIZE;
[all...]
H A Ddeclance.c156 #define TX_RING_SIZE (1 << (LANCE_LOG_TX_BUFFERS)) macro
157 #define TX_RING_MOD_MASK (TX_RING_SIZE - 1)
223 struct lance_tx_desc btx_ring[TX_RING_SIZE];
267 char *tx_buf_ptr_cpu[TX_RING_SIZE];
271 uint tx_buf_ptr_lnc[TX_RING_SIZE];
485 for (i = 0; i < TX_RING_SIZE; i++) {
1090 for (i = 0; i < TX_RING_SIZE; i++) {
1135 for (i = 0; i < TX_RING_SIZE; i++) {
1166 for (i = 0; i < TX_RING_SIZE; i++) {
H A Dsh_eth.c462 for (i = 0; i < TX_RING_SIZE; i++) {
480 int tx_ringsize = sizeof(*txdesc) * TX_RING_SIZE;
524 for (i = 0; i < TX_RING_SIZE; i++) {
567 mdp->tx_skbuff = kmalloc(sizeof(*mdp->tx_skbuff) * TX_RING_SIZE,
590 tx_ringsize = sizeof(struct sh_eth_txdesc) * TX_RING_SIZE;
706 entry = mdp->dirty_tx % TX_RING_SIZE;
717 if (entry >= TX_RING_SIZE - 1)
1144 for (i = 0; i < TX_RING_SIZE; i++) {
1167 if ((mdp->cur_tx - mdp->dirty_tx) >= (TX_RING_SIZE - 4)) {
1176 entry = mdp->cur_tx % TX_RING_SIZE;
[all...]
H A Dsmsc9420.c561 for (i = 0; i < TX_RING_SIZE; i++) {
981 pd->tx_ring_tail = (pd->tx_ring_tail + 1) % TX_RING_SIZE;
993 (((pd->tx_ring_head + 2) % TX_RING_SIZE) == pd->tx_ring_tail);
1019 if (unlikely(index == (TX_RING_SIZE - 1)))
1027 pd->tx_ring_head = (pd->tx_ring_head + 1) % TX_RING_SIZE;
1248 TX_RING_SIZE), GFP_KERNEL);
1255 for (i = 0; i < TX_RING_SIZE; i++) {
1263 pd->tx_ring[TX_RING_SIZE - 1].length = TDES1_TER_;
1635 sizeof(struct smsc9420_dma_desc) * TX_RING_SIZE,
1693 (RX_RING_SIZE + TX_RING_SIZE), p
[all...]
/netgear-R7000-V1.0.7.12_1.2.5/components/opensource/linux/linux-2.6.36/drivers/net/tulip/
H A Dinterrupt.c517 int maxtx = TX_RING_SIZE;
518 int maxoi = TX_RING_SIZE;
581 int entry = dirty_tx % TX_RING_SIZE;
631 if (tp->cur_tx - dirty_tx > TX_RING_SIZE) {
635 dirty_tx += TX_RING_SIZE;
639 if (tp->cur_tx - dirty_tx < TX_RING_SIZE - 2)
H A Dtulip_core.c602 for (i = 0; i < TX_RING_SIZE; i++)
659 for (i = 0; i < TX_RING_SIZE; i++) {
680 entry = tp->cur_tx % TX_RING_SIZE;
688 if (tp->cur_tx - tp->dirty_tx < TX_RING_SIZE/2) {/* Typical path */
690 } else if (tp->cur_tx - tp->dirty_tx == TX_RING_SIZE/2) {
692 } else if (tp->cur_tx - tp->dirty_tx < TX_RING_SIZE - 2) {
698 if (entry == TX_RING_SIZE-1)
723 int entry = dirty_tx % TX_RING_SIZE;
822 for (i = 0; i < TX_RING_SIZE; i++) {
1149 if (tp->cur_tx - tp->dirty_tx > TX_RING_SIZE
[all...]
/netgear-R7000-V1.0.7.12_1.2.5/components/opensource/linux/linux-2.6.36/drivers/net/wan/
H A Ddscc4.c162 #define TX_RING_SIZE 32 macro
164 #define TX_TOTAL_SIZE TX_RING_SIZE*sizeof(struct TxFD)
199 struct sk_buff *tx_skbuff[TX_RING_SIZE];
428 ((dpriv->tx_current-1)%TX_RING_SIZE)*sizeof(struct TxFD);
493 for (i = 0; i < TX_RING_SIZE; i++) {
1114 next = dpriv->tx_current%TX_RING_SIZE;
1133 if (!((++dpriv->tx_current - dpriv->tx_dirty)%TX_RING_SIZE))
1466 if ((dpriv->tx_current - dpriv->tx_dirty)%TX_RING_SIZE)
1492 cur = dpriv->tx_dirty%TX_RING_SIZE;
1567 (dpriv->tx_dirty%TX_RING_SIZE)*
[all...]

Completed in 248 milliseconds

123