Searched refs:rx_buf_sz (Results 1 - 22 of 22) sorted by relevance

/netgear-WNDR4500v2-V1.0.0.60_1.0.38/src/linux/linux-2.6/drivers/net/
H A Dsis190.c286 u32 rx_buf_sz; member in struct:sis190_private
469 static inline void sis190_give_to_asic(struct RxDesc *desc, u32 rx_buf_sz) argument
474 desc->size = cpu_to_le32((rx_buf_sz & RX_BUF_MASK) | eor);
480 u32 rx_buf_sz)
483 sis190_give_to_asic(desc, rx_buf_sz);
496 struct RxDesc *desc, u32 rx_buf_sz)
502 skb = dev_alloc_skb(rx_buf_sz);
508 mapping = pci_map_single(pdev, skb->data, rx_buf_sz,
511 sis190_map_to_asic(desc, mapping, rx_buf_sz);
533 tp->RxDescRing + i, tp->rx_buf_sz);
479 sis190_map_to_asic(struct RxDesc *desc, dma_addr_t mapping, u32 rx_buf_sz) argument
495 sis190_alloc_rx_skb(struct pci_dev *pdev, struct sk_buff **sk_buff, struct RxDesc *desc, u32 rx_buf_sz) argument
540 sis190_try_rx_copy(struct sk_buff **sk_buff, int pkt_size, struct RxDesc *desc, int rx_buf_sz) argument
[all...]
H A Dhamachi.c501 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member in struct:hamachi_private
1136 hmp->rx_buf_sz, PCI_DMA_FROMDEVICE);
1143 struct sk_buff *skb = dev_alloc_skb(hmp->rx_buf_sz);
1150 skb->data, hmp->rx_buf_sz, PCI_DMA_FROMDEVICE));
1152 DescEndPacket | DescIntr | (hmp->rx_buf_sz - 2));
1187 hmp->rx_buf_sz = (dev->mtu <= 1492 ? PKT_BUF_SZ :
1197 struct sk_buff *skb = dev_alloc_skb(hmp->rx_buf_sz);
1204 skb->data, hmp->rx_buf_sz, PCI_DMA_FROMDEVICE));
1207 DescEndPacket | DescIntr | (hmp->rx_buf_sz -2));
1501 hmp->rx_buf_sz,
[all...]
H A Dr8169.c446 unsigned rx_buf_sz; member in struct:rtl8169_private
1737 tp->rx_buf_sz = (mtu > RX_BUF_SIZE) ? mtu + ETH_HLEN + 8 : RX_BUF_SIZE;
1977 pci_unmap_single(pdev, le64_to_cpu(desc->addr), tp->rx_buf_sz,
1984 static inline void rtl8169_mark_to_asic(struct RxDesc *desc, u32 rx_buf_sz) argument
1988 desc->opts1 = cpu_to_le32(DescOwn | eor | rx_buf_sz);
1992 u32 rx_buf_sz)
1996 rtl8169_mark_to_asic(desc, rx_buf_sz);
2000 struct RxDesc *desc, int rx_buf_sz,
2007 skb = dev_alloc_skb(rx_buf_sz + align);
2014 mapping = pci_map_single(pdev, skb->data, rx_buf_sz,
1991 rtl8169_map_to_asic(struct RxDesc *desc, dma_addr_t mapping, u32 rx_buf_sz) argument
1999 rtl8169_alloc_rx_skb(struct pci_dev *pdev, struct sk_buff **sk_buff, struct RxDesc *desc, int rx_buf_sz, unsigned int align) argument
2482 rtl8169_try_rx_copy(struct sk_buff **sk_buff, int pkt_size, struct RxDesc *desc, int rx_buf_sz, unsigned int align) argument
[all...]
H A Ddl2k.c495 skb = dev_alloc_skb (np->rx_buf_sz);
508 (np->pdev, skb->data, np->rx_buf_sz,
512 cpu_to_le64 (np->rx_buf_sz) << 48;
542 np->rx_buf_sz = (dev->mtu <= 1500 ? PACKET_SIZE : dev->mtu + 32);
566 struct sk_buff *skb = dev_alloc_skb (np->rx_buf_sz);
578 np->pdev, skb->data, np->rx_buf_sz,
580 np->rx_ring[i].fraginfo |= cpu_to_le64 (np->rx_buf_sz) << 48;
845 np->rx_buf_sz,
853 np->rx_buf_sz,
864 np->rx_buf_sz,
[all...]
H A Dyellowfin.c325 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member in struct:yellowfin_private
733 yp->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32);
737 cpu_to_le32(CMD_RX_BUF | INTR_ALWAYS | yp->rx_buf_sz);
743 struct sk_buff *skb = dev_alloc_skb(yp->rx_buf_sz);
750 skb->data, yp->rx_buf_sz, PCI_DMA_FROMDEVICE));
1064 yp->rx_buf_sz, PCI_DMA_FROMDEVICE);
1131 yp->rx_buf_sz,
1142 yp->rx_buf_sz,
1158 struct sk_buff *skb = dev_alloc_skb(yp->rx_buf_sz);
1165 skb->data, yp->rx_buf_sz, PCI_DMA_FROMDEVIC
[all...]
H A Dsundance.c378 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member in struct:netdev_private
968 np->rx_buf_sz = (dev->mtu <= 1520 ? PKT_BUF_SZ : dev->mtu + 16);
981 struct sk_buff *skb = dev_alloc_skb(np->rx_buf_sz);
988 pci_map_single(np->pci_dev, skb->data, np->rx_buf_sz,
990 np->rx_ring[i].frag[0].length = cpu_to_le32(np->rx_buf_sz | LastFrag);
1312 np->rx_buf_sz,
1318 np->rx_buf_sz,
1324 np->rx_buf_sz,
1367 skb = dev_alloc_skb(np->rx_buf_sz);
1375 np->rx_buf_sz, PCI_DMA_FROMDEVIC
[all...]
H A Dvia-rhine.c392 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member in struct:rhine_private
908 rp->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32);
915 rp->rx_ring[i].desc_length = cpu_to_le32(rp->rx_buf_sz);
925 struct sk_buff *skb = dev_alloc_skb(rp->rx_buf_sz);
932 pci_map_single(rp->pdev, skb->data, rp->rx_buf_sz,
953 rp->rx_buf_sz, PCI_DMA_FROMDEVICE);
1490 rp->rx_buf_sz,
1499 rp->rx_buf_sz,
1513 rp->rx_buf_sz,
1535 skb = dev_alloc_skb(rp->rx_buf_sz);
[all...]
H A D8139cp.c360 unsigned rx_buf_sz; member in struct:cp_private
454 cp->rx_buf_sz = mtu + ETH_HLEN + 8;
456 cp->rx_buf_sz = PKT_BUF_SZ;
560 buflen = cp->rx_buf_sz + RX_OFFSET;
592 cp->rx_buf_sz);
594 desc->opts1 = cpu_to_le32(DescOwn | cp->rx_buf_sz);
1072 skb = dev_alloc_skb(cp->rx_buf_sz + RX_OFFSET);
1078 mapping = pci_map_single(cp->pdev, skb->data, cp->rx_buf_sz,
1086 cpu_to_le32(DescOwn | RingEnd | cp->rx_buf_sz);
1089 cpu_to_le32(DescOwn | cp->rx_buf_sz);
[all...]
H A Depic100.c270 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member in struct:epic_private
912 ep->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32);
917 ep->rx_ring[i].buflength = cpu_to_le32(ep->rx_buf_sz);
927 struct sk_buff *skb = dev_alloc_skb(ep->rx_buf_sz);
933 skb->data, ep->rx_buf_sz, PCI_DMA_FROMDEVICE);
1198 ep->rx_buf_sz,
1204 ep->rx_buf_sz,
1209 ep->rx_buf_sz, PCI_DMA_FROMDEVICE);
1228 skb = ep->rx_skbuff[entry] = dev_alloc_skb(ep->rx_buf_sz);
1233 skb->data, ep->rx_buf_sz, PCI_DMA_FROMDEVIC
[all...]
H A Dfealnx.c409 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member in struct:netdev_private
1076 skb = dev_alloc_skb(np->rx_buf_sz);
1086 np->rx_buf_sz, PCI_DMA_FROMDEVICE);
1247 np->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32);
1255 np->rx_ring[i].control = np->rx_buf_sz << RBSShift;
1268 struct sk_buff *skb = dev_alloc_skb(np->rx_buf_sz);
1279 np->rx_buf_sz, PCI_DMA_FROMDEVICE);
1707 np->rx_buf_sz,
1721 np->rx_buf_sz,
1726 np->rx_buf_sz,
[all...]
H A Dvia-velocity.c1115 pci_unmap_single(vptr->pdev, rd_info->skb_dma, vptr->rx_buf_sz,
1396 vptr->rx_buf_sz, PCI_DMA_FROMDEVICE);
1419 pci_action(vptr->pdev, rd_info->skb_dma, vptr->rx_buf_sz,
1447 rd_info->skb = dev_alloc_skb(vptr->rx_buf_sz + 64);
1457 rd_info->skb_dma = pci_map_single(vptr->pdev, rd_info->skb->data, vptr->rx_buf_sz, PCI_DMA_FROMDEVICE);
1464 rd->len = cpu_to_le32(vptr->rx_buf_sz);
1726 vptr->rx_buf_sz = (dev->mtu <= 1504 ? PKT_BUF_SZ : dev->mtu + 32);
1802 vptr->rx_buf_sz = 9 * 1024;
1804 vptr->rx_buf_sz = 8192;
1806 vptr->rx_buf_sz
[all...]
H A Dstarfire.c613 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member in struct:netdev_private
992 writel((np->rx_buf_sz << RxBufferLenShift) |
1176 np->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32);
1180 struct sk_buff *skb = dev_alloc_skb(np->rx_buf_sz);
1184 np->rx_info[i].mapping = pci_map_single(np->pci_dev, skb->data, np->rx_buf_sz, PCI_DMA_FROMDEVICE);
1463 pci_unmap_single(np->pci_dev, np->rx_info[entry].mapping, np->rx_buf_sz, PCI_DMA_FROMDEVICE);
1576 skb = dev_alloc_skb(np->rx_buf_sz);
1581 pci_map_single(np->pci_dev, skb->data, np->rx_buf_sz, PCI_DMA_FROMDEVICE);
1944 pci_unmap_single(np->pci_dev, np->rx_info[i].mapping, np->rx_buf_sz, PCI_DMA_FROMDEVICE);
H A Dsungem.h938 #define RX_BUF_ALLOC_SIZE(gp) ((gp)->rx_buf_sz + 28 + RX_OFFSET + 64)
1002 int rx_buf_sz; member in struct:gem
H A Dnatsemi.c573 unsigned int rx_buf_sz; member in struct:netdev_private
1752 if (np->rx_buf_sz > NATSEMI_LONGPKT)
1935 unsigned int buflen = np->rx_buf_sz+NATSEMI_PADDING;
1945 np->rx_ring[entry].cmd_status = cpu_to_le32(np->rx_buf_sz);
1958 np->rx_buf_sz = ETH_DATA_LEN + NATSEMI_HEADERS;
1960 np->rx_buf_sz = dev->mtu + NATSEMI_HEADERS;
2022 unsigned int buflen = np->rx_buf_sz;
2281 unsigned int buflen = np->rx_buf_sz;
2341 } else if (pkt_len > np->rx_buf_sz) {
H A Dforcedeth.c758 unsigned int rx_buf_sz; member in struct:fe_priv
1378 struct sk_buff *skb = dev_alloc_skb(np->rx_buf_sz + NV_RX_ALLOC_PAD);
1388 np->put_rx.orig->flaglen = cpu_to_le32(np->rx_buf_sz | NV_RX_AVAIL);
1410 struct sk_buff *skb = dev_alloc_skb(np->rx_buf_sz + NV_RX_ALLOC_PAD);
1421 np->put_rx.ex->flaglen = cpu_to_le32(np->rx_buf_sz | NV_RX2_AVAIL);
2371 np->rx_buf_sz = ETH_DATA_LEN + NV_RX_HEADERS;
2373 np->rx_buf_sz = dev->mtu + NV_RX_HEADERS;
2423 writel(np->rx_buf_sz, base + NvRegOffloadConfig);
3512 writel(np->rx_buf_sz, base + NvRegOffloadConfig);
4030 writel(np->rx_buf_sz, bas
[all...]
H A Ddl2k.h656 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member in struct:netdev_private
H A Dsungem.c828 skb_put(new_skb, (gp->rx_buf_sz + RX_OFFSET));
1612 gp->rx_buf_sz = max(dev->mtu + ETH_HLEN + VLAN_HLEN,
1628 skb_put(skb, (gp->rx_buf_sz + RX_OFFSET));
1872 writel(0x20000000 | (gp->rx_buf_sz + 4), gp->regs + MAC_MAXFSZ);
1954 int max_frame = (gp->rx_buf_sz + 4 + 64) & ~63;
H A Dvia-velocity.h1763 int rx_buf_sz; member in struct:velocity_info
/netgear-WNDR4500v2-V1.0.0.60_1.0.38/src/linux/linux-2.6/drivers/net/tulip/
H A Dwinbond-840.c310 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member in struct:netdev_private
803 np->rx_ring[i].length = np->rx_buf_sz;
812 struct sk_buff *skb = dev_alloc_skb(np->rx_buf_sz);
817 np->rx_buf_sz,PCI_DMA_FROMDEVICE);
975 np->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32);
1273 skb = dev_alloc_skb(np->rx_buf_sz);
1279 np->rx_buf_sz, PCI_DMA_FROMDEVICE);
H A Dde2104x.c297 unsigned rx_buf_sz; member in struct:de_private
429 buflen = copying_skb ? (len + RX_OFFSET) : de->rx_buf_sz;
472 cpu_to_le32(RingEnd | de->rx_buf_sz);
474 de->rx_ring[rx_tail].opts2 = cpu_to_le32(de->rx_buf_sz);
1257 skb = dev_alloc_skb(de->rx_buf_sz);
1264 skb->data, de->rx_buf_sz, PCI_DMA_FROMDEVICE);
1270 cpu_to_le32(RingEnd | de->rx_buf_sz);
1272 de->rx_ring[i].opts2 = cpu_to_le32(de->rx_buf_sz);
1318 de->rx_buf_sz, PCI_DMA_FROMDEVICE);
1361 de->rx_buf_sz
[all...]
/netgear-WNDR4500v2-V1.0.0.60_1.0.38/src/linux/linux-2.6/drivers/atm/
H A Diphase.c683 iadev->rx_buf_sz = IA_RX_BUF_SZ;
694 iadev->rx_buf_sz = IA_RX_BUF_SZ;
706 iadev->rx_buf_sz = IA_RX_BUF_SZ;
711 iadev->rx_buf_sz, iadev->rx_pkt_ram);)
1027 if (len > iadev->rx_buf_sz) {
1028 printk("Over %d bytes sdu received, dropped!!!\n", iadev->rx_buf_sz);
1188 if ((length > iadev->rx_buf_sz) || (length >
1335 writew(iadev->rx_buf_sz, iadev->reass_reg+BUF_SIZE);
1349 rx_pkt_start += iadev->rx_buf_sz;
2988 iadev->num_rx_desc, iadev->rx_buf_sz,
[all...]
H A Diphase.h1021 u16 num_rx_desc, rx_buf_sz, rxing; member in struct:iadev_t

Completed in 307 milliseconds