• Home
  • History
  • Annotate
  • Raw
  • Download
  • only in /netgear-R7000-V1.0.7.12_1.2.5/components/opensource/linux/linux-2.6.36/drivers/net/

Lines Matching refs:rxr

713 		struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring;
717 if (rxr->rx_desc_ring[j])
719 rxr->rx_desc_ring[j],
720 rxr->rx_desc_mapping[j]);
721 rxr->rx_desc_ring[j] = NULL;
723 vfree(rxr->rx_buf_ring);
724 rxr->rx_buf_ring = NULL;
727 if (rxr->rx_pg_desc_ring[j])
729 rxr->rx_pg_desc_ring[j],
730 rxr->rx_pg_desc_mapping[j]);
731 rxr->rx_pg_desc_ring[j] = NULL;
733 vfree(rxr->rx_pg_ring);
734 rxr->rx_pg_ring = NULL;
767 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring;
770 rxr->rx_buf_ring =
772 if (rxr->rx_buf_ring == NULL)
775 memset(rxr->rx_buf_ring, 0,
779 rxr->rx_desc_ring[j] =
782 &rxr->rx_desc_mapping[j],
784 if (rxr->rx_desc_ring[j] == NULL)
790 rxr->rx_pg_ring = vmalloc(SW_RXPG_RING_SIZE *
792 if (rxr->rx_pg_ring == NULL)
795 memset(rxr->rx_pg_ring, 0, SW_RXPG_RING_SIZE *
800 rxr->rx_pg_desc_ring[j] =
803 &rxr->rx_pg_desc_mapping[j],
805 if (rxr->rx_pg_desc_ring[j] == NULL)
2673 bnx2_alloc_rx_page(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, u16 index, gfp_t gfp)
2676 struct sw_pg *rx_pg = &rxr->rx_pg_ring[index];
2678 &rxr->rx_pg_desc_ring[RX_RING(index)][RX_IDX(index)];
2698 bnx2_free_rx_page(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, u16 index)
2700 struct sw_pg *rx_pg = &rxr->rx_pg_ring[index];
2714 bnx2_alloc_rx_skb(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, u16 index, gfp_t gfp)
2717 struct sw_bd *rx_buf = &rxr->rx_buf_ring[index];
2719 struct rx_bd *rxbd = &rxr->rx_desc_ring[RX_RING(index)][RX_IDX(index)];
2744 rxr->rx_prod_bseq += bp->rx_buf_use_size;
2889 bnx2_reuse_rx_skb_pages(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr,
2896 u16 cons = rxr->rx_pg_cons;
2898 cons_rx_pg = &rxr->rx_pg_ring[cons];
2917 hw_prod = rxr->rx_pg_prod;
2922 prod_rx_pg = &rxr->rx_pg_ring[prod];
2923 cons_rx_pg = &rxr->rx_pg_ring[cons];
2924 cons_bd = &rxr->rx_pg_desc_ring[RX_RING(cons)][RX_IDX(cons)];
2925 prod_bd = &rxr->rx_pg_desc_ring[RX_RING(prod)][RX_IDX(prod)];
2940 rxr->rx_pg_prod = hw_prod;
2941 rxr->rx_pg_cons = cons;
2945 bnx2_reuse_rx_skb(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr,
2951 cons_rx_buf = &rxr->rx_buf_ring[cons];
2952 prod_rx_buf = &rxr->rx_buf_ring[prod];
2958 rxr->rx_prod_bseq += bp->rx_buf_use_size;
2969 cons_bd = &rxr->rx_desc_ring[RX_RING(cons)][RX_IDX(cons)];
2970 prod_bd = &rxr->rx_desc_ring[RX_RING(prod)][RX_IDX(prod)];
2976 bnx2_rx_skb(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, struct sk_buff *skb,
2983 err = bnx2_alloc_rx_skb(bp, rxr, prod, GFP_ATOMIC);
2985 bnx2_reuse_rx_skb(bp, rxr, skb, (u16) (ring_idx >> 16), prod);
2990 bnx2_reuse_rx_skb_pages(bp, rxr, NULL, pages);
3005 u16 pg_cons = rxr->rx_pg_cons;
3006 u16 pg_prod = rxr->rx_pg_prod;
3019 rxr->rx_pg_cons = pg_cons;
3020 rxr->rx_pg_prod = pg_prod;
3021 bnx2_reuse_rx_skb_pages(bp, rxr, NULL,
3035 rx_pg = &rxr->rx_pg_ring[pg_cons];
3047 err = bnx2_alloc_rx_page(bp, rxr,
3051 rxr->rx_pg_cons = pg_cons;
3052 rxr->rx_pg_prod = pg_prod;
3053 bnx2_reuse_rx_skb_pages(bp, rxr, skb,
3069 rxr->rx_pg_prod = pg_prod;
3070 rxr->rx_pg_cons = pg_cons;
3092 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring;
3098 sw_cons = rxr->rx_cons;
3099 sw_prod = rxr->rx_prod;
3117 rx_buf = &rxr->rx_buf_ring[sw_ring_cons];
3122 &rxr->rx_buf_ring[RX_RING_IDX(NEXT_RX_BD(sw_cons))];
3152 bnx2_reuse_rx_skb(bp, rxr, skb, sw_ring_cons,
3159 bnx2_reuse_rx_skb_pages(bp, rxr, NULL, pages);
3171 bnx2_reuse_rx_skb(bp, rxr, skb, sw_ring_cons,
3183 bnx2_reuse_rx_skb(bp, rxr, skb,
3187 } else if (unlikely(bnx2_rx_skb(bp, rxr, skb, len, hdr_len,
3258 rxr->rx_cons = sw_cons;
3259 rxr->rx_prod = sw_prod;
3262 REG_WR16(bp, rxr->rx_pg_bidx_addr, rxr->rx_pg_prod);
3264 REG_WR16(bp, rxr->rx_bidx_addr, sw_prod);
3266 REG_WR(bp, rxr->rx_bseq_addr, rxr->rx_prod_bseq);
3357 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring;
3359 if ((bnx2_get_hw_rx_cons(bnapi) != rxr->rx_cons) ||
3451 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring;
3456 if (bnx2_get_hw_rx_cons(bnapi) != rxr->rx_cons)
5031 struct bnx2_rx_ring_info *rxr;
5037 rxr = &bnapi->rx_ring;
5041 rxr->rx_prod_bseq = 0;
5042 rxr->rx_prod = 0;
5043 rxr->rx_cons = 0;
5044 rxr->rx_pg_prod = 0;
5045 rxr->rx_pg_cons = 0;
5142 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring;
5151 bnx2_init_rxbd_rings(rxr->rx_desc_ring, rxr->rx_desc_mapping,
5163 bnx2_init_rxbd_rings(rxr->rx_pg_desc_ring,
5164 rxr->rx_pg_desc_mapping,
5171 val = (u64) rxr->rx_pg_desc_mapping[0] >> 32;
5174 val = (u64) rxr->rx_pg_desc_mapping[0] & 0xffffffff;
5181 val = (u64) rxr->rx_desc_mapping[0] >> 32;
5184 val = (u64) rxr->rx_desc_mapping[0] & 0xffffffff;
5187 ring_prod = prod = rxr->rx_pg_prod;
5189 if (bnx2_alloc_rx_page(bp, rxr, ring_prod, GFP_KERNEL) < 0) {
5197 rxr->rx_pg_prod = prod;
5199 ring_prod = prod = rxr->rx_prod;
5201 if (bnx2_alloc_rx_skb(bp, rxr, ring_prod, GFP_KERNEL) < 0) {
5209 rxr->rx_prod = prod;
5211 rxr->rx_bidx_addr = MB_GET_CID_ADDR(cid) + BNX2_L2CTX_HOST_BDIDX;
5212 rxr->rx_bseq_addr = MB_GET_CID_ADDR(cid) + BNX2_L2CTX_HOST_BSEQ;
5213 rxr->rx_pg_bidx_addr = MB_GET_CID_ADDR(cid) + BNX2_L2CTX_HOST_PG_BDIDX;
5215 REG_WR16(bp, rxr->rx_pg_bidx_addr, rxr->rx_pg_prod);
5216 REG_WR16(bp, rxr->rx_bidx_addr, prod);
5218 REG_WR(bp, rxr->rx_bseq_addr, rxr->rx_prod_bseq);
5375 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring;
5378 if (rxr->rx_buf_ring == NULL)
5382 struct sw_bd *rx_buf = &rxr->rx_buf_ring[j];
5398 bnx2_free_rx_page(bp, rxr, j);
5711 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring;
5716 rxr = &bnapi->rx_ring;
5792 rx_buf = &rxr->rx_buf_ring[rx_start_idx];