Lines Matching refs:desc

135 		struct ltq_dma_desc *desc = &ch->dma.desc_base[ch->dma.desc];
137 if ((desc->ctl & (LTQ_DMA_OWN | LTQ_DMA_C)) != LTQ_DMA_C)
140 desc->ctl = LTQ_DMA_OWN | LTQ_DMA_RX_OFFSET(NET_IP_ALIGN) |
142 ch->dma.desc++;
143 ch->dma.desc %= LTQ_DESC_NUM;
189 void *buf = ch->rx_buff[ch->dma.desc];
194 ch->rx_buff[ch->dma.desc] = alloc(priv->rx_skb_size);
195 if (!ch->rx_buff[ch->dma.desc]) {
196 ch->rx_buff[ch->dma.desc] = buf;
201 mapping = dma_map_single(priv->dev, ch->rx_buff[ch->dma.desc],
204 skb_free_frag(ch->rx_buff[ch->dma.desc]);
205 ch->rx_buff[ch->dma.desc] = buf;
210 ch->dma.desc_base[ch->dma.desc].addr = mapping + NET_SKB_PAD + NET_IP_ALIGN;
214 ch->dma.desc_base[ch->dma.desc].ctl =
223 struct ltq_dma_desc *desc = &ch->dma.desc_base[ch->dma.desc];
224 void *buf = ch->rx_buff[ch->dma.desc];
225 u32 ctl = desc->ctl;
233 ch->dma.desc++;
234 ch->dma.desc %= LTQ_DESC_NUM;
291 struct ltq_dma_desc *desc = &ch->dma.desc_base[ch->dma.desc];
293 if ((desc->ctl & (LTQ_DMA_OWN | LTQ_DMA_C)) == LTQ_DMA_C) {
323 struct ltq_dma_desc *desc = &ch->dma.desc_base[ch->tx_free];
325 if ((desc->ctl & (LTQ_DMA_OWN | LTQ_DMA_C)) == LTQ_DMA_C) {
362 struct ltq_dma_desc *desc = &ch->dma.desc_base[ch->dma.desc];
375 if ((desc->ctl & (LTQ_DMA_OWN | LTQ_DMA_C)) || ch->skb[ch->dma.desc]) {
381 ch->skb[ch->dma.desc] = skb;
390 desc->addr = mapping - byte_offset;
393 desc->ctl = LTQ_DMA_OWN | LTQ_DMA_SOP | LTQ_DMA_EOP |
395 ch->dma.desc++;
396 ch->dma.desc %= LTQ_DESC_NUM;
397 if (ch->dma.desc == ch->tx_free)
436 curr_desc = ch_rx->dma.desc;
438 for (ch_rx->dma.desc = 0; ch_rx->dma.desc < LTQ_DESC_NUM;
439 ch_rx->dma.desc++) {
440 buff = ch_rx->rx_buff[ch_rx->dma.desc];
451 ch_rx->dma.desc = curr_desc;
499 for (ch_rx->dma.desc = 0; ch_rx->dma.desc < LTQ_DESC_NUM;
500 ch_rx->dma.desc++) {
505 ch_rx->dma.desc = 0;