• Home
  • History
  • Annotate
  • Raw
  • Download
  • only in /netgear-R7000-V1.0.7.12_1.2.5/components/opensource/linux/linux-2.6.36/drivers/dma/

Lines Matching defs:desc

94 	struct dw_desc *desc, *_desc;
99 list_for_each_entry_safe(desc, _desc, &dwc->free_list, desc_node) {
100 if (async_tx_test_ack(&desc->txd)) {
101 list_del(&desc->desc_node);
102 ret = desc;
105 dev_dbg(chan2dev(&dwc->chan), "desc %p not ACKed\n", desc);
115 static void dwc_sync_desc_for_cpu(struct dw_dma_chan *dwc, struct dw_desc *desc)
119 list_for_each_entry(child, &desc->tx_list, desc_node)
124 desc->txd.phys, sizeof(desc->lli),
130 * `desc' must not be on any lists.
132 static void dwc_desc_put(struct dw_dma_chan *dwc, struct dw_desc *desc)
134 if (desc) {
137 dwc_sync_desc_for_cpu(dwc, desc);
140 list_for_each_entry(child, &desc->tx_list, desc_node)
142 "moving child desc %p to freelist\n",
144 list_splice_init(&desc->tx_list, &dwc->free_list);
145 dev_vdbg(chan2dev(&dwc->chan), "moving desc %p to freelist\n", desc);
146 list_add(&desc->desc_node, &dwc->free_list);
153 dwc_assign_cookie(struct dw_dma_chan *dwc, struct dw_desc *desc)
161 desc->txd.cookie = cookie;
199 dwc_descriptor_complete(struct dw_dma_chan *dwc, struct dw_desc *desc)
203 struct dma_async_tx_descriptor *txd = &desc->txd;
211 dwc_sync_desc_for_cpu(dwc, desc);
212 list_splice_init(&desc->tx_list, &dwc->free_list);
213 list_move(&desc->desc_node, &dwc->free_list);
219 dma_unmap_single(parent, desc->lli.dar,
220 desc->len, DMA_FROM_DEVICE);
222 dma_unmap_page(parent, desc->lli.dar,
223 desc->len, DMA_FROM_DEVICE);
227 dma_unmap_single(parent, desc->lli.sar,
228 desc->len, DMA_TO_DEVICE);
230 dma_unmap_page(parent, desc->lli.sar,
231 desc->len, DMA_TO_DEVICE);
245 struct dw_desc *desc, *_desc;
267 list_for_each_entry_safe(desc, _desc, &list, desc_node)
268 dwc_descriptor_complete(dwc, desc);
274 struct dw_desc *desc, *_desc;
296 list_for_each_entry_safe(desc, _desc, &dwc->active_list, desc_node) {
297 if (desc->lli.llp == llp)
301 list_for_each_entry(child, &desc->tx_list, desc_node)
310 dwc_descriptor_complete(dwc, desc);
330 " desc: s0x%x d0x%x l0x%x c0x%x:%x\n",
445 dwc_dump_lli(dwc, &dwc->cdesc->desc[i]->lli);
529 struct dw_desc *desc = txd_to_dw_desc(tx);
534 cookie = dwc_assign_cookie(dwc, desc);
543 desc->txd.cookie);
544 dwc_dostart(dwc, desc);
545 list_add_tail(&desc->desc_node, &dwc->active_list);
548 desc->txd.cookie);
550 list_add_tail(&desc->desc_node, &dwc->queue);
563 struct dw_desc *desc;
603 desc = dwc_desc_get(dwc);
604 if (!desc)
607 desc->lli.sar = src + offset;
608 desc->lli.dar = dest + offset;
609 desc->lli.ctllo = ctllo;
610 desc->lli.ctlhi = xfer_count;
613 first = desc;
615 prev->lli.llp = desc->txd.phys;
619 list_add_tail(&desc->desc_node,
622 prev = desc;
679 struct dw_desc *desc;
683 desc = dwc_desc_get(dwc);
684 if (!desc) {
696 desc->lli.sar = mem;
697 desc->lli.dar = reg;
698 desc->lli.ctllo = ctllo | DWC_CTLL_SRC_WIDTH(mem_width);
699 desc->lli.ctlhi = len >> mem_width;
702 first = desc;
704 prev->lli.llp = desc->txd.phys;
709 list_add_tail(&desc->desc_node,
712 prev = desc;
725 struct dw_desc *desc;
729 desc = dwc_desc_get(dwc);
730 if (!desc) {
742 desc->lli.sar = reg;
743 desc->lli.dar = mem;
744 desc->lli.ctllo = ctllo | DWC_CTLL_DST_WIDTH(mem_width);
745 desc->lli.ctlhi = len >> reg_width;
748 first = desc;
750 prev->lli.llp = desc->txd.phys;
755 list_add_tail(&desc->desc_node,
758 prev = desc;
789 struct dw_desc *desc, *_desc;
816 list_for_each_entry_safe(desc, _desc, &list, desc_node)
817 dwc_descriptor_complete(dwc, desc);
864 struct dw_desc *desc;
908 desc = kzalloc(sizeof(struct dw_desc), GFP_KERNEL);
909 if (!desc) {
916 INIT_LIST_HEAD(&desc->tx_list);
917 dma_async_tx_descriptor_init(&desc->txd, chan);
918 desc->txd.tx_submit = dwc_tx_submit;
919 desc->txd.flags = DMA_CTRL_ACK;
920 desc->txd.phys = dma_map_single(chan2parent(chan), &desc->lli,
921 sizeof(desc->lli), DMA_TO_DEVICE);
922 dwc_desc_put(dwc, desc);
945 struct dw_desc *desc, *_desc;
967 list_for_each_entry_safe(desc, _desc, &list, desc_node) {
968 dev_vdbg(chan2dev(chan), " freeing descriptor %p\n", desc);
969 dma_unmap_single(chan2parent(chan), desc->txd.phys,
970 sizeof(desc->lli), DMA_TO_DEVICE);
971 kfree(desc);
1018 channel_writel(dwc, LLP, dwc->cdesc->desc[0]->txd.phys);
1069 struct dw_desc *desc;
1116 cdesc->desc = kzalloc(sizeof(struct dw_desc *) * periods, GFP_KERNEL);
1117 if (!cdesc->desc)
1121 desc = dwc_desc_get(dwc);
1122 if (!desc)
1127 desc->lli.dar = dws->tx_reg;
1128 desc->lli.sar = buf_addr + (period_len * i);
1129 desc->lli.ctllo = (DWC_DEFAULT_CTLLO
1138 desc->lli.dar = buf_addr + (period_len * i);
1139 desc->lli.sar = dws->rx_reg;
1140 desc->lli.ctllo = (DWC_DEFAULT_CTLLO
1152 desc->lli.ctlhi = (period_len >> reg_width);
1153 cdesc->desc[i] = desc;
1156 last->lli.llp = desc->txd.phys;
1162 last = desc;
1166 last->lli.llp = cdesc->desc[0]->txd.phys;
1181 dwc_desc_put(dwc, cdesc->desc[i]);
1219 dwc_desc_put(dwc, cdesc->desc[i]);
1221 kfree(cdesc->desc);