Lines Matching refs:vc

195 	struct virt_dma_chan	vc;
226 return container_of(c, struct sprd_dma_chn, vc.chan);
292 struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan);
305 struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan);
342 struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan);
401 struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan);
436 struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan);
491 struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan);
536 struct virt_dma_desc *vd = vchan_next_desc(&schan->vc);
603 spin_lock(&schan->vc.lock);
607 spin_unlock(&schan->vc.lock);
628 spin_unlock(&schan->vc.lock);
645 spin_lock_irqsave(&schan->vc.lock, flags);
650 spin_unlock_irqrestore(&schan->vc.lock, flags);
655 vchan_free_chan_resources(&schan->vc);
673 spin_lock_irqsave(&schan->vc.lock, flags);
674 vd = vchan_find_desc(&schan->vc, cookie);
697 spin_unlock_irqrestore(&schan->vc.lock, flags);
708 spin_lock_irqsave(&schan->vc.lock, flags);
709 if (vchan_issue_pending(&schan->vc) && !schan->cur_desc)
711 spin_unlock_irqrestore(&schan->vc.lock, flags);
946 return vchan_tx_prep(&schan->vc, &sdesc->vd, flags);
1034 return vchan_tx_prep(&schan->vc, &sdesc->vd, flags);
1052 spin_lock_irqsave(&schan->vc.lock, flags);
1054 spin_unlock_irqrestore(&schan->vc.lock, flags);
1064 spin_lock_irqsave(&schan->vc.lock, flags);
1066 spin_unlock_irqrestore(&schan->vc.lock, flags);
1078 spin_lock_irqsave(&schan->vc.lock, flags);
1084 vchan_get_all_descriptors(&schan->vc, &head);
1085 spin_unlock_irqrestore(&schan->vc.lock, flags);
1090 vchan_dma_desc_free_list(&schan->vc, &head);
1201 dma_chn->vc.desc_free = sprd_dma_free_desc;
1202 vchan_init(&dma_chn->vc, &sdev->dma_dev);
1254 vc.chan.device_node) {
1255 list_del(&c->vc.chan.device_node);
1256 tasklet_kill(&c->vc.task);