Lines Matching refs:vc

186 	struct virt_dma_chan	vc;
227 return container_of(chan, struct sun6i_vchan, vc.chan);
399 dev_dbg(chan2dev(&vchan->vc.chan),
436 struct sun6i_dma_dev *sdev = to_sun6i_dma_dev(vchan->vc.chan.device);
437 struct virt_dma_desc *desc = vchan_next_desc(&vchan->vc);
485 list_for_each_entry(vchan, &sdev->slave.channels, vc.chan.device_node) {
486 spin_lock_irq(&vchan->vc.lock);
503 spin_unlock_irq(&vchan->vc.lock);
524 pchan->idx, &vchan->vc);
535 spin_lock_irq(&vchan->vc.lock);
537 spin_unlock_irq(&vchan->vc.lock);
567 spin_lock(&vchan->vc.lock);
570 spin_unlock(&vchan->vc.lock);
661 __func__, vchan->vc.chan.chan_id, &dest, &src, len, flags);
693 return vchan_tx_prep(&vchan->vc, &txd->vd, flags);
746 __func__, vchan->vc.chan.chan_id,
760 __func__, vchan->vc.chan.chan_id,
773 return vchan_tx_prep(&vchan->vc, &txd->vd, flags);
844 return vchan_tx_prep(&vchan->vc, &txd->vd, flags);
870 dev_dbg(chan2dev(chan), "vchan %p: pause\n", &vchan->vc);
891 dev_dbg(chan2dev(chan), "vchan %p: resume\n", &vchan->vc);
893 spin_lock_irqsave(&vchan->vc.lock, flags);
898 } else if (!list_empty(&vchan->vc.desc_issued)) {
904 spin_unlock_irqrestore(&vchan->vc.lock, flags);
921 spin_lock_irqsave(&vchan->vc.lock, flags);
927 struct virt_dma_chan *vc = &vchan->vc;
929 list_add_tail(&vd->node, &vc->desc_completed);
933 vchan_get_all_descriptors(&vchan->vc, &head);
945 spin_unlock_irqrestore(&vchan->vc.lock, flags);
947 vchan_dma_desc_free_list(&vchan->vc, &head);
969 spin_lock_irqsave(&vchan->vc.lock, flags);
971 vd = vchan_find_desc(&vchan->vc, cookie);
983 spin_unlock_irqrestore(&vchan->vc.lock, flags);
996 spin_lock_irqsave(&vchan->vc.lock, flags);
998 if (vchan_issue_pending(&vchan->vc)) {
1005 &vchan->vc);
1011 &vchan->vc);
1014 spin_unlock_irqrestore(&vchan->vc.lock, flags);
1027 vchan_free_chan_resources(&vchan->vc);
1074 list_del(&vchan->vc.chan.device_node);
1075 tasklet_kill(&vchan->vc.task);
1409 vchan->vc.desc_free = sun6i_dma_free_desc;
1410 vchan_init(&vchan->vc, &sdc->slave);