Lines Matching refs:xdma_chan

55  * struct xdma_chan - Driver specific DMA channel structure
65 struct xdma_chan {
96 struct xdma_chan *chan;
130 struct xdma_chan *h2c_chans;
131 struct xdma_chan *c2h_chans;
208 static inline struct xdma_chan *to_xdma_chan(struct dma_chan *chan)
210 return container_of(chan, struct xdma_chan, vchan.chan);
222 static int xdma_channel_init(struct xdma_chan *chan)
268 xdma_alloc_desc(struct xdma_chan *chan, u32 desc_num, bool cyclic)
325 static int xdma_xfer_start(struct xdma_chan *xchan)
391 static int xdma_xfer_stop(struct xdma_chan *xchan)
413 struct xdma_chan **chans, *xchan;
498 struct xdma_chan *xdma_chan = to_xdma_chan(chan);
501 spin_lock_irqsave(&xdma_chan->vchan.lock, flags);
502 if (vchan_issue_pending(&xdma_chan->vchan))
503 xdma_xfer_start(xdma_chan);
504 spin_unlock_irqrestore(&xdma_chan->vchan.lock, flags);
513 struct xdma_chan *xdma_chan = to_xdma_chan(chan);
518 xdma_xfer_stop(xdma_chan);
520 spin_lock_irqsave(&xdma_chan->vchan.lock, flags);
522 xdma_chan->busy = false;
523 xdma_chan->stop_requested = true;
524 vd = vchan_next_desc(&xdma_chan->vchan);
530 vchan_get_all_descriptors(&xdma_chan->vchan, &head);
531 list_splice_tail(&head, &xdma_chan->vchan.desc_terminated);
533 spin_unlock_irqrestore(&xdma_chan->vchan.lock, flags);
544 struct xdma_chan *xdma_chan = to_xdma_chan(chan);
545 struct xdma_device *xdev = xdma_chan->xdev_hdl;
549 regmap_read(xdev->rmap, xdma_chan->base + XDMA_CHAN_STATUS, &st);
551 wait_for_completion_timeout(&xdma_chan->last_interrupt, msecs_to_jiffies(1000));
553 vchan_synchronize(&xdma_chan->vchan);
609 struct xdma_chan *xdma_chan = to_xdma_chan(chan);
619 sw_desc = xdma_alloc_desc(xdma_chan, desc_num, false);
627 dev_addr = xdma_chan->cfg.dst_addr;
631 dev_addr = xdma_chan->cfg.src_addr;
643 tx_desc = vchan_tx_prep(&xdma_chan->vchan, &sw_desc->vdesc, flags);
670 struct xdma_chan *xdma_chan = to_xdma_chan(chan);
671 struct xdma_device *xdev = xdma_chan->xdev_hdl;
693 sw_desc = xdma_alloc_desc(xdma_chan, periods, true);
704 dev_addr = xdma_chan->cfg.dst_addr;
708 dev_addr = xdma_chan->cfg.src_addr;
719 tx_desc = vchan_tx_prep(&xdma_chan->vchan, &sw_desc->vdesc, flags);
745 struct xdma_chan *xchan = to_xdma_chan(chan);
790 struct xdma_chan *xdma_chan = to_xdma_chan(chan);
792 memcpy(&xdma_chan->cfg, cfg, sizeof(*cfg));
803 struct xdma_chan *xdma_chan = to_xdma_chan(chan);
805 vchan_free_chan_resources(&xdma_chan->vchan);
806 dma_pool_destroy(xdma_chan->desc_pool);
807 xdma_chan->desc_pool = NULL;
816 struct xdma_chan *xdma_chan = to_xdma_chan(chan);
817 struct xdma_device *xdev = xdma_chan->xdev_hdl;
827 xdma_chan->desc_pool = dma_pool_create(dma_chan_name(chan), dev, XDMA_DESC_BLOCK_SIZE,
829 if (!xdma_chan->desc_pool) {
840 struct xdma_chan *xdma_chan = to_xdma_chan(chan);
852 spin_lock_irqsave(&xdma_chan->vchan.lock, flags);
854 vd = vchan_find_desc(&xdma_chan->vchan, cookie);
867 spin_unlock_irqrestore(&xdma_chan->vchan.lock, flags);
879 struct xdma_chan *xchan = dev_id;
1108 struct xdma_chan *xdma_chan = to_xdma_chan(chan);
1111 return chan_info->dir == xdma_chan->dir;