Lines Matching defs:atdma

36  * at_dma_ / atdma	: ATmel DMA controller entity related
209 * struct atdma_sg - atdma scatter gather entry
269 * @atdma: pointer to the driver data.
285 struct at_dma *atdma;
361 #define dma_readl(atdma, name) \
362 __raw_readl((atdma)->regs + AT_DMA_##name)
363 #define dma_writel(atdma, name, val) \
364 __raw_writel((val), (atdma)->regs + AT_DMA_##name)
392 struct at_dma *atdma = to_at_dma(atchan->vc.chan.device);
397 dma_readl(atdma, EBCIMR),
398 dma_readl(atdma, CHSR));
422 static void atc_setup_irq(struct at_dma *atdma, int chan_id, int on)
430 dma_writel(atdma, EBCIER, ebci);
432 dma_writel(atdma, EBCIDR, ebci);
435 static void atc_enable_chan_irq(struct at_dma *atdma, int chan_id)
437 atc_setup_irq(atdma, chan_id, 1);
440 static void atc_disable_chan_irq(struct at_dma *atdma, int chan_id)
442 atc_setup_irq(atdma, chan_id, 0);
452 struct at_dma *atdma = to_at_dma(atchan->vc.chan.device);
454 return !!(dma_readl(atdma, CHSR) & atchan->mask);
587 dma_writel(atchan->atdma, CHER, atchan->mask);
594 struct at_dma *atdma = to_at_dma(vd->tx.chan->device);
600 dma_pool_free(atdma->lli_pool, desc->sg[i].lli,
606 dma_pool_free(atdma->memset_pool, desc->memset_vaddr,
784 dma_writel(atchan->atdma, CHDR, AT_DMA_RES(i) | atchan->mask);
828 struct at_dma *atdma = dev_id;
835 imr = dma_readl(atdma, EBCIMR);
836 status = dma_readl(atdma, EBCISR);
842 dev_vdbg(atdma->dma_device.dev,
846 for (i = 0; i < atdma->dma_device.chancnt; i++) {
847 atchan = &atdma->chan[i];
871 struct at_dma *atdma = to_at_dma(chan->device);
938 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool, GFP_NOWAIT,
974 struct at_dma *atdma = to_at_dma(chan->device);
1019 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool, GFP_NOWAIT,
1054 struct at_dma *atdma = to_at_dma(chan->device);
1069 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool, GFP_NOWAIT,
1098 struct at_dma *atdma = to_at_dma(chan->device);
1119 vaddr = dma_pool_alloc(atdma->memset_pool, GFP_NOWAIT, &paddr);
1157 dma_pool_free(atdma->memset_pool, vaddr, paddr);
1168 struct at_dma *atdma = to_at_dma(chan->device);
1186 vaddr = dma_pool_alloc(atdma->memset_pool, GFP_NOWAIT, &paddr);
1235 dma_pool_free(atdma->memset_pool, vaddr, paddr);
1253 struct at_dma *atdma = to_at_dma(chan->device);
1303 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool,
1352 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool,
1431 struct at_dma *atdma = to_at_dma(chan->device);
1437 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool, GFP_ATOMIC,
1581 struct at_dma *atdma = to_at_dma(chan->device);
1589 dma_writel(atdma, CHER, AT_DMA_SUSP(chan_id));
1600 struct at_dma *atdma = to_at_dma(chan->device);
1611 dma_writel(atdma, CHDR, AT_DMA_RES(chan_id));
1622 struct at_dma *atdma = to_at_dma(chan->device);
1639 dma_writel(atdma, CHDR, AT_DMA_RES(chan_id) | atchan->mask);
1642 while (dma_readl(atdma, CHSR) & atchan->mask)
1728 struct at_dma *atdma = to_at_dma(chan->device);
1748 BUG_ON(!atslave->dma_dev || atslave->dma_dev != atdma->dma_device.dev);
1924 * @atdma: the Atmel HDAMC device
1926 static void at_dma_off(struct at_dma *atdma)
1928 dma_writel(atdma, EN, 0);
1931 dma_writel(atdma, EBCIDR, -1L);
1934 while (dma_readl(atdma, CHSR) & atdma->all_chan_mask)
1940 struct at_dma *atdma;
1960 atdma = devm_kzalloc(&pdev->dev,
1961 struct_size(atdma, chan, plat_dat->nr_channels),
1963 if (!atdma)
1966 atdma->regs = devm_platform_ioremap_resource(pdev, 0);
1967 if (IS_ERR(atdma->regs))
1968 return PTR_ERR(atdma->regs);
1975 atdma->dma_device.cap_mask = plat_dat->cap_mask;
1976 atdma->all_chan_mask = (1 << plat_dat->nr_channels) - 1;
1978 atdma->clk = devm_clk_get(&pdev->dev, "dma_clk");
1979 if (IS_ERR(atdma->clk))
1980 return PTR_ERR(atdma->clk);
1982 err = clk_prepare_enable(atdma->clk);
1987 at_dma_off(atdma);
1989 err = request_irq(irq, at_dma_interrupt, 0, "at_hdmac", atdma);
1993 platform_set_drvdata(pdev, atdma);
1996 atdma->lli_pool = dma_pool_create("at_hdmac_lli_pool",
1999 if (!atdma->lli_pool) {
2006 atdma->memset_pool = dma_pool_create("at_hdmac_memset_pool",
2008 if (!atdma->memset_pool) {
2015 while (dma_readl(atdma, EBCISR))
2019 INIT_LIST_HEAD(&atdma->dma_device.channels);
2021 struct at_dma_chan *atchan = &atdma->chan[i];
2026 atchan->ch_regs = atdma->regs + ch_regs(i);
2029 atchan->atdma = atdma;
2031 vchan_init(&atchan->vc, &atdma->dma_device);
2032 atc_enable_chan_irq(atdma, i);
2036 atdma->dma_device.device_alloc_chan_resources = atc_alloc_chan_resources;
2037 atdma->dma_device.device_free_chan_resources = atc_free_chan_resources;
2038 atdma->dma_device.device_tx_status = atc_tx_status;
2039 atdma->dma_device.device_issue_pending = atc_issue_pending;
2040 atdma->dma_device.dev = &pdev->dev;
2043 if (dma_has_cap(DMA_INTERLEAVE, atdma->dma_device.cap_mask))
2044 atdma->dma_device.device_prep_interleaved_dma = atc_prep_dma_interleaved;
2046 if (dma_has_cap(DMA_MEMCPY, atdma->dma_device.cap_mask))
2047 atdma->dma_device.device_prep_dma_memcpy = atc_prep_dma_memcpy;
2049 if (dma_has_cap(DMA_MEMSET, atdma->dma_device.cap_mask)) {
2050 atdma->dma_device.device_prep_dma_memset = atc_prep_dma_memset;
2051 atdma->dma_device.device_prep_dma_memset_sg = atc_prep_dma_memset_sg;
2052 atdma->dma_device.fill_align = DMAENGINE_ALIGN_4_BYTES;
2055 if (dma_has_cap(DMA_SLAVE, atdma->dma_device.cap_mask)) {
2056 atdma->dma_device.device_prep_slave_sg = atc_prep_slave_sg;
2058 dma_cap_set(DMA_CYCLIC, atdma->dma_device.cap_mask);
2059 atdma->dma_device.device_prep_dma_cyclic = atc_prep_dma_cyclic;
2060 atdma->dma_device.device_config = atc_config;
2061 atdma->dma_device.device_pause = atc_pause;
2062 atdma->dma_device.device_resume = atc_resume;
2063 atdma->dma_device.device_terminate_all = atc_terminate_all;
2064 atdma->dma_device.src_addr_widths = ATC_DMA_BUSWIDTHS;
2065 atdma->dma_device.dst_addr_widths = ATC_DMA_BUSWIDTHS;
2066 atdma->dma_device.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV);
2067 atdma->dma_device.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST;
2070 dma_writel(atdma, EN, AT_DMA_ENABLE);
2073 dma_has_cap(DMA_MEMCPY, atdma->dma_device.cap_mask) ? "cpy " : "",
2074 dma_has_cap(DMA_MEMSET, atdma->dma_device.cap_mask) ? "set " : "",
2075 dma_has_cap(DMA_SLAVE, atdma->dma_device.cap_mask) ? "slave " : "",
2078 err = dma_async_device_register(&atdma->dma_device);
2091 at_dma_xlate, atdma);
2101 dma_async_device_unregister(&atdma->dma_device);
2103 dma_pool_destroy(atdma->memset_pool);
2105 dma_pool_destroy(atdma->lli_pool);
2107 free_irq(platform_get_irq(pdev, 0), atdma);
2109 clk_disable_unprepare(atdma->clk);
2115 struct at_dma *atdma = platform_get_drvdata(pdev);
2118 at_dma_off(atdma);
2121 dma_async_device_unregister(&atdma->dma_device);
2123 dma_pool_destroy(atdma->memset_pool);
2124 dma_pool_destroy(atdma->lli_pool);
2125 free_irq(platform_get_irq(pdev, 0), atdma);
2127 list_for_each_entry_safe(chan, _chan, &atdma->dma_device.channels,
2130 atc_disable_chan_irq(atdma, chan->chan_id);
2134 clk_disable_unprepare(atdma->clk);
2139 struct at_dma *atdma = platform_get_drvdata(pdev);
2142 clk_disable_unprepare(atdma->clk);
2147 struct at_dma *atdma = dev_get_drvdata(dev);
2150 list_for_each_entry_safe(chan, _chan, &atdma->dma_device.channels,
2181 struct at_dma *atdma = dev_get_drvdata(dev);
2185 list_for_each_entry_safe(chan, _chan, &atdma->dma_device.channels,
2193 atdma->save_imr = dma_readl(atdma, EBCIMR);
2196 at_dma_off(atdma);
2197 clk_disable_unprepare(atdma->clk);
2203 struct at_dma *atdma = to_at_dma(atchan->vc.chan.device);
2212 dma_writel(atdma, CHER, atchan->mask);
2222 struct at_dma *atdma = dev_get_drvdata(dev);
2226 clk_prepare_enable(atdma->clk);
2227 dma_writel(atdma, EN, AT_DMA_ENABLE);
2230 while (dma_readl(atdma, EBCISR))
2234 dma_writel(atdma, EBCIER, atdma->save_imr);
2235 list_for_each_entry_safe(chan, _chan, &atdma->dma_device.channels,