Lines Matching refs:ddev

42  * @ddev: DMA device
48 struct dma_device ddev;
190 return container_of(d, struct bcm2835_dmadev, ddev);
822 c = devm_kzalloc(d->ddev.dev, sizeof(*c), GFP_KERNEL);
827 vchan_init(&c->vc, &d->ddev);
846 list_for_each_entry_safe(c, next, &od->ddev.channels,
852 dma_unmap_page_attrs(od->ddev.dev, od->zero_page, PAGE_SIZE,
868 chan = dma_get_any_slave_channel(&d->ddev);
910 dma_cap_set(DMA_SLAVE, od->ddev.cap_mask);
911 dma_cap_set(DMA_PRIVATE, od->ddev.cap_mask);
912 dma_cap_set(DMA_CYCLIC, od->ddev.cap_mask);
913 dma_cap_set(DMA_MEMCPY, od->ddev.cap_mask);
914 od->ddev.device_alloc_chan_resources = bcm2835_dma_alloc_chan_resources;
915 od->ddev.device_free_chan_resources = bcm2835_dma_free_chan_resources;
916 od->ddev.device_tx_status = bcm2835_dma_tx_status;
917 od->ddev.device_issue_pending = bcm2835_dma_issue_pending;
918 od->ddev.device_prep_dma_cyclic = bcm2835_dma_prep_dma_cyclic;
919 od->ddev.device_prep_slave_sg = bcm2835_dma_prep_slave_sg;
920 od->ddev.device_prep_dma_memcpy = bcm2835_dma_prep_dma_memcpy;
921 od->ddev.device_config = bcm2835_dma_slave_config;
922 od->ddev.device_terminate_all = bcm2835_dma_terminate_all;
923 od->ddev.device_synchronize = bcm2835_dma_synchronize;
924 od->ddev.src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES);
925 od->ddev.dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES);
926 od->ddev.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV) |
928 od->ddev.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST;
929 od->ddev.descriptor_reuse = true;
930 od->ddev.dev = &pdev->dev;
931 INIT_LIST_HEAD(&od->ddev.channels);
935 od->zero_page = dma_map_page_attrs(od->ddev.dev, ZERO_PAGE(0), 0,
938 if (dma_mapping_error(od->ddev.dev, od->zero_page)) {
1006 rc = dma_async_device_register(&od->ddev);
1026 dma_async_device_unregister(&od->ddev);