Searched refs:dma_dev (Results 76 - 100 of 177) sorted by relevance

12345678

/linux-master/drivers/iommu/
H A Dexynos-iommu.c173 static struct device *dma_dev; variable in typeref:struct:device
808 if (!dma_dev)
809 dma_dev = &pdev->dev;
883 dma_sync_single_for_cpu(dma_dev, virt_to_phys(ent), sizeof(*ent),
886 dma_sync_single_for_device(dma_dev, virt_to_phys(ent), sizeof(*ent),
897 BUG_ON(PG_ENT_SHIFT < 0 || !dma_dev);
915 handle = dma_map_single(dma_dev, domain->pgtable, LV1TABLE_SIZE,
919 if (dma_mapping_error(dma_dev, handle))
963 dma_unmap_single(dma_dev, virt_to_phys(domain->pgtable), LV1TABLE_SIZE,
970 dma_unmap_single(dma_dev, bas
[all...]
H A Drockchip-iommu.c123 static struct device *dma_dev; variable in typeref:struct:device
132 dma_sync_single_for_device(dma_dev, dma, size, DMA_TO_DEVICE);
734 pt_dma = dma_map_single(dma_dev, page_table, SPAGE_SIZE, DMA_TO_DEVICE);
735 if (dma_mapping_error(dma_dev, pt_dma)) {
736 dev_err(dma_dev, "DMA mapping error while allocating page table\n");
1052 if (!dma_dev)
1068 rk_domain->dt_dma = dma_map_single(dma_dev, rk_domain->dt,
1070 if (dma_mapping_error(dma_dev, rk_domain->dt_dma)) {
1071 dev_err(dma_dev, "DMA map error for DT\n");
1105 dma_unmap_single(dma_dev, pt_phy
[all...]
/linux-master/arch/mips/alchemy/common/
H A Ddma.c77 static const struct dma_dev { struct
127 static const struct dma_dev dma_dev_table_bank2[DMA_NUM_DEV_BANK2] = {
168 const struct dma_dev *dev;
/linux-master/drivers/net/thunderbolt/
H A Dmain.c339 struct device *dma_dev = tb_ring_dma_device(ring->ring); local
361 dma_unmap_page(dma_dev, tf->frame.buffer_phy, size,
505 struct device *dma_dev = tb_ring_dma_device(ring->ring); local
523 dma_addr = dma_map_page(dma_dev, tf->page, 0,
525 if (dma_mapping_error(dma_dev, dma_addr)) {
551 struct device *dma_dev = tb_ring_dma_device(ring->ring); local
563 dma_sync_single_for_cpu(dma_dev, tf->frame.buffer_phy,
585 struct device *dma_dev = tb_ring_dma_device(ring->ring); local
598 dma_addr = dma_map_page(dma_dev, tf->page, 0, TBNET_FRAME_SIZE,
600 if (dma_mapping_error(dma_dev, dma_add
806 struct device *dma_dev = tb_ring_dma_device(net->rx_ring.ring); local
993 struct device *dma_dev = tb_ring_dma_device(net->tx_ring.ring); local
[all...]
/linux-master/drivers/scsi/
H A Dhosts.c210 * @dma_dev: dma device for the host
220 struct device *dma_dev)
244 if (!dma_dev)
245 dma_dev = shost->shost_gendev.parent;
247 shost->dma_dev = dma_dev;
249 if (dma_dev->dma_mask) {
251 dma_max_mapping_size(dma_dev) >> SECTOR_SHIFT);
219 scsi_add_host_with_dma(struct Scsi_Host *shost, struct device *dev, struct device *dma_dev) argument
/linux-master/drivers/net/wireless/broadcom/b43legacy/
H A Ddma.c234 dmaaddr = dma_map_single(ring->dev->dev->dma_dev,
238 dmaaddr = dma_map_single(ring->dev->dev->dma_dev,
252 dma_unmap_single(ring->dev->dev->dma_dev,
256 dma_unmap_single(ring->dev->dev->dma_dev,
268 dma_sync_single_for_cpu(ring->dev->dev->dma_dev,
279 dma_sync_single_for_device(ring->dev->dev->dma_dev,
300 ring->descbase = dma_alloc_coherent(ring->dev->dev->dma_dev,
311 dma_free_coherent(ring->dev->dev->dma_dev, B43legacy_DMA_RINGMEMSIZE,
395 if (unlikely(dma_mapping_error(ring->dev->dev->dma_dev, addr)))
635 dma_test = dma_map_single(dev->dev->dma_dev, rin
[all...]
/linux-master/drivers/dma/
H A Ddmatest.c582 struct device *dma_dev; local
616 dma_dev = dmaengine_get_dma_device(chan);
749 um = dmaengine_get_unmap_data(dma_dev, src->cnt + dst->cnt,
764 um->addr[i] = dma_map_page(dma_dev, pg, pg_off,
767 ret = dma_mapping_error(dma_dev, um->addr[i]);
782 dsts[i] = dma_map_page(dma_dev, pg, pg_off, um->len,
784 ret = dma_mapping_error(dma_dev, dsts[i]);
1024 struct dma_device *dma_dev = chan->device; local
1037 if (dma_has_cap(DMA_COMPLETION_NO_ORDER, dma_dev->cap_mask) &&
1043 if (dma_has_cap(DMA_MEMCPY, dma_dev
[all...]
/linux-master/drivers/net/ethernet/sun/
H A Dsunhme.c1169 dma_unmap_single(hp->dma_dev, dma_addr,
1189 dma_unmap_single(hp->dma_dev, dma_addr,
1194 dma_unmap_page(hp->dma_dev, dma_addr,
1235 mapping = dma_map_single(hp->dma_dev, skb->data, RX_BUF_ALLOC_SIZE,
1237 if (dma_mapping_error(hp->dma_dev, mapping)) {
1733 dma_unmap_single(hp->dma_dev, dma_addr, dma_len, DMA_TO_DEVICE);
1735 dma_unmap_page(hp->dma_dev, dma_addr, dma_len, DMA_TO_DEVICE);
1805 mapping = dma_map_single(hp->dma_dev, new_skb->data,
1808 if (unlikely(dma_mapping_error(hp->dma_dev, mapping))) {
1814 dma_unmap_single(hp->dma_dev, dma_add
[all...]
/linux-master/drivers/net/ethernet/sfc/
H A Dtx_tso.c173 struct device *dma_dev = &efx->pci_dev->dev; local
198 dma_addr = dma_map_single(dma_dev, skb->data,
205 return unlikely(dma_mapping_error(dma_dev, dma_addr)) ? -ENOMEM : 0;
/linux-master/drivers/dma/fsl-dpaa2-qdma/
H A Ddpaa2-qdma.h100 struct dma_device dma_dev; member in struct:dpaa2_qdma_engine
/linux-master/include/sound/
H A Ddmaengine_pcm.h124 * @dma_dev: If set, request DMA channel on this device rather than the DAI
147 struct device *dma_dev; member in struct:snd_dmaengine_pcm_config
/linux-master/include/linux/
H A Dmcb.h75 struct device *dma_dev; member in struct:mcb_device
/linux-master/drivers/dma/ioat/
H A Dinit.c304 struct dma_device *dma = &ioat_dma->dma_dev;
350 tx = ioat_dma->dma_dev.device_prep_dma_memcpy(dma_chan, dma_dest,
495 struct dma_device *dma = &ioat_dma->dma_dev;
539 int err = dma_async_device_register(&ioat_dma->dma_dev);
551 struct dma_device *dma = &ioat_dma->dma_dev;
568 struct dma_device *dma = &ioat_dma->dma_dev;
769 struct dma_device *dma = &ioat_dma->dma_dev;
801 struct dma_device *dma = &ioat_dma->dma_dev;
1068 dma = &ioat_dma->dma_dev;
1097 dma = &ioat_dma->dma_dev;
[all...]
/linux-master/drivers/net/ethernet/fungible/funcore/
H A Dfun_queue.h142 void *fun_alloc_ring_mem(struct device *dma_dev, size_t depth,
146 void fun_free_ring_mem(struct device *dma_dev, size_t depth, size_t hw_desc_sz,
/linux-master/drivers/net/ethernet/fungible/funeth/
H A Dfuneth_tx.c165 if (unlikely(fun_map_pkt(q->dma_dev, shinfo, skb->data,
401 dma_unmap_single(q->dma_dev, be64_to_cpu(gle->sgl_data),
405 dma_unmap_page(q->dma_dev, be64_to_cpu(gle->sgl_data),
410 dma_unmap_page(q->dma_dev, be64_to_cpu(gle->sgl_data),
539 if (unlikely(fun_map_pkt(q->dma_dev, si, xdpf->data, xdpf->len, dma,
641 q->dma_dev = &fp->pdev->dev;
642 q->desc = fun_alloc_ring_mem(q->dma_dev, ndesc, FUNETH_SQE_SIZE,
669 fun_free_ring_mem(q->dma_dev, q->mask + 1, FUNETH_SQE_SIZE, true,
H A Dfuneth_txrx.h111 struct device *dma_dev; /* device for DMA mappings */ member in struct:funeth_txq
166 struct device *dma_dev; /* device for DMA mappings */ member in struct:funeth_rxq
/linux-master/drivers/ata/
H A Dpata_octeon_cf.c846 struct platform_device *dma_dev; local
847 dma_dev = of_find_device_by_node(dma_node);
848 if (dma_dev) {
851 res_dma = platform_get_resource(dma_dev, IORESOURCE_MEM, 0);
853 put_device(&dma_dev->dev);
860 put_device(&dma_dev->dev);
865 i = platform_get_irq(dma_dev, 0);
870 put_device(&dma_dev->dev);
/linux-master/drivers/ufs/core/
H A Dufs_bsg.c111 sg_cnt = dma_map_sg(hba->host->dma_dev, payload->sg_list, payload->sg_cnt, dir);
123 dma_unmap_sg(hba->host->dma_dev, payload->sg_list, payload->sg_cnt, dir);
/linux-master/drivers/net/ethernet/ti/icssg/
H A Dicssg_prueth.h100 struct device *dma_dev; member in struct:prueth_tx_chn
113 struct device *dma_dev; member in struct:prueth_rx_chn
/linux-master/drivers/pci/endpoint/functions/
H A Dpci-epf-test.c424 struct device *dma_dev = epf->epc->dev.parent; local
449 dst_phys_addr = dma_map_single(dma_dev, buf, reg->size,
451 if (dma_mapping_error(dma_dev, dst_phys_addr)) {
465 dma_unmap_single(dma_dev, dst_phys_addr, reg->size,
508 struct device *dma_dev = epf->epc->dev.parent; local
536 src_phys_addr = dma_map_single(dma_dev, buf, reg->size,
538 if (dma_mapping_error(dma_dev, src_phys_addr)) {
554 dma_unmap_single(dma_dev, src_phys_addr, reg->size,
/linux-master/drivers/gpu/drm/mediatek/
H A Dmtk_drm_drv.c446 struct device *dma_dev = NULL; local
526 dma_dev = mtk_drm_crtc_dma_dev_get(crtc);
527 if (!dma_dev) {
534 private->all_drm_private[i]->dma_dev = dma_dev;
540 ret = dma_set_max_seg_size(dma_dev, UINT_MAX);
542 dev_err(dma_dev, "Failed to set DMA segment size\n");
584 return drm_gem_prime_import_dev(dev, dma_buf, private->dma_dev);
/linux-master/drivers/net/wireless/broadcom/b43/
H A Ddma.c353 dmaaddr = dma_map_single(ring->dev->dev->dma_dev,
356 dmaaddr = dma_map_single(ring->dev->dev->dma_dev,
368 dma_unmap_single(ring->dev->dev->dma_dev,
371 dma_unmap_single(ring->dev->dev->dma_dev,
381 dma_sync_single_for_cpu(ring->dev->dev->dma_dev,
390 dma_sync_single_for_device(ring->dev->dev->dma_dev,
421 ring->descbase = dma_alloc_coherent(ring->dev->dev->dma_dev,
434 dma_free_coherent(ring->dev->dev->dma_dev, ring_mem_size,
542 if (unlikely(dma_mapping_error(ring->dev->dev->dma_dev, addr)))
898 dma_test = dma_map_single(dev->dev->dma_dev,
[all...]
/linux-master/drivers/net/ethernet/marvell/prestera/
H A Dprestera_rxtx.c470 struct device *dma_dev = sdma->sw->dev->dev; local
473 dma = dma_map_single(dma_dev, skb->data, skb->len, DMA_TO_DEVICE);
474 if (dma_mapping_error(dma_dev, dma))
486 struct device *dma_dev = sdma->sw->dev->dev; local
488 dma_unmap_single(dma_dev, buf->buf_dma, buf->skb->len, DMA_TO_DEVICE);
716 struct device *dma_dev = sdma->sw->dev->dev; local
741 dma_sync_single_for_device(dma_dev, buf->buf_dma, skb->len,
/linux-master/drivers/spi/
H A Dspi-topcliff-pch.c106 struct pci_dev *dma_dev; member in struct:pch_spi_dma_ctrl
811 (param->dma_dev == chan->device->dev)) {
823 struct pci_dev *dma_dev; local
838 dma_dev = pci_get_slot(data->board_dat->pdev->bus,
843 param->dma_dev = &dma_dev->dev;
857 param->dma_dev = &dma_dev->dev;
871 dma->dma_dev = dma_dev;
[all...]
/linux-master/drivers/net/ethernet/mediatek/
H A Dmtk_eth_soc.c1141 eth->scratch_ring = dma_alloc_coherent(eth->dma_dev,
1152 dma_addr = dma_map_single(eth->dma_dev,
1155 if (unlikely(dma_mapping_error(eth->dma_dev, dma_addr)))
1219 dma_unmap_single(eth->dma_dev,
1224 dma_unmap_page(eth->dma_dev,
1231 dma_unmap_page(eth->dma_dev,
1238 dma_unmap_page(eth->dma_dev,
1422 txd_info.addr = dma_map_single(eth->dma_dev, skb->data, txd_info.size,
1424 if (unlikely(dma_mapping_error(eth->dma_dev, txd_info.addr)))
1464 txd_info.addr = skb_frag_dma_map(eth->dma_dev, fra
4667 mtk_eth_set_dma_device(struct mtk_eth *eth, struct device *dma_dev) argument
[all...]

Completed in 262 milliseconds

12345678