Lines Matching defs:dir

605 			    enum dma_data_direction dir)
609 dma_kmalloc_needs_bounce(dev, size, dir));
613 int nents, enum dma_data_direction dir)
629 if (!dma_kmalloc_safe(dev, dir)) {
737 * @dir: Direction of DMA transfer
743 static int dma_info_to_prot(enum dma_data_direction dir, bool coherent,
751 switch (dir) {
1045 size_t size, enum dma_data_direction dir, gfp_t gfp,
1064 struct sg_table *sgt, enum dma_data_direction dir)
1075 dma_addr_t dma_handle, size_t size, enum dma_data_direction dir)
1079 if (dev_is_dma_coherent(dev) && !dev_use_swiotlb(dev, size, dir))
1084 arch_sync_dma_for_cpu(phys, size, dir);
1087 swiotlb_sync_single_for_cpu(dev, phys, size, dir);
1091 dma_addr_t dma_handle, size_t size, enum dma_data_direction dir)
1095 if (dev_is_dma_coherent(dev) && !dev_use_swiotlb(dev, size, dir))
1100 swiotlb_sync_single_for_device(dev, phys, size, dir);
1103 arch_sync_dma_for_device(phys, size, dir);
1108 enum dma_data_direction dir)
1116 sg->length, dir);
1119 arch_sync_dma_for_cpu(sg_phys(sg), sg->length, dir);
1124 enum dma_data_direction dir)
1133 sg->length, dir);
1136 arch_sync_dma_for_device(sg_phys(sg), sg->length, dir);
1140 unsigned long offset, size_t size, enum dma_data_direction dir,
1145 int prot = dma_info_to_prot(dir, coherent, attrs);
1155 if (dev_use_swiotlb(dev, size, dir) &&
1169 iova_mask(iovad), dir, attrs);
1179 (dir == DMA_TO_DEVICE || dir == DMA_BIDIRECTIONAL)) {
1188 arch_sync_dma_for_device(phys, size, dir);
1192 swiotlb_tbl_unmap_single(dev, phys, size, dir, attrs);
1197 size_t size, enum dma_data_direction dir, unsigned long attrs)
1207 arch_sync_dma_for_cpu(phys, size, dir);
1212 swiotlb_tbl_unmap_single(dev, phys, size, dir, attrs);
1310 int nents, enum dma_data_direction dir, unsigned long attrs)
1317 sg_dma_len(s), dir, attrs);
1321 int nents, enum dma_data_direction dir, unsigned long attrs)
1330 s->offset, s->length, dir, attrs);
1339 iommu_dma_unmap_sg_swiotlb(dev, sg, i, dir, attrs | DMA_ATTR_SKIP_CPU_SYNC);
1351 int nents, enum dma_data_direction dir, unsigned long attrs)
1357 int prot = dma_info_to_prot(dir, dev_is_dma_coherent(dev), attrs);
1372 if (dev_use_sg_swiotlb(dev, sg, nents, dir))
1373 return iommu_dma_map_sg_swiotlb(dev, sg, nents, dir, attrs);
1376 iommu_dma_sync_sg_for_device(dev, sg, nents, dir);
1471 int nents, enum dma_data_direction dir, unsigned long attrs)
1478 iommu_dma_unmap_sg_swiotlb(dev, sg, nents, dir, attrs);
1483 iommu_dma_sync_sg_for_cpu(dev, sg, nents, dir);
1521 size_t size, enum dma_data_direction dir, unsigned long attrs)
1524 dma_info_to_prot(dir, false, attrs) | IOMMU_MMIO,
1529 size_t size, enum dma_data_direction dir, unsigned long attrs)