• Home
  • History
  • Annotate
  • Raw
  • Download
  • only in /netgear-WNDR4500v2-V1.0.0.60_1.0.38/src/linux/linux-2.6/arch/sparc64/kernel/

Lines Matching defs:iommu

17 #include <asm/iommu.h>
165 struct iommu *iommu;
183 iommu = pdev->dev.archdata.iommu;
185 spin_lock_irqsave(&iommu->lock, flags);
186 entry = pci_arena_alloc(&iommu->arena, npages);
187 spin_unlock_irqrestore(&iommu->lock, flags);
192 *dma_addrp = (iommu->page_table_map_base +
219 spin_lock(&iommu->lock);
220 pci_arena_free(&iommu->arena, entry, npages);
221 spin_unlock_irqrestore(&iommu->lock, flags);
231 struct iommu *iommu;
236 iommu = pdev->dev.archdata.iommu;
239 entry = ((dvma - iommu->page_table_map_base) >> IO_PAGE_SHIFT);
241 spin_lock_irqsave(&iommu->lock, flags);
243 pci_arena_free(&iommu->arena, entry, npages);
254 spin_unlock_irqrestore(&iommu->lock, flags);
263 struct iommu *iommu;
270 iommu = pdev->dev.archdata.iommu;
279 spin_lock_irqsave(&iommu->lock, flags);
280 entry = pci_arena_alloc(&iommu->arena, npages);
281 spin_unlock_irqrestore(&iommu->lock, flags);
286 bus_addr = (iommu->page_table_map_base +
317 spin_lock(&iommu->lock);
318 pci_arena_free(&iommu->arena, entry, npages);
319 spin_unlock_irqrestore(&iommu->lock, flags);
327 struct iommu *iommu;
338 iommu = pdev->dev.archdata.iommu;
346 spin_lock_irqsave(&iommu->lock, flags);
348 entry = (bus_addr - iommu->page_table_map_base) >> IO_PAGE_SHIFT;
349 pci_arena_free(&iommu->arena, entry, npages);
360 spin_unlock_irqrestore(&iommu->lock, flags);
462 struct iommu *iommu;
481 iommu = pdev->dev.archdata.iommu;
490 spin_lock_irqsave(&iommu->lock, flags);
491 entry = pci_arena_alloc(&iommu->arena, npages);
492 spin_unlock_irqrestore(&iommu->lock, flags);
497 dma_base = iommu->page_table_map_base +
528 spin_lock_irqsave(&iommu->lock, flags);
529 pci_arena_free(&iommu->arena, entry, npages);
530 spin_unlock_irqrestore(&iommu->lock, flags);
538 struct iommu *iommu;
548 iommu = pdev->dev.archdata.iommu;
561 entry = ((bus_addr - iommu->page_table_map_base) >> IO_PAGE_SHIFT);
563 spin_lock_irqsave(&iommu->lock, flags);
565 pci_arena_free(&iommu->arena, entry, npages);
576 spin_unlock_irqrestore(&iommu->lock, flags);
613 struct iommu *iommu)
615 struct iommu_arena *arena = &iommu->arena;
642 struct iommu *iommu = pbm->iommu;
672 spin_lock_init(&iommu->lock);
673 iommu->ctx_lowest_free = 1;
674 iommu->page_table_map_base = dma_offset;
675 iommu->dma_addr_mask = dma_mask;
680 iommu->arena.map = kzalloc(sz, GFP_KERNEL);
681 if (!iommu->arena.map) {
685 iommu->arena.limit = num_tsb_entries;
687 sz = probe_existing_entries(pbm, iommu);
1170 struct iommu *iommu;
1215 iommu = kzalloc(sizeof(struct iommu), GFP_ATOMIC);
1216 if (!iommu)
1219 p->pbm_A.iommu = iommu;
1221 iommu = kzalloc(sizeof(struct iommu), GFP_ATOMIC);
1222 if (!iommu)
1225 p->pbm_B.iommu = iommu;