Lines Matching refs:iopte

468 static void iopte_free(struct omap_iommu *obj, u32 *iopte, bool dma_valid)
472 /* Note: freed iopte's must be clean ready for re-use */
473 if (iopte) {
475 pt_dma = virt_to_phys(iopte);
480 kmem_cache_free(iopte_cachep, iopte);
487 u32 *iopte;
498 iopte = kmem_cache_zalloc(iopte_cachep, GFP_KERNEL);
502 if (!iopte)
505 *pt_dma = dma_map_single(obj->dev, iopte, IOPTE_TABLE_SIZE,
509 iopte_free(obj, iopte, false);
517 if (WARN_ON(*pt_dma != virt_to_phys(iopte))) {
521 iopte_free(obj, iopte, false);
525 *iopgd = virt_to_phys(iopte) | IOPGD_TABLE;
528 dev_vdbg(obj->dev, "%s: a new pte:%p\n", __func__, iopte);
531 iopte_free(obj, iopte, false);
535 iopte = iopte_offset(iopgd, da);
539 __func__, da, iopgd, *iopgd, iopte, *iopte);
541 return iopte;
582 u32 *iopte = iopte_alloc(obj, iopgd, &pt_dma, da);
585 if (IS_ERR(iopte))
586 return PTR_ERR(iopte);
588 *iopte = (pa & IOPAGE_MASK) | prot | IOPTE_SMALL;
592 __func__, da, pa, iopte, *iopte);
601 u32 *iopte = iopte_alloc(obj, iopgd, &pt_dma, da);
611 if (IS_ERR(iopte))
612 return PTR_ERR(iopte);
615 *(iopte + i) = (pa & IOLARGE_MASK) | prot | IOPTE_LARGE;
687 u32 *iopgd, *iopte = NULL;
694 iopte = iopte_offset(iopgd, da);
697 *ppte = iopte;
714 u32 *iopte = iopte_offset(iopgd, da);
717 if (*iopte & IOPTE_LARGE) {
720 iopte = iopte_offset(iopgd, (da & IOLARGE_MASK));
723 memset(iopte, 0, nent * sizeof(*iopte));
730 iopte = iopte_offset(iopgd, 0);
732 if (iopte[i])
735 iopte_free(obj, iopte, true);
807 u32 *iopgd, *iopte;
833 iopte = iopte_offset(iopgd, da);
836 obj->name, errs, da, iopgd, *iopgd, iopte, *iopte);