Searched refs:sgt (Results 1 - 25 of 243) sorted by relevance

12345678910

/linux-master/drivers/gpu/drm/i915/
H A Di915_mm.c37 struct sgt_iter sgt; member in struct:remap_pfn
46 return (r->sgt.dma + r->sgt.curr + r->iobase) >> PAGE_SHIFT;
48 return r->sgt.pfn + (r->sgt.curr >> PAGE_SHIFT);
55 if (GEM_WARN_ON(!r->sgt.sgp))
63 r->sgt.curr += PAGE_SIZE;
64 if (r->sgt.curr >= r->sgt.max)
65 r->sgt
[all...]
/linux-master/drivers/spi/
H A Dinternals.h24 struct sg_table *sgt, void *buf, size_t len,
27 struct sg_table *sgt, enum dma_data_direction dir);
30 struct sg_table *sgt, void *buf, size_t len,
37 struct device *dev, struct sg_table *sgt,
29 spi_map_buf(struct spi_controller *ctlr, struct device *dev, struct sg_table *sgt, void *buf, size_t len, enum dma_data_direction dir) argument
36 spi_unmap_buf(struct spi_controller *ctlr, struct device *dev, struct sg_table *sgt, enum dma_data_direction dir) argument
/linux-master/include/linux/
H A Dintel_th.h38 * @sgt: pointer to sg_table, can be overridden by the buffer
43 int (*alloc_window)(void *priv, struct sg_table **sgt,
45 void (*free_window)(void *priv, struct sg_table *sgt);
51 * ->ready(): window @sgt is filled up to the last block OR
59 int (*ready)(void *priv, struct sg_table *sgt, size_t bytes);
65 void intel_th_msc_window_unlock(struct device *dev, struct sg_table *sgt);
H A Dscatterlist.h46 struct sg_table sgt; /* The scatter list table */ member in struct:sg_append_table
201 #define for_each_sgtable_sg(sgt, sg, i) \
202 for_each_sg((sgt)->sgl, sg, (sgt)->orig_nents, i)
209 #define for_each_sgtable_dma_sg(sgt, sg, i) \
210 for_each_sg((sgt)->sgl, sg, (sgt)->nents, i)
437 void sg_free_append_table(struct sg_append_table *sgt);
441 int sg_alloc_append_table_from_pages(struct sg_append_table *sgt,
446 int sg_alloc_table_from_pages_segment(struct sg_table *sgt, struc
471 sg_alloc_table_from_pages(struct sg_table *sgt, struct page **pages, unsigned int n_pages, unsigned int offset, unsigned long size, gfp_t gfp_mask) argument
[all...]
/linux-master/drivers/media/pci/mgb4/
H A Dmgb4_dma.h16 u64 paddr, struct sg_table *sgt);
/linux-master/drivers/media/common/videobuf2/
H A Dvideobuf2-dma-contig.c53 static unsigned long vb2_dc_get_contiguous_size(struct sg_table *sgt) argument
56 dma_addr_t expected = sg_dma_address(sgt->sgl);
60 for_each_sgtable_dma_sg(sgt, s, i) {
126 struct sg_table *sgt = buf->dma_sgt; local
140 dma_sync_sgtable_for_device(buf->dev, sgt, buf->dma_dir);
146 struct sg_table *sgt = buf->dma_sgt; local
160 dma_sync_sgtable_for_cpu(buf->dev, sgt, buf->dma_dir);
313 struct sg_table sgt; member in struct:vb2_dc_attachment
323 struct sg_table *sgt; local
331 sgt
359 struct sg_table *sgt; local
385 struct sg_table *sgt; local
414 vb2_dc_dmabuf_ops_unmap(struct dma_buf_attachment *db_attach, struct sg_table *sgt, enum dma_data_direction dma_dir) argument
476 struct sg_table *sgt; local
534 struct sg_table *sgt = buf->dma_sgt; local
571 struct sg_table *sgt; local
693 struct sg_table *sgt; local
733 struct sg_table *sgt = buf->dma_sgt; local
[all...]
H A Dvideobuf2-dma-sg.c105 struct sg_table *sgt; local
145 sgt = &buf->sg_table;
150 if (dma_map_sgtable(buf->dev, sgt, buf->dma_dir,
182 struct sg_table *sgt = &buf->sg_table; local
188 dma_unmap_sgtable(buf->dev, sgt, buf->dma_dir,
204 struct sg_table *sgt = buf->dma_sgt; local
209 dma_sync_sgtable_for_device(buf->dev, sgt, buf->dma_dir);
215 struct sg_table *sgt = buf->dma_sgt; local
220 dma_sync_sgtable_for_cpu(buf->dev, sgt, buf->dma_dir);
227 struct sg_table *sgt; local
287 struct sg_table *sgt = &buf->sg_table; local
364 struct sg_table sgt; member in struct:vb2_dma_sg_attachment
374 struct sg_table *sgt; local
410 struct sg_table *sgt; local
429 struct sg_table *sgt; local
453 vb2_dma_sg_dmabuf_ops_unmap(struct dma_buf_attachment *db_attach, struct sg_table *sgt, enum dma_data_direction dma_dir) argument
470 struct sg_table *sgt = buf->dma_sgt; local
481 struct sg_table *sgt = buf->dma_sgt; local
554 struct sg_table *sgt; local
582 struct sg_table *sgt = buf->dma_sgt; local
[all...]
/linux-master/drivers/gpu/drm/tests/
H A Ddrm_gem_shmem_test.c38 struct sg_table *sgt = ptr; local
40 sg_free_table(sgt);
73 * created with the shmem file node attribute equal to NULL and the sgt
83 struct sg_table *sgt; local
91 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL);
92 KUNIT_ASSERT_NOT_NULL(test, sgt);
94 ret = kunit_add_action_or_reset(test, kfree_wrapper, sgt);
97 ret = sg_alloc_table(sgt, 1, GFP_KERNEL);
100 ret = kunit_add_action_or_reset(test, sg_free_table_wrapper, sgt);
203 struct sg_table *sgt; local
242 struct sg_table *sgt; local
312 struct sg_table *sgt; local
[all...]
/linux-master/drivers/hwtracing/intel_th/
H A Dmsu-sink.c51 static int msu_sink_alloc_window(void *data, struct sg_table **sgt, size_t size) argument
64 ret = sg_alloc_table(*sgt, nents, GFP_KERNEL);
68 priv->sgts[priv->nr_sgts++] = *sgt;
70 for_each_sg((*sgt)->sgl, sg_ptr, nents, i) {
84 static void msu_sink_free_window(void *data, struct sg_table *sgt) argument
90 for_each_sg(sgt->sgl, sg_ptr, sgt->nents, i) {
95 sg_free_table(sgt);
99 static int msu_sink_ready(void *data, struct sg_table *sgt, size_t bytes) argument
103 intel_th_msc_window_unlock(priv->dev, sgt);
[all...]
/linux-master/drivers/gpu/drm/virtio/
H A Dvirtgpu_vram.c75 struct sg_table *sgt; local
79 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL);
80 if (!sgt)
90 return sgt;
93 ret = sg_alloc_table(sgt, 1, GFP_KERNEL);
104 sg_set_page(sgt->sgl, NULL, vram->vram_node.size, 0);
105 sg_dma_address(sgt->sgl) = addr;
106 sg_dma_len(sgt->sgl) = vram->vram_node.size;
108 return sgt;
115 virtio_gpu_vram_unmap_dma_buf(struct device *dev, struct sg_table *sgt, enum dma_data_direction dir) argument
[all...]
/linux-master/drivers/gpu/drm/mediatek/
H A Dmtk_drm_gem.c193 struct sg_table *sgt; local
196 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL);
197 if (!sgt)
200 ret = dma_get_sgtable_attrs(priv->dma_dev, sgt, mtk_gem->cookie,
204 DRM_ERROR("failed to allocate sgt, %d\n", ret);
205 kfree(sgt);
209 return sgt;
236 struct sg_table *sgt = NULL; local
242 sgt
[all...]
/linux-master/drivers/gpu/drm/xe/
H A Dxe_dma_buf.c94 struct sg_table *sgt; local
111 sgt = drm_prime_pages_to_sg(obj->dev,
114 if (IS_ERR(sgt))
115 return sgt;
117 if (dma_map_sgtable(attach->dev, sgt, dir,
127 dir, &sgt);
135 return sgt;
138 sg_free_table(sgt);
139 kfree(sgt);
144 struct sg_table *sgt,
143 xe_dma_buf_unmap(struct dma_buf_attachment *attach, struct sg_table *sgt, enum dma_data_direction dir) argument
[all...]
H A Dxe_ttm_vram_mgr.h24 struct sg_table **sgt);
26 struct sg_table *sgt);
/linux-master/drivers/gpu/drm/omapdrm/
H A Domap_gem_dmabuf.c102 struct sg_table *sgt; local
123 sgt = dma_buf_map_attachment_unlocked(attach, DMA_TO_DEVICE);
124 if (IS_ERR(sgt)) {
125 ret = PTR_ERR(sgt);
129 obj = omap_gem_new_dmabuf(dev, dma_buf->size, sgt);
140 dma_buf_unmap_attachment_unlocked(attach, sgt, DMA_TO_DEVICE);
/linux-master/drivers/gpu/drm/tegra/
H A Dgem.c47 static inline unsigned int sgt_dma_count_chunks(struct sg_table *sgt) argument
49 return sg_dma_count_chunks(sgt->sgl, sgt->nents);
88 map->sgt = dma_buf_map_attachment_unlocked(map->attach, direction);
89 if (IS_ERR(map->sgt)) {
91 err = PTR_ERR(map->sgt);
92 map->sgt = NULL;
96 err = sgt_dma_count_chunks(map->sgt);
106 map->sgt = kzalloc(sizeof(*map->sgt), GFP_KERNE
630 struct sg_table *sgt; local
657 tegra_gem_prime_unmap_dma_buf(struct dma_buf_attachment *attach, struct sg_table *sgt, enum dma_data_direction dir) argument
[all...]
/linux-master/net/ceph/
H A Dcrypto.c154 * Dispose of @sgt with teardown_sgtable().
161 static int setup_sgtable(struct sg_table *sgt, struct scatterlist *prealloc_sg, argument
173 memset(sgt, 0, sizeof(*sgt));
183 ret = sg_alloc_table(sgt, chunk_cnt, GFP_NOFS);
189 sgt->sgl = prealloc_sg;
190 sgt->nents = sgt->orig_nents = 1;
193 for_each_sg(sgt->sgl, sg, sgt
213 teardown_sgtable(struct sg_table *sgt) argument
223 struct sg_table sgt; local
[all...]
/linux-master/drivers/gpu/drm/i915/gem/
H A Di915_gem_dmabuf.c32 struct sg_table *sgt; local
37 * Make a copy of the object's sgt, so that we can make an independent
40 sgt = kmalloc(sizeof(*sgt), GFP_KERNEL);
41 if (!sgt) {
46 ret = sg_alloc_table(sgt, obj->mm.pages->orig_nents, GFP_KERNEL);
50 dst = sgt->sgl;
56 ret = dma_map_sgtable(attach->dev, sgt, dir, DMA_ATTR_SKIP_CPU_SYNC);
60 return sgt;
63 sg_free_table(sgt);
240 struct sg_table *sgt; local
269 i915_gem_object_put_pages_dmabuf(struct drm_i915_gem_object *obj, struct sg_table *sgt) argument
[all...]
/linux-master/drivers/media/platform/nvidia/tegra-vde/
H A Ddmabuf-cache.c27 struct sg_table *sgt; member in struct:tegra_vde_cache_entry
41 dma_buf_unmap_attachment_unlocked(entry->a, entry->sgt, entry->dma_dir);
72 struct sg_table *sgt; local
93 *addrp = sg_dma_address(entry->sgt->sgl);
105 sgt = dma_buf_map_attachment_unlocked(attachment, dma_dir);
106 if (IS_ERR(sgt)) {
108 err = PTR_ERR(sgt);
112 if (!vde->domain && sgt->nents > 1) {
125 err = tegra_vde_iommu_map(vde, sgt, &iova, dmabuf->size);
131 *addrp = sg_dma_address(sgt
[all...]
/linux-master/drivers/infiniband/core/
H A Dumem_dmabuf.c17 struct sg_table *sgt; local
26 if (umem_dmabuf->sgt)
29 sgt = dma_buf_map_attachment(umem_dmabuf->attach,
31 if (IS_ERR(sgt))
32 return PTR_ERR(sgt);
39 for_each_sgtable_dma_sg(sgt, sg, i) {
62 umem_dmabuf->umem.sgt_append.sgt.sgl = umem_dmabuf->first_sg;
63 umem_dmabuf->umem.sgt_append.sgt.nents = nmap;
64 umem_dmabuf->sgt = sgt;
[all...]
/linux-master/include/uapi/linux/
H A Derspan.h22 __be16 sgt; /* security group tag */ member in struct:erspan_md2
/linux-master/tools/include/uapi/linux/
H A Derspan.h22 __be16 sgt; /* security group tag */ member in struct:erspan_md2
/linux-master/include/drm/
H A Ddrm_prime.h84 struct sg_table *sgt,
97 unsigned long drm_prime_get_contiguous_size(struct sg_table *sgt);
108 int drm_prime_sg_to_page_array(struct sg_table *sgt, struct page **pages,
110 int drm_prime_sg_to_dma_addr_array(struct sg_table *sgt, dma_addr_t *addrs,
/linux-master/drivers/gpu/drm/armada/
H A Darmada_gem.c68 if (dobj->sgt)
70 dobj->sgt, DMA_TO_DEVICE);
392 struct sg_table *sgt; local
395 sgt = kmalloc(sizeof(*sgt), GFP_KERNEL);
396 if (!sgt)
404 if (sg_alloc_table(sgt, count, GFP_KERNEL))
409 for_each_sgtable_sg(sgt, sg, i) {
419 if (dma_map_sgtable(attach->dev, sgt, dir, 0))
423 if (sg_alloc_table(sgt,
452 armada_gem_prime_unmap_dma_buf(struct dma_buf_attachment *attach, struct sg_table *sgt, enum dma_data_direction dir) argument
[all...]
/linux-master/drivers/gpu/drm/
H A Ddrm_gem_dma_helper.c234 drm_prime_gem_destroy(gem_obj, dma_obj->sgt);
424 struct sg_table *sgt; local
427 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL);
428 if (!sgt)
431 ret = dma_get_sgtable(obj->dev->dev, sgt, dma_obj->vaddr,
436 return sgt;
439 kfree(sgt);
449 * @sgt: scatter/gather table of pinned pages
464 struct sg_table *sgt)
462 drm_gem_dma_prime_import_sg_table(struct drm_device *dev, struct dma_buf_attachment *attach, struct sg_table *sgt) argument
574 drm_gem_dma_prime_import_sg_table_vmap(struct drm_device *dev, struct dma_buf_attachment *attach, struct sg_table *sgt) argument
[all...]
/linux-master/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_dma_buf.c150 struct sg_table *sgt; local
175 sgt = drm_prime_pages_to_sg(obj->dev,
178 if (IS_ERR(sgt))
179 return sgt;
181 if (dma_map_sgtable(attach->dev, sgt, dir,
189 dir, &sgt);
197 return sgt;
200 sg_free_table(sgt);
201 kfree(sgt);
208 * @sgt
214 amdgpu_dma_buf_unmap(struct dma_buf_attachment *attach, struct sg_table *sgt, enum dma_data_direction dir) argument
[all...]

Completed in 195 milliseconds

12345678910