Searched refs:imem (Results 1 - 25 of 32) sorted by relevance

12

/linux-master/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/
H A Dbase.c74 nvkm_instobj_dtor(struct nvkm_instmem *imem, struct nvkm_instobj *iobj) argument
76 spin_lock(&imem->lock);
78 spin_unlock(&imem->lock);
83 struct nvkm_instmem *imem, struct nvkm_instobj *iobj)
87 spin_lock(&imem->lock);
88 list_add_tail(&iobj->head, &imem->list);
89 spin_unlock(&imem->lock);
96 struct nvkm_instmem *imem = device->imem; local
99 if (!imem
82 nvkm_instobj_ctor(const struct nvkm_memory_func *func, struct nvkm_instmem *imem, struct nvkm_instobj *iobj) argument
111 nvkm_instobj_new(struct nvkm_instmem *imem, u32 size, u32 align, bool zero, bool preserve, struct nvkm_memory **pmemory) argument
152 nvkm_instmem_rd32(struct nvkm_instmem *imem, u32 addr) argument
158 nvkm_instmem_wr32(struct nvkm_instmem *imem, u32 addr, u32 data) argument
164 nvkm_instmem_boot(struct nvkm_instmem *imem) argument
181 struct nvkm_instmem *imem = nvkm_instmem(subdev); local
201 struct nvkm_instmem *imem = nvkm_instmem(subdev); local
218 struct nvkm_instmem *imem = nvkm_instmem(subdev); local
227 struct nvkm_instmem *imem = nvkm_instmem(subdev); local
244 nvkm_instmem_ctor(const struct nvkm_instmem_func *func, struct nvkm_device *device, enum nvkm_subdev_type type, int inst, struct nvkm_instmem *imem) argument
[all...]
H A Dnv40.c43 struct nv40_instmem *imem; member in struct:nv40_instobj
51 iowrite32_native(data, iobj->imem->iomem + iobj->node->offset + offset);
58 return ioread32_native(iobj->imem->iomem + iobj->node->offset + offset);
77 return iobj->imem->iomem + iobj->node->offset;
102 mutex_lock(&iobj->imem->base.mutex);
103 nvkm_mm_free(&iobj->imem->heap, &iobj->node);
104 mutex_unlock(&iobj->imem->base.mutex);
105 nvkm_instobj_dtor(&iobj->imem->base, &iobj->base);
123 struct nv40_instmem *imem = nv40_instmem(base); local
131 nvkm_instobj_ctor(&nv40_instobj_func, &imem
160 struct nv40_instmem *imem = nv40_instmem(base); local
216 struct nv40_instmem *imem = nv40_instmem(base); local
241 struct nv40_instmem *imem; local
[all...]
H A Dnv04.c42 struct nv04_instmem *imem; member in struct:nv04_instobj
50 struct nvkm_device *device = iobj->imem->base.subdev.device;
58 struct nvkm_device *device = iobj->imem->base.subdev.device;
77 struct nvkm_device *device = iobj->imem->base.subdev.device;
103 mutex_lock(&iobj->imem->base.mutex);
104 nvkm_mm_free(&iobj->imem->heap, &iobj->node);
105 mutex_unlock(&iobj->imem->base.mutex);
106 nvkm_instobj_dtor(&iobj->imem->base, &iobj->base);
124 struct nv04_instmem *imem = nv04_instmem(base); local
132 nvkm_instobj_ctor(&nv04_instobj_func, &imem
147 nv04_instmem_rd32(struct nvkm_instmem *imem, u32 addr) argument
153 nv04_instmem_wr32(struct nvkm_instmem *imem, u32 addr, u32 data) argument
159 nv04_instmem_resume(struct nvkm_instmem *imem) argument
177 nv04_instmem_suspend(struct nvkm_instmem *imem) argument
203 struct nv04_instmem *imem = nv04_instmem(base); local
243 struct nv04_instmem *imem = nv04_instmem(base); local
268 struct nv04_instmem *imem; local
[all...]
H A Dgk20a.c54 struct gk20a_instmem *imem; member in struct:gk20a_instobj
145 struct gk20a_instmem *imem = obj->base.imem; local
151 imem->vaddr_use -= nvkm_memory_size(&obj->base.base.memory);
152 nvkm_debug(&imem->base.subdev, "vaddr used: %x/%x\n", imem->vaddr_use,
153 imem->vaddr_max);
160 gk20a_instmem_vaddr_gc(struct gk20a_instmem *imem, const u64 size) argument
162 while (imem->vaddr_use + size > imem
177 struct gk20a_instmem *imem = node->imem; local
189 struct gk20a_instmem *imem = node->base.imem; local
232 struct gk20a_instmem *imem = node->imem; local
244 struct gk20a_instmem *imem = node->base.imem; local
298 struct gk20a_instmem *imem = node->base.imem; local
315 struct gk20a_instmem *imem = node->base.imem; local
383 gk20a_instobj_ctor_dma(struct gk20a_instmem *imem, u32 npages, u32 align, struct gk20a_instobj **_node) argument
421 gk20a_instobj_ctor_iommu(struct gk20a_instmem *imem, u32 npages, u32 align, struct gk20a_instobj **_node) argument
518 struct gk20a_instmem *imem = gk20a_instmem(base); local
551 struct gk20a_instmem *imem = gk20a_instmem(base); local
578 struct gk20a_instmem *imem; local
[all...]
H A Dnv50.c48 struct nv50_instmem *imem; member in struct:nv50_instobj
60 struct nv50_instmem *imem = iobj->imem; local
61 struct nvkm_device *device = imem->base.subdev.device;
66 spin_lock_irqsave(&imem->base.lock, flags);
67 if (unlikely(imem->addr != base)) {
69 imem->addr = base;
72 spin_unlock_irqrestore(&imem->base.lock, flags);
79 struct nv50_instmem *imem = iobj->imem; local
123 struct nv50_instmem *imem = iobj->imem; local
195 struct nv50_instmem *imem = iobj->imem; local
220 struct nvkm_instmem *imem = &iobj->imem->base; local
269 struct nvkm_instmem *imem = &iobj->imem->base; local
320 struct nvkm_instmem *imem = &iobj->imem->base; local
360 struct nv50_instmem *imem = nv50_instmem(base); local
377 nv50_instobj_new(struct nvkm_instmem *imem, u32 size, u32 align, bool zero, struct nvkm_memory **pmemory) argument
425 struct nv50_instmem *imem; local
[all...]
H A Dr535.c210 r535_instmem_resume(struct nvkm_instmem *imem) argument
213 if (imem->rm.fbsr_valid) {
214 nvkm_gsp_sg_free(imem->subdev.device, &imem->rm.fbsr);
215 imem->rm.fbsr_valid = false;
220 r535_instmem_suspend(struct nvkm_instmem *imem) argument
222 struct nvkm_subdev *subdev = &imem->subdev;
235 list_for_each_entry(iobj, &imem->list, head) {
242 list_for_each_entry(iobj, &imem->boot, head) {
265 ret = nvkm_gsp_sg(gsp->subdev.device, fbsr.size, &imem
303 r535_instmem_dtor(struct nvkm_instmem *imem) argument
[all...]
/linux-master/drivers/gpu/drm/nouveau/nvkm/engine/mpeg/
H A Dnv40.c33 struct nvkm_instmem *imem = device->imem; local
37 u32 dma0 = nvkm_instmem_rd32(imem, inst + 0);
38 u32 dma1 = nvkm_instmem_rd32(imem, inst + 4);
39 u32 dma2 = nvkm_instmem_rd32(imem, inst + 8);
/linux-master/drivers/remoteproc/
H A Dqcom_pil_info.c33 struct resource imem; local
45 ret = of_address_to_resource(np, 0, &imem);
50 base = ioremap(imem.start, resource_size(&imem));
56 memset_io(base, 0, resource_size(&imem));
59 _reloc.num_entries = (u32)resource_size(&imem) / PIL_RELOC_ENTRY_SIZE;
/linux-master/drivers/gpu/drm/nouveau/nvkm/engine/fifo/
H A Dnv40.c41 struct nvkm_memory *ramfc = chan->cgrp->runl->fifo->engine.subdev.device->imem->ramfc;
114 struct nvkm_instmem *imem = fifo->engine.subdev.device->imem; local
119 hash = nvkm_ramht_insert(imem->ramht, eobj, chan->id, 4, eobj->handle, context);
129 struct nvkm_memory *ramfc = device->imem->ramfc;
185 struct nvkm_instmem *imem = device->imem; local
186 struct nvkm_ramht *ramht = imem->ramht;
187 struct nvkm_memory *ramro = imem->ramro;
188 struct nvkm_memory *ramfc = imem
[all...]
H A Dnv17.c40 struct nvkm_memory *ramfc = chan->cgrp->runl->fifo->engine.subdev.device->imem->ramfc;
96 struct nvkm_instmem *imem = device->imem; local
97 struct nvkm_ramht *ramht = imem->ramht;
98 struct nvkm_memory *ramro = imem->ramro;
99 struct nvkm_memory *ramfc = imem->ramfc;
H A Dnv04.c45 struct nvkm_memory *fctx = device->imem->ramfc;
105 struct nvkm_memory *ramfc = chan->cgrp->runl->fifo->engine.subdev.device->imem->ramfc;
118 struct nvkm_memory *ramfc = chan->cgrp->runl->fifo->engine.subdev.device->imem->ramfc;
184 struct nvkm_instmem *imem = fifo->engine.subdev.device->imem; local
187 nvkm_ramht_remove(imem->ramht, hash);
195 struct nvkm_instmem *imem = fifo->engine.subdev.device->imem; local
200 hash = nvkm_ramht_insert(imem->ramht, eobj, chan->id, 4, eobj->handle, context);
469 struct nvkm_instmem *imem local
[all...]
H A Dnv10.c39 struct nvkm_memory *ramfc = chan->cgrp->runl->fifo->engine.subdev.device->imem->ramfc;
/linux-master/drivers/net/wwan/iosm/
H A Diosm_ipc_mux.c13 channel_id = ipc_imem_channel_alloc(ipc_mux->imem, ipc_mux->instance_id,
25 ipc_mux->channel = ipc_imem_channel_open(ipc_mux->imem, channel_id,
187 ipc_imem_channel_close(ipc_mux->imem, ipc_mux->channel_id);
223 ipc_imem_td_update_timer_suspend(ipc_mux->imem, true);
227 ipc_imem_td_update_timer_suspend(ipc_mux->imem, false);
236 ipc_imem_td_update_timer_suspend(ipc_mux->imem, true);
240 ipc_imem_td_update_timer_suspend(ipc_mux->imem, false);
279 struct iosm_imem *imem)
295 ipc_mux->pcie = imem->pcie;
296 ipc_mux->imem
278 ipc_mux_init(struct ipc_mux_config *mux_cfg, struct iosm_imem *imem) argument
[all...]
H A Diosm_ipc_task_queue.h82 * @imem: Pointer to iosm_imem struct
92 int ipc_task_queue_send_task(struct iosm_imem *imem,
H A Diosm_ipc_protocol.c17 int index = ipc_protocol_msg_prep(ipc_protocol->imem, msg_type,
25 ipc_protocol_msg_hp_update(ipc_protocol->imem);
83 index = ipc_task_queue_send_task(ipc_protocol->imem,
100 ipc_task_queue_send_task(ipc_protocol->imem,
182 ipc_task_queue_send_task(ipc_protocol->imem,
235 ipc_protocol->imem = ipc_imem;
H A Diosm_ipc_task_queue.c140 int ipc_task_queue_send_task(struct iosm_imem *imem, argument
157 ret = ipc_task_queue_add_task(imem, arg, copy, func,
160 dev_err(imem->ipc_task->dev,
H A Diosm_ipc_pcie.c43 ipc_imem_cleanup(ipc_pcie->imem);
53 kfree(ipc_pcie->imem);
313 /* Establish the link to the imem layer. */
314 ipc_pcie->imem = ipc_imem_init(ipc_pcie, pci->device,
316 if (!ipc_pcie->imem) {
317 dev_err(ipc_pcie->dev, "failed to init imem");
355 ipc_imem_pm_s2idle_sleep(ipc_pcie->imem, true);
366 ipc_imem_pm_s2idle_sleep(ipc_pcie->imem, false);
381 ipc_imem_pm_suspend(ipc_pcie->imem);
392 ipc_imem_pm_resume(ipc_pcie->imem);
[all...]
H A Diosm_ipc_pcie.h54 * @imem: Pointer to imem data struct
69 struct iosm_imem *imem; member in struct:iosm_pcie
H A Diosm_ipc_protocol.h111 * @imem: Pointer to struct iosm_imem
122 struct iosm_imem *imem; member in struct:iosm_protocol
H A Diosm_ipc_irq.c44 ipc_imem_irq_process(ipc_pcie->imem, instance);
H A Diosm_ipc_mux_codec.c20 ipc_imem_ul_send(ipc_mux->imem);
28 int ret = ipc_task_queue_send_task(ipc_mux->imem, ipc_mux_tq_cmd_send,
49 ipc_uevent_send(ipc_mux->imem->dev, UEVENT_MDM_TIMEOUT);
252 adb_timer = &ipc_mux->imem->adb_timer;
900 ul_data_pend = ipc_imem_ul_write_td(ipc_mux->imem);
904 ipc_imem_td_update_timer_start(ipc_mux->imem);
1003 (void)ipc_imem_ul_write_td(ipc_mux->imem);
1161 (void)ipc_imem_ul_write_td(ipc_mux->imem);
1491 ipc_imem_adb_timer_start(ipc_mux->imem);
1494 ipc_imem_td_update_timer_start(ipc_mux->imem);
[all...]
/linux-master/drivers/gpu/drm/nouveau/nvkm/core/
H A Dmemory.c141 struct nvkm_instmem *imem = device->imem; local
146 if (unlikely(!imem))
159 ret = nvkm_instobj_new(imem, size, align, zero, preserve, &memory);
/linux-master/drivers/gpu/drm/nouveau/nvkm/engine/device/
H A Dbase.c85 .imem = { 0x00000001, nv04_instmem_new },
106 .imem = { 0x00000001, nv04_instmem_new },
128 .imem = { 0x00000001, nv04_instmem_new },
148 .imem = { 0x00000001, nv04_instmem_new },
170 .imem = { 0x00000001, nv04_instmem_new },
192 .imem = { 0x00000001, nv04_instmem_new },
214 .imem = { 0x00000001, nv04_instmem_new },
236 .imem = { 0x00000001, nv04_instmem_new },
258 .imem = { 0x00000001, nv04_instmem_new },
280 .imem
[all...]
H A Duser.c78 struct nvkm_instmem *imem = device->imem; local
162 if (imem && args->v0.ram_size > 0)
163 args->v0.ram_user = args->v0.ram_user - imem->reserved;
/linux-master/drivers/memory/
H A Dbrcmstb_dpfe.c184 void __iomem *imem; member in struct:brcmstb_dpfe_priv
571 u32 __iomem *imem = priv->imem; local
590 sum += readl_relaxed(imem + i);
626 const u32 *dmem, *imem; local
676 imem = fw_blob;
683 ret = __write_firmware(priv->imem, imem, imem_size, is_big_endian);
884 priv->imem = devm_platform_ioremap_resource_byname(pdev, "dpfe-imem");
[all...]

Completed in 176 milliseconds

12