Lines Matching refs:v3d

41 	struct v3d_dev *v3d =
43 struct drm_device *dev = &v3d->drm;
63 spin_lock_irqsave(&v3d->job_lock, irqflags);
64 if (!v3d->bin_job) {
65 spin_unlock_irqrestore(&v3d->job_lock, irqflags);
70 list_add_tail(&bo->unref_head, &v3d->bin_job->render->unref_list);
71 spin_unlock_irqrestore(&v3d->job_lock, irqflags);
83 struct v3d_dev *v3d = arg;
98 schedule_work(&v3d->overflow_mem_work);
104 to_v3d_fence(v3d->bin_job->base.irq_fence);
105 struct v3d_file_priv *file = v3d->bin_job->base.file->driver_priv;
109 v3d->queue[V3D_BIN].jobs_sent++;
112 v3d->queue[V3D_BIN].start_ns = 0;
115 v3d->queue[V3D_BIN].enabled_ns += runtime;
117 trace_v3d_bcl_irq(&v3d->drm, fence->seqno);
124 to_v3d_fence(v3d->render_job->base.irq_fence);
125 struct v3d_file_priv *file = v3d->render_job->base.file->driver_priv;
129 v3d->queue[V3D_RENDER].jobs_sent++;
132 v3d->queue[V3D_RENDER].start_ns = 0;
135 v3d->queue[V3D_RENDER].enabled_ns += runtime;
137 trace_v3d_rcl_irq(&v3d->drm, fence->seqno);
142 if (intsts & V3D_INT_CSDDONE(v3d->ver)) {
144 to_v3d_fence(v3d->csd_job->base.irq_fence);
145 struct v3d_file_priv *file = v3d->csd_job->base.file->driver_priv;
149 v3d->queue[V3D_CSD].jobs_sent++;
152 v3d->queue[V3D_CSD].start_ns = 0;
155 v3d->queue[V3D_CSD].enabled_ns += runtime;
157 trace_v3d_csd_irq(&v3d->drm, fence->seqno);
165 if (v3d->ver < 71 && (intsts & V3D_INT_GMPV))
166 dev_err(v3d->drm.dev, "GMP violation\n");
171 if (v3d->single_irq_line && status == IRQ_NONE)
180 struct v3d_dev *v3d = arg;
191 to_v3d_fence(v3d->tfu_job->base.irq_fence);
192 struct v3d_file_priv *file = v3d->tfu_job->base.file->driver_priv;
196 v3d->queue[V3D_TFU].jobs_sent++;
199 v3d->queue[V3D_TFU].start_ns = 0;
202 v3d->queue[V3D_TFU].enabled_ns += runtime;
204 trace_v3d_tfu_irq(&v3d->drm, fence->seqno);
214 (v3d->va_width - 32));
229 if (v3d->ver >= 41) {
235 dev_err(v3d->drm.dev, "MMU error from client %s (%d) at 0x%llx%s%s%s\n",
246 if (v3d->ver >= 71 && (intsts & V3D_V7_HUB_INT_GMPV)) {
247 dev_err(v3d->drm.dev, "GMP Violation\n");
255 v3d_irq_init(struct v3d_dev *v3d)
259 INIT_WORK(&v3d->overflow_mem_work, v3d_overflow_mem_work);
264 for (core = 0; core < v3d->cores; core++)
265 V3D_CORE_WRITE(core, V3D_CTL_INT_CLR, V3D_CORE_IRQS(v3d->ver));
266 V3D_WRITE(V3D_HUB_INT_CLR, V3D_HUB_IRQS(v3d->ver));
268 irq1 = platform_get_irq_optional(v3d_to_pdev(v3d), 1);
272 ret = devm_request_irq(v3d->drm.dev, irq1,
274 "v3d_core0", v3d);
277 ret = devm_request_irq(v3d->drm.dev,
278 platform_get_irq(v3d_to_pdev(v3d), 0),
280 "v3d_hub", v3d);
284 v3d->single_irq_line = true;
286 ret = devm_request_irq(v3d->drm.dev,
287 platform_get_irq(v3d_to_pdev(v3d), 0),
289 "v3d", v3d);
294 v3d_irq_enable(v3d);
299 dev_err(v3d->drm.dev, "IRQ setup failed: %d\n", ret);
304 v3d_irq_enable(struct v3d_dev *v3d)
309 for (core = 0; core < v3d->cores; core++) {
310 V3D_CORE_WRITE(core, V3D_CTL_INT_MSK_SET, ~V3D_CORE_IRQS(v3d->ver));
311 V3D_CORE_WRITE(core, V3D_CTL_INT_MSK_CLR, V3D_CORE_IRQS(v3d->ver));
314 V3D_WRITE(V3D_HUB_INT_MSK_SET, ~V3D_HUB_IRQS(v3d->ver));
315 V3D_WRITE(V3D_HUB_INT_MSK_CLR, V3D_HUB_IRQS(v3d->ver));
319 v3d_irq_disable(struct v3d_dev *v3d)
324 for (core = 0; core < v3d->cores; core++)
329 for (core = 0; core < v3d->cores; core++)
330 V3D_CORE_WRITE(core, V3D_CTL_INT_CLR, V3D_CORE_IRQS(v3d->ver));
331 V3D_WRITE(V3D_HUB_INT_CLR, V3D_HUB_IRQS(v3d->ver));
333 cancel_work_sync(&v3d->overflow_mem_work);
337 void v3d_irq_reset(struct v3d_dev *v3d)
339 v3d_irq_enable(v3d);