Lines Matching refs:disp

37 gv100_disp_wndw_cnt(struct nvkm_disp *disp, unsigned long *pmask)
39 struct nvkm_device *device = disp->engine.subdev.device;
47 struct nv50_disp *disp =
49 struct nvkm_subdev *subdev = &disp->base.engine.subdev;
55 nvkm_debug(subdev, "supervisor %d: %08x\n", ffs(disp->super), stat);
56 list_for_each_entry(head, &disp->base.head, head) {
61 if (disp->super & 0x00000001) {
62 nv50_disp_chan_mthd(disp->chan[0], NV_DBG_DEBUG);
63 nv50_disp_super_1(disp);
64 list_for_each_entry(head, &disp->base.head, head) {
67 nv50_disp_super_1_0(disp, head);
70 if (disp->super & 0x00000002) {
71 list_for_each_entry(head, &disp->base.head, head) {
74 nv50_disp_super_2_0(disp, head);
76 nvkm_outp_route(&disp->base);
77 list_for_each_entry(head, &disp->base.head, head) {
80 nv50_disp_super_2_1(disp, head);
82 list_for_each_entry(head, &disp->base.head, head) {
85 nv50_disp_super_2_2(disp, head);
88 if (disp->super & 0x00000004) {
89 list_for_each_entry(head, &disp->base.head, head) {
92 nv50_disp_super_3_0(disp, head);
96 list_for_each_entry(head, &disp->base.head, head)
102 gv100_disp_exception(struct nv50_disp *disp, int chid)
104 struct nvkm_subdev *subdev = &disp->base.engine.subdev;
130 if (chid < ARRAY_SIZE(disp->chan) && disp->chan[chid]) {
133 nv50_disp_chan_mthd(disp->chan[chid], NV_DBG_ERROR);
144 gv100_disp_intr_ctrl_disp(struct nv50_disp *disp)
146 struct nvkm_subdev *subdev = &disp->base.engine.subdev;
151 disp->super = (stat & 0x00000007);
152 queue_work(disp->wq, &disp->supervisor);
153 nvkm_wr32(device, 0x611860, disp->super);
179 nv50_disp_chan_uevent_send(disp, 0);
182 for_each_set_bit(wndw, &wndws, disp->wndw.nr) {
183 nv50_disp_chan_uevent_send(disp, 1 + wndw);
192 gv100_disp_intr_exc_other(struct nv50_disp *disp)
194 struct nvkm_subdev *subdev = &disp->base.engine.subdev;
202 gv100_disp_exception(disp, 0);
207 for_each_set_bit(head, &mask, disp->wndw.nr) {
209 gv100_disp_exception(disp, 73 + head);
221 gv100_disp_intr_exc_winim(struct nv50_disp *disp)
223 struct nvkm_subdev *subdev = &disp->base.engine.subdev;
228 for_each_set_bit(wndw, &stat, disp->wndw.nr) {
230 gv100_disp_exception(disp, 33 + wndw);
241 gv100_disp_intr_exc_win(struct nv50_disp *disp)
243 struct nvkm_subdev *subdev = &disp->base.engine.subdev;
248 for_each_set_bit(wndw, &stat, disp->wndw.nr) {
250 gv100_disp_exception(disp, 1 + wndw);
261 gv100_disp_intr_head_timing(struct nv50_disp *disp, int head)
263 struct nvkm_subdev *subdev = &disp->base.engine.subdev;
274 nvkm_disp_vblank(&disp->base, head);
286 gv100_disp_intr(struct nv50_disp *disp)
288 struct nvkm_subdev *subdev = &disp->base.engine.subdev;
296 gv100_disp_intr_head_timing(disp, head);
302 gv100_disp_intr_exc_win(disp);
307 gv100_disp_intr_exc_winim(disp);
312 gv100_disp_intr_exc_other(disp);
317 gv100_disp_intr_ctrl_disp(disp);
326 gv100_disp_fini(struct nv50_disp *disp)
328 struct nvkm_device *device = disp->base.engine.subdev.device;
333 gv100_disp_init(struct nv50_disp *disp)
335 struct nvkm_device *device = disp->base.engine.subdev.device;
355 for (i = 0; i < disp->sor.nr; i++) {
362 list_for_each_entry(head, &disp->base.head, head) {
377 for (i = 0; i < disp->wndw.nr; i++) {
394 switch (nvkm_memory_target(disp->inst->memory)) {
402 nvkm_wr32(device, 0x610014, disp->inst->addr >> 16);
409 nvkm_wr32(device, 0x611cec, disp->head.mask << 16 |
414 nvkm_wr32(device, 0x611ce8, disp->wndw.mask); /* MSK. */
418 nvkm_wr32(device, 0x611ce4, disp->wndw.mask); /* MSK. */
422 list_for_each_entry(head, &disp->base.head, head) {