Lines Matching refs:dev_priv

64 	dev_priv->dma_low +=8;					\
72 dev_priv->dma_low += 8;
74 static void via_cmdbuf_start(drm_via_private_t * dev_priv);
75 static void via_cmdbuf_pause(drm_via_private_t * dev_priv);
76 static void via_cmdbuf_reset(drm_via_private_t * dev_priv);
77 static void via_cmdbuf_rewind(drm_via_private_t * dev_priv);
78 static int via_wait_idle(drm_via_private_t * dev_priv);
79 static void via_pad_cache(drm_via_private_t * dev_priv, int qwords);
85 static uint32_t via_cmdbuf_space(drm_via_private_t * dev_priv)
87 uint32_t agp_base = dev_priv->dma_offset + (uint32_t) dev_priv->agpAddr;
88 uint32_t hw_addr = *(dev_priv->hw_addr_ptr) - agp_base;
90 return ((hw_addr <= dev_priv->dma_low) ?
91 (dev_priv->dma_high + hw_addr - dev_priv->dma_low) :
92 (hw_addr - dev_priv->dma_low));
99 static uint32_t via_cmdbuf_lag(drm_via_private_t * dev_priv)
101 uint32_t agp_base = dev_priv->dma_offset + (uint32_t) dev_priv->agpAddr;
102 uint32_t hw_addr = *(dev_priv->hw_addr_ptr) - agp_base;
104 return ((hw_addr <= dev_priv->dma_low) ?
105 (dev_priv->dma_low - hw_addr) :
106 (dev_priv->dma_wrap + dev_priv->dma_low - hw_addr));
114 via_cmdbuf_wait(drm_via_private_t * dev_priv, unsigned int size)
116 uint32_t agp_base = dev_priv->dma_offset + (uint32_t) dev_priv->agpAddr;
120 hw_addr_ptr = dev_priv->hw_addr_ptr;
121 cur_addr = dev_priv->dma_low;
145 static inline uint32_t *via_check_dma(drm_via_private_t * dev_priv,
148 if ((dev_priv->dma_low + size + 4 * CMDBUF_ALIGNMENT_SIZE) >
149 dev_priv->dma_high) {
150 via_cmdbuf_rewind(dev_priv);
152 if (via_cmdbuf_wait(dev_priv, size) != 0) {
156 return (uint32_t *) (dev_priv->dma_ptr + dev_priv->dma_low);
165 drm_via_private_t *dev_priv =
168 if (dev_priv->ring.virtual_start) {
169 via_cmdbuf_reset(dev_priv);
171 drm_core_ioremapfree(&dev_priv->ring.map, dev);
172 dev_priv->ring.virtual_start = NULL;
176 blitq = dev_priv->blit_queues + i;
185 drm_via_private_t * dev_priv,
188 if (!dev_priv || !dev_priv->mmio) {
193 if (dev_priv->ring.virtual_start != NULL) {
203 if (dev_priv->chipset == VIA_DX9_0) {
208 dev_priv->ring.map.offset = dev->agp->base + init->offset;
209 dev_priv->ring.map.size = init->size;
210 dev_priv->ring.map.type = 0;
211 dev_priv->ring.map.flags = 0;
212 dev_priv->ring.map.mtrr = 0;
214 drm_core_ioremap_wc(&dev_priv->ring.map, dev);
216 if (dev_priv->ring.map.virtual == NULL) {
223 dev_priv->ring.virtual_start = dev_priv->ring.map.virtual;
225 dev_priv->dma_ptr = dev_priv->ring.virtual_start;
226 dev_priv->dma_low = 0;
227 dev_priv->dma_high = init->size;
228 dev_priv->dma_wrap = init->size;
229 dev_priv->dma_offset = init->offset;
230 dev_priv->last_pause_ptr = NULL;
231 dev_priv->hw_addr_ptr =
232 (volatile uint32_t *)((char *)dev_priv->mmio->virtual +
235 via_cmdbuf_start(dev_priv);
242 drm_via_private_t *dev_priv = (drm_via_private_t *) dev->dev_private;
251 retcode = via_initialize(dev, dev_priv, init);
260 retcode = (dev_priv->ring.virtual_start != NULL) ?
273 drm_via_private_t *dev_priv;
277 dev_priv = (drm_via_private_t *) dev->dev_private;
279 if (dev_priv->ring.virtual_start == NULL) {
288 if (DRM_COPY_FROM_USER(dev_priv->pci_buf, cmd->buf, cmd->size))
298 via_verify_command_stream((uint32_t *) dev_priv->pci_buf,
303 vb = via_check_dma(dev_priv, (cmd->size < 0x100) ? 0x102 : cmd->size);
308 memcpy(vb, dev_priv->pci_buf, cmd->size);
310 dev_priv->dma_low += cmd->size;
318 via_pad_cache(dev_priv, (0x100 - cmd->size) >> 3);
319 via_cmdbuf_pause(dev_priv);
326 drm_via_private_t *dev_priv = dev->dev_private;
328 if (!via_wait_idle(dev_priv)) {
362 drm_via_private_t *dev_priv = dev->dev_private;
368 if (DRM_COPY_FROM_USER(dev_priv->pci_buf, cmd->buf, cmd->size))
372 via_verify_command_stream((uint32_t *) dev_priv->pci_buf,
378 via_parse_command_stream(dev, (const uint32_t *)dev_priv->pci_buf,
400 static inline uint32_t *via_align_buffer(drm_via_private_t * dev_priv,
414 static inline uint32_t *via_get_dma(drm_via_private_t * dev_priv)
416 return (uint32_t *) (dev_priv->dma_ptr + dev_priv->dma_low);
424 static int via_hook_segment(drm_via_private_t * dev_priv,
429 volatile uint32_t *paused_at = dev_priv->last_pause_ptr;
435 (void) *(volatile uint32_t *)(via_get_dma(dev_priv) -1);
441 reader = *(dev_priv->hw_addr_ptr);
442 ptr = ((volatile char *)paused_at - dev_priv->dma_ptr) +
443 dev_priv->dma_offset + (uint32_t) dev_priv->agpAddr + 4;
445 dev_priv->last_pause_ptr = via_get_dma(dev_priv) - 1;
454 diff = (uint32_t) (ptr - reader) - dev_priv->dma_diff;
460 reader = *(dev_priv->hw_addr_ptr);
461 diff = (uint32_t) (ptr - reader) - dev_priv->dma_diff;
467 reader = *(dev_priv->hw_addr_ptr);
468 diff = (uint32_t) (ptr - reader) - dev_priv->dma_diff;
469 diff &= (dev_priv->dma_high - 1);
470 if (diff != 0 && diff < (dev_priv->dma_high >> 1)) {
473 ptr, reader, dev_priv->dma_diff);
490 static int via_wait_idle(drm_via_private_t * dev_priv)
504 static uint32_t *via_align_cmd(drm_via_private_t * dev_priv, uint32_t cmd_type,
514 via_cmdbuf_wait(dev_priv, 2 * CMDBUF_ALIGNMENT_SIZE);
516 vb = via_get_dma(dev_priv);
519 agp_base = dev_priv->dma_offset + (uint32_t) dev_priv->agpAddr;
521 ((dev_priv->dma_low & CMDBUF_ALIGNMENT_MASK) >> 3);
524 agp_base + dev_priv->dma_low - 8 + (qw_pad_count << 3);
529 vb = via_align_buffer(dev_priv, vb, qw_pad_count - 1);
534 static void via_cmdbuf_start(drm_via_private_t * dev_priv)
545 dev_priv->dma_low = 0;
547 agp_base = dev_priv->dma_offset + (uint32_t) dev_priv->agpAddr;
549 end_addr = agp_base + dev_priv->dma_high;
556 dev_priv->last_pause_ptr =
557 via_align_cmd(dev_priv, HC_HAGPBpID_PAUSE, 0,
561 (void) *(volatile uint32_t *)dev_priv->last_pause_ptr;
574 dev_priv->dma_diff = 0;
579 reader = *(dev_priv->hw_addr_ptr);
580 ptr = ((volatile char *)dev_priv->last_pause_ptr - dev_priv->dma_ptr) +
581 dev_priv->dma_offset + (uint32_t) dev_priv->agpAddr + 4;
590 dev_priv->dma_diff = ptr - reader;
593 static void via_pad_cache(drm_via_private_t * dev_priv, int qwords)
597 via_cmdbuf_wait(dev_priv, qwords + 2);
598 vb = via_get_dma(dev_priv);
600 via_align_buffer(dev_priv, vb, qwords);
603 static inline void via_dummy_bitblt(drm_via_private_t * dev_priv)
605 uint32_t *vb = via_get_dma(dev_priv);
611 static void via_cmdbuf_jump(drm_via_private_t * dev_priv)
619 agp_base = dev_priv->dma_offset + (uint32_t) dev_priv->agpAddr;
620 via_align_cmd(dev_priv, HC_HAGPBpID_JUMP, 0, &jump_addr_hi,
623 dev_priv->dma_wrap = dev_priv->dma_low;
629 dev_priv->dma_low = 0;
630 if (via_cmdbuf_wait(dev_priv, CMDBUF_ALIGNMENT_SIZE) != 0) {
634 via_dummy_bitblt(dev_priv);
635 via_dummy_bitblt(dev_priv);
638 via_align_cmd(dev_priv, HC_HAGPBpID_PAUSE, 0, &pause_addr_hi,
640 via_align_cmd(dev_priv, HC_HAGPBpID_PAUSE, 0, &pause_addr_hi,
644 dma_low_save1 = dev_priv->dma_low;
656 via_align_cmd(dev_priv, HC_HAGPBpID_PAUSE, 0, &pause_addr_hi,
658 via_align_cmd(dev_priv, HC_HAGPBpID_PAUSE, 0, &pause_addr_hi,
662 dma_low_save2 = dev_priv->dma_low;
663 dev_priv->dma_low = dma_low_save1;
664 via_hook_segment(dev_priv, jump_addr_hi, jump_addr_lo, 0);
665 dev_priv->dma_low = dma_low_save2;
666 via_hook_segment(dev_priv, pause_addr_hi, pause_addr_lo, 0);
670 static void via_cmdbuf_rewind(drm_via_private_t * dev_priv)
672 via_cmdbuf_jump(dev_priv);
675 static void via_cmdbuf_flush(drm_via_private_t * dev_priv, uint32_t cmd_type)
679 via_align_cmd(dev_priv, cmd_type, 0, &pause_addr_hi, &pause_addr_lo, 0);
680 via_hook_segment(dev_priv, pause_addr_hi, pause_addr_lo, 0);
683 static void via_cmdbuf_pause(drm_via_private_t * dev_priv)
685 via_cmdbuf_flush(dev_priv, HC_HAGPBpID_PAUSE);
688 static void via_cmdbuf_reset(drm_via_private_t * dev_priv)
690 via_cmdbuf_flush(dev_priv, HC_HAGPBpID_STOP);
691 via_wait_idle(dev_priv);
703 drm_via_private_t *dev_priv;
708 dev_priv = (drm_via_private_t *) dev->dev_private;
710 if (dev_priv->ring.virtual_start == NULL) {
719 while (((tmp_size = via_cmdbuf_space(dev_priv)) < d_siz->size)
731 while (((tmp_size = via_cmdbuf_lag(dev_priv)) > d_siz->size)