Lines Matching refs:dev_priv

41 savage_bci_wait_fifo_shadow(drm_savage_private_t *dev_priv, unsigned int n)
43 uint32_t mask = dev_priv->status_used_mask;
44 uint32_t threshold = dev_priv->bci_threshold_hi;
49 if (n > dev_priv->cob_size + SAVAGE_BCI_FIFO_SIZE - threshold)
56 status = dev_priv->status_ptr[0];
70 savage_bci_wait_fifo_s3d(drm_savage_private_t *dev_priv, unsigned int n)
72 uint32_t maxUsed = dev_priv->cob_size + SAVAGE_BCI_FIFO_SIZE - n;
91 savage_bci_wait_fifo_s4(drm_savage_private_t *dev_priv, unsigned int n)
93 uint32_t maxUsed = dev_priv->cob_size + SAVAGE_BCI_FIFO_SIZE - n;
123 savage_bci_wait_event_shadow(drm_savage_private_t *dev_priv, uint16_t e)
130 status = dev_priv->status_ptr[1];
146 savage_bci_wait_event_reg(drm_savage_private_t *dev_priv, uint16_t e)
167 uint16_t savage_bci_emit_event(drm_savage_private_t *dev_priv,
173 if (dev_priv->status_ptr) {
175 count = dev_priv->status_ptr[1023];
176 if (count < dev_priv->event_counter)
177 dev_priv->event_wrap++;
179 count = dev_priv->event_counter;
184 dev_priv->event_wrap++;
186 dev_priv->event_counter = count;
187 if (dev_priv->status_ptr)
188 dev_priv->status_ptr[1023] = (uint32_t)count;
211 drm_savage_private_t *dev_priv = dev->dev_private;
218 dev_priv->head.next = &dev_priv->tail;
219 dev_priv->head.prev = NULL;
220 dev_priv->head.buf = NULL;
222 dev_priv->tail.next = NULL;
223 dev_priv->tail.prev = &dev_priv->head;
224 dev_priv->tail.buf = NULL;
233 entry->next = dev_priv->head.next;
234 entry->prev = &dev_priv->head;
235 dev_priv->head.next->prev = entry;
236 dev_priv->head.next = entry;
244 drm_savage_private_t *dev_priv = dev->dev_private;
245 drm_savage_buf_priv_t *tail = dev_priv->tail.prev;
251 if (dev_priv->status_ptr)
252 event = dev_priv->status_ptr[1] & 0xffff;
255 wrap = dev_priv->event_wrap;
256 if (event > dev_priv->event_counter)
277 drm_savage_private_t *dev_priv = dev->dev_private;
287 prev = &dev_priv->head;
298 static int savage_dma_init(drm_savage_private_t *dev_priv)
302 dev_priv->nr_dma_pages = dev_priv->cmd_dma->size /
304 dev_priv->dma_pages = drm_alloc(sizeof(drm_savage_dma_page_t) *
305 dev_priv->nr_dma_pages, DRM_MEM_DRIVER);
306 if (dev_priv->dma_pages == NULL)
309 for (i = 0; i < dev_priv->nr_dma_pages; ++i) {
310 SET_AGE(&dev_priv->dma_pages[i].age, 0, 0);
311 dev_priv->dma_pages[i].used = 0;
312 dev_priv->dma_pages[i].flushed = 0;
314 SET_AGE(&dev_priv->last_dma_age, 0, 0);
316 dev_priv->first_dma_page = 0;
317 dev_priv->current_dma_page = 0;
322 void savage_dma_reset(drm_savage_private_t *dev_priv)
326 event = savage_bci_emit_event(dev_priv, 0);
327 wrap = dev_priv->event_wrap;
328 for (i = 0; i < dev_priv->nr_dma_pages; ++i) {
329 SET_AGE(&dev_priv->dma_pages[i].age, event, wrap);
330 dev_priv->dma_pages[i].used = 0;
331 dev_priv->dma_pages[i].flushed = 0;
333 SET_AGE(&dev_priv->last_dma_age, event, wrap);
334 dev_priv->first_dma_page = dev_priv->current_dma_page = 0;
337 void savage_dma_wait(drm_savage_private_t *dev_priv, unsigned int page)
343 if (dev_priv->cmd_dma == &dev_priv->fake_dma)
347 if (dev_priv->status_ptr)
348 event = dev_priv->status_ptr[1] & 0xffff;
351 wrap = dev_priv->event_wrap;
352 if (event > dev_priv->event_counter)
355 if (dev_priv->dma_pages[page].age.wrap > wrap ||
356 (dev_priv->dma_pages[page].age.wrap == wrap &&
357 dev_priv->dma_pages[page].age.event > event)) {
358 if (dev_priv->wait_evnt(dev_priv,
359 dev_priv->dma_pages[page].age.event)
365 uint32_t *savage_dma_alloc(drm_savage_private_t *dev_priv, unsigned int n)
367 unsigned int cur = dev_priv->current_dma_page;
369 dev_priv->dma_pages[cur].used;
376 cur, dev_priv->dma_pages[cur].used, n, rest, nr_pages);
378 if (cur + nr_pages < dev_priv->nr_dma_pages) {
379 dma_ptr = (uint32_t *)dev_priv->cmd_dma->virtual +
380 cur * SAVAGE_DMA_PAGE_SIZE + dev_priv->dma_pages[cur].used;
383 dev_priv->dma_pages[cur].used += rest;
387 dev_priv->dma_flush(dev_priv);
390 for (i = cur; i < dev_priv->nr_dma_pages; ++i) {
391 dev_priv->dma_pages[i].age = dev_priv->last_dma_age;
392 dev_priv->dma_pages[i].used = 0;
393 dev_priv->dma_pages[i].flushed = 0;
395 dma_ptr = (uint32_t *)dev_priv->cmd_dma->virtual;
396 dev_priv->first_dma_page = cur = 0;
400 if (dev_priv->dma_pages[i].used) {
402 i, dev_priv->dma_pages[i].used);
406 dev_priv->dma_pages[i].used = SAVAGE_DMA_PAGE_SIZE;
408 dev_priv->dma_pages[i].used = n;
411 dev_priv->current_dma_page = --i;
414 i, dev_priv->dma_pages[i].used, n);
416 savage_dma_wait(dev_priv, dev_priv->current_dma_page);
421 static void savage_dma_flush(drm_savage_private_t *dev_priv)
423 unsigned int first = dev_priv->first_dma_page;
424 unsigned int cur = dev_priv->current_dma_page;
431 dev_priv->dma_pages[cur].used == dev_priv->dma_pages[cur].flushed)
436 pad = -dev_priv->dma_pages[cur].used & 1;
437 align = -(dev_priv->dma_pages[cur].used + pad) & 7;
441 first, cur, dev_priv->dma_pages[first].flushed,
442 dev_priv->dma_pages[cur].used, pad, align);
446 uint32_t *dma_ptr = (uint32_t *)dev_priv->cmd_dma->virtual +
447 cur * SAVAGE_DMA_PAGE_SIZE + dev_priv->dma_pages[cur].used;
448 dev_priv->dma_pages[cur].used += pad;
458 phys_addr = dev_priv->cmd_dma->offset +
460 dev_priv->dma_pages[first].flushed) * 4;
462 dev_priv->dma_pages[cur].used - dev_priv->dma_pages[first].flushed;
465 phys_addr | dev_priv->dma_type, len);
469 BCI_WRITE(phys_addr | dev_priv->dma_type);
473 dev_priv->dma_pages[cur].used += align;
476 event = savage_bci_emit_event(dev_priv, 0);
477 wrap = dev_priv->event_wrap;
479 SET_AGE(&dev_priv->dma_pages[i].age, event, wrap);
480 dev_priv->dma_pages[i].used = 0;
481 dev_priv->dma_pages[i].flushed = 0;
484 if (dev_priv->dma_pages[cur].used == SAVAGE_DMA_PAGE_SIZE) {
485 SET_AGE(&dev_priv->dma_pages[cur].age, event, wrap);
486 dev_priv->dma_pages[cur].used = 0;
487 dev_priv->dma_pages[cur].flushed = 0;
490 if (cur == dev_priv->nr_dma_pages)
492 dev_priv->first_dma_page = dev_priv->current_dma_page = cur;
494 dev_priv->first_dma_page = cur;
495 dev_priv->dma_pages[cur].flushed = dev_priv->dma_pages[i].used;
497 SET_AGE(&dev_priv->last_dma_age, event, wrap);
500 dev_priv->dma_pages[cur].used,
501 dev_priv->dma_pages[cur].flushed);
504 static void savage_fake_dma_flush(drm_savage_private_t *dev_priv)
509 if (dev_priv->first_dma_page == dev_priv->current_dma_page &&
510 dev_priv->dma_pages[dev_priv->current_dma_page].used == 0)
514 dev_priv->first_dma_page, dev_priv->current_dma_page,
515 dev_priv->dma_pages[dev_priv->current_dma_page].used);
517 for (i = dev_priv->first_dma_page;
518 i <= dev_priv->current_dma_page && dev_priv->dma_pages[i].used;
520 uint32_t *dma_ptr = (uint32_t *)dev_priv->cmd_dma->virtual +
524 if (i < dev_priv->current_dma_page &&
525 dev_priv->dma_pages[i].used != SAVAGE_DMA_PAGE_SIZE) {
527 i, dev_priv->dma_pages[i].used);
530 BEGIN_BCI(dev_priv->dma_pages[i].used);
531 for (j = 0; j < dev_priv->dma_pages[i].used; ++j) {
534 dev_priv->dma_pages[i].used = 0;
538 dev_priv->first_dma_page = dev_priv->current_dma_page = 0;
543 drm_savage_private_t *dev_priv;
545 dev_priv = drm_alloc(sizeof(drm_savage_private_t), DRM_MEM_DRIVER);
546 if (dev_priv == NULL)
549 memset(dev_priv, 0, sizeof(drm_savage_private_t));
550 dev->dev_private = (void *)dev_priv;
552 dev_priv->chipset = (enum savage_family)chipset;
565 drm_savage_private_t *dev_priv = dev->dev_private;
574 dev_priv->mtrr[0].handle = -1;
575 dev_priv->mtrr[1].handle = -1;
576 dev_priv->mtrr[2].handle = -1;
577 if (S3_SAVAGE3D_SERIES(dev_priv->chipset)) {
588 dev_priv->mtrr[0].base = fb_base;
589 dev_priv->mtrr[0].size = 0x01000000;
590 dev_priv->mtrr[0].handle =
591 drm_mtrr_add(dev_priv->mtrr[0].base,
592 dev_priv->mtrr[0].size, DRM_MTRR_WC);
593 dev_priv->mtrr[1].base = fb_base + 0x02000000;
594 dev_priv->mtrr[1].size = 0x02000000;
595 dev_priv->mtrr[1].handle =
596 drm_mtrr_add(dev_priv->mtrr[1].base,
597 dev_priv->mtrr[1].size, DRM_MTRR_WC);
598 dev_priv->mtrr[2].base = fb_base + 0x04000000;
599 dev_priv->mtrr[2].size = 0x04000000;
600 dev_priv->mtrr[2].handle =
601 drm_mtrr_add(dev_priv->mtrr[2].base,
602 dev_priv->mtrr[2].size, DRM_MTRR_WC);
607 } else if (dev_priv->chipset != S3_SUPERSAVAGE &&
608 dev_priv->chipset != S3_SAVAGE2000) {
619 dev_priv->mtrr[0].base = fb_base;
620 dev_priv->mtrr[0].size = 0x08000000;
621 dev_priv->mtrr[0].handle =
622 drm_mtrr_add(dev_priv->mtrr[0].base,
623 dev_priv->mtrr[0].size, DRM_MTRR_WC);
639 _DRM_READ_ONLY, &dev_priv->mmio);
644 _DRM_WRITE_COMBINING, &dev_priv->fb);
650 &dev_priv->aperture);
662 drm_savage_private_t *dev_priv = dev->dev_private;
666 if (dev_priv->mtrr[i].handle >= 0)
667 drm_mtrr_del(dev_priv->mtrr[i].handle,
668 dev_priv->mtrr[i].base,
669 dev_priv->mtrr[i].size, DRM_MTRR_WC);
674 drm_savage_private_t *dev_priv = dev->dev_private;
676 drm_free(dev_priv, sizeof(drm_savage_private_t), DRM_MEM_DRIVER);
683 drm_savage_private_t *dev_priv = dev->dev_private;
699 dev_priv->cob_size = init->cob_size;
700 dev_priv->bci_threshold_lo = init->bci_threshold_lo;
701 dev_priv->bci_threshold_hi = init->bci_threshold_hi;
702 dev_priv->dma_type = init->dma_type;
704 dev_priv->fb_bpp = init->fb_bpp;
705 dev_priv->front_offset = init->front_offset;
706 dev_priv->front_pitch = init->front_pitch;
707 dev_priv->back_offset = init->back_offset;
708 dev_priv->back_pitch = init->back_pitch;
709 dev_priv->depth_bpp = init->depth_bpp;
710 dev_priv->depth_offset = init->depth_offset;
711 dev_priv->depth_pitch = init->depth_pitch;
713 dev_priv->texture_offset = init->texture_offset;
714 dev_priv->texture_size = init->texture_size;
716 dev_priv->sarea = drm_getsarea(dev);
717 if (!dev_priv->sarea) {
723 dev_priv->status = drm_core_findmap(dev, init->status_offset);
724 if (!dev_priv->status) {
730 dev_priv->status = NULL;
732 if (dev_priv->dma_type == SAVAGE_DMA_AGP && init->buffers_offset) {
749 dev_priv->agp_textures =
751 if (!dev_priv->agp_textures) {
757 dev_priv->agp_textures = NULL;
761 if (S3_SAVAGE3D_SERIES(dev_priv->chipset)) {
773 dev_priv->cmd_dma = drm_core_findmap(dev, init->cmd_dma_offset);
774 if (!dev_priv->cmd_dma) {
779 if (dev_priv->dma_type == SAVAGE_DMA_AGP) {
780 if (dev_priv->cmd_dma->type != _DRM_AGP) {
786 drm_core_ioremap(dev_priv->cmd_dma, dev);
787 if (!dev_priv->cmd_dma->virtual) {
793 } else if (dev_priv->cmd_dma->type != _DRM_CONSISTENT) {
800 dev_priv->cmd_dma = NULL;
803 dev_priv->dma_flush = savage_dma_flush;
804 if (!dev_priv->cmd_dma) {
806 dev_priv->fake_dma.offset = 0;
807 dev_priv->fake_dma.size = SAVAGE_FAKE_DMA_SIZE;
808 dev_priv->fake_dma.type = _DRM_SHM;
809 dev_priv->fake_dma.virtual = drm_alloc(SAVAGE_FAKE_DMA_SIZE,
811 if (!dev_priv->fake_dma.virtual) {
816 dev_priv->cmd_dma = &dev_priv->fake_dma;
817 dev_priv->dma_flush = savage_fake_dma_flush;
820 dev_priv->sarea_priv =
821 (drm_savage_sarea_t *)((uint8_t *)dev_priv->sarea->virtual +
829 if (dev_priv->chipset <= S3_SAVAGE4) {
830 color_tile_format = dev_priv->fb_bpp == 16 ?
832 depth_tile_format = dev_priv->depth_bpp == 16 ?
838 front_stride = dev_priv->front_pitch / (dev_priv->fb_bpp / 8);
839 back_stride = dev_priv->back_pitch / (dev_priv->fb_bpp / 8);
841 dev_priv->depth_pitch / (dev_priv->depth_bpp / 8);
843 dev_priv->front_bd = front_stride | SAVAGE_BD_BW_DISABLE |
844 (dev_priv->fb_bpp << SAVAGE_BD_BPP_SHIFT) |
847 dev_priv-> back_bd = back_stride | SAVAGE_BD_BW_DISABLE |
848 (dev_priv->fb_bpp << SAVAGE_BD_BPP_SHIFT) |
851 dev_priv->depth_bd = depth_stride | SAVAGE_BD_BW_DISABLE |
852 (dev_priv->depth_bpp << SAVAGE_BD_BPP_SHIFT) |
857 dev_priv->event_counter = 0;
858 dev_priv->event_wrap = 0;
859 dev_priv->bci_ptr = (volatile uint32_t *)
860 ((uint8_t *)dev_priv->mmio->virtual + SAVAGE_BCI_OFFSET);
861 if (S3_SAVAGE3D_SERIES(dev_priv->chipset)) {
862 dev_priv->status_used_mask = SAVAGE_FIFO_USED_MASK_S3D;
864 dev_priv->status_used_mask = SAVAGE_FIFO_USED_MASK_S4;
866 if (dev_priv->status != NULL) {
867 dev_priv->status_ptr =
868 (volatile uint32_t *)dev_priv->status->virtual;
869 dev_priv->wait_fifo = savage_bci_wait_fifo_shadow;
870 dev_priv->wait_evnt = savage_bci_wait_event_shadow;
871 dev_priv->status_ptr[1023] = dev_priv->event_counter;
873 dev_priv->status_ptr = NULL;
874 if (S3_SAVAGE3D_SERIES(dev_priv->chipset)) {
875 dev_priv->wait_fifo = savage_bci_wait_fifo_s3d;
877 dev_priv->wait_fifo = savage_bci_wait_fifo_s4;
879 dev_priv->wait_evnt = savage_bci_wait_event_reg;
883 if (S3_SAVAGE3D_SERIES(dev_priv->chipset))
884 dev_priv->emit_clip_rect = savage_emit_clip_rect_s3d;
886 dev_priv->emit_clip_rect = savage_emit_clip_rect_s4;
894 if (savage_dma_init(dev_priv) < 0) {
905 drm_savage_private_t *dev_priv = dev->dev_private;
907 if (dev_priv->cmd_dma == &dev_priv->fake_dma) {
908 if (dev_priv->fake_dma.virtual)
909 drm_free(dev_priv->fake_dma.virtual,
911 } else if (dev_priv->cmd_dma && dev_priv->cmd_dma->virtual &&
912 dev_priv->cmd_dma->type == _DRM_AGP &&
913 dev_priv->dma_type == SAVAGE_DMA_AGP)
914 drm_core_ioremapfree(dev_priv->cmd_dma, dev);
916 if (dev_priv->dma_type == SAVAGE_DMA_AGP &&
925 if (dev_priv->dma_pages)
926 drm_free(dev_priv->dma_pages,
927 sizeof(drm_savage_dma_page_t)*dev_priv->nr_dma_pages,
951 drm_savage_private_t *dev_priv = dev->dev_private;
958 event->count = savage_bci_emit_event(dev_priv, event->flags);
959 event->count |= dev_priv->event_wrap << 16;
966 drm_savage_private_t *dev_priv = dev->dev_private;
974 if (dev_priv->status_ptr)
975 hw_e = dev_priv->status_ptr[1] & 0xffff;
978 hw_w = dev_priv->event_wrap;
979 if (hw_e > dev_priv->event_counter)
992 return dev_priv->wait_evnt(dev_priv, event_e);
1061 drm_savage_private_t *dev_priv = dev->dev_private;
1066 if (!dev_priv)
1079 event = savage_bci_emit_event(dev_priv, SAVAGE_WAIT_3D);
1080 SET_AGE(&buf_priv->age, event, dev_priv->event_wrap);