• Home
  • History
  • Annotate
  • Raw
  • Download
  • only in /netgear-WNDR4500v2-V1.0.0.60_1.0.38/src/linux/linux-2.6/drivers/char/drm/

Lines Matching refs:dev_priv

38 savage_bci_wait_fifo_shadow(drm_savage_private_t * dev_priv, unsigned int n)
40 uint32_t mask = dev_priv->status_used_mask;
41 uint32_t threshold = dev_priv->bci_threshold_hi;
46 if (n > dev_priv->cob_size + SAVAGE_BCI_FIFO_SIZE - threshold)
53 status = dev_priv->status_ptr[0];
67 savage_bci_wait_fifo_s3d(drm_savage_private_t * dev_priv, unsigned int n)
69 uint32_t maxUsed = dev_priv->cob_size + SAVAGE_BCI_FIFO_SIZE - n;
88 savage_bci_wait_fifo_s4(drm_savage_private_t * dev_priv, unsigned int n)
90 uint32_t maxUsed = dev_priv->cob_size + SAVAGE_BCI_FIFO_SIZE - n;
120 savage_bci_wait_event_shadow(drm_savage_private_t * dev_priv, uint16_t e)
127 status = dev_priv->status_ptr[1];
143 savage_bci_wait_event_reg(drm_savage_private_t * dev_priv, uint16_t e)
164 uint16_t savage_bci_emit_event(drm_savage_private_t * dev_priv,
170 if (dev_priv->status_ptr) {
172 count = dev_priv->status_ptr[1023];
173 if (count < dev_priv->event_counter)
174 dev_priv->event_wrap++;
176 count = dev_priv->event_counter;
181 dev_priv->event_wrap++;
183 dev_priv->event_counter = count;
184 if (dev_priv->status_ptr)
185 dev_priv->status_ptr[1023] = (uint32_t) count;
208 drm_savage_private_t *dev_priv = dev->dev_private;
215 dev_priv->head.next = &dev_priv->tail;
216 dev_priv->head.prev = NULL;
217 dev_priv->head.buf = NULL;
219 dev_priv->tail.next = NULL;
220 dev_priv->tail.prev = &dev_priv->head;
221 dev_priv->tail.buf = NULL;
230 entry->next = dev_priv->head.next;
231 entry->prev = &dev_priv->head;
232 dev_priv->head.next->prev = entry;
233 dev_priv->head.next = entry;
241 drm_savage_private_t *dev_priv = dev->dev_private;
242 drm_savage_buf_priv_t *tail = dev_priv->tail.prev;
248 if (dev_priv->status_ptr)
249 event = dev_priv->status_ptr[1] & 0xffff;
252 wrap = dev_priv->event_wrap;
253 if (event > dev_priv->event_counter)
274 drm_savage_private_t *dev_priv = dev->dev_private;
284 prev = &dev_priv->head;
295 static int savage_dma_init(drm_savage_private_t * dev_priv)
299 dev_priv->nr_dma_pages = dev_priv->cmd_dma->size /
301 dev_priv->dma_pages = drm_alloc(sizeof(drm_savage_dma_page_t) *
302 dev_priv->nr_dma_pages, DRM_MEM_DRIVER);
303 if (dev_priv->dma_pages == NULL)
306 for (i = 0; i < dev_priv->nr_dma_pages; ++i) {
307 SET_AGE(&dev_priv->dma_pages[i].age, 0, 0);
308 dev_priv->dma_pages[i].used = 0;
309 dev_priv->dma_pages[i].flushed = 0;
311 SET_AGE(&dev_priv->last_dma_age, 0, 0);
313 dev_priv->first_dma_page = 0;
314 dev_priv->current_dma_page = 0;
319 void savage_dma_reset(drm_savage_private_t * dev_priv)
323 event = savage_bci_emit_event(dev_priv, 0);
324 wrap = dev_priv->event_wrap;
325 for (i = 0; i < dev_priv->nr_dma_pages; ++i) {
326 SET_AGE(&dev_priv->dma_pages[i].age, event, wrap);
327 dev_priv->dma_pages[i].used = 0;
328 dev_priv->dma_pages[i].flushed = 0;
330 SET_AGE(&dev_priv->last_dma_age, event, wrap);
331 dev_priv->first_dma_page = dev_priv->current_dma_page = 0;
334 void savage_dma_wait(drm_savage_private_t * dev_priv, unsigned int page)
340 if (dev_priv->cmd_dma == &dev_priv->fake_dma)
344 if (dev_priv->status_ptr)
345 event = dev_priv->status_ptr[1] & 0xffff;
348 wrap = dev_priv->event_wrap;
349 if (event > dev_priv->event_counter)
352 if (dev_priv->dma_pages[page].age.wrap > wrap ||
353 (dev_priv->dma_pages[page].age.wrap == wrap &&
354 dev_priv->dma_pages[page].age.event > event)) {
355 if (dev_priv->wait_evnt(dev_priv,
356 dev_priv->dma_pages[page].age.event)
362 uint32_t *savage_dma_alloc(drm_savage_private_t * dev_priv, unsigned int n)
364 unsigned int cur = dev_priv->current_dma_page;
366 dev_priv->dma_pages[cur].used;
373 cur, dev_priv->dma_pages[cur].used, n, rest, nr_pages);
375 if (cur + nr_pages < dev_priv->nr_dma_pages) {
376 dma_ptr = (uint32_t *) dev_priv->cmd_dma->handle +
377 cur * SAVAGE_DMA_PAGE_SIZE + dev_priv->dma_pages[cur].used;
380 dev_priv->dma_pages[cur].used += rest;
384 dev_priv->dma_flush(dev_priv);
387 for (i = cur; i < dev_priv->nr_dma_pages; ++i) {
388 dev_priv->dma_pages[i].age = dev_priv->last_dma_age;
389 dev_priv->dma_pages[i].used = 0;
390 dev_priv->dma_pages[i].flushed = 0;
392 dma_ptr = (uint32_t *) dev_priv->cmd_dma->handle;
393 dev_priv->first_dma_page = cur = 0;
397 if (dev_priv->dma_pages[i].used) {
399 i, dev_priv->dma_pages[i].used);
403 dev_priv->dma_pages[i].used = SAVAGE_DMA_PAGE_SIZE;
405 dev_priv->dma_pages[i].used = n;
408 dev_priv->current_dma_page = --i;
411 i, dev_priv->dma_pages[i].used, n);
413 savage_dma_wait(dev_priv, dev_priv->current_dma_page);
418 static void savage_dma_flush(drm_savage_private_t * dev_priv)
420 unsigned int first = dev_priv->first_dma_page;
421 unsigned int cur = dev_priv->current_dma_page;
428 dev_priv->dma_pages[cur].used == dev_priv->dma_pages[cur].flushed)
433 pad = -dev_priv->dma_pages[cur].used & 1;
434 align = -(dev_priv->dma_pages[cur].used + pad) & 7;
438 first, cur, dev_priv->dma_pages[first].flushed,
439 dev_priv->dma_pages[cur].used, pad, align);
443 uint32_t *dma_ptr = (uint32_t *) dev_priv->cmd_dma->handle +
444 cur * SAVAGE_DMA_PAGE_SIZE + dev_priv->dma_pages[cur].used;
445 dev_priv->dma_pages[cur].used += pad;
455 phys_addr = dev_priv->cmd_dma->offset +
457 dev_priv->dma_pages[first].flushed) * 4;
459 dev_priv->dma_pages[cur].used - dev_priv->dma_pages[first].flushed;
462 phys_addr | dev_priv->dma_type, len);
466 BCI_WRITE(phys_addr | dev_priv->dma_type);
470 dev_priv->dma_pages[cur].used += align;
473 event = savage_bci_emit_event(dev_priv, 0);
474 wrap = dev_priv->event_wrap;
476 SET_AGE(&dev_priv->dma_pages[i].age, event, wrap);
477 dev_priv->dma_pages[i].used = 0;
478 dev_priv->dma_pages[i].flushed = 0;
481 if (dev_priv->dma_pages[cur].used == SAVAGE_DMA_PAGE_SIZE) {
482 SET_AGE(&dev_priv->dma_pages[cur].age, event, wrap);
483 dev_priv->dma_pages[cur].used = 0;
484 dev_priv->dma_pages[cur].flushed = 0;
487 if (cur == dev_priv->nr_dma_pages)
489 dev_priv->first_dma_page = dev_priv->current_dma_page = cur;
491 dev_priv->first_dma_page = cur;
492 dev_priv->dma_pages[cur].flushed = dev_priv->dma_pages[i].used;
494 SET_AGE(&dev_priv->last_dma_age, event, wrap);
497 dev_priv->dma_pages[cur].used,
498 dev_priv->dma_pages[cur].flushed);
501 static void savage_fake_dma_flush(drm_savage_private_t * dev_priv)
506 if (dev_priv->first_dma_page == dev_priv->current_dma_page &&
507 dev_priv->dma_pages[dev_priv->current_dma_page].used == 0)
511 dev_priv->first_dma_page, dev_priv->current_dma_page,
512 dev_priv->dma_pages[dev_priv->current_dma_page].used);
514 for (i = dev_priv->first_dma_page;
515 i <= dev_priv->current_dma_page && dev_priv->dma_pages[i].used;
517 uint32_t *dma_ptr = (uint32_t *) dev_priv->cmd_dma->handle +
521 if (i < dev_priv->current_dma_page &&
522 dev_priv->dma_pages[i].used != SAVAGE_DMA_PAGE_SIZE) {
524 i, dev_priv->dma_pages[i].used);
527 BEGIN_BCI(dev_priv->dma_pages[i].used);
528 for (j = 0; j < dev_priv->dma_pages[i].used; ++j) {
531 dev_priv->dma_pages[i].used = 0;
535 dev_priv->first_dma_page = dev_priv->current_dma_page = 0;
540 drm_savage_private_t *dev_priv;
542 dev_priv = drm_alloc(sizeof(drm_savage_private_t), DRM_MEM_DRIVER);
543 if (dev_priv == NULL)
546 memset(dev_priv, 0, sizeof(drm_savage_private_t));
547 dev->dev_private = (void *)dev_priv;
549 dev_priv->chipset = (enum savage_family)chipset;
563 drm_savage_private_t *dev_priv = dev->dev_private;
572 dev_priv->mtrr[0].handle = -1;
573 dev_priv->mtrr[1].handle = -1;
574 dev_priv->mtrr[2].handle = -1;
575 if (S3_SAVAGE3D_SERIES(dev_priv->chipset)) {
586 dev_priv->mtrr[0].base = fb_base;
587 dev_priv->mtrr[0].size = 0x01000000;
588 dev_priv->mtrr[0].handle =
589 drm_mtrr_add(dev_priv->mtrr[0].base,
590 dev_priv->mtrr[0].size, DRM_MTRR_WC);
591 dev_priv->mtrr[1].base = fb_base + 0x02000000;
592 dev_priv->mtrr[1].size = 0x02000000;
593 dev_priv->mtrr[1].handle =
594 drm_mtrr_add(dev_priv->mtrr[1].base,
595 dev_priv->mtrr[1].size, DRM_MTRR_WC);
596 dev_priv->mtrr[2].base = fb_base + 0x04000000;
597 dev_priv->mtrr[2].size = 0x04000000;
598 dev_priv->mtrr[2].handle =
599 drm_mtrr_add(dev_priv->mtrr[2].base,
600 dev_priv->mtrr[2].size, DRM_MTRR_WC);
605 } else if (dev_priv->chipset != S3_SUPERSAVAGE &&
606 dev_priv->chipset != S3_SAVAGE2000) {
617 dev_priv->mtrr[0].base = fb_base;
618 dev_priv->mtrr[0].size = 0x08000000;
619 dev_priv->mtrr[0].handle =
620 drm_mtrr_add(dev_priv->mtrr[0].base,
621 dev_priv->mtrr[0].size, DRM_MTRR_WC);
637 _DRM_READ_ONLY, &dev_priv->mmio);
642 _DRM_WRITE_COMBINING, &dev_priv->fb);
648 &dev_priv->aperture);
660 drm_savage_private_t *dev_priv = dev->dev_private;
664 if (dev_priv->mtrr[i].handle >= 0)
665 drm_mtrr_del(dev_priv->mtrr[i].handle,
666 dev_priv->mtrr[i].base,
667 dev_priv->mtrr[i].size, DRM_MTRR_WC);
672 drm_savage_private_t *dev_priv = dev->dev_private;
674 drm_free(dev_priv, sizeof(drm_savage_private_t), DRM_MEM_DRIVER);
681 drm_savage_private_t *dev_priv = dev->dev_private;
697 dev_priv->cob_size = init->cob_size;
698 dev_priv->bci_threshold_lo = init->bci_threshold_lo;
699 dev_priv->bci_threshold_hi = init->bci_threshold_hi;
700 dev_priv->dma_type = init->dma_type;
702 dev_priv->fb_bpp = init->fb_bpp;
703 dev_priv->front_offset = init->front_offset;
704 dev_priv->front_pitch = init->front_pitch;
705 dev_priv->back_offset = init->back_offset;
706 dev_priv->back_pitch = init->back_pitch;
707 dev_priv->depth_bpp = init->depth_bpp;
708 dev_priv->depth_offset = init->depth_offset;
709 dev_priv->depth_pitch = init->depth_pitch;
711 dev_priv->texture_offset = init->texture_offset;
712 dev_priv->texture_size = init->texture_size;
715 if (!dev_priv->sarea) {
721 dev_priv->status = drm_core_findmap(dev, init->status_offset);
722 if (!dev_priv->status) {
728 dev_priv->status = NULL;
730 if (dev_priv->dma_type == SAVAGE_DMA_AGP && init->buffers_offset) {
747 dev_priv->agp_textures =
749 if (!dev_priv->agp_textures) {
755 dev_priv->agp_textures = NULL;
759 if (S3_SAVAGE3D_SERIES(dev_priv->chipset)) {
771 dev_priv->cmd_dma = drm_core_findmap(dev, init->cmd_dma_offset);
772 if (!dev_priv->cmd_dma) {
777 if (dev_priv->dma_type == SAVAGE_DMA_AGP) {
778 if (dev_priv->cmd_dma->type != _DRM_AGP) {
784 drm_core_ioremap(dev_priv->cmd_dma, dev);
785 if (!dev_priv->cmd_dma->handle) {
791 } else if (dev_priv->cmd_dma->type != _DRM_CONSISTENT) {
798 dev_priv->cmd_dma = NULL;
801 dev_priv->dma_flush = savage_dma_flush;
802 if (!dev_priv->cmd_dma) {
804 dev_priv->fake_dma.offset = 0;
805 dev_priv->fake_dma.size = SAVAGE_FAKE_DMA_SIZE;
806 dev_priv->fake_dma.type = _DRM_SHM;
807 dev_priv->fake_dma.handle = drm_alloc(SAVAGE_FAKE_DMA_SIZE,
809 if (!dev_priv->fake_dma.handle) {
814 dev_priv->cmd_dma = &dev_priv->fake_dma;
815 dev_priv->dma_flush = savage_fake_dma_flush;
818 dev_priv->sarea_priv =
819 (drm_savage_sarea_t *) ((uint8_t *) dev_priv->sarea->handle +
827 if (dev_priv->chipset <= S3_SAVAGE4) {
828 color_tile_format = dev_priv->fb_bpp == 16 ?
830 depth_tile_format = dev_priv->depth_bpp == 16 ?
836 front_stride = dev_priv->front_pitch / (dev_priv->fb_bpp / 8);
837 back_stride = dev_priv->back_pitch / (dev_priv->fb_bpp / 8);
839 dev_priv->depth_pitch / (dev_priv->depth_bpp / 8);
841 dev_priv->front_bd = front_stride | SAVAGE_BD_BW_DISABLE |
842 (dev_priv->fb_bpp << SAVAGE_BD_BPP_SHIFT) |
845 dev_priv->back_bd = back_stride | SAVAGE_BD_BW_DISABLE |
846 (dev_priv->fb_bpp << SAVAGE_BD_BPP_SHIFT) |
849 dev_priv->depth_bd = depth_stride | SAVAGE_BD_BW_DISABLE |
850 (dev_priv->depth_bpp << SAVAGE_BD_BPP_SHIFT) |
855 dev_priv->event_counter = 0;
856 dev_priv->event_wrap = 0;
857 dev_priv->bci_ptr = (volatile uint32_t *)
858 ((uint8_t *) dev_priv->mmio->handle + SAVAGE_BCI_OFFSET);
859 if (S3_SAVAGE3D_SERIES(dev_priv->chipset)) {
860 dev_priv->status_used_mask = SAVAGE_FIFO_USED_MASK_S3D;
862 dev_priv->status_used_mask = SAVAGE_FIFO_USED_MASK_S4;
864 if (dev_priv->status != NULL) {
865 dev_priv->status_ptr =
866 (volatile uint32_t *)dev_priv->status->handle;
867 dev_priv->wait_fifo = savage_bci_wait_fifo_shadow;
868 dev_priv->wait_evnt = savage_bci_wait_event_shadow;
869 dev_priv->status_ptr[1023] = dev_priv->event_counter;
871 dev_priv->status_ptr = NULL;
872 if (S3_SAVAGE3D_SERIES(dev_priv->chipset)) {
873 dev_priv->wait_fifo = savage_bci_wait_fifo_s3d;
875 dev_priv->wait_fifo = savage_bci_wait_fifo_s4;
877 dev_priv->wait_evnt = savage_bci_wait_event_reg;
881 if (S3_SAVAGE3D_SERIES(dev_priv->chipset))
882 dev_priv->emit_clip_rect = savage_emit_clip_rect_s3d;
884 dev_priv->emit_clip_rect = savage_emit_clip_rect_s4;
892 if (savage_dma_init(dev_priv) < 0) {
903 drm_savage_private_t *dev_priv = dev->dev_private;
905 if (dev_priv->cmd_dma == &dev_priv->fake_dma) {
906 if (dev_priv->fake_dma.handle)
907 drm_free(dev_priv->fake_dma.handle,
909 } else if (dev_priv->cmd_dma && dev_priv->cmd_dma->handle &&
910 dev_priv->cmd_dma->type == _DRM_AGP &&
911 dev_priv->dma_type == SAVAGE_DMA_AGP)
912 drm_core_ioremapfree(dev_priv->cmd_dma, dev);
914 if (dev_priv->dma_type == SAVAGE_DMA_AGP &&
923 if (dev_priv->dma_pages)
924 drm_free(dev_priv->dma_pages,
925 sizeof(drm_savage_dma_page_t) * dev_priv->nr_dma_pages,
954 drm_savage_private_t *dev_priv = dev->dev_private;
964 event.count = savage_bci_emit_event(dev_priv, event.flags);
965 event.count |= dev_priv->event_wrap << 16;
974 drm_savage_private_t *dev_priv = dev->dev_private;
985 if (dev_priv->status_ptr)
986 hw_e = dev_priv->status_ptr[1] & 0xffff;
989 hw_w = dev_priv->event_wrap;
990 if (hw_e > dev_priv->event_counter)
1003 return dev_priv->wait_evnt(dev_priv, event_e);
1075 drm_savage_private_t *dev_priv = dev->dev_private;
1080 if (!dev_priv)
1095 event = savage_bci_emit_event(dev_priv, SAVAGE_WAIT_3D);
1096 SET_AGE(&buf_priv->age, event, dev_priv->event_wrap);