Lines Matching refs:dev_priv

57  * \param dev_priv pointer to device private data structure.
63 int mach64_do_wait_for_fifo(drm_mach64_private_t *dev_priv, int entries)
67 for (i = 0; i < dev_priv->usec_timeout; i++) {
81 int mach64_do_wait_for_idle(drm_mach64_private_t *dev_priv)
85 ret = mach64_do_wait_for_fifo(dev_priv, 16);
89 for (i = 0; i < dev_priv->usec_timeout; i++) {
96 mach64_dump_ring_info(dev_priv);
111 * \param dev_priv pointer to device private data structure.
119 int mach64_wait_ring(drm_mach64_private_t *dev_priv, int n)
121 drm_mach64_descriptor_ring_t *ring = &dev_priv->ring;
124 for (i = 0; i < dev_priv->usec_timeout; i++) {
125 mach64_update_ring_snapshot(dev_priv);
136 mach64_dump_ring_info(dev_priv);
145 static int mach64_ring_idle(drm_mach64_private_t *dev_priv)
147 drm_mach64_descriptor_ring_t *ring = &dev_priv->ring;
153 while (i < dev_priv->usec_timeout) {
154 mach64_update_ring_snapshot(dev_priv);
171 mach64_dump_ring_info(dev_priv);
180 static void mach64_ring_reset(drm_mach64_private_t *dev_priv)
182 drm_mach64_descriptor_ring_t *ring = &dev_priv->ring;
184 mach64_do_release_used_buffers(dev_priv);
192 dev_priv->ring_running = 0;
198 int mach64_do_dma_flush(drm_mach64_private_t *dev_priv)
204 return mach64_ring_idle(dev_priv);
210 int mach64_do_dma_idle(drm_mach64_private_t *dev_priv)
215 if ((ret = mach64_ring_idle(dev_priv)) < 0) {
218 dev_priv->ring.tail);
222 mach64_ring_stop(dev_priv);
225 mach64_do_release_used_buffers(dev_priv);
232 int mach64_do_engine_reset(drm_mach64_private_t *dev_priv)
261 mach64_ring_reset(dev_priv);
276 void mach64_dump_engine_info(drm_mach64_private_t *dev_priv)
279 if (!dev_priv->is_pci) {
417 static void mach64_dump_buf_info(drm_mach64_private_t *dev_priv,
477 void mach64_dump_ring_info(drm_mach64_private_t *dev_priv)
479 drm_mach64_descriptor_ring_t *ring = &dev_priv->ring;
519 list_for_each(ptr, &dev_priv->pending) {
527 mach64_dump_buf_info(dev_priv, buf);
623 if (dev_priv->ring.space <= (n) * sizeof(u32)) { \
625 if ((ret = mach64_wait_ring( dev_priv, (n) * sizeof(u32))) < 0 ) { \
627 mach64_dump_engine_info( dev_priv ); \
628 mach64_do_engine_reset( dev_priv ); \
632 dev_priv->ring.space -= (n) * sizeof(u32); \
633 _ring = (u32 *) dev_priv->ring.start; \
634 _ring_tail = _ring_write = dev_priv->ring.tail; \
635 _ring_mask = dev_priv->ring.tail_mask; \
657 dev_priv->ring.tail = _ring_write; \
658 mach64_ring_tick( dev_priv, &(dev_priv)->ring ); \
664 int mach64_add_buf_to_ring(drm_mach64_private_t *dev_priv,
711 int mach64_add_hostdata_buf_to_ring(drm_mach64_private_t *dev_priv,
778 drm_mach64_private_t *dev_priv = dev->dev_private;
788 table = (u32 *) dev_priv->ring.start;
816 mach64_do_wait_for_fifo(dev_priv, 3);
822 mach64_do_wait_for_idle(dev_priv);
831 mach64_do_engine_reset(dev_priv);
879 if ((i = mach64_do_wait_for_idle(dev_priv))) {
882 mach64_do_engine_reset(dev_priv);
883 mach64_do_wait_for_fifo(dev_priv, 3);
897 DRM_DEBUG("table bus addr = 0x%08x\n", dev_priv->ring.start_addr);
901 dev_priv->ring.start_addr | MACH64_CIRCULAR_BUF_SIZE_16KB);
913 if ((i = mach64_do_wait_for_idle(dev_priv))) {
916 mach64_dump_engine_info(dev_priv);
918 mach64_do_engine_reset(dev_priv);
919 mach64_do_wait_for_fifo(dev_priv, 3);
931 mach64_do_wait_for_fifo(dev_priv, 1);
947 mach64_do_wait_for_fifo(dev_priv, 2);
964 drm_mach64_private_t *dev_priv;
970 dev_priv = drm_alloc(sizeof(drm_mach64_private_t), DRM_MEM_DRIVER);
971 if (dev_priv == NULL)
974 memset(dev_priv, 0, sizeof(drm_mach64_private_t));
976 dev_priv->is_pci = init->is_pci;
978 dev_priv->fb_bpp = init->fb_bpp;
979 dev_priv->front_offset = init->front_offset;
980 dev_priv->front_pitch = init->front_pitch;
981 dev_priv->back_offset = init->back_offset;
982 dev_priv->back_pitch = init->back_pitch;
984 dev_priv->depth_bpp = init->depth_bpp;
985 dev_priv->depth_offset = init->depth_offset;
986 dev_priv->depth_pitch = init->depth_pitch;
988 dev_priv->front_offset_pitch = (((dev_priv->front_pitch / 8) << 22) |
989 (dev_priv->front_offset >> 3));
990 dev_priv->back_offset_pitch = (((dev_priv->back_pitch / 8) << 22) |
991 (dev_priv->back_offset >> 3));
992 dev_priv->depth_offset_pitch = (((dev_priv->depth_pitch / 8) << 22) |
993 (dev_priv->depth_offset >> 3));
995 dev_priv->usec_timeout = 1000000;
998 INIT_LIST_HEAD(&dev_priv->free_list);
999 INIT_LIST_HEAD(&dev_priv->placeholders);
1000 INIT_LIST_HEAD(&dev_priv->pending);
1002 dev_priv->sarea = drm_getsarea(dev);
1003 if (!dev_priv->sarea) {
1005 dev->dev_private = (void *)dev_priv;
1009 dev_priv->fb = drm_core_findmap(dev, init->fb_offset);
1010 if (!dev_priv->fb) {
1012 dev->dev_private = (void *)dev_priv;
1016 dev_priv->mmio = drm_core_findmap(dev, init->mmio_offset);
1017 if (!dev_priv->mmio) {
1019 dev->dev_private = (void *)dev_priv;
1024 dev_priv->ring_map = drm_core_findmap(dev, init->ring_offset);
1025 if (!dev_priv->ring_map) {
1027 dev->dev_private = (void *)dev_priv;
1032 dev_priv->sarea_priv = (drm_mach64_sarea_t *)
1033 ((u8 *) dev_priv->sarea->virtual + init->sarea_priv_offset);
1035 if (!dev_priv->is_pci) {
1036 drm_core_ioremap(dev_priv->ring_map, dev);
1037 if (!dev_priv->ring_map->virtual) {
1040 dev->dev_private = (void *)dev_priv;
1049 dev->dev_private = (void *)dev_priv;
1055 dev_priv->dev_buffers = dev->agp_buffer_map;
1061 dev->dev_private = (void *)dev_priv;
1065 dev_priv->agp_textures =
1067 if (!dev_priv->agp_textures) {
1069 dev->dev_private = (void *)dev_priv;
1075 dev->dev_private = (void *)dev_priv;
1077 dev_priv->driver_mode = init->dma_mode;
1084 if ((ret = mach64_do_wait_for_idle(dev_priv))) {
1093 if ((ret = mach64_do_wait_for_idle(dev_priv))) {
1101 dev_priv->ring.size = 0x4000; /* 16KB */
1102 dev_priv->ring.start = dev_priv->ring_map->virtual;
1103 dev_priv->ring.start_addr = (u32) dev_priv->ring_map->offset;
1105 memset(dev_priv->ring.start, 0, dev_priv->ring.size);
1107 dev_priv->ring.start, dev_priv->ring.start_addr);
1110 if (dev_priv->driver_mode != MACH64_MODE_MMIO) {
1120 dev_priv->driver_mode = MACH64_MODE_MMIO;
1124 switch (dev_priv->driver_mode) {
1144 dev_priv->ring_running = 0;
1147 dev_priv->ring.head_addr = dev_priv->ring.start_addr;
1148 dev_priv->ring.head = dev_priv->ring.tail = 0;
1149 dev_priv->ring.tail_mask = (dev_priv->ring.size / sizeof(u32)) - 1;
1150 dev_priv->ring.space = dev_priv->ring.size;
1153 mach64_do_wait_for_fifo(dev_priv, 1);
1155 (dev_priv->ring.
1159 dev_priv->sarea_priv->frames_queued = 0;
1161 dev_priv->frame_ofs[i] = ~0; /* All ones indicates placeholder */
1178 int mach64_do_dispatch_pseudo_dma(drm_mach64_private_t *dev_priv)
1180 drm_mach64_descriptor_ring_t *ring = &dev_priv->ring;
1192 if ((ret = mach64_do_wait_for_idle(dev_priv)) < 0) {
1194 mach64_dump_engine_info(dev_priv);
1195 mach64_do_engine_reset(dev_priv);
1227 list_for_each(ptr, &dev_priv->pending) {
1241 mach64_dump_ring_info(dev_priv);
1242 mach64_do_engine_reset(dev_priv);
1281 (dev_priv, 16)) < 0) {
1288 (dev_priv)) < 0) {
1308 if ((ret = mach64_do_wait_for_idle(dev_priv)) < 0) {
1337 drm_mach64_private_t *dev_priv = dev->dev_private;
1339 if (!dev_priv->is_pci) {
1340 if (dev_priv->ring_map)
1341 drm_core_ioremapfree(dev_priv->ring_map, dev);
1351 drm_free(dev_priv, sizeof(drm_mach64_private_t),
1388 drm_mach64_private_t *dev_priv = dev->dev_private;
1394 return mach64_do_dma_idle(dev_priv);
1400 drm_mach64_private_t *dev_priv = dev->dev_private;
1406 return mach64_do_dma_flush(dev_priv);
1412 drm_mach64_private_t *dev_priv = dev->dev_private;
1418 return mach64_do_engine_reset(dev_priv);
1431 drm_mach64_private_t *dev_priv = dev->dev_private;
1447 list_add_tail(ptr, &dev_priv->free_list);
1455 drm_mach64_private_t *dev_priv = dev->dev_private;
1462 list_for_each_safe(ptr, tmp, &dev_priv->pending) {
1467 list_for_each_safe(ptr, tmp, &dev_priv->placeholders) {
1473 list_for_each_safe(ptr, tmp, &dev_priv->free_list) {
1483 int mach64_do_release_used_buffers(drm_mach64_private_t *dev_priv)
1490 if (list_empty(&dev_priv->pending))
1495 list_for_each_safe(ptr, tmp, &dev_priv->pending) {
1500 list_add_tail(ptr, &dev_priv->free_list);
1510 static int mach64_do_reclaim_completed(drm_mach64_private_t *dev_priv)
1512 drm_mach64_descriptor_ring_t *ring = &dev_priv->ring;
1518 mach64_ring_tick(dev_priv, ring);
1526 mach64_dump_ring_info(dev_priv);
1531 mach64_do_release_used_buffers(dev_priv);
1533 if (list_empty(&dev_priv->free_list)) {
1543 list_for_each_safe(ptr, tmp, &dev_priv->pending) {
1563 mach64_dump_ring_info(dev_priv);
1571 list_add_tail(ptr, &dev_priv->free_list);
1583 struct drm_buf *mach64_freelist_get(drm_mach64_private_t *dev_priv)
1585 drm_mach64_descriptor_ring_t *ring = &dev_priv->ring;
1590 if (list_empty(&dev_priv->free_list)) {
1591 if (list_empty(&dev_priv->pending)) {
1595 list_for_each(ptr, &dev_priv->placeholders) {
1602 for (t = 0; t < dev_priv->usec_timeout; t++) {
1605 ret = mach64_do_reclaim_completed(dev_priv);
1613 mach64_dump_ring_info(dev_priv);
1621 ptr = dev_priv->free_list.next;
1625 list_add_tail(ptr, &dev_priv->placeholders);
1629 int mach64_freelist_put(drm_mach64_private_t *dev_priv, struct drm_buf *copy_buf)
1635 list_for_each(ptr, &dev_priv->pending) {
1643 ptr = dev_priv->placeholders.next;
1650 list_add_tail(ptr, &dev_priv->free_list);
1668 drm_mach64_private_t *dev_priv = dev->dev_private;
1671 buf = mach64_freelist_get(dev_priv);