• Home
  • History
  • Annotate
  • Raw
  • Download
  • only in /netgear-R7000-V1.0.7.12_1.2.5/components/opensource/linux/linux-2.6.36/drivers/gpu/drm/i915/

Lines Matching refs:dev_priv

53 	drm_i915_private_t *dev_priv = dev->dev_private;
55 dev_priv->status_page_dmah =
58 if (!dev_priv->status_page_dmah) {
62 dev_priv->render_ring.status_page.page_addr
63 = dev_priv->status_page_dmah->vaddr;
64 dev_priv->dma_status_page = dev_priv->status_page_dmah->busaddr;
66 memset(dev_priv->render_ring.status_page.page_addr, 0, PAGE_SIZE);
69 dev_priv->dma_status_page |= (dev_priv->dma_status_page >> 28) &
72 I915_WRITE(HWS_PGA, dev_priv->dma_status_page);
83 drm_i915_private_t *dev_priv = dev->dev_private;
84 if (dev_priv->status_page_dmah) {
85 drm_pci_free(dev, dev_priv->status_page_dmah);
86 dev_priv->status_page_dmah = NULL;
89 if (dev_priv->render_ring.status_page.gfx_addr) {
90 dev_priv->render_ring.status_page.gfx_addr = 0;
91 drm_core_ioremapfree(&dev_priv->hws_map, dev);
100 drm_i915_private_t *dev_priv = dev->dev_private;
102 struct intel_ring_buffer *ring = &dev_priv->render_ring;
127 drm_i915_private_t *dev_priv = dev->dev_private;
136 intel_cleanup_ring_buffer(dev, &dev_priv->render_ring);
138 intel_cleanup_ring_buffer(dev, &dev_priv->bsd_ring);
150 drm_i915_private_t *dev_priv = dev->dev_private;
162 if (dev_priv->render_ring.gem_object != NULL) {
169 dev_priv->render_ring.size = init->ring_size;
171 dev_priv->render_ring.map.offset = init->ring_start;
172 dev_priv->render_ring.map.size = init->ring_size;
173 dev_priv->render_ring.map.type = 0;
174 dev_priv->render_ring.map.flags = 0;
175 dev_priv->render_ring.map.mtrr = 0;
177 drm_core_ioremap_wc(&dev_priv->render_ring.map, dev);
179 if (dev_priv->render_ring.map.handle == NULL) {
187 dev_priv->render_ring.virtual_start = dev_priv->render_ring.map.handle;
189 dev_priv->cpp = init->cpp;
190 dev_priv->back_offset = init->back_offset;
191 dev_priv->front_offset = init->front_offset;
192 dev_priv->current_page = 0;
198 dev_priv->allow_batchbuffer = 1;
205 drm_i915_private_t *dev_priv = (drm_i915_private_t *) dev->dev_private;
210 ring = &dev_priv->render_ring;
228 I915_WRITE(HWS_PGA, dev_priv->dma_status_page);
337 drm_i915_private_t *dev_priv = dev->dev_private;
340 if ((dwords+1) * sizeof(int) >= dev_priv->render_ring.size - 8)
405 drm_i915_private_t *dev_priv = dev->dev_private;
408 dev_priv->counter++;
409 if (dev_priv->counter > 0x7FFFFFFFUL)
410 dev_priv->counter = 0;
412 master_priv->sarea_priv->last_enqueue = dev_priv->counter;
417 OUT_RING(dev_priv->counter);
514 drm_i915_private_t *dev_priv = dev->dev_private;
523 dev_priv->current_page,
536 if (dev_priv->current_page == 0) {
537 OUT_RING(dev_priv->back_offset);
538 dev_priv->current_page = 1;
540 OUT_RING(dev_priv->front_offset);
541 dev_priv->current_page = 0;
551 master_priv->sarea_priv->last_enqueue = dev_priv->counter++;
556 OUT_RING(dev_priv->counter);
560 master_priv->sarea_priv->pf_current_page = dev_priv->current_page;
566 drm_i915_private_t *dev_priv = dev->dev_private;
569 return intel_wait_ring_buffer(dev, &dev_priv->render_ring,
570 dev_priv->render_ring.size - 8);
590 drm_i915_private_t *dev_priv = (drm_i915_private_t *) dev->dev_private;
598 if (!dev_priv->allow_batchbuffer) {
632 sarea_priv->last_dispatch = READ_BREADCRUMB(dev_priv);
643 drm_i915_private_t *dev_priv = (drm_i915_private_t *) dev->dev_private;
696 sarea_priv->last_dispatch = READ_BREADCRUMB(dev_priv);
725 drm_i915_private_t *dev_priv = dev->dev_private;
729 if (!dev_priv) {
739 value = dev_priv->allow_batchbuffer ? 1 : 0;
742 value = READ_BREADCRUMB(dev_priv);
748 value = dev_priv->has_gem;
751 value = dev_priv->num_fence_regs - dev_priv->fence_reg_start;
754 value = dev_priv->overlay ? 1 : 0;
761 value = dev_priv->has_gem;
783 drm_i915_private_t *dev_priv = dev->dev_private;
786 if (!dev_priv) {
795 dev_priv->tex_lru_log_granularity = param->value;
798 dev_priv->allow_batchbuffer = param->value;
801 if (param->value > dev_priv->num_fence_regs ||
805 dev_priv->fence_reg_start = param->value;
819 drm_i915_private_t *dev_priv = dev->dev_private;
821 struct intel_ring_buffer *ring = &dev_priv->render_ring;
826 if (!dev_priv) {
840 dev_priv->hws_map.offset = dev->agp->base + hws->addr;
841 dev_priv->hws_map.size = 4*1024;
842 dev_priv->hws_map.type = 0;
843 dev_priv->hws_map.flags = 0;
844 dev_priv->hws_map.mtrr = 0;
846 drm_core_ioremap_wc(&dev_priv->hws_map, dev);
847 if (dev_priv->hws_map.handle == NULL) {
854 ring->status_page.page_addr = dev_priv->hws_map.handle;
867 struct drm_i915_private *dev_priv = dev->dev_private;
869 dev_priv->bridge_dev = pci_get_bus_and_slot(0, PCI_DEVFN(0,0));
870 if (!dev_priv->bridge_dev) {
888 drm_i915_private_t *dev_priv = dev->dev_private;
895 pci_read_config_dword(dev_priv->bridge_dev, reg + 4, &temp_hi);
896 pci_read_config_dword(dev_priv->bridge_dev, reg, &temp_lo);
907 dev_priv->mch_res.name = "i915 MCHBAR";
908 dev_priv->mch_res.flags = IORESOURCE_MEM;
909 ret = pci_bus_alloc_resource(dev_priv->bridge_dev->bus,
910 &dev_priv->mch_res,
914 dev_priv->bridge_dev);
917 dev_priv->mch_res.start = 0;
922 pci_write_config_dword(dev_priv->bridge_dev, reg + 4,
923 upper_32_bits(dev_priv->mch_res.start));
925 pci_write_config_dword(dev_priv->bridge_dev, reg,
926 lower_32_bits(dev_priv->mch_res.start));
934 drm_i915_private_t *dev_priv = dev->dev_private;
939 dev_priv->mchbar_need_disable = false;
942 pci_read_config_dword(dev_priv->bridge_dev, DEVEN_REG, &temp);
945 pci_read_config_dword(dev_priv->bridge_dev, mchbar_reg, &temp);
956 dev_priv->mchbar_need_disable = true;
960 pci_write_config_dword(dev_priv->bridge_dev, DEVEN_REG,
963 pci_read_config_dword(dev_priv->bridge_dev, mchbar_reg, &temp);
964 pci_write_config_dword(dev_priv->bridge_dev, mchbar_reg, temp | 1);
971 drm_i915_private_t *dev_priv = dev->dev_private;
975 if (dev_priv->mchbar_need_disable) {
977 pci_read_config_dword(dev_priv->bridge_dev, DEVEN_REG, &temp);
979 pci_write_config_dword(dev_priv->bridge_dev, DEVEN_REG, temp);
981 pci_read_config_dword(dev_priv->bridge_dev, mchbar_reg, &temp);
983 pci_write_config_dword(dev_priv->bridge_dev, mchbar_reg, temp);
987 if (dev_priv->mch_res.start)
988 release_resource(&dev_priv->mch_res);
1005 struct drm_i915_private *dev_priv = dev->dev_private;
1011 pci_read_config_word(dev_priv->bridge_dev, INTEL_GMCH_CTRL, &tmp);
1247 struct drm_i915_private *dev_priv = dev->dev_private;
1253 compressed_fb = drm_mm_search_free(&dev_priv->vram, size, 4096, 0);
1255 dev_priv->no_fbc_reason = FBC_STOLEN_TOO_SMALL;
1263 dev_priv->no_fbc_reason = FBC_STOLEN_TOO_SMALL;
1274 compressed_llb = drm_mm_search_free(&dev_priv->vram, 4096,
1295 dev_priv->cfb_size = size;
1298 dev_priv->compressed_fb = compressed_fb;
1306 dev_priv->compressed_llb = compressed_llb;
1315 struct drm_i915_private *dev_priv = dev->dev_private;
1317 drm_mm_put_block(dev_priv->compressed_fb);
1318 if (dev_priv->compressed_llb)
1319 drm_mm_put_block(dev_priv->compressed_llb);
1368 struct drm_i915_private *dev_priv = dev->dev_private;
1376 drm_mm_init(&dev_priv->vram, 0, prealloc_size);
1380 dev_priv->mm.suspended = 0;
1413 dev_priv->allow_batchbuffer = 1;
1438 dev_priv->flip_pending_is_done = true;
1502 drm_i915_private_t *dev_priv = dev->dev_private;
1509 dev_priv->fsb_freq = 533; /* 133*4 */
1512 dev_priv->fsb_freq = 800; /* 200*4 */
1515 dev_priv->fsb_freq = 667; /* 167*4 */
1518 dev_priv->fsb_freq = 400; /* 100*4 */
1524 dev_priv->mem_freq = 533;
1527 dev_priv->mem_freq = 667;
1530 dev_priv->mem_freq = 800;
1536 dev_priv->is_ddr3 = (tmp & CSHRDDR3CTL_DDR3) ? 1 : 0;
1541 drm_i915_private_t *dev_priv = dev->dev_private;
1549 dev_priv->mem_freq = 800;
1552 dev_priv->mem_freq = 1066;
1555 dev_priv->mem_freq = 1333;
1558 dev_priv->mem_freq = 1600;
1563 dev_priv->mem_freq = 0;
1567 dev_priv->r_t = dev_priv->mem_freq;
1571 dev_priv->fsb_freq = 3200;
1574 dev_priv->fsb_freq = 3733;
1577 dev_priv->fsb_freq = 4266;
1580 dev_priv->fsb_freq = 4800;
1583 dev_priv->fsb_freq = 5333;
1586 dev_priv->fsb_freq = 5866;
1589 dev_priv->fsb_freq = 6400;
1594 dev_priv->fsb_freq = 0;
1598 if (dev_priv->fsb_freq == 3200) {
1599 dev_priv->c_m = 0;
1600 } else if (dev_priv->fsb_freq > 3200 && dev_priv->fsb_freq <= 4800) {
1601 dev_priv->c_m = 1;
1603 dev_priv->c_m = 2;
1761 unsigned long i915_chipset_val(struct drm_i915_private *dev_priv)
1768 diff1 = now - dev_priv->last_time1;
1776 if (total_count < dev_priv->last_count1) {
1777 diff = ~0UL - dev_priv->last_count1;
1780 diff = total_count - dev_priv->last_count1;
1784 if (cparams[i].i == dev_priv->c_m &&
1785 cparams[i].t == dev_priv->r_t) {
1796 dev_priv->last_count1 = total_count;
1797 dev_priv->last_time1 = now;
1802 unsigned long i915_mch_val(struct drm_i915_private *dev_priv)
1817 static unsigned long pvid_to_extvid(struct drm_i915_private *dev_priv, u8 pxvid)
1824 if (IS_MOBILE(dev_priv->dev))
1834 void i915_update_gfx_val(struct drm_i915_private *dev_priv)
1842 diff1 = timespec_sub(now, dev_priv->last_time2);
1851 if (count < dev_priv->last_count2) {
1852 diff = ~0UL - dev_priv->last_count2;
1855 diff = count - dev_priv->last_count2;
1858 dev_priv->last_count2 = count;
1859 dev_priv->last_time2 = now;
1864 dev_priv->gfx_power = diff;
1867 unsigned long i915_gfx_val(struct drm_i915_private *dev_priv)
1872 pxvid = I915_READ(PXVFREQ_BASE + (dev_priv->cur_delay * 4));
1874 ext_v = pvid_to_extvid(dev_priv, pxvid);
1878 t = i915_mch_val(dev_priv);
1892 corr2 = (corr * dev_priv->corr);
1897 i915_update_gfx_val(dev_priv);
1899 return dev_priv->gfx_power + state2;
1907 * - dev_priv->max_delay
1908 * - dev_priv->min_delay
1909 * - dev_priv->fmax
1910 * - dev_priv->gpu_busy
1922 struct drm_i915_private *dev_priv;
1928 dev_priv = i915_mch_dev;
1930 chipset_val = i915_chipset_val(dev_priv);
1931 graphics_val = i915_gfx_val(dev_priv);
1949 struct drm_i915_private *dev_priv;
1957 dev_priv = i915_mch_dev;
1959 if (dev_priv->max_delay > dev_priv->fmax)
1960 dev_priv->max_delay--;
1977 struct drm_i915_private *dev_priv;
1985 dev_priv = i915_mch_dev;
1987 if (dev_priv->max_delay < dev_priv->min_delay)
1988 dev_priv->max_delay++;
2004 struct drm_i915_private *dev_priv;
2010 dev_priv = i915_mch_dev;
2012 ret = dev_priv->busy;
2029 struct drm_i915_private *dev_priv;
2037 dev_priv = i915_mch_dev;
2039 dev_priv->max_delay = dev_priv->fstart;
2041 if (!ironlake_set_drps(dev_priv->dev, dev_priv->fstart))
2084 struct drm_i915_private *dev_priv;
2095 dev_priv = kzalloc(sizeof(drm_i915_private_t), GFP_KERNEL);
2096 if (dev_priv == NULL)
2099 dev->dev_private = (void *)dev_priv;
2100 dev_priv->dev = dev;
2101 dev_priv->info = (struct intel_device_info *) flags;
2117 dev_priv->regs = ioremap(base, size);
2118 if (!dev_priv->regs) {
2124 dev_priv->mm.gtt_mapping =
2127 if (dev_priv->mm.gtt_mapping == NULL) {
2137 dev_priv->mm.gtt_mtrr = mtrr_add(dev->agp->base,
2141 if (dev_priv->mm.gtt_mtrr < 0) {
2156 dev_priv->wq = create_singlethread_workqueue("i915");
2157 if (dev_priv->wq == NULL) {
2164 dev_priv->has_gem = 1;
2172 dev_priv->has_gem = 0;
2175 if (dev_priv->has_gem == 0 &&
2220 spin_lock_init(&dev_priv->user_irq_lock);
2221 spin_lock_init(&dev_priv->error_lock);
2222 dev_priv->trace_irq_seqno = 0;
2232 dev_priv->mm.suspended = 1;
2248 setup_timer(&dev_priv->hangcheck_timer, i915_hangcheck_elapsed,
2252 i915_mch_dev = dev_priv;
2253 dev_priv->mchdev_lock = &mchdev_lock;
2263 destroy_workqueue(dev_priv->wq);
2265 io_mapping_free(dev_priv->mm.gtt_mapping);
2267 iounmap(dev_priv->regs);
2269 pci_dev_put(dev_priv->bridge_dev);
2271 kfree(dev_priv);
2277 struct drm_i915_private *dev_priv = dev->dev_private;
2285 destroy_workqueue(dev_priv->wq);
2286 del_timer_sync(&dev_priv->hangcheck_timer);
2288 io_mapping_free(dev_priv->mm.gtt_mapping);
2289 if (dev_priv->mm.gtt_mtrr >= 0) {
2290 mtrr_del(dev_priv->mm.gtt_mtrr, dev->agp->base,
2292 dev_priv->mm.gtt_mtrr = -1;
2302 if (dev_priv->child_dev && dev_priv->child_dev_num) {
2303 kfree(dev_priv->child_dev);
2304 dev_priv->child_dev = NULL;
2305 dev_priv->child_dev_num = 0;
2315 if (dev_priv->regs != NULL)
2316 iounmap(dev_priv->regs);
2328 drm_mm_takedown(&dev_priv->vram);
2339 pci_dev_put(dev_priv->bridge_dev);
2377 drm_i915_private_t *dev_priv = dev->dev_private;
2379 if (!dev_priv || drm_core_check_feature(dev, DRIVER_MODESET)) {
2387 if (dev_priv->agp_heap)
2388 i915_mem_takedown(&(dev_priv->agp_heap));
2395 drm_i915_private_t *dev_priv = dev->dev_private;
2398 i915_mem_release(dev, file_priv, dev_priv->agp_heap);