Lines Matching refs:rdev

51 static void evergreen_gpu_init(struct radeon_device *rdev);
53 void evergreen_fini(struct radeon_device *rdev);
55 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
57 extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
92 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
97 err = pci_find_cap(rdev->dev, PCIY_EXPRESS, &cap);
103 ctl = pci_read_config(rdev->dev, cap, 2);
113 pci_write_config(rdev->dev, cap, ctl, 2);
117 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
125 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
141 * @rdev: radeon_device pointer
146 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
150 if (crtc >= rdev->num_crtc)
159 while (dce4_is_in_vblank(rdev, crtc)) {
161 if (!dce4_is_counter_moving(rdev, crtc))
166 while (!dce4_is_in_vblank(rdev, crtc)) {
168 if (!dce4_is_counter_moving(rdev, crtc))
177 * @rdev: radeon_device pointer
183 void evergreen_pre_page_flip(struct radeon_device *rdev, int crtc)
186 radeon_irq_kms_pflip_irq_get(rdev, crtc);
192 * @rdev: radeon_device pointer
198 void evergreen_post_page_flip(struct radeon_device *rdev, int crtc)
201 radeon_irq_kms_pflip_irq_put(rdev, crtc);
207 * @rdev: radeon_device pointer
217 u32 evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
219 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
239 for (i = 0; i < rdev->usec_timeout; i++) {
255 int evergreen_get_temp(struct radeon_device *rdev)
260 if (rdev->family == CHIP_JUNIPER) {
293 int sumo_get_temp(struct radeon_device *rdev)
304 * @rdev: radeon_device pointer
310 void sumo_pm_init_profile(struct radeon_device *rdev)
315 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
316 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
317 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
318 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
321 if (rdev->flags & RADEON_IS_MOBILITY)
322 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
324 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
326 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
327 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
328 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
329 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
331 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
332 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
333 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
334 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
336 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
337 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
338 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
339 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
341 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
342 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
343 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
344 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
347 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
348 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
349 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
350 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
351 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
352 rdev->pm.power_state[idx].num_clock_modes - 1;
354 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
355 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
356 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
357 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
358 rdev->pm.power_state[idx].num_clock_modes - 1;
364 * @rdev: radeon_device pointer
370 void btc_pm_init_profile(struct radeon_device *rdev)
375 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
376 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
377 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
378 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
383 if (rdev->flags & RADEON_IS_MOBILITY)
384 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
386 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
388 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
389 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
390 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
391 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
393 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
394 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
395 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
396 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
398 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
399 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
400 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
401 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
403 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
404 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
405 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
406 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
408 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
409 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
410 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
411 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
413 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
414 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
415 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
416 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
422 * @rdev: radeon_device pointer
427 void evergreen_pm_misc(struct radeon_device *rdev)
429 int req_ps_idx = rdev->pm.requested_power_state_index;
430 int req_cm_idx = rdev->pm.requested_clock_mode_index;
431 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
438 if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
439 radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
440 rdev->pm.current_vddc = voltage->voltage;
448 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
449 (rdev->family >= CHIP_BARTS) &&
450 rdev->pm.active_crtc_count &&
451 ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
452 (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
453 voltage = &rdev->pm.power_state[req_ps_idx].
454 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
459 if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
460 radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
461 rdev->pm.current_vddci = voltage->vddci;
470 * @rdev: radeon_device pointer
474 void evergreen_pm_prepare(struct radeon_device *rdev)
476 struct drm_device *ddev = rdev->ddev;
495 * @rdev: radeon_device pointer
499 void evergreen_pm_finish(struct radeon_device *rdev)
501 struct drm_device *ddev = rdev->ddev;
520 * @rdev: radeon_device pointer
526 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
565 * @rdev: radeon_device pointer
570 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
574 bool connected = evergreen_hpd_sense(rdev, hpd);
633 * @rdev: radeon_device pointer
638 void evergreen_hpd_init(struct radeon_device *rdev)
640 struct drm_device *dev = rdev->ddev;
680 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
683 radeon_irq_kms_enable_hpd(rdev, enabled);
689 * @rdev: radeon_device pointer
694 void evergreen_hpd_fini(struct radeon_device *rdev)
696 struct drm_device *dev = rdev->ddev;
726 radeon_irq_kms_disable_hpd(rdev, disabled);
731 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
776 if (ASIC_IS_DCE5(rdev))
782 if (ASIC_IS_DCE5(rdev))
788 if (ASIC_IS_DCE5(rdev))
794 if (ASIC_IS_DCE5(rdev))
805 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
1049 static void evergreen_program_watermarks(struct radeon_device *rdev,
1071 wm.yclk = rdev->pm.current_mclk * 10;
1072 wm.sclk = rdev->pm.current_sclk * 10;
1086 wm.dram_channels = evergreen_get_number_of_dram_channels(rdev);
1100 (rdev->disp_priority == 2)) {
1160 * @rdev: radeon_device pointer
1165 void evergreen_bandwidth_update(struct radeon_device *rdev)
1172 radeon_update_display_priority(rdev);
1174 for (i = 0; i < rdev->num_crtc; i++) {
1175 if (rdev->mode_info.crtcs[i]->base.enabled)
1178 for (i = 0; i < rdev->num_crtc; i += 2) {
1179 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
1180 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
1181 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
1182 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
1183 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
1184 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
1191 * @rdev: radeon_device pointer
1197 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
1202 for (i = 0; i < rdev->usec_timeout; i++) {
1215 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
1223 for (i = 0; i < rdev->usec_timeout; i++) {
1238 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
1243 if (rdev->gart.robj == NULL) {
1244 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
1247 r = radeon_gart_table_vram_pin(rdev);
1250 radeon_gart_restore(rdev);
1262 if (rdev->flags & RADEON_IS_IGP) {
1270 if ((rdev->family == CHIP_JUNIPER) ||
1271 (rdev->family == CHIP_CYPRESS) ||
1272 (rdev->family == CHIP_HEMLOCK) ||
1273 (rdev->family == CHIP_BARTS))
1280 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
1281 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
1282 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
1286 (u32)(rdev->dummy_page.addr >> 12));
1289 evergreen_pcie_gart_tlb_flush(rdev);
1291 (unsigned)(rdev->mc.gtt_size >> 20),
1292 (unsigned long long)rdev->gart.table_addr);
1293 rdev->gart.ready = true;
1297 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
1319 radeon_gart_table_vram_unpin(rdev);
1322 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
1324 evergreen_pcie_gart_disable(rdev);
1325 radeon_gart_table_vram_free(rdev);
1326 radeon_gart_fini(rdev);
1330 static void evergreen_agp_enable(struct radeon_device *rdev)
1356 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
1367 for (i = 0; i < rdev->num_crtc; i++) {
1371 if (ASIC_IS_DCE6(rdev)) {
1374 radeon_wait_for_vblank(rdev, i);
1382 radeon_wait_for_vblank(rdev, i);
1390 frame_count = radeon_get_vblank_counter(rdev, i);
1391 for (j = 0; j < rdev->usec_timeout; j++) {
1392 if (radeon_get_vblank_counter(rdev, i) != frame_count)
1410 radeon_mc_wait_for_idle(rdev);
1424 for (i = 0; i < rdev->num_crtc; i++) {
1440 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
1446 for (i = 0; i < rdev->num_crtc; i++) {
1448 upper_32_bits(rdev->mc.vram_start));
1450 upper_32_bits(rdev->mc.vram_start));
1452 (u32)rdev->mc.vram_start);
1454 (u32)rdev->mc.vram_start);
1456 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
1457 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
1460 for (i = 0; i < rdev->num_crtc; i++) {
1477 for (j = 0; j < rdev->usec_timeout; j++) {
1493 for (i = 0; i < rdev->num_crtc; i++) {
1495 if (ASIC_IS_DCE6(rdev)) {
1509 frame_count = radeon_get_vblank_counter(rdev, i);
1510 for (j = 0; j < rdev->usec_timeout; j++) {
1511 if (radeon_get_vblank_counter(rdev, i) != frame_count)
1523 void evergreen_mc_program(struct radeon_device *rdev)
1539 evergreen_mc_stop(rdev, &save);
1540 if (evergreen_mc_wait_for_idle(rdev)) {
1541 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1546 if (rdev->flags & RADEON_IS_AGP) {
1547 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
1550 rdev->mc.vram_start >> 12);
1552 rdev->mc.gtt_end >> 12);
1556 rdev->mc.gtt_start >> 12);
1558 rdev->mc.vram_end >> 12);
1562 rdev->mc.vram_start >> 12);
1564 rdev->mc.vram_end >> 12);
1566 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
1568 if ((rdev->family == CHIP_PALM) ||
1569 (rdev->family == CHIP_SUMO) ||
1570 (rdev->family == CHIP_SUMO2)) {
1572 tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
1573 tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
1576 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
1577 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
1579 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
1582 if (rdev->flags & RADEON_IS_AGP) {
1583 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
1584 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
1585 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
1591 if (evergreen_mc_wait_for_idle(rdev)) {
1592 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1594 evergreen_mc_resume(rdev, &save);
1597 rv515_vga_render_disable(rdev);
1603 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
1605 struct radeon_ring *ring = &rdev->ring[ib->ring];
1618 } else if (rdev->wb.enabled) {
1638 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
1643 if (!rdev->me_fw || !rdev->pfp_fw)
1646 r700_cp_stop(rdev);
1653 fw_data = (const __be32 *)rdev->pfp_fw->data;
1659 fw_data = (const __be32 *)rdev->me_fw->data;
1670 static int evergreen_cp_start(struct radeon_device *rdev)
1672 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
1676 r = radeon_ring_lock(rdev, ring, 7);
1684 radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
1688 radeon_ring_unlock_commit(rdev, ring);
1693 r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
1731 radeon_ring_unlock_commit(rdev, ring);
1736 static int evergreen_cp_resume(struct radeon_device *rdev)
1738 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
1776 ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
1777 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
1778 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
1780 if (rdev->wb.enabled)
1795 evergreen_cp_start(rdev);
1797 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
1808 static void evergreen_gpu_init(struct radeon_device *rdev)
1829 switch (rdev->family) {
1832 rdev->config.evergreen.num_ses = 2;
1833 rdev->config.evergreen.max_pipes = 4;
1834 rdev->config.evergreen.max_tile_pipes = 8;
1835 rdev->config.evergreen.max_simds = 10;
1836 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
1837 rdev->config.evergreen.max_gprs = 256;
1838 rdev->config.evergreen.max_threads = 248;
1839 rdev->config.evergreen.max_gs_threads = 32;
1840 rdev->config.evergreen.max_stack_entries = 512;
1841 rdev->config.evergreen.sx_num_of_sets = 4;
1842 rdev->config.evergreen.sx_max_export_size = 256;
1843 rdev->config.evergreen.sx_max_export_pos_size = 64;
1844 rdev->config.evergreen.sx_max_export_smx_size = 192;
1845 rdev->config.evergreen.max_hw_contexts = 8;
1846 rdev->config.evergreen.sq_num_cf_insts = 2;
1848 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1849 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1850 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1854 rdev->config.evergreen.num_ses = 1;
1855 rdev->config.evergreen.max_pipes = 4;
1856 rdev->config.evergreen.max_tile_pipes = 4;
1857 rdev->config.evergreen.max_simds = 10;
1858 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
1859 rdev->config.evergreen.max_gprs = 256;
1860 rdev->config.evergreen.max_threads = 248;
1861 rdev->config.evergreen.max_gs_threads = 32;
1862 rdev->config.evergreen.max_stack_entries = 512;
1863 rdev->config.evergreen.sx_num_of_sets = 4;
1864 rdev->config.evergreen.sx_max_export_size = 256;
1865 rdev->config.evergreen.sx_max_export_pos_size = 64;
1866 rdev->config.evergreen.sx_max_export_smx_size = 192;
1867 rdev->config.evergreen.max_hw_contexts = 8;
1868 rdev->config.evergreen.sq_num_cf_insts = 2;
1870 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1871 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1872 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1876 rdev->config.evergreen.num_ses = 1;
1877 rdev->config.evergreen.max_pipes = 4;
1878 rdev->config.evergreen.max_tile_pipes = 4;
1879 rdev->config.evergreen.max_simds = 5;
1880 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
1881 rdev->config.evergreen.max_gprs = 256;
1882 rdev->config.evergreen.max_threads = 248;
1883 rdev->config.evergreen.max_gs_threads = 32;
1884 rdev->config.evergreen.max_stack_entries = 256;
1885 rdev->config.evergreen.sx_num_of_sets = 4;
1886 rdev->config.evergreen.sx_max_export_size = 256;
1887 rdev->config.evergreen.sx_max_export_pos_size = 64;
1888 rdev->config.evergreen.sx_max_export_smx_size = 192;
1889 rdev->config.evergreen.max_hw_contexts = 8;
1890 rdev->config.evergreen.sq_num_cf_insts = 2;
1892 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1893 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1894 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1899 rdev->config.evergreen.num_ses = 1;
1900 rdev->config.evergreen.max_pipes = 2;
1901 rdev->config.evergreen.max_tile_pipes = 2;
1902 rdev->config.evergreen.max_simds = 2;
1903 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
1904 rdev->config.evergreen.max_gprs = 256;
1905 rdev->config.evergreen.max_threads = 192;
1906 rdev->config.evergreen.max_gs_threads = 16;
1907 rdev->config.evergreen.max_stack_entries = 256;
1908 rdev->config.evergreen.sx_num_of_sets = 4;
1909 rdev->config.evergreen.sx_max_export_size = 128;
1910 rdev->config.evergreen.sx_max_export_pos_size = 32;
1911 rdev->config.evergreen.sx_max_export_smx_size = 96;
1912 rdev->config.evergreen.max_hw_contexts = 4;
1913 rdev->config.evergreen.sq_num_cf_insts = 1;
1915 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
1916 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1917 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1921 rdev->config.evergreen.num_ses = 1;
1922 rdev->config.evergreen.max_pipes = 2;
1923 rdev->config.evergreen.max_tile_pipes = 2;
1924 rdev->config.evergreen.max_simds = 2;
1925 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
1926 rdev->config.evergreen.max_gprs = 256;
1927 rdev->config.evergreen.max_threads = 192;
1928 rdev->config.evergreen.max_gs_threads = 16;
1929 rdev->config.evergreen.max_stack_entries = 256;
1930 rdev->config.evergreen.sx_num_of_sets = 4;
1931 rdev->config.evergreen.sx_max_export_size = 128;
1932 rdev->config.evergreen.sx_max_export_pos_size = 32;
1933 rdev->config.evergreen.sx_max_export_smx_size = 96;
1934 rdev->config.evergreen.max_hw_contexts = 4;
1935 rdev->config.evergreen.sq_num_cf_insts = 1;
1937 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
1938 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1939 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1943 rdev->config.evergreen.num_ses = 1;
1944 rdev->config.evergreen.max_pipes = 4;
1945 rdev->config.evergreen.max_tile_pipes = 4;
1946 if (rdev->ddev->pci_device == 0x9648)
1947 rdev->config.evergreen.max_simds = 3;
1948 else if ((rdev->ddev->pci_device == 0x9647) ||
1949 (rdev->ddev->pci_device == 0x964a))
1950 rdev->config.evergreen.max_simds = 4;
1952 rdev->config.evergreen.max_simds = 5;
1953 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
1954 rdev->config.evergreen.max_gprs = 256;
1955 rdev->config.evergreen.max_threads = 248;
1956 rdev->config.evergreen.max_gs_threads = 32;
1957 rdev->config.evergreen.max_stack_entries = 256;
1958 rdev->config.evergreen.sx_num_of_sets = 4;
1959 rdev->config.evergreen.sx_max_export_size = 256;
1960 rdev->config.evergreen.sx_max_export_pos_size = 64;
1961 rdev->config.evergreen.sx_max_export_smx_size = 192;
1962 rdev->config.evergreen.max_hw_contexts = 8;
1963 rdev->config.evergreen.sq_num_cf_insts = 2;
1965 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
1966 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1967 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1971 rdev->config.evergreen.num_ses = 1;
1972 rdev->config.evergreen.max_pipes = 4;
1973 rdev->config.evergreen.max_tile_pipes = 4;
1974 rdev->config.evergreen.max_simds = 2;
1975 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
1976 rdev->config.evergreen.max_gprs = 256;
1977 rdev->config.evergreen.max_threads = 248;
1978 rdev->config.evergreen.max_gs_threads = 32;
1979 rdev->config.evergreen.max_stack_entries = 512;
1980 rdev->config.evergreen.sx_num_of_sets = 4;
1981 rdev->config.evergreen.sx_max_export_size = 256;
1982 rdev->config.evergreen.sx_max_export_pos_size = 64;
1983 rdev->config.evergreen.sx_max_export_smx_size = 192;
1984 rdev->config.evergreen.max_hw_contexts = 8;
1985 rdev->config.evergreen.sq_num_cf_insts = 2;
1987 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
1988 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1989 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1993 rdev->config.evergreen.num_ses = 2;
1994 rdev->config.evergreen.max_pipes = 4;
1995 rdev->config.evergreen.max_tile_pipes = 8;
1996 rdev->config.evergreen.max_simds = 7;
1997 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
1998 rdev->config.evergreen.max_gprs = 256;
1999 rdev->config.evergreen.max_threads = 248;
2000 rdev->config.evergreen.max_gs_threads = 32;
2001 rdev->config.evergreen.max_stack_entries = 512;
2002 rdev->config.evergreen.sx_num_of_sets = 4;
2003 rdev->config.evergreen.sx_max_export_size = 256;
2004 rdev->config.evergreen.sx_max_export_pos_size = 64;
2005 rdev->config.evergreen.sx_max_export_smx_size = 192;
2006 rdev->config.evergreen.max_hw_contexts = 8;
2007 rdev->config.evergreen.sq_num_cf_insts = 2;
2009 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2010 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2011 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2015 rdev->config.evergreen.num_ses = 1;
2016 rdev->config.evergreen.max_pipes = 4;
2017 rdev->config.evergreen.max_tile_pipes = 4;
2018 rdev->config.evergreen.max_simds = 6;
2019 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
2020 rdev->config.evergreen.max_gprs = 256;
2021 rdev->config.evergreen.max_threads = 248;
2022 rdev->config.evergreen.max_gs_threads = 32;
2023 rdev->config.evergreen.max_stack_entries = 256;
2024 rdev->config.evergreen.sx_num_of_sets = 4;
2025 rdev->config.evergreen.sx_max_export_size = 256;
2026 rdev->config.evergreen.sx_max_export_pos_size = 64;
2027 rdev->config.evergreen.sx_max_export_smx_size = 192;
2028 rdev->config.evergreen.max_hw_contexts = 8;
2029 rdev->config.evergreen.sq_num_cf_insts = 2;
2031 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2032 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2033 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2037 rdev->config.evergreen.num_ses = 1;
2038 rdev->config.evergreen.max_pipes = 2;
2039 rdev->config.evergreen.max_tile_pipes = 2;
2040 rdev->config.evergreen.max_simds = 2;
2041 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
2042 rdev->config.evergreen.max_gprs = 256;
2043 rdev->config.evergreen.max_threads = 192;
2044 rdev->config.evergreen.max_gs_threads = 16;
2045 rdev->config.evergreen.max_stack_entries = 256;
2046 rdev->config.evergreen.sx_num_of_sets = 4;
2047 rdev->config.evergreen.sx_max_export_size = 128;
2048 rdev->config.evergreen.sx_max_export_pos_size = 32;
2049 rdev->config.evergreen.sx_max_export_smx_size = 96;
2050 rdev->config.evergreen.max_hw_contexts = 4;
2051 rdev->config.evergreen.sq_num_cf_insts = 1;
2053 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2054 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2055 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2071 evergreen_fix_pci_max_read_req_size(rdev);
2074 if ((rdev->family == CHIP_PALM) ||
2075 (rdev->family == CHIP_SUMO) ||
2076 (rdev->family == CHIP_SUMO2))
2088 rdev->config.evergreen.tile_config = 0;
2089 switch (rdev->config.evergreen.max_tile_pipes) {
2092 rdev->config.evergreen.tile_config |= (0 << 0);
2095 rdev->config.evergreen.tile_config |= (1 << 0);
2098 rdev->config.evergreen.tile_config |= (2 << 0);
2101 rdev->config.evergreen.tile_config |= (3 << 0);
2105 if (rdev->flags & RADEON_IS_IGP)
2106 rdev->config.evergreen.tile_config |= 1 << 4;
2110 rdev->config.evergreen.tile_config |= 0 << 4;
2113 rdev->config.evergreen.tile_config |= 1 << 4;
2117 rdev->config.evergreen.tile_config |= 2 << 4;
2121 rdev->config.evergreen.tile_config |= 0 << 8;
2122 rdev->config.evergreen.tile_config |=
2127 if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
2139 for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
2160 if ((rdev->config.evergreen.max_backends == 1) &&
2161 (rdev->flags & RADEON_IS_IGP)) {
2171 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
2199 smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
2202 if (rdev->family <= CHIP_SUMO2)
2205 WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
2206 POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
2207 SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
2209 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
2210 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
2211 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
2218 WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
2235 switch (rdev->family) {
2250 sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
2251 sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
2253 sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
2254 sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
2255 sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
2256 sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
2258 switch (rdev->family) {
2271 sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
2272 sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
2273 sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
2274 sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
2275 sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
2277 sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
2278 sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
2279 sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
2280 sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
2281 sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
2282 sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
2299 switch (rdev->family) {
2363 int evergreen_mc_init(struct radeon_device *rdev)
2369 rdev->mc.vram_is_ddr = true;
2370 if ((rdev->family == CHIP_PALM) ||
2371 (rdev->family == CHIP_SUMO) ||
2372 (rdev->family == CHIP_SUMO2))
2399 rdev->mc.vram_width = numchan * chansize;
2401 rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0);
2402 rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0);
2404 if ((rdev->family == CHIP_PALM) ||
2405 (rdev->family == CHIP_SUMO) ||
2406 (rdev->family == CHIP_SUMO2)) {
2408 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
2409 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
2412 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
2413 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
2415 rdev->mc.visible_vram_size = rdev->mc.aper_size;
2416 r700_vram_gtt_location(rdev, &rdev->mc);
2417 radeon_update_bandwidth_info(rdev);
2422 bool evergreen_gpu_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
2437 radeon_ring_force_activity(rdev, ring);
2438 return radeon_ring_test_lockup(rdev, ring);
2441 static void evergreen_gpu_soft_reset_gfx(struct radeon_device *rdev)
2448 dev_info(rdev->dev, " GRBM_STATUS = 0x%08X\n",
2450 dev_info(rdev->dev, " GRBM_STATUS_SE0 = 0x%08X\n",
2452 dev_info(rdev->dev, " GRBM_STATUS_SE1 = 0x%08X\n",
2454 dev_info(rdev->dev, " SRBM_STATUS = 0x%08X\n",
2456 dev_info(rdev->dev, " R_008674_CP_STALLED_STAT1 = 0x%08X\n",
2458 dev_info(rdev->dev, " R_008678_CP_STALLED_STAT2 = 0x%08X\n",
2460 dev_info(rdev->dev, " R_00867C_CP_BUSY_STAT = 0x%08X\n",
2462 dev_info(rdev->dev, " R_008680_CP_STAT = 0x%08X\n",
2482 dev_info(rdev->dev, " GRBM_SOFT_RESET=0x%08X\n", grbm_reset);
2489 dev_info(rdev->dev, " GRBM_STATUS = 0x%08X\n",
2491 dev_info(rdev->dev, " GRBM_STATUS_SE0 = 0x%08X\n",
2493 dev_info(rdev->dev, " GRBM_STATUS_SE1 = 0x%08X\n",
2495 dev_info(rdev->dev, " SRBM_STATUS = 0x%08X\n",
2497 dev_info(rdev->dev, " R_008674_CP_STALLED_STAT1 = 0x%08X\n",
2499 dev_info(rdev->dev, " R_008678_CP_STALLED_STAT2 = 0x%08X\n",
2501 dev_info(rdev->dev, " R_00867C_CP_BUSY_STAT = 0x%08X\n",
2503 dev_info(rdev->dev, " R_008680_CP_STAT = 0x%08X\n",
2507 static void evergreen_gpu_soft_reset_dma(struct radeon_device *rdev)
2514 dev_info(rdev->dev, " R_00D034_DMA_STATUS_REG = 0x%08X\n",
2528 dev_info(rdev->dev, " R_00D034_DMA_STATUS_REG = 0x%08X\n",
2532 static int evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
2545 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
2547 evergreen_mc_stop(rdev, &save);
2548 if (evergreen_mc_wait_for_idle(rdev)) {
2549 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2553 evergreen_gpu_soft_reset_gfx(rdev);
2556 evergreen_gpu_soft_reset_dma(rdev);
2561 evergreen_mc_resume(rdev, &save);
2565 int evergreen_asic_reset(struct radeon_device *rdev)
2567 return evergreen_gpu_soft_reset(rdev, (RADEON_RESET_GFX |
2574 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
2576 if (crtc >= rdev->num_crtc)
2582 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
2586 if (rdev->family >= CHIP_CAYMAN) {
2587 cayman_cp_int_cntl_setup(rdev, 0,
2589 cayman_cp_int_cntl_setup(rdev, 1, 0);
2590 cayman_cp_int_cntl_setup(rdev, 2, 0);
2600 if (rdev->num_crtc >= 4) {
2604 if (rdev->num_crtc >= 6) {
2611 if (rdev->num_crtc >= 4) {
2615 if (rdev->num_crtc >= 6) {
2621 if (!ASIC_IS_DCE6(rdev))
2640 int evergreen_irq_set(struct radeon_device *rdev)
2651 if (!rdev->irq.installed) {
2652 dev_warn(rdev->dev, "Can't enable IRQ/MSI because no handler is installed\n");
2656 if (!rdev->ih.enabled) {
2657 r600_disable_interrupts(rdev);
2659 evergreen_disable_interrupt_state(rdev);
2679 if (rdev->family >= CHIP_CAYMAN) {
2681 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
2685 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
2689 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
2694 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
2701 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
2706 if (rdev->family >= CHIP_CAYMAN) {
2708 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
2714 if (rdev->irq.crtc_vblank_int[0] ||
2715 atomic_read(&rdev->irq.pflip[0])) {
2719 if (rdev->irq.crtc_vblank_int[1] ||
2720 atomic_read(&rdev->irq.pflip[1])) {
2724 if (rdev->irq.crtc_vblank_int[2] ||
2725 atomic_read(&rdev->irq.pflip[2])) {
2729 if (rdev->irq.crtc_vblank_int[3] ||
2730 atomic_read(&rdev->irq.pflip[3])) {
2734 if (rdev->irq.crtc_vblank_int[4] ||
2735 atomic_read(&rdev->irq.pflip[4])) {
2739 if (rdev->irq.crtc_vblank_int[5] ||
2740 atomic_read(&rdev->irq.pflip[5])) {
2744 if (rdev->irq.hpd[0]) {
2748 if (rdev->irq.hpd[1]) {
2752 if (rdev->irq.hpd[2]) {
2756 if (rdev->irq.hpd[3]) {
2760 if (rdev->irq.hpd[4]) {
2764 if (rdev->irq.hpd[5]) {
2768 if (rdev->irq.afmt[0]) {
2772 if (rdev->irq.afmt[1]) {
2776 if (rdev->irq.afmt[2]) {
2780 if (rdev->irq.afmt[3]) {
2784 if (rdev->irq.afmt[4]) {
2788 if (rdev->irq.afmt[5]) {
2793 if (rdev->family >= CHIP_CAYMAN) {
2794 cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
2795 cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
2796 cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
2802 if (rdev->family >= CHIP_CAYMAN)
2809 if (rdev->num_crtc >= 4) {
2813 if (rdev->num_crtc >= 6) {
2820 if (rdev->num_crtc >= 4) {
2824 if (rdev->num_crtc >= 6) {
2846 static void evergreen_irq_ack(struct radeon_device *rdev)
2850 rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
2851 rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
2852 rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
2853 rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
2854 rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
2855 rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
2856 rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
2857 rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
2858 if (rdev->num_crtc >= 4) {
2859 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
2860 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
2862 if (rdev->num_crtc >= 6) {
2863 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
2864 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
2867 rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
2868 rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
2869 rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
2870 rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
2871 rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
2872 rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
2874 if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
2876 if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
2878 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
2880 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
2882 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
2884 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
2887 if (rdev->num_crtc >= 4) {
2888 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
2890 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
2892 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
2894 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
2896 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
2898 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
2902 if (rdev->num_crtc >= 6) {
2903 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
2905 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
2907 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
2909 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
2911 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
2913 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
2917 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
2922 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
2927 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
2932 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
2937 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
2942 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
2947 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
2952 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
2957 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
2962 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
2967 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
2972 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
2979 static void evergreen_irq_disable(struct radeon_device *rdev)
2981 r600_disable_interrupts(rdev);
2984 evergreen_irq_ack(rdev);
2985 evergreen_disable_interrupt_state(rdev);
2988 void evergreen_irq_suspend(struct radeon_device *rdev)
2990 evergreen_irq_disable(rdev);
2991 r600_rlc_stop(rdev);
2994 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
2998 if (rdev->wb.enabled)
2999 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
3008 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
3009 wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
3010 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
3015 return (wptr & rdev->ih.ptr_mask);
3018 irqreturn_t evergreen_irq_process(struct radeon_device *rdev)
3027 if (!rdev->ih.enabled || rdev->shutdown)
3030 wptr = evergreen_get_ih_wptr(rdev);
3034 if (atomic_xchg(&rdev->ih.lock, 1))
3037 rptr = rdev->ih.rptr;
3044 evergreen_irq_ack(rdev);
3049 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
3050 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
3056 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
3057 if (rdev->irq.crtc_vblank_int[0]) {
3058 drm_handle_vblank(rdev->ddev, 0);
3059 rdev->pm.vblank_sync = true;
3060 DRM_WAKEUP(&rdev->irq.vblank_queue);
3062 if (atomic_read(&rdev->irq.pflip[0]))
3063 radeon_crtc_handle_flip(rdev, 0);
3064 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
3069 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
3070 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
3082 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
3083 if (rdev->irq.crtc_vblank_int[1]) {
3084 drm_handle_vblank(rdev->ddev, 1);
3085 rdev->pm.vblank_sync = true;
3086 DRM_WAKEUP(&rdev->irq.vblank_queue);
3088 if (atomic_read(&rdev->irq.pflip[1]))
3089 radeon_crtc_handle_flip(rdev, 1);
3090 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
3095 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
3096 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
3108 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
3109 if (rdev->irq.crtc_vblank_int[2]) {
3110 drm_handle_vblank(rdev->ddev, 2);
3111 rdev->pm.vblank_sync = true;
3112 DRM_WAKEUP(&rdev->irq.vblank_queue);
3114 if (atomic_read(&rdev->irq.pflip[2]))
3115 radeon_crtc_handle_flip(rdev, 2);
3116 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
3121 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
3122 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
3134 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
3135 if (rdev->irq.crtc_vblank_int[3]) {
3136 drm_handle_vblank(rdev->ddev, 3);
3137 rdev->pm.vblank_sync = true;
3138 DRM_WAKEUP(&rdev->irq.vblank_queue);
3140 if (atomic_read(&rdev->irq.pflip[3]))
3141 radeon_crtc_handle_flip(rdev, 3);
3142 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
3147 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
3148 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
3160 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
3161 if (rdev->irq.crtc_vblank_int[4]) {
3162 drm_handle_vblank(rdev->ddev, 4);
3163 rdev->pm.vblank_sync = true;
3164 DRM_WAKEUP(&rdev->irq.vblank_queue);
3166 if (atomic_read(&rdev->irq.pflip[4]))
3167 radeon_crtc_handle_flip(rdev, 4);
3168 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
3173 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
3174 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
3186 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
3187 if (rdev->irq.crtc_vblank_int[5]) {
3188 drm_handle_vblank(rdev->ddev, 5);
3189 rdev->pm.vblank_sync = true;
3190 DRM_WAKEUP(&rdev->irq.vblank_queue);
3192 if (atomic_read(&rdev->irq.pflip[5]))
3193 radeon_crtc_handle_flip(rdev, 5);
3194 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
3199 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
3200 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
3212 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
3213 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
3219 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
3220 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
3226 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
3227 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
3233 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
3234 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
3240 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
3241 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
3247 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
3248 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
3261 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
3262 rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
3268 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
3269 rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
3275 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
3276 rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
3282 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
3283 rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
3289 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
3290 rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
3296 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
3297 rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
3309 dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
3310 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
3312 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
3321 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
3325 if (rdev->family >= CHIP_CAYMAN) {
3328 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
3331 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
3334 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
3338 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
3342 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
3348 if (rdev->family >= CHIP_CAYMAN) {
3350 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
3360 rptr &= rdev->ih.ptr_mask;
3363 taskqueue_enqueue(rdev->tq, &rdev->hotplug_work);
3365 taskqueue_enqueue(rdev->tq, &rdev->audio_work);
3366 rdev->ih.rptr = rptr;
3367 WREG32(IH_RB_RPTR, rdev->ih.rptr);
3368 atomic_set(&rdev->ih.lock, 0);
3371 wptr = evergreen_get_ih_wptr(rdev);
3381 * @rdev: radeon_device pointer
3388 void evergreen_dma_fence_ring_emit(struct radeon_device *rdev,
3391 struct radeon_ring *ring = &rdev->ring[fence->ring];
3392 u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
3409 * @rdev: radeon_device pointer
3414 void evergreen_dma_ring_ib_execute(struct radeon_device *rdev,
3417 struct radeon_ring *ring = &rdev->ring[ib->ring];
3419 if (rdev->wb.enabled) {
3444 * @rdev: radeon_device pointer
3454 int evergreen_copy_dma(struct radeon_device *rdev,
3460 int ring_index = rdev->asic->copy.dma_ring_index;
3461 struct radeon_ring *ring = &rdev->ring[ring_index];
3466 r = radeon_semaphore_create(rdev, &sem);
3474 r = radeon_ring_lock(rdev, ring, num_loops * 5 + 11);
3477 radeon_semaphore_free(rdev, &sem, NULL);
3482 radeon_semaphore_sync_rings(rdev, sem, (*fence)->ring,
3486 radeon_semaphore_free(rdev, &sem, NULL);
3503 r = radeon_fence_emit(rdev, fence, ring->idx);
3505 radeon_ring_unlock_undo(rdev, ring);
3509 radeon_ring_unlock_commit(rdev, ring);
3510 radeon_semaphore_free(rdev, &sem, *fence);
3515 static int evergreen_startup(struct radeon_device *rdev)
3517 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3521 evergreen_pcie_gen2_enable(rdev);
3523 if (ASIC_IS_DCE5(rdev)) {
3524 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
3525 r = ni_init_microcode(rdev);
3531 r = ni_mc_load_microcode(rdev);
3537 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
3538 r = r600_init_microcode(rdev);
3546 r = r600_vram_scratch_init(rdev);
3550 evergreen_mc_program(rdev);
3551 if (rdev->flags & RADEON_IS_AGP) {
3552 evergreen_agp_enable(rdev);
3554 r = evergreen_pcie_gart_enable(rdev);
3558 evergreen_gpu_init(rdev);
3560 r = evergreen_blit_init(rdev);
3562 r600_blit_fini(rdev);
3563 rdev->asic->copy.copy = NULL;
3564 dev_warn(rdev->dev, "failed blitter (%d) falling back to memcpy\n", r);
3568 r = radeon_wb_init(rdev);
3572 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
3574 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
3578 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
3580 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
3585 r = r600_irq_init(rdev);
3588 radeon_irq_kms_fini(rdev);
3591 evergreen_irq_set(rdev);
3593 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
3599 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
3600 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
3606 r = evergreen_cp_load_microcode(rdev);
3609 r = evergreen_cp_resume(rdev);
3612 r = r600_dma_resume(rdev);
3616 r = radeon_ib_pool_init(rdev);
3618 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
3622 r = r600_audio_init(rdev);
3631 int evergreen_resume(struct radeon_device *rdev)
3638 if (radeon_asic_reset(rdev))
3639 dev_warn(rdev->dev, "GPU reset failed !\n");
3645 atom_asic_init(rdev->mode_info.atom_context);
3647 rdev->accel_working = true;
3648 r = evergreen_startup(rdev);
3651 rdev->accel_working = false;
3659 int evergreen_suspend(struct radeon_device *rdev)
3661 r600_audio_fini(rdev);
3662 r700_cp_stop(rdev);
3663 r600_dma_stop(rdev);
3664 evergreen_irq_suspend(rdev);
3665 radeon_wb_disable(rdev);
3666 evergreen_pcie_gart_disable(rdev);
3677 int evergreen_init(struct radeon_device *rdev)
3682 if (!radeon_get_bios(rdev)) {
3683 if (ASIC_IS_AVIVO(rdev))
3687 if (!rdev->is_atom_bios) {
3688 dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
3691 r = radeon_atombios_init(rdev);
3697 if (radeon_asic_reset(rdev))
3698 dev_warn(rdev->dev, "GPU reset failed !\n");
3700 if (!radeon_card_posted(rdev)) {
3701 if (!rdev->bios) {
3702 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
3706 atom_asic_init(rdev->mode_info.atom_context);
3709 r600_scratch_init(rdev);
3711 radeon_surface_init(rdev);
3713 radeon_get_clock_info(rdev->ddev);
3715 r = radeon_fence_driver_init(rdev);
3719 if (rdev->flags & RADEON_IS_AGP) {
3720 r = radeon_agp_init(rdev);
3722 radeon_agp_disable(rdev);
3725 r = evergreen_mc_init(rdev);
3729 r = radeon_bo_init(rdev);
3733 r = radeon_irq_kms_init(rdev);
3737 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
3738 r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
3740 rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
3741 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
3743 rdev->ih.ring_obj = NULL;
3744 r600_ih_ring_init(rdev, 64 * 1024);
3746 r = r600_pcie_gart_init(rdev);
3750 rdev->accel_working = true;
3751 r = evergreen_startup(rdev);
3753 dev_err(rdev->dev, "disabling GPU acceleration\n");
3754 r700_cp_fini(rdev);
3755 r600_dma_fini(rdev);
3756 r600_irq_fini(rdev);
3757 radeon_wb_fini(rdev);
3758 radeon_ib_pool_fini(rdev);
3759 radeon_irq_kms_fini(rdev);
3760 evergreen_pcie_gart_fini(rdev);
3761 rdev->accel_working = false;
3768 if (ASIC_IS_DCE5(rdev)) {
3769 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
3778 void evergreen_fini(struct radeon_device *rdev)
3780 r600_audio_fini(rdev);
3781 r600_blit_fini(rdev);
3782 r700_cp_fini(rdev);
3783 r600_dma_fini(rdev);
3784 r600_irq_fini(rdev);
3785 radeon_wb_fini(rdev);
3786 radeon_ib_pool_fini(rdev);
3787 radeon_irq_kms_fini(rdev);
3788 evergreen_pcie_gart_fini(rdev);
3789 r600_vram_scratch_fini(rdev);
3790 radeon_gem_fini(rdev);
3791 radeon_fence_driver_fini(rdev);
3792 radeon_agp_fini(rdev);
3793 radeon_bo_fini(rdev);
3794 radeon_atombios_fini(rdev);
3795 if (ASIC_IS_DCE5(rdev))
3796 ni_fini_microcode(rdev);
3798 r600_fini_microcode(rdev);
3799 free(rdev->bios, DRM_MEM_DRIVER);
3800 rdev->bios = NULL;
3803 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
3811 if (rdev->flags & RADEON_IS_IGP)
3814 if (!(rdev->flags & RADEON_IS_PCIE))
3818 if (ASIC_IS_X2(rdev))
3821 ret = drm_pcie_get_speed_cap_mask(rdev->ddev, &mask);