1254885Sdumbbell/*
2254885Sdumbbell * Copyright 2010 Advanced Micro Devices, Inc.
3254885Sdumbbell *
4254885Sdumbbell * Permission is hereby granted, free of charge, to any person obtaining a
5254885Sdumbbell * copy of this software and associated documentation files (the "Software"),
6254885Sdumbbell * to deal in the Software without restriction, including without limitation
7254885Sdumbbell * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8254885Sdumbbell * and/or sell copies of the Software, and to permit persons to whom the
9254885Sdumbbell * Software is furnished to do so, subject to the following conditions:
10254885Sdumbbell *
11254885Sdumbbell * The above copyright notice and this permission notice shall be included in
12254885Sdumbbell * all copies or substantial portions of the Software.
13254885Sdumbbell *
14254885Sdumbbell * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15254885Sdumbbell * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16254885Sdumbbell * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17254885Sdumbbell * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18254885Sdumbbell * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19254885Sdumbbell * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20254885Sdumbbell * OTHER DEALINGS IN THE SOFTWARE.
21254885Sdumbbell *
22254885Sdumbbell * Authors: Alex Deucher
23254885Sdumbbell */
24254885Sdumbbell
25254885Sdumbbell#include <sys/cdefs.h>
26254885Sdumbbell__FBSDID("$FreeBSD: releng/10.2/sys/dev/drm2/radeon/evergreen.c 282199 2015-04-28 19:35:05Z dumbbell $");
27254885Sdumbbell
28254885Sdumbbell#include <dev/drm2/drmP.h>
29254885Sdumbbell#include "radeon.h"
30254885Sdumbbell#include "radeon_asic.h"
31254885Sdumbbell#include <dev/drm2/radeon/radeon_drm.h>
32254885Sdumbbell#include "evergreend.h"
33254885Sdumbbell#include "atom.h"
34254885Sdumbbell#include "avivod.h"
35254885Sdumbbell#include "evergreen_reg.h"
36254885Sdumbbell#include "evergreen_blit_shaders.h"
37254885Sdumbbell
38254885Sdumbbell#define EVERGREEN_PFP_UCODE_SIZE 1120
39254885Sdumbbell#define EVERGREEN_PM4_UCODE_SIZE 1376
40254885Sdumbbell
41254885Sdumbbellstatic const u32 crtc_offsets[6] =
42254885Sdumbbell{
43254885Sdumbbell	EVERGREEN_CRTC0_REGISTER_OFFSET,
44254885Sdumbbell	EVERGREEN_CRTC1_REGISTER_OFFSET,
45254885Sdumbbell	EVERGREEN_CRTC2_REGISTER_OFFSET,
46254885Sdumbbell	EVERGREEN_CRTC3_REGISTER_OFFSET,
47254885Sdumbbell	EVERGREEN_CRTC4_REGISTER_OFFSET,
48254885Sdumbbell	EVERGREEN_CRTC5_REGISTER_OFFSET
49254885Sdumbbell};
50254885Sdumbbell
51254885Sdumbbellstatic void evergreen_gpu_init(struct radeon_device *rdev);
52282199Sdumbbell#ifdef FREEBSD_WIP /* FreeBSD: to please GCC 4.2. */
53282199Sdumbbellvoid evergreen_fini(struct radeon_device *rdev);
54282199Sdumbbell#endif
55254885Sdumbbellvoid evergreen_pcie_gen2_enable(struct radeon_device *rdev);
56282199Sdumbbell#ifdef FREEBSD_WIP /* FreeBSD: to please GCC 4.2. */
57282199Sdumbbellextern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
58282199Sdumbbell				     int ring, u32 cp_int_cntl);
59282199Sdumbbell#endif
60254885Sdumbbell
61254885Sdumbbellvoid evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
62254885Sdumbbell			     unsigned *bankh, unsigned *mtaspect,
63254885Sdumbbell			     unsigned *tile_split)
64254885Sdumbbell{
65254885Sdumbbell	*bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
66254885Sdumbbell	*bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
67254885Sdumbbell	*mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
68254885Sdumbbell	*tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
69254885Sdumbbell	switch (*bankw) {
70254885Sdumbbell	default:
71254885Sdumbbell	case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
72254885Sdumbbell	case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
73254885Sdumbbell	case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
74254885Sdumbbell	case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
75254885Sdumbbell	}
76254885Sdumbbell	switch (*bankh) {
77254885Sdumbbell	default:
78254885Sdumbbell	case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
79254885Sdumbbell	case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
80254885Sdumbbell	case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
81254885Sdumbbell	case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
82254885Sdumbbell	}
83254885Sdumbbell	switch (*mtaspect) {
84254885Sdumbbell	default:
85254885Sdumbbell	case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
86254885Sdumbbell	case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
87254885Sdumbbell	case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
88254885Sdumbbell	case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
89254885Sdumbbell	}
90254885Sdumbbell}
91254885Sdumbbell
92254885Sdumbbellvoid evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
93254885Sdumbbell{
94254885Sdumbbell	u16 ctl, v;
95254885Sdumbbell	int err, cap;
96254885Sdumbbell
97254885Sdumbbell	err = pci_find_cap(rdev->dev, PCIY_EXPRESS, &cap);
98254885Sdumbbell	if (err)
99254885Sdumbbell		return;
100254885Sdumbbell
101254885Sdumbbell	cap += PCIER_DEVICE_CTL;
102254885Sdumbbell
103254885Sdumbbell	ctl = pci_read_config(rdev->dev, cap, 2);
104254885Sdumbbell
105254885Sdumbbell	v = (ctl & PCIEM_CTL_MAX_READ_REQUEST) >> 12;
106254885Sdumbbell
107254885Sdumbbell	/* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
108254885Sdumbbell	 * to avoid hangs or perfomance issues
109254885Sdumbbell	 */
110254885Sdumbbell	if ((v == 0) || (v == 6) || (v == 7)) {
111254885Sdumbbell		ctl &= ~PCIEM_CTL_MAX_READ_REQUEST;
112254885Sdumbbell		ctl |= (2 << 12);
113254885Sdumbbell		pci_write_config(rdev->dev, cap, ctl, 2);
114254885Sdumbbell	}
115254885Sdumbbell}
116254885Sdumbbell
117282199Sdumbbellstatic bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
118282199Sdumbbell{
119282199Sdumbbell	if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
120282199Sdumbbell		return true;
121282199Sdumbbell	else
122282199Sdumbbell		return false;
123282199Sdumbbell}
124282199Sdumbbell
125282199Sdumbbellstatic bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
126282199Sdumbbell{
127282199Sdumbbell	u32 pos1, pos2;
128282199Sdumbbell
129282199Sdumbbell	pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
130282199Sdumbbell	pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
131282199Sdumbbell
132282199Sdumbbell	if (pos1 != pos2)
133282199Sdumbbell		return true;
134282199Sdumbbell	else
135282199Sdumbbell		return false;
136282199Sdumbbell}
137282199Sdumbbell
138254885Sdumbbell/**
139254885Sdumbbell * dce4_wait_for_vblank - vblank wait asic callback.
140254885Sdumbbell *
141254885Sdumbbell * @rdev: radeon_device pointer
142254885Sdumbbell * @crtc: crtc to wait for vblank on
143254885Sdumbbell *
144254885Sdumbbell * Wait for vblank on the requested crtc (evergreen+).
145254885Sdumbbell */
146254885Sdumbbellvoid dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
147254885Sdumbbell{
148282199Sdumbbell	unsigned i = 0;
149254885Sdumbbell
150254885Sdumbbell	if (crtc >= rdev->num_crtc)
151254885Sdumbbell		return;
152254885Sdumbbell
153282199Sdumbbell	if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
154282199Sdumbbell		return;
155282199Sdumbbell
156282199Sdumbbell	/* depending on when we hit vblank, we may be close to active; if so,
157282199Sdumbbell	 * wait for another frame.
158282199Sdumbbell	 */
159282199Sdumbbell	while (dce4_is_in_vblank(rdev, crtc)) {
160282199Sdumbbell		if (i++ % 100 == 0) {
161282199Sdumbbell			if (!dce4_is_counter_moving(rdev, crtc))
162254885Sdumbbell				break;
163254885Sdumbbell		}
164282199Sdumbbell	}
165282199Sdumbbell
166282199Sdumbbell	while (!dce4_is_in_vblank(rdev, crtc)) {
167282199Sdumbbell		if (i++ % 100 == 0) {
168282199Sdumbbell			if (!dce4_is_counter_moving(rdev, crtc))
169254885Sdumbbell				break;
170254885Sdumbbell		}
171254885Sdumbbell	}
172254885Sdumbbell}
173254885Sdumbbell
174254885Sdumbbell/**
175254885Sdumbbell * radeon_irq_kms_pflip_irq_get - pre-pageflip callback.
176254885Sdumbbell *
177254885Sdumbbell * @rdev: radeon_device pointer
178254885Sdumbbell * @crtc: crtc to prepare for pageflip on
179254885Sdumbbell *
180254885Sdumbbell * Pre-pageflip callback (evergreen+).
181254885Sdumbbell * Enables the pageflip irq (vblank irq).
182254885Sdumbbell */
183254885Sdumbbellvoid evergreen_pre_page_flip(struct radeon_device *rdev, int crtc)
184254885Sdumbbell{
185254885Sdumbbell	/* enable the pflip int */
186254885Sdumbbell	radeon_irq_kms_pflip_irq_get(rdev, crtc);
187254885Sdumbbell}
188254885Sdumbbell
189254885Sdumbbell/**
190254885Sdumbbell * evergreen_post_page_flip - pos-pageflip callback.
191254885Sdumbbell *
192254885Sdumbbell * @rdev: radeon_device pointer
193254885Sdumbbell * @crtc: crtc to cleanup pageflip on
194254885Sdumbbell *
195254885Sdumbbell * Post-pageflip callback (evergreen+).
196254885Sdumbbell * Disables the pageflip irq (vblank irq).
197254885Sdumbbell */
198254885Sdumbbellvoid evergreen_post_page_flip(struct radeon_device *rdev, int crtc)
199254885Sdumbbell{
200254885Sdumbbell	/* disable the pflip int */
201254885Sdumbbell	radeon_irq_kms_pflip_irq_put(rdev, crtc);
202254885Sdumbbell}
203254885Sdumbbell
204254885Sdumbbell/**
205254885Sdumbbell * evergreen_page_flip - pageflip callback.
206254885Sdumbbell *
207254885Sdumbbell * @rdev: radeon_device pointer
208254885Sdumbbell * @crtc_id: crtc to cleanup pageflip on
209254885Sdumbbell * @crtc_base: new address of the crtc (GPU MC address)
210254885Sdumbbell *
211254885Sdumbbell * Does the actual pageflip (evergreen+).
212254885Sdumbbell * During vblank we take the crtc lock and wait for the update_pending
213254885Sdumbbell * bit to go high, when it does, we release the lock, and allow the
214254885Sdumbbell * double buffered update to take place.
215254885Sdumbbell * Returns the current update pending status.
216254885Sdumbbell */
217254885Sdumbbellu32 evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
218254885Sdumbbell{
219254885Sdumbbell	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
220254885Sdumbbell	u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);
221254885Sdumbbell	int i;
222254885Sdumbbell
223254885Sdumbbell	/* Lock the graphics update lock */
224254885Sdumbbell	tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
225254885Sdumbbell	WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
226254885Sdumbbell
227254885Sdumbbell	/* update the scanout addresses */
228254885Sdumbbell	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
229254885Sdumbbell	       upper_32_bits(crtc_base));
230254885Sdumbbell	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
231254885Sdumbbell	       (u32)crtc_base);
232254885Sdumbbell
233254885Sdumbbell	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
234254885Sdumbbell	       upper_32_bits(crtc_base));
235254885Sdumbbell	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
236254885Sdumbbell	       (u32)crtc_base);
237254885Sdumbbell
238254885Sdumbbell	/* Wait for update_pending to go high. */
239254885Sdumbbell	for (i = 0; i < rdev->usec_timeout; i++) {
240254885Sdumbbell		if (RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING)
241254885Sdumbbell			break;
242282199Sdumbbell		udelay(1);
243254885Sdumbbell	}
244254885Sdumbbell	DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
245254885Sdumbbell
246254885Sdumbbell	/* Unlock the lock, so double-buffering can take place inside vblank */
247254885Sdumbbell	tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
248254885Sdumbbell	WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
249254885Sdumbbell
250254885Sdumbbell	/* Return current update_pending status: */
251254885Sdumbbell	return RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING;
252254885Sdumbbell}
253254885Sdumbbell
254254885Sdumbbell/* get temperature in millidegrees */
255254885Sdumbbellint evergreen_get_temp(struct radeon_device *rdev)
256254885Sdumbbell{
257254885Sdumbbell	u32 temp, toffset;
258254885Sdumbbell	int actual_temp = 0;
259254885Sdumbbell
260254885Sdumbbell	if (rdev->family == CHIP_JUNIPER) {
261254885Sdumbbell		toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
262254885Sdumbbell			TOFFSET_SHIFT;
263254885Sdumbbell		temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
264254885Sdumbbell			TS0_ADC_DOUT_SHIFT;
265254885Sdumbbell
266254885Sdumbbell		if (toffset & 0x100)
267254885Sdumbbell			actual_temp = temp / 2 - (0x200 - toffset);
268254885Sdumbbell		else
269254885Sdumbbell			actual_temp = temp / 2 + toffset;
270254885Sdumbbell
271254885Sdumbbell		actual_temp = actual_temp * 1000;
272254885Sdumbbell
273254885Sdumbbell	} else {
274254885Sdumbbell		temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
275254885Sdumbbell			ASIC_T_SHIFT;
276254885Sdumbbell
277254885Sdumbbell		if (temp & 0x400)
278254885Sdumbbell			actual_temp = -256;
279254885Sdumbbell		else if (temp & 0x200)
280254885Sdumbbell			actual_temp = 255;
281254885Sdumbbell		else if (temp & 0x100) {
282254885Sdumbbell			actual_temp = temp & 0x1ff;
283254885Sdumbbell			actual_temp |= ~0x1ff;
284254885Sdumbbell		} else
285254885Sdumbbell			actual_temp = temp & 0xff;
286254885Sdumbbell
287254885Sdumbbell		actual_temp = (actual_temp * 1000) / 2;
288254885Sdumbbell	}
289254885Sdumbbell
290254885Sdumbbell	return actual_temp;
291254885Sdumbbell}
292254885Sdumbbell
293254885Sdumbbellint sumo_get_temp(struct radeon_device *rdev)
294254885Sdumbbell{
295254885Sdumbbell	u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
296254885Sdumbbell	int actual_temp = temp - 49;
297254885Sdumbbell
298254885Sdumbbell	return actual_temp * 1000;
299254885Sdumbbell}
300254885Sdumbbell
301254885Sdumbbell/**
302254885Sdumbbell * sumo_pm_init_profile - Initialize power profiles callback.
303254885Sdumbbell *
304254885Sdumbbell * @rdev: radeon_device pointer
305254885Sdumbbell *
306254885Sdumbbell * Initialize the power states used in profile mode
307254885Sdumbbell * (sumo, trinity, SI).
308254885Sdumbbell * Used for profile mode only.
309254885Sdumbbell */
310254885Sdumbbellvoid sumo_pm_init_profile(struct radeon_device *rdev)
311254885Sdumbbell{
312254885Sdumbbell	int idx;
313254885Sdumbbell
314254885Sdumbbell	/* default */
315254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
316254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
317254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
318254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
319254885Sdumbbell
320254885Sdumbbell	/* low,mid sh/mh */
321254885Sdumbbell	if (rdev->flags & RADEON_IS_MOBILITY)
322254885Sdumbbell		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
323254885Sdumbbell	else
324254885Sdumbbell		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
325254885Sdumbbell
326254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
327254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
328254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
329254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
330254885Sdumbbell
331254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
332254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
333254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
334254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
335254885Sdumbbell
336254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
337254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
338254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
339254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
340254885Sdumbbell
341254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
342254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
343254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
344254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
345254885Sdumbbell
346254885Sdumbbell	/* high sh/mh */
347254885Sdumbbell	idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
348254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
349254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
350254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
351254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
352254885Sdumbbell		rdev->pm.power_state[idx].num_clock_modes - 1;
353254885Sdumbbell
354254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
355254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
356254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
357254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
358254885Sdumbbell		rdev->pm.power_state[idx].num_clock_modes - 1;
359254885Sdumbbell}
360254885Sdumbbell
361254885Sdumbbell/**
362254885Sdumbbell * btc_pm_init_profile - Initialize power profiles callback.
363254885Sdumbbell *
364254885Sdumbbell * @rdev: radeon_device pointer
365254885Sdumbbell *
366254885Sdumbbell * Initialize the power states used in profile mode
367254885Sdumbbell * (BTC, cayman).
368254885Sdumbbell * Used for profile mode only.
369254885Sdumbbell */
370254885Sdumbbellvoid btc_pm_init_profile(struct radeon_device *rdev)
371254885Sdumbbell{
372254885Sdumbbell	int idx;
373254885Sdumbbell
374254885Sdumbbell	/* default */
375254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
376254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
377254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
378254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
379254885Sdumbbell	/* starting with BTC, there is one state that is used for both
380254885Sdumbbell	 * MH and SH.  Difference is that we always use the high clock index for
381254885Sdumbbell	 * mclk.
382254885Sdumbbell	 */
383254885Sdumbbell	if (rdev->flags & RADEON_IS_MOBILITY)
384254885Sdumbbell		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
385254885Sdumbbell	else
386254885Sdumbbell		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
387254885Sdumbbell	/* low sh */
388254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
389254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
390254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
391254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
392254885Sdumbbell	/* mid sh */
393254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
394254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
395254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
396254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
397254885Sdumbbell	/* high sh */
398254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
399254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
400254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
401254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
402254885Sdumbbell	/* low mh */
403254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
404254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
405254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
406254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
407254885Sdumbbell	/* mid mh */
408254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
409254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
410254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
411254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
412254885Sdumbbell	/* high mh */
413254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
414254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
415254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
416254885Sdumbbell	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
417254885Sdumbbell}
418254885Sdumbbell
419254885Sdumbbell/**
420254885Sdumbbell * evergreen_pm_misc - set additional pm hw parameters callback.
421254885Sdumbbell *
422254885Sdumbbell * @rdev: radeon_device pointer
423254885Sdumbbell *
424254885Sdumbbell * Set non-clock parameters associated with a power state
425254885Sdumbbell * (voltage, etc.) (evergreen+).
426254885Sdumbbell */
427254885Sdumbbellvoid evergreen_pm_misc(struct radeon_device *rdev)
428254885Sdumbbell{
429254885Sdumbbell	int req_ps_idx = rdev->pm.requested_power_state_index;
430254885Sdumbbell	int req_cm_idx = rdev->pm.requested_clock_mode_index;
431254885Sdumbbell	struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
432254885Sdumbbell	struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
433254885Sdumbbell
434254885Sdumbbell	if (voltage->type == VOLTAGE_SW) {
435254885Sdumbbell		/* 0xff01 is a flag rather then an actual voltage */
436254885Sdumbbell		if (voltage->voltage == 0xff01)
437254885Sdumbbell			return;
438254885Sdumbbell		if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
439254885Sdumbbell			radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
440254885Sdumbbell			rdev->pm.current_vddc = voltage->voltage;
441254885Sdumbbell			DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
442254885Sdumbbell		}
443282199Sdumbbell
444282199Sdumbbell		/* starting with BTC, there is one state that is used for both
445282199Sdumbbell		 * MH and SH.  Difference is that we always use the high clock index for
446282199Sdumbbell		 * mclk and vddci.
447282199Sdumbbell		 */
448282199Sdumbbell		if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
449282199Sdumbbell		    (rdev->family >= CHIP_BARTS) &&
450282199Sdumbbell		    rdev->pm.active_crtc_count &&
451282199Sdumbbell		    ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
452282199Sdumbbell		     (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
453282199Sdumbbell			voltage = &rdev->pm.power_state[req_ps_idx].
454282199Sdumbbell				clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
455282199Sdumbbell
456254885Sdumbbell		/* 0xff01 is a flag rather then an actual voltage */
457254885Sdumbbell		if (voltage->vddci == 0xff01)
458254885Sdumbbell			return;
459254885Sdumbbell		if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
460254885Sdumbbell			radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
461254885Sdumbbell			rdev->pm.current_vddci = voltage->vddci;
462254885Sdumbbell			DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
463254885Sdumbbell		}
464254885Sdumbbell	}
465254885Sdumbbell}
466254885Sdumbbell
467254885Sdumbbell/**
468254885Sdumbbell * evergreen_pm_prepare - pre-power state change callback.
469254885Sdumbbell *
470254885Sdumbbell * @rdev: radeon_device pointer
471254885Sdumbbell *
472254885Sdumbbell * Prepare for a power state change (evergreen+).
473254885Sdumbbell */
474254885Sdumbbellvoid evergreen_pm_prepare(struct radeon_device *rdev)
475254885Sdumbbell{
476254885Sdumbbell	struct drm_device *ddev = rdev->ddev;
477254885Sdumbbell	struct drm_crtc *crtc;
478254885Sdumbbell	struct radeon_crtc *radeon_crtc;
479254885Sdumbbell	u32 tmp;
480254885Sdumbbell
481254885Sdumbbell	/* disable any active CRTCs */
482254885Sdumbbell	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
483254885Sdumbbell		radeon_crtc = to_radeon_crtc(crtc);
484254885Sdumbbell		if (radeon_crtc->enabled) {
485254885Sdumbbell			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
486254885Sdumbbell			tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
487254885Sdumbbell			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
488254885Sdumbbell		}
489254885Sdumbbell	}
490254885Sdumbbell}
491254885Sdumbbell
492254885Sdumbbell/**
493254885Sdumbbell * evergreen_pm_finish - post-power state change callback.
494254885Sdumbbell *
495254885Sdumbbell * @rdev: radeon_device pointer
496254885Sdumbbell *
497254885Sdumbbell * Clean up after a power state change (evergreen+).
498254885Sdumbbell */
499254885Sdumbbellvoid evergreen_pm_finish(struct radeon_device *rdev)
500254885Sdumbbell{
501254885Sdumbbell	struct drm_device *ddev = rdev->ddev;
502254885Sdumbbell	struct drm_crtc *crtc;
503254885Sdumbbell	struct radeon_crtc *radeon_crtc;
504254885Sdumbbell	u32 tmp;
505254885Sdumbbell
506254885Sdumbbell	/* enable any active CRTCs */
507254885Sdumbbell	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
508254885Sdumbbell		radeon_crtc = to_radeon_crtc(crtc);
509254885Sdumbbell		if (radeon_crtc->enabled) {
510254885Sdumbbell			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
511254885Sdumbbell			tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
512254885Sdumbbell			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
513254885Sdumbbell		}
514254885Sdumbbell	}
515254885Sdumbbell}
516254885Sdumbbell
517254885Sdumbbell/**
518254885Sdumbbell * evergreen_hpd_sense - hpd sense callback.
519254885Sdumbbell *
520254885Sdumbbell * @rdev: radeon_device pointer
521254885Sdumbbell * @hpd: hpd (hotplug detect) pin
522254885Sdumbbell *
523254885Sdumbbell * Checks if a digital monitor is connected (evergreen+).
524254885Sdumbbell * Returns true if connected, false if not connected.
525254885Sdumbbell */
526254885Sdumbbellbool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
527254885Sdumbbell{
528254885Sdumbbell	bool connected = false;
529254885Sdumbbell
530254885Sdumbbell	switch (hpd) {
531254885Sdumbbell	case RADEON_HPD_1:
532254885Sdumbbell		if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
533254885Sdumbbell			connected = true;
534254885Sdumbbell		break;
535254885Sdumbbell	case RADEON_HPD_2:
536254885Sdumbbell		if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
537254885Sdumbbell			connected = true;
538254885Sdumbbell		break;
539254885Sdumbbell	case RADEON_HPD_3:
540254885Sdumbbell		if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
541254885Sdumbbell			connected = true;
542254885Sdumbbell		break;
543254885Sdumbbell	case RADEON_HPD_4:
544254885Sdumbbell		if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
545254885Sdumbbell			connected = true;
546254885Sdumbbell		break;
547254885Sdumbbell	case RADEON_HPD_5:
548254885Sdumbbell		if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
549254885Sdumbbell			connected = true;
550254885Sdumbbell		break;
551254885Sdumbbell	case RADEON_HPD_6:
552254885Sdumbbell		if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
553254885Sdumbbell			connected = true;
554254885Sdumbbell			break;
555254885Sdumbbell	default:
556254885Sdumbbell		break;
557254885Sdumbbell	}
558254885Sdumbbell
559254885Sdumbbell	return connected;
560254885Sdumbbell}
561254885Sdumbbell
562254885Sdumbbell/**
563254885Sdumbbell * evergreen_hpd_set_polarity - hpd set polarity callback.
564254885Sdumbbell *
565254885Sdumbbell * @rdev: radeon_device pointer
566254885Sdumbbell * @hpd: hpd (hotplug detect) pin
567254885Sdumbbell *
568254885Sdumbbell * Set the polarity of the hpd pin (evergreen+).
569254885Sdumbbell */
570254885Sdumbbellvoid evergreen_hpd_set_polarity(struct radeon_device *rdev,
571254885Sdumbbell				enum radeon_hpd_id hpd)
572254885Sdumbbell{
573254885Sdumbbell	u32 tmp;
574254885Sdumbbell	bool connected = evergreen_hpd_sense(rdev, hpd);
575254885Sdumbbell
576254885Sdumbbell	switch (hpd) {
577254885Sdumbbell	case RADEON_HPD_1:
578254885Sdumbbell		tmp = RREG32(DC_HPD1_INT_CONTROL);
579254885Sdumbbell		if (connected)
580254885Sdumbbell			tmp &= ~DC_HPDx_INT_POLARITY;
581254885Sdumbbell		else
582254885Sdumbbell			tmp |= DC_HPDx_INT_POLARITY;
583254885Sdumbbell		WREG32(DC_HPD1_INT_CONTROL, tmp);
584254885Sdumbbell		break;
585254885Sdumbbell	case RADEON_HPD_2:
586254885Sdumbbell		tmp = RREG32(DC_HPD2_INT_CONTROL);
587254885Sdumbbell		if (connected)
588254885Sdumbbell			tmp &= ~DC_HPDx_INT_POLARITY;
589254885Sdumbbell		else
590254885Sdumbbell			tmp |= DC_HPDx_INT_POLARITY;
591254885Sdumbbell		WREG32(DC_HPD2_INT_CONTROL, tmp);
592254885Sdumbbell		break;
593254885Sdumbbell	case RADEON_HPD_3:
594254885Sdumbbell		tmp = RREG32(DC_HPD3_INT_CONTROL);
595254885Sdumbbell		if (connected)
596254885Sdumbbell			tmp &= ~DC_HPDx_INT_POLARITY;
597254885Sdumbbell		else
598254885Sdumbbell			tmp |= DC_HPDx_INT_POLARITY;
599254885Sdumbbell		WREG32(DC_HPD3_INT_CONTROL, tmp);
600254885Sdumbbell		break;
601254885Sdumbbell	case RADEON_HPD_4:
602254885Sdumbbell		tmp = RREG32(DC_HPD4_INT_CONTROL);
603254885Sdumbbell		if (connected)
604254885Sdumbbell			tmp &= ~DC_HPDx_INT_POLARITY;
605254885Sdumbbell		else
606254885Sdumbbell			tmp |= DC_HPDx_INT_POLARITY;
607254885Sdumbbell		WREG32(DC_HPD4_INT_CONTROL, tmp);
608254885Sdumbbell		break;
609254885Sdumbbell	case RADEON_HPD_5:
610254885Sdumbbell		tmp = RREG32(DC_HPD5_INT_CONTROL);
611254885Sdumbbell		if (connected)
612254885Sdumbbell			tmp &= ~DC_HPDx_INT_POLARITY;
613254885Sdumbbell		else
614254885Sdumbbell			tmp |= DC_HPDx_INT_POLARITY;
615254885Sdumbbell		WREG32(DC_HPD5_INT_CONTROL, tmp);
616254885Sdumbbell			break;
617254885Sdumbbell	case RADEON_HPD_6:
618254885Sdumbbell		tmp = RREG32(DC_HPD6_INT_CONTROL);
619254885Sdumbbell		if (connected)
620254885Sdumbbell			tmp &= ~DC_HPDx_INT_POLARITY;
621254885Sdumbbell		else
622254885Sdumbbell			tmp |= DC_HPDx_INT_POLARITY;
623254885Sdumbbell		WREG32(DC_HPD6_INT_CONTROL, tmp);
624254885Sdumbbell		break;
625254885Sdumbbell	default:
626254885Sdumbbell		break;
627254885Sdumbbell	}
628254885Sdumbbell}
629254885Sdumbbell
630254885Sdumbbell/**
631254885Sdumbbell * evergreen_hpd_init - hpd setup callback.
632254885Sdumbbell *
633254885Sdumbbell * @rdev: radeon_device pointer
634254885Sdumbbell *
635254885Sdumbbell * Setup the hpd pins used by the card (evergreen+).
636254885Sdumbbell * Enable the pin, set the polarity, and enable the hpd interrupts.
637254885Sdumbbell */
638254885Sdumbbellvoid evergreen_hpd_init(struct radeon_device *rdev)
639254885Sdumbbell{
640254885Sdumbbell	struct drm_device *dev = rdev->ddev;
641254885Sdumbbell	struct drm_connector *connector;
642254885Sdumbbell	unsigned enabled = 0;
643254885Sdumbbell	u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
644254885Sdumbbell		DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
645254885Sdumbbell
646254885Sdumbbell	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
647254885Sdumbbell		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
648282199Sdumbbell
649282199Sdumbbell		if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
650282199Sdumbbell		    connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
651282199Sdumbbell			/* don't try to enable hpd on eDP or LVDS avoid breaking the
652282199Sdumbbell			 * aux dp channel on imac and help (but not completely fix)
653282199Sdumbbell			 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
654282199Sdumbbell			 * also avoid interrupt storms during dpms.
655282199Sdumbbell			 */
656282199Sdumbbell			continue;
657282199Sdumbbell		}
658254885Sdumbbell		switch (radeon_connector->hpd.hpd) {
659254885Sdumbbell		case RADEON_HPD_1:
660254885Sdumbbell			WREG32(DC_HPD1_CONTROL, tmp);
661254885Sdumbbell			break;
662254885Sdumbbell		case RADEON_HPD_2:
663254885Sdumbbell			WREG32(DC_HPD2_CONTROL, tmp);
664254885Sdumbbell			break;
665254885Sdumbbell		case RADEON_HPD_3:
666254885Sdumbbell			WREG32(DC_HPD3_CONTROL, tmp);
667254885Sdumbbell			break;
668254885Sdumbbell		case RADEON_HPD_4:
669254885Sdumbbell			WREG32(DC_HPD4_CONTROL, tmp);
670254885Sdumbbell			break;
671254885Sdumbbell		case RADEON_HPD_5:
672254885Sdumbbell			WREG32(DC_HPD5_CONTROL, tmp);
673254885Sdumbbell			break;
674254885Sdumbbell		case RADEON_HPD_6:
675254885Sdumbbell			WREG32(DC_HPD6_CONTROL, tmp);
676254885Sdumbbell			break;
677254885Sdumbbell		default:
678254885Sdumbbell			break;
679254885Sdumbbell		}
680254885Sdumbbell		radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
681254885Sdumbbell		enabled |= 1 << radeon_connector->hpd.hpd;
682254885Sdumbbell	}
683254885Sdumbbell	radeon_irq_kms_enable_hpd(rdev, enabled);
684254885Sdumbbell}
685254885Sdumbbell
686254885Sdumbbell/**
687254885Sdumbbell * evergreen_hpd_fini - hpd tear down callback.
688254885Sdumbbell *
689254885Sdumbbell * @rdev: radeon_device pointer
690254885Sdumbbell *
691254885Sdumbbell * Tear down the hpd pins used by the card (evergreen+).
692254885Sdumbbell * Disable the hpd interrupts.
693254885Sdumbbell */
694254885Sdumbbellvoid evergreen_hpd_fini(struct radeon_device *rdev)
695254885Sdumbbell{
696254885Sdumbbell	struct drm_device *dev = rdev->ddev;
697254885Sdumbbell	struct drm_connector *connector;
698254885Sdumbbell	unsigned disabled = 0;
699254885Sdumbbell
700254885Sdumbbell	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
701254885Sdumbbell		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
702254885Sdumbbell		switch (radeon_connector->hpd.hpd) {
703254885Sdumbbell		case RADEON_HPD_1:
704254885Sdumbbell			WREG32(DC_HPD1_CONTROL, 0);
705254885Sdumbbell			break;
706254885Sdumbbell		case RADEON_HPD_2:
707254885Sdumbbell			WREG32(DC_HPD2_CONTROL, 0);
708254885Sdumbbell			break;
709254885Sdumbbell		case RADEON_HPD_3:
710254885Sdumbbell			WREG32(DC_HPD3_CONTROL, 0);
711254885Sdumbbell			break;
712254885Sdumbbell		case RADEON_HPD_4:
713254885Sdumbbell			WREG32(DC_HPD4_CONTROL, 0);
714254885Sdumbbell			break;
715254885Sdumbbell		case RADEON_HPD_5:
716254885Sdumbbell			WREG32(DC_HPD5_CONTROL, 0);
717254885Sdumbbell			break;
718254885Sdumbbell		case RADEON_HPD_6:
719254885Sdumbbell			WREG32(DC_HPD6_CONTROL, 0);
720254885Sdumbbell			break;
721254885Sdumbbell		default:
722254885Sdumbbell			break;
723254885Sdumbbell		}
724254885Sdumbbell		disabled |= 1 << radeon_connector->hpd.hpd;
725254885Sdumbbell	}
726254885Sdumbbell	radeon_irq_kms_disable_hpd(rdev, disabled);
727254885Sdumbbell}
728254885Sdumbbell
729254885Sdumbbell/* watermark setup */
730254885Sdumbbell
731254885Sdumbbellstatic u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
732254885Sdumbbell					struct radeon_crtc *radeon_crtc,
733254885Sdumbbell					struct drm_display_mode *mode,
734254885Sdumbbell					struct drm_display_mode *other_mode)
735254885Sdumbbell{
736254885Sdumbbell	u32 tmp;
737254885Sdumbbell	/*
738254885Sdumbbell	 * Line Buffer Setup
739254885Sdumbbell	 * There are 3 line buffers, each one shared by 2 display controllers.
740254885Sdumbbell	 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
741254885Sdumbbell	 * the display controllers.  The paritioning is done via one of four
742254885Sdumbbell	 * preset allocations specified in bits 2:0:
743254885Sdumbbell	 * first display controller
744254885Sdumbbell	 *  0 - first half of lb (3840 * 2)
745254885Sdumbbell	 *  1 - first 3/4 of lb (5760 * 2)
746254885Sdumbbell	 *  2 - whole lb (7680 * 2), other crtc must be disabled
747254885Sdumbbell	 *  3 - first 1/4 of lb (1920 * 2)
748254885Sdumbbell	 * second display controller
749254885Sdumbbell	 *  4 - second half of lb (3840 * 2)
750254885Sdumbbell	 *  5 - second 3/4 of lb (5760 * 2)
751254885Sdumbbell	 *  6 - whole lb (7680 * 2), other crtc must be disabled
752254885Sdumbbell	 *  7 - last 1/4 of lb (1920 * 2)
753254885Sdumbbell	 */
754254885Sdumbbell	/* this can get tricky if we have two large displays on a paired group
755254885Sdumbbell	 * of crtcs.  Ideally for multiple large displays we'd assign them to
756254885Sdumbbell	 * non-linked crtcs for maximum line buffer allocation.
757254885Sdumbbell	 */
758254885Sdumbbell	if (radeon_crtc->base.enabled && mode) {
759254885Sdumbbell		if (other_mode)
760254885Sdumbbell			tmp = 0; /* 1/2 */
761254885Sdumbbell		else
762254885Sdumbbell			tmp = 2; /* whole */
763254885Sdumbbell	} else
764254885Sdumbbell		tmp = 0;
765254885Sdumbbell
766254885Sdumbbell	/* second controller of the pair uses second half of the lb */
767254885Sdumbbell	if (radeon_crtc->crtc_id % 2)
768254885Sdumbbell		tmp += 4;
769254885Sdumbbell	WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
770254885Sdumbbell
771254885Sdumbbell	if (radeon_crtc->base.enabled && mode) {
772254885Sdumbbell		switch (tmp) {
773254885Sdumbbell		case 0:
774254885Sdumbbell		case 4:
775254885Sdumbbell		default:
776254885Sdumbbell			if (ASIC_IS_DCE5(rdev))
777254885Sdumbbell				return 4096 * 2;
778254885Sdumbbell			else
779254885Sdumbbell				return 3840 * 2;
780254885Sdumbbell		case 1:
781254885Sdumbbell		case 5:
782254885Sdumbbell			if (ASIC_IS_DCE5(rdev))
783254885Sdumbbell				return 6144 * 2;
784254885Sdumbbell			else
785254885Sdumbbell				return 5760 * 2;
786254885Sdumbbell		case 2:
787254885Sdumbbell		case 6:
788254885Sdumbbell			if (ASIC_IS_DCE5(rdev))
789254885Sdumbbell				return 8192 * 2;
790254885Sdumbbell			else
791254885Sdumbbell				return 7680 * 2;
792254885Sdumbbell		case 3:
793254885Sdumbbell		case 7:
794254885Sdumbbell			if (ASIC_IS_DCE5(rdev))
795254885Sdumbbell				return 2048 * 2;
796254885Sdumbbell			else
797254885Sdumbbell				return 1920 * 2;
798254885Sdumbbell		}
799254885Sdumbbell	}
800254885Sdumbbell
801254885Sdumbbell	/* controller not enabled, so no lb used */
802254885Sdumbbell	return 0;
803254885Sdumbbell}
804254885Sdumbbell
805254885Sdumbbellu32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
806254885Sdumbbell{
807254885Sdumbbell	u32 tmp = RREG32(MC_SHARED_CHMAP);
808254885Sdumbbell
809254885Sdumbbell	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
810254885Sdumbbell	case 0:
811254885Sdumbbell	default:
812254885Sdumbbell		return 1;
813254885Sdumbbell	case 1:
814254885Sdumbbell		return 2;
815254885Sdumbbell	case 2:
816254885Sdumbbell		return 4;
817254885Sdumbbell	case 3:
818254885Sdumbbell		return 8;
819254885Sdumbbell	}
820254885Sdumbbell}
821254885Sdumbbell
822254885Sdumbbellstruct evergreen_wm_params {
823254885Sdumbbell	u32 dram_channels; /* number of dram channels */
824254885Sdumbbell	u32 yclk;          /* bandwidth per dram data pin in kHz */
825254885Sdumbbell	u32 sclk;          /* engine clock in kHz */
826254885Sdumbbell	u32 disp_clk;      /* display clock in kHz */
827254885Sdumbbell	u32 src_width;     /* viewport width */
828254885Sdumbbell	u32 active_time;   /* active display time in ns */
829254885Sdumbbell	u32 blank_time;    /* blank time in ns */
830254885Sdumbbell	bool interlaced;    /* mode is interlaced */
831254885Sdumbbell	fixed20_12 vsc;    /* vertical scale ratio */
832254885Sdumbbell	u32 num_heads;     /* number of active crtcs */
833254885Sdumbbell	u32 bytes_per_pixel; /* bytes per pixel display + overlay */
834254885Sdumbbell	u32 lb_size;       /* line buffer allocated to pipe */
835254885Sdumbbell	u32 vtaps;         /* vertical scaler taps */
836254885Sdumbbell};
837254885Sdumbbell
838254885Sdumbbellstatic u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
839254885Sdumbbell{
840254885Sdumbbell	/* Calculate DRAM Bandwidth and the part allocated to display. */
841254885Sdumbbell	fixed20_12 dram_efficiency; /* 0.7 */
842254885Sdumbbell	fixed20_12 yclk, dram_channels, bandwidth;
843254885Sdumbbell	fixed20_12 a;
844254885Sdumbbell
845254885Sdumbbell	a.full = dfixed_const(1000);
846254885Sdumbbell	yclk.full = dfixed_const(wm->yclk);
847254885Sdumbbell	yclk.full = dfixed_div(yclk, a);
848254885Sdumbbell	dram_channels.full = dfixed_const(wm->dram_channels * 4);
849254885Sdumbbell	a.full = dfixed_const(10);
850254885Sdumbbell	dram_efficiency.full = dfixed_const(7);
851254885Sdumbbell	dram_efficiency.full = dfixed_div(dram_efficiency, a);
852254885Sdumbbell	bandwidth.full = dfixed_mul(dram_channels, yclk);
853254885Sdumbbell	bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
854254885Sdumbbell
855254885Sdumbbell	return dfixed_trunc(bandwidth);
856254885Sdumbbell}
857254885Sdumbbell
858254885Sdumbbellstatic u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
859254885Sdumbbell{
860254885Sdumbbell	/* Calculate DRAM Bandwidth and the part allocated to display. */
861254885Sdumbbell	fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
862254885Sdumbbell	fixed20_12 yclk, dram_channels, bandwidth;
863254885Sdumbbell	fixed20_12 a;
864254885Sdumbbell
865254885Sdumbbell	a.full = dfixed_const(1000);
866254885Sdumbbell	yclk.full = dfixed_const(wm->yclk);
867254885Sdumbbell	yclk.full = dfixed_div(yclk, a);
868254885Sdumbbell	dram_channels.full = dfixed_const(wm->dram_channels * 4);
869254885Sdumbbell	a.full = dfixed_const(10);
870254885Sdumbbell	disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
871254885Sdumbbell	disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
872254885Sdumbbell	bandwidth.full = dfixed_mul(dram_channels, yclk);
873254885Sdumbbell	bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
874254885Sdumbbell
875254885Sdumbbell	return dfixed_trunc(bandwidth);
876254885Sdumbbell}
877254885Sdumbbell
878254885Sdumbbellstatic u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
879254885Sdumbbell{
880254885Sdumbbell	/* Calculate the display Data return Bandwidth */
881254885Sdumbbell	fixed20_12 return_efficiency; /* 0.8 */
882254885Sdumbbell	fixed20_12 sclk, bandwidth;
883254885Sdumbbell	fixed20_12 a;
884254885Sdumbbell
885254885Sdumbbell	a.full = dfixed_const(1000);
886254885Sdumbbell	sclk.full = dfixed_const(wm->sclk);
887254885Sdumbbell	sclk.full = dfixed_div(sclk, a);
888254885Sdumbbell	a.full = dfixed_const(10);
889254885Sdumbbell	return_efficiency.full = dfixed_const(8);
890254885Sdumbbell	return_efficiency.full = dfixed_div(return_efficiency, a);
891254885Sdumbbell	a.full = dfixed_const(32);
892254885Sdumbbell	bandwidth.full = dfixed_mul(a, sclk);
893254885Sdumbbell	bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
894254885Sdumbbell
895254885Sdumbbell	return dfixed_trunc(bandwidth);
896254885Sdumbbell}
897254885Sdumbbell
898254885Sdumbbellstatic u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
899254885Sdumbbell{
900254885Sdumbbell	/* Calculate the DMIF Request Bandwidth */
901254885Sdumbbell	fixed20_12 disp_clk_request_efficiency; /* 0.8 */
902254885Sdumbbell	fixed20_12 disp_clk, bandwidth;
903254885Sdumbbell	fixed20_12 a;
904254885Sdumbbell
905254885Sdumbbell	a.full = dfixed_const(1000);
906254885Sdumbbell	disp_clk.full = dfixed_const(wm->disp_clk);
907254885Sdumbbell	disp_clk.full = dfixed_div(disp_clk, a);
908254885Sdumbbell	a.full = dfixed_const(10);
909254885Sdumbbell	disp_clk_request_efficiency.full = dfixed_const(8);
910254885Sdumbbell	disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
911254885Sdumbbell	a.full = dfixed_const(32);
912254885Sdumbbell	bandwidth.full = dfixed_mul(a, disp_clk);
913254885Sdumbbell	bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
914254885Sdumbbell
915254885Sdumbbell	return dfixed_trunc(bandwidth);
916254885Sdumbbell}
917254885Sdumbbell
918254885Sdumbbellstatic u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
919254885Sdumbbell{
920254885Sdumbbell	/* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
921254885Sdumbbell	u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
922254885Sdumbbell	u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
923254885Sdumbbell	u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
924254885Sdumbbell
925254885Sdumbbell	return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
926254885Sdumbbell}
927254885Sdumbbell
928254885Sdumbbellstatic u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
929254885Sdumbbell{
930254885Sdumbbell	/* Calculate the display mode Average Bandwidth
931254885Sdumbbell	 * DisplayMode should contain the source and destination dimensions,
932254885Sdumbbell	 * timing, etc.
933254885Sdumbbell	 */
934254885Sdumbbell	fixed20_12 bpp;
935254885Sdumbbell	fixed20_12 line_time;
936254885Sdumbbell	fixed20_12 src_width;
937254885Sdumbbell	fixed20_12 bandwidth;
938254885Sdumbbell	fixed20_12 a;
939254885Sdumbbell
940254885Sdumbbell	a.full = dfixed_const(1000);
941254885Sdumbbell	line_time.full = dfixed_const(wm->active_time + wm->blank_time);
942254885Sdumbbell	line_time.full = dfixed_div(line_time, a);
943254885Sdumbbell	bpp.full = dfixed_const(wm->bytes_per_pixel);
944254885Sdumbbell	src_width.full = dfixed_const(wm->src_width);
945254885Sdumbbell	bandwidth.full = dfixed_mul(src_width, bpp);
946254885Sdumbbell	bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
947254885Sdumbbell	bandwidth.full = dfixed_div(bandwidth, line_time);
948254885Sdumbbell
949254885Sdumbbell	return dfixed_trunc(bandwidth);
950254885Sdumbbell}
951254885Sdumbbell
952254885Sdumbbellstatic u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
953254885Sdumbbell{
954254885Sdumbbell	/* First calcualte the latency in ns */
955254885Sdumbbell	u32 mc_latency = 2000; /* 2000 ns. */
956254885Sdumbbell	u32 available_bandwidth = evergreen_available_bandwidth(wm);
957254885Sdumbbell	u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
958254885Sdumbbell	u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
959254885Sdumbbell	u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
960254885Sdumbbell	u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
961254885Sdumbbell		(wm->num_heads * cursor_line_pair_return_time);
962254885Sdumbbell	u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
963254885Sdumbbell	u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
964254885Sdumbbell	fixed20_12 a, b, c;
965254885Sdumbbell
966254885Sdumbbell	if (wm->num_heads == 0)
967254885Sdumbbell		return 0;
968254885Sdumbbell
969254885Sdumbbell	a.full = dfixed_const(2);
970254885Sdumbbell	b.full = dfixed_const(1);
971254885Sdumbbell	if ((wm->vsc.full > a.full) ||
972254885Sdumbbell	    ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
973254885Sdumbbell	    (wm->vtaps >= 5) ||
974254885Sdumbbell	    ((wm->vsc.full >= a.full) && wm->interlaced))
975254885Sdumbbell		max_src_lines_per_dst_line = 4;
976254885Sdumbbell	else
977254885Sdumbbell		max_src_lines_per_dst_line = 2;
978254885Sdumbbell
979254885Sdumbbell	a.full = dfixed_const(available_bandwidth);
980254885Sdumbbell	b.full = dfixed_const(wm->num_heads);
981254885Sdumbbell	a.full = dfixed_div(a, b);
982254885Sdumbbell
983254885Sdumbbell	b.full = dfixed_const(1000);
984254885Sdumbbell	c.full = dfixed_const(wm->disp_clk);
985254885Sdumbbell	b.full = dfixed_div(c, b);
986254885Sdumbbell	c.full = dfixed_const(wm->bytes_per_pixel);
987254885Sdumbbell	b.full = dfixed_mul(b, c);
988254885Sdumbbell
989254885Sdumbbell	lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
990254885Sdumbbell
991254885Sdumbbell	a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
992254885Sdumbbell	b.full = dfixed_const(1000);
993254885Sdumbbell	c.full = dfixed_const(lb_fill_bw);
994254885Sdumbbell	b.full = dfixed_div(c, b);
995254885Sdumbbell	a.full = dfixed_div(a, b);
996254885Sdumbbell	line_fill_time = dfixed_trunc(a);
997254885Sdumbbell
998254885Sdumbbell	if (line_fill_time < wm->active_time)
999254885Sdumbbell		return latency;
1000254885Sdumbbell	else
1001254885Sdumbbell		return latency + (line_fill_time - wm->active_time);
1002254885Sdumbbell
1003254885Sdumbbell}
1004254885Sdumbbell
1005254885Sdumbbellstatic bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1006254885Sdumbbell{
1007254885Sdumbbell	if (evergreen_average_bandwidth(wm) <=
1008254885Sdumbbell	    (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
1009254885Sdumbbell		return true;
1010254885Sdumbbell	else
1011254885Sdumbbell		return false;
1012254885Sdumbbell};
1013254885Sdumbbell
1014254885Sdumbbellstatic bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
1015254885Sdumbbell{
1016254885Sdumbbell	if (evergreen_average_bandwidth(wm) <=
1017254885Sdumbbell	    (evergreen_available_bandwidth(wm) / wm->num_heads))
1018254885Sdumbbell		return true;
1019254885Sdumbbell	else
1020254885Sdumbbell		return false;
1021254885Sdumbbell};
1022254885Sdumbbell
1023254885Sdumbbellstatic bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
1024254885Sdumbbell{
1025254885Sdumbbell	u32 lb_partitions = wm->lb_size / wm->src_width;
1026254885Sdumbbell	u32 line_time = wm->active_time + wm->blank_time;
1027254885Sdumbbell	u32 latency_tolerant_lines;
1028254885Sdumbbell	u32 latency_hiding;
1029254885Sdumbbell	fixed20_12 a;
1030254885Sdumbbell
1031254885Sdumbbell	a.full = dfixed_const(1);
1032254885Sdumbbell	if (wm->vsc.full > a.full)
1033254885Sdumbbell		latency_tolerant_lines = 1;
1034254885Sdumbbell	else {
1035254885Sdumbbell		if (lb_partitions <= (wm->vtaps + 1))
1036254885Sdumbbell			latency_tolerant_lines = 1;
1037254885Sdumbbell		else
1038254885Sdumbbell			latency_tolerant_lines = 2;
1039254885Sdumbbell	}
1040254885Sdumbbell
1041254885Sdumbbell	latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
1042254885Sdumbbell
1043254885Sdumbbell	if (evergreen_latency_watermark(wm) <= latency_hiding)
1044254885Sdumbbell		return true;
1045254885Sdumbbell	else
1046254885Sdumbbell		return false;
1047254885Sdumbbell}
1048254885Sdumbbell
1049254885Sdumbbellstatic void evergreen_program_watermarks(struct radeon_device *rdev,
1050254885Sdumbbell					 struct radeon_crtc *radeon_crtc,
1051254885Sdumbbell					 u32 lb_size, u32 num_heads)
1052254885Sdumbbell{
1053254885Sdumbbell	struct drm_display_mode *mode = &radeon_crtc->base.mode;
1054254885Sdumbbell	struct evergreen_wm_params wm;
1055254885Sdumbbell	u32 pixel_period;
1056254885Sdumbbell	u32 line_time = 0;
1057254885Sdumbbell	u32 latency_watermark_a = 0, latency_watermark_b = 0;
1058254885Sdumbbell	u32 priority_a_mark = 0, priority_b_mark = 0;
1059254885Sdumbbell	u32 priority_a_cnt = PRIORITY_OFF;
1060254885Sdumbbell	u32 priority_b_cnt = PRIORITY_OFF;
1061254885Sdumbbell	u32 pipe_offset = radeon_crtc->crtc_id * 16;
1062254885Sdumbbell	u32 tmp, arb_control3;
1063254885Sdumbbell	fixed20_12 a, b, c;
1064254885Sdumbbell
1065254885Sdumbbell	if (radeon_crtc->base.enabled && num_heads && mode) {
1066254885Sdumbbell		pixel_period = 1000000 / (u32)mode->clock;
1067254885Sdumbbell		line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
1068254885Sdumbbell		priority_a_cnt = 0;
1069254885Sdumbbell		priority_b_cnt = 0;
1070254885Sdumbbell
1071254885Sdumbbell		wm.yclk = rdev->pm.current_mclk * 10;
1072254885Sdumbbell		wm.sclk = rdev->pm.current_sclk * 10;
1073254885Sdumbbell		wm.disp_clk = mode->clock;
1074254885Sdumbbell		wm.src_width = mode->crtc_hdisplay;
1075254885Sdumbbell		wm.active_time = mode->crtc_hdisplay * pixel_period;
1076254885Sdumbbell		wm.blank_time = line_time - wm.active_time;
1077254885Sdumbbell		wm.interlaced = false;
1078254885Sdumbbell		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1079254885Sdumbbell			wm.interlaced = true;
1080254885Sdumbbell		wm.vsc = radeon_crtc->vsc;
1081254885Sdumbbell		wm.vtaps = 1;
1082254885Sdumbbell		if (radeon_crtc->rmx_type != RMX_OFF)
1083254885Sdumbbell			wm.vtaps = 2;
1084254885Sdumbbell		wm.bytes_per_pixel = 4; /* XXX: get this from fb config */
1085254885Sdumbbell		wm.lb_size = lb_size;
1086254885Sdumbbell		wm.dram_channels = evergreen_get_number_of_dram_channels(rdev);
1087254885Sdumbbell		wm.num_heads = num_heads;
1088254885Sdumbbell
1089254885Sdumbbell		/* set for high clocks */
1090254885Sdumbbell		latency_watermark_a = min(evergreen_latency_watermark(&wm), (u32)65535);
1091254885Sdumbbell		/* set for low clocks */
1092254885Sdumbbell		/* wm.yclk = low clk; wm.sclk = low clk */
1093254885Sdumbbell		latency_watermark_b = min(evergreen_latency_watermark(&wm), (u32)65535);
1094254885Sdumbbell
1095254885Sdumbbell		/* possibly force display priority to high */
1096254885Sdumbbell		/* should really do this at mode validation time... */
1097254885Sdumbbell		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm) ||
1098254885Sdumbbell		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm) ||
1099254885Sdumbbell		    !evergreen_check_latency_hiding(&wm) ||
1100254885Sdumbbell		    (rdev->disp_priority == 2)) {
1101254885Sdumbbell			DRM_DEBUG_KMS("force priority to high\n");
1102254885Sdumbbell			priority_a_cnt |= PRIORITY_ALWAYS_ON;
1103254885Sdumbbell			priority_b_cnt |= PRIORITY_ALWAYS_ON;
1104254885Sdumbbell		}
1105254885Sdumbbell
1106254885Sdumbbell		a.full = dfixed_const(1000);
1107254885Sdumbbell		b.full = dfixed_const(mode->clock);
1108254885Sdumbbell		b.full = dfixed_div(b, a);
1109254885Sdumbbell		c.full = dfixed_const(latency_watermark_a);
1110254885Sdumbbell		c.full = dfixed_mul(c, b);
1111254885Sdumbbell		c.full = dfixed_mul(c, radeon_crtc->hsc);
1112254885Sdumbbell		c.full = dfixed_div(c, a);
1113254885Sdumbbell		a.full = dfixed_const(16);
1114254885Sdumbbell		c.full = dfixed_div(c, a);
1115254885Sdumbbell		priority_a_mark = dfixed_trunc(c);
1116254885Sdumbbell		priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
1117254885Sdumbbell
1118254885Sdumbbell		a.full = dfixed_const(1000);
1119254885Sdumbbell		b.full = dfixed_const(mode->clock);
1120254885Sdumbbell		b.full = dfixed_div(b, a);
1121254885Sdumbbell		c.full = dfixed_const(latency_watermark_b);
1122254885Sdumbbell		c.full = dfixed_mul(c, b);
1123254885Sdumbbell		c.full = dfixed_mul(c, radeon_crtc->hsc);
1124254885Sdumbbell		c.full = dfixed_div(c, a);
1125254885Sdumbbell		a.full = dfixed_const(16);
1126254885Sdumbbell		c.full = dfixed_div(c, a);
1127254885Sdumbbell		priority_b_mark = dfixed_trunc(c);
1128254885Sdumbbell		priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
1129254885Sdumbbell	}
1130254885Sdumbbell
1131254885Sdumbbell	/* select wm A */
1132254885Sdumbbell	arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
1133254885Sdumbbell	tmp = arb_control3;
1134254885Sdumbbell	tmp &= ~LATENCY_WATERMARK_MASK(3);
1135254885Sdumbbell	tmp |= LATENCY_WATERMARK_MASK(1);
1136254885Sdumbbell	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
1137254885Sdumbbell	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
1138254885Sdumbbell	       (LATENCY_LOW_WATERMARK(latency_watermark_a) |
1139254885Sdumbbell		LATENCY_HIGH_WATERMARK(line_time)));
1140254885Sdumbbell	/* select wm B */
1141254885Sdumbbell	tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
1142254885Sdumbbell	tmp &= ~LATENCY_WATERMARK_MASK(3);
1143254885Sdumbbell	tmp |= LATENCY_WATERMARK_MASK(2);
1144254885Sdumbbell	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
1145254885Sdumbbell	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
1146254885Sdumbbell	       (LATENCY_LOW_WATERMARK(latency_watermark_b) |
1147254885Sdumbbell		LATENCY_HIGH_WATERMARK(line_time)));
1148254885Sdumbbell	/* restore original selection */
1149254885Sdumbbell	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
1150254885Sdumbbell
1151254885Sdumbbell	/* write the priority marks */
1152254885Sdumbbell	WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
1153254885Sdumbbell	WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
1154254885Sdumbbell
1155254885Sdumbbell}
1156254885Sdumbbell
1157254885Sdumbbell/**
1158254885Sdumbbell * evergreen_bandwidth_update - update display watermarks callback.
1159254885Sdumbbell *
1160254885Sdumbbell * @rdev: radeon_device pointer
1161254885Sdumbbell *
1162254885Sdumbbell * Update the display watermarks based on the requested mode(s)
1163254885Sdumbbell * (evergreen+).
1164254885Sdumbbell */
1165254885Sdumbbellvoid evergreen_bandwidth_update(struct radeon_device *rdev)
1166254885Sdumbbell{
1167254885Sdumbbell	struct drm_display_mode *mode0 = NULL;
1168254885Sdumbbell	struct drm_display_mode *mode1 = NULL;
1169254885Sdumbbell	u32 num_heads = 0, lb_size;
1170254885Sdumbbell	int i;
1171254885Sdumbbell
1172254885Sdumbbell	radeon_update_display_priority(rdev);
1173254885Sdumbbell
1174254885Sdumbbell	for (i = 0; i < rdev->num_crtc; i++) {
1175254885Sdumbbell		if (rdev->mode_info.crtcs[i]->base.enabled)
1176254885Sdumbbell			num_heads++;
1177254885Sdumbbell	}
1178254885Sdumbbell	for (i = 0; i < rdev->num_crtc; i += 2) {
1179254885Sdumbbell		mode0 = &rdev->mode_info.crtcs[i]->base.mode;
1180254885Sdumbbell		mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
1181254885Sdumbbell		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
1182254885Sdumbbell		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
1183254885Sdumbbell		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
1184254885Sdumbbell		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
1185254885Sdumbbell	}
1186254885Sdumbbell}
1187254885Sdumbbell
1188254885Sdumbbell/**
1189254885Sdumbbell * evergreen_mc_wait_for_idle - wait for MC idle callback.
1190254885Sdumbbell *
1191254885Sdumbbell * @rdev: radeon_device pointer
1192254885Sdumbbell *
1193254885Sdumbbell * Wait for the MC (memory controller) to be idle.
1194254885Sdumbbell * (evergreen+).
1195254885Sdumbbell * Returns 0 if the MC is idle, -1 if not.
1196254885Sdumbbell */
1197254885Sdumbbellint evergreen_mc_wait_for_idle(struct radeon_device *rdev)
1198254885Sdumbbell{
1199254885Sdumbbell	unsigned i;
1200254885Sdumbbell	u32 tmp;
1201254885Sdumbbell
1202254885Sdumbbell	for (i = 0; i < rdev->usec_timeout; i++) {
1203254885Sdumbbell		/* read MC_STATUS */
1204254885Sdumbbell		tmp = RREG32(SRBM_STATUS) & 0x1F00;
1205254885Sdumbbell		if (!tmp)
1206254885Sdumbbell			return 0;
1207282199Sdumbbell		udelay(1);
1208254885Sdumbbell	}
1209254885Sdumbbell	return -1;
1210254885Sdumbbell}
1211254885Sdumbbell
1212254885Sdumbbell/*
1213254885Sdumbbell * GART
1214254885Sdumbbell */
1215254885Sdumbbellvoid evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
1216254885Sdumbbell{
1217254885Sdumbbell	unsigned i;
1218254885Sdumbbell	u32 tmp;
1219254885Sdumbbell
1220254885Sdumbbell	WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
1221254885Sdumbbell
1222254885Sdumbbell	WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
1223254885Sdumbbell	for (i = 0; i < rdev->usec_timeout; i++) {
1224254885Sdumbbell		/* read MC_STATUS */
1225254885Sdumbbell		tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
1226254885Sdumbbell		tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
1227254885Sdumbbell		if (tmp == 2) {
1228254885Sdumbbell			DRM_ERROR("[drm] r600 flush TLB failed\n");
1229254885Sdumbbell			return;
1230254885Sdumbbell		}
1231254885Sdumbbell		if (tmp) {
1232254885Sdumbbell			return;
1233254885Sdumbbell		}
1234282199Sdumbbell		udelay(1);
1235254885Sdumbbell	}
1236254885Sdumbbell}
1237254885Sdumbbell
1238254885Sdumbbellstatic int evergreen_pcie_gart_enable(struct radeon_device *rdev)
1239254885Sdumbbell{
1240254885Sdumbbell	u32 tmp;
1241254885Sdumbbell	int r;
1242254885Sdumbbell
1243254885Sdumbbell	if (rdev->gart.robj == NULL) {
1244254885Sdumbbell		dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
1245254885Sdumbbell		return -EINVAL;
1246254885Sdumbbell	}
1247254885Sdumbbell	r = radeon_gart_table_vram_pin(rdev);
1248254885Sdumbbell	if (r)
1249254885Sdumbbell		return r;
1250254885Sdumbbell	radeon_gart_restore(rdev);
1251254885Sdumbbell	/* Setup L2 cache */
1252254885Sdumbbell	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
1253254885Sdumbbell				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
1254254885Sdumbbell				EFFECTIVE_L2_QUEUE_SIZE(7));
1255254885Sdumbbell	WREG32(VM_L2_CNTL2, 0);
1256254885Sdumbbell	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
1257254885Sdumbbell	/* Setup TLB control */
1258254885Sdumbbell	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
1259254885Sdumbbell		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
1260254885Sdumbbell		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
1261254885Sdumbbell		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
1262254885Sdumbbell	if (rdev->flags & RADEON_IS_IGP) {
1263254885Sdumbbell		WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
1264254885Sdumbbell		WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
1265254885Sdumbbell		WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
1266254885Sdumbbell	} else {
1267254885Sdumbbell		WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
1268254885Sdumbbell		WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
1269254885Sdumbbell		WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
1270254885Sdumbbell		if ((rdev->family == CHIP_JUNIPER) ||
1271254885Sdumbbell		    (rdev->family == CHIP_CYPRESS) ||
1272254885Sdumbbell		    (rdev->family == CHIP_HEMLOCK) ||
1273254885Sdumbbell		    (rdev->family == CHIP_BARTS))
1274254885Sdumbbell			WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
1275254885Sdumbbell	}
1276254885Sdumbbell	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
1277254885Sdumbbell	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
1278254885Sdumbbell	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
1279254885Sdumbbell	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
1280254885Sdumbbell	WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
1281254885Sdumbbell	WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
1282254885Sdumbbell	WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
1283254885Sdumbbell	WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
1284254885Sdumbbell				RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
1285254885Sdumbbell	WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
1286254885Sdumbbell			(u32)(rdev->dummy_page.addr >> 12));
1287254885Sdumbbell	WREG32(VM_CONTEXT1_CNTL, 0);
1288254885Sdumbbell
1289254885Sdumbbell	evergreen_pcie_gart_tlb_flush(rdev);
1290254885Sdumbbell	DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
1291254885Sdumbbell		 (unsigned)(rdev->mc.gtt_size >> 20),
1292254885Sdumbbell		 (unsigned long long)rdev->gart.table_addr);
1293254885Sdumbbell	rdev->gart.ready = true;
1294254885Sdumbbell	return 0;
1295254885Sdumbbell}
1296254885Sdumbbell
1297254885Sdumbbellstatic void evergreen_pcie_gart_disable(struct radeon_device *rdev)
1298254885Sdumbbell{
1299254885Sdumbbell	u32 tmp;
1300254885Sdumbbell
1301254885Sdumbbell	/* Disable all tables */
1302254885Sdumbbell	WREG32(VM_CONTEXT0_CNTL, 0);
1303254885Sdumbbell	WREG32(VM_CONTEXT1_CNTL, 0);
1304254885Sdumbbell
1305254885Sdumbbell	/* Setup L2 cache */
1306254885Sdumbbell	WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
1307254885Sdumbbell				EFFECTIVE_L2_QUEUE_SIZE(7));
1308254885Sdumbbell	WREG32(VM_L2_CNTL2, 0);
1309254885Sdumbbell	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
1310254885Sdumbbell	/* Setup TLB control */
1311254885Sdumbbell	tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
1312254885Sdumbbell	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
1313254885Sdumbbell	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
1314254885Sdumbbell	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
1315254885Sdumbbell	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
1316254885Sdumbbell	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
1317254885Sdumbbell	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
1318254885Sdumbbell	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
1319254885Sdumbbell	radeon_gart_table_vram_unpin(rdev);
1320254885Sdumbbell}
1321254885Sdumbbell
1322254885Sdumbbellstatic void evergreen_pcie_gart_fini(struct radeon_device *rdev)
1323254885Sdumbbell{
1324254885Sdumbbell	evergreen_pcie_gart_disable(rdev);
1325254885Sdumbbell	radeon_gart_table_vram_free(rdev);
1326254885Sdumbbell	radeon_gart_fini(rdev);
1327254885Sdumbbell}
1328254885Sdumbbell
1329254885Sdumbbell
1330254885Sdumbbellstatic void evergreen_agp_enable(struct radeon_device *rdev)
1331254885Sdumbbell{
1332254885Sdumbbell	u32 tmp;
1333254885Sdumbbell
1334254885Sdumbbell	/* Setup L2 cache */
1335254885Sdumbbell	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
1336254885Sdumbbell				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
1337254885Sdumbbell				EFFECTIVE_L2_QUEUE_SIZE(7));
1338254885Sdumbbell	WREG32(VM_L2_CNTL2, 0);
1339254885Sdumbbell	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
1340254885Sdumbbell	/* Setup TLB control */
1341254885Sdumbbell	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
1342254885Sdumbbell		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
1343254885Sdumbbell		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
1344254885Sdumbbell		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
1345254885Sdumbbell	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
1346254885Sdumbbell	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
1347254885Sdumbbell	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
1348254885Sdumbbell	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
1349254885Sdumbbell	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
1350254885Sdumbbell	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
1351254885Sdumbbell	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
1352254885Sdumbbell	WREG32(VM_CONTEXT0_CNTL, 0);
1353254885Sdumbbell	WREG32(VM_CONTEXT1_CNTL, 0);
1354254885Sdumbbell}
1355254885Sdumbbell
1356254885Sdumbbellvoid evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
1357254885Sdumbbell{
1358254885Sdumbbell	u32 crtc_enabled, tmp, frame_count, blackout;
1359254885Sdumbbell	int i, j;
1360254885Sdumbbell
1361254885Sdumbbell	save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
1362254885Sdumbbell	save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
1363254885Sdumbbell
1364254885Sdumbbell	/* disable VGA render */
1365254885Sdumbbell	WREG32(VGA_RENDER_CONTROL, 0);
1366254885Sdumbbell	/* blank the display controllers */
1367254885Sdumbbell	for (i = 0; i < rdev->num_crtc; i++) {
1368254885Sdumbbell		crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
1369254885Sdumbbell		if (crtc_enabled) {
1370254885Sdumbbell			save->crtc_enabled[i] = true;
1371254885Sdumbbell			if (ASIC_IS_DCE6(rdev)) {
1372254885Sdumbbell				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
1373254885Sdumbbell				if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
1374254885Sdumbbell					radeon_wait_for_vblank(rdev, i);
1375282199Sdumbbell					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
1376254885Sdumbbell					tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
1377254885Sdumbbell					WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
1378254885Sdumbbell				}
1379254885Sdumbbell			} else {
1380254885Sdumbbell				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
1381254885Sdumbbell				if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
1382254885Sdumbbell					radeon_wait_for_vblank(rdev, i);
1383282199Sdumbbell					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
1384254885Sdumbbell					tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1385254885Sdumbbell					WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
1386254885Sdumbbell					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
1387254885Sdumbbell				}
1388254885Sdumbbell			}
1389254885Sdumbbell			/* wait for the next frame */
1390254885Sdumbbell			frame_count = radeon_get_vblank_counter(rdev, i);
1391254885Sdumbbell			for (j = 0; j < rdev->usec_timeout; j++) {
1392254885Sdumbbell				if (radeon_get_vblank_counter(rdev, i) != frame_count)
1393254885Sdumbbell					break;
1394282199Sdumbbell				udelay(1);
1395254885Sdumbbell			}
1396282199Sdumbbell
1397282199Sdumbbell			/* XXX this is a hack to avoid strange behavior with EFI on certain systems */
1398282199Sdumbbell			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
1399282199Sdumbbell			tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
1400282199Sdumbbell			tmp &= ~EVERGREEN_CRTC_MASTER_EN;
1401282199Sdumbbell			WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
1402282199Sdumbbell			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
1403282199Sdumbbell			save->crtc_enabled[i] = false;
1404282199Sdumbbell			/* ***** */
1405254885Sdumbbell		} else {
1406254885Sdumbbell			save->crtc_enabled[i] = false;
1407254885Sdumbbell		}
1408254885Sdumbbell	}
1409254885Sdumbbell
1410254885Sdumbbell	radeon_mc_wait_for_idle(rdev);
1411254885Sdumbbell
1412254885Sdumbbell	blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
1413254885Sdumbbell	if ((blackout & BLACKOUT_MODE_MASK) != 1) {
1414254885Sdumbbell		/* Block CPU access */
1415254885Sdumbbell		WREG32(BIF_FB_EN, 0);
1416254885Sdumbbell		/* blackout the MC */
1417254885Sdumbbell		blackout &= ~BLACKOUT_MODE_MASK;
1418254885Sdumbbell		WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
1419254885Sdumbbell	}
1420254885Sdumbbell	/* wait for the MC to settle */
1421282199Sdumbbell	udelay(100);
1422282199Sdumbbell
1423282199Sdumbbell	/* lock double buffered regs */
1424282199Sdumbbell	for (i = 0; i < rdev->num_crtc; i++) {
1425282199Sdumbbell		if (save->crtc_enabled[i]) {
1426282199Sdumbbell			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
1427282199Sdumbbell			if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
1428282199Sdumbbell				tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
1429282199Sdumbbell				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
1430282199Sdumbbell			}
1431282199Sdumbbell			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
1432282199Sdumbbell			if (!(tmp & 1)) {
1433282199Sdumbbell				tmp |= 1;
1434282199Sdumbbell				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
1435282199Sdumbbell			}
1436282199Sdumbbell		}
1437282199Sdumbbell	}
1438254885Sdumbbell}
1439254885Sdumbbell
1440254885Sdumbbellvoid evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
1441254885Sdumbbell{
1442254885Sdumbbell	u32 tmp, frame_count;
1443254885Sdumbbell	int i, j;
1444254885Sdumbbell
1445254885Sdumbbell	/* update crtc base addresses */
1446254885Sdumbbell	for (i = 0; i < rdev->num_crtc; i++) {
1447254885Sdumbbell		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
1448254885Sdumbbell		       upper_32_bits(rdev->mc.vram_start));
1449254885Sdumbbell		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
1450254885Sdumbbell		       upper_32_bits(rdev->mc.vram_start));
1451254885Sdumbbell		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
1452254885Sdumbbell		       (u32)rdev->mc.vram_start);
1453254885Sdumbbell		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
1454254885Sdumbbell		       (u32)rdev->mc.vram_start);
1455254885Sdumbbell	}
1456254885Sdumbbell	WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
1457254885Sdumbbell	WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
1458254885Sdumbbell
1459282199Sdumbbell	/* unlock regs and wait for update */
1460282199Sdumbbell	for (i = 0; i < rdev->num_crtc; i++) {
1461282199Sdumbbell		if (save->crtc_enabled[i]) {
1462282199Sdumbbell			tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
1463282199Sdumbbell			if ((tmp & 0x3) != 0) {
1464282199Sdumbbell				tmp &= ~0x3;
1465282199Sdumbbell				WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
1466282199Sdumbbell			}
1467282199Sdumbbell			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
1468282199Sdumbbell			if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
1469282199Sdumbbell				tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
1470282199Sdumbbell				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
1471282199Sdumbbell			}
1472282199Sdumbbell			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
1473282199Sdumbbell			if (tmp & 1) {
1474282199Sdumbbell				tmp &= ~1;
1475282199Sdumbbell				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
1476282199Sdumbbell			}
1477282199Sdumbbell			for (j = 0; j < rdev->usec_timeout; j++) {
1478282199Sdumbbell				tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
1479282199Sdumbbell				if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
1480282199Sdumbbell					break;
1481282199Sdumbbell				udelay(1);
1482282199Sdumbbell			}
1483282199Sdumbbell		}
1484282199Sdumbbell	}
1485282199Sdumbbell
1486254885Sdumbbell	/* unblackout the MC */
1487254885Sdumbbell	tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
1488254885Sdumbbell	tmp &= ~BLACKOUT_MODE_MASK;
1489254885Sdumbbell	WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
1490254885Sdumbbell	/* allow CPU access */
1491254885Sdumbbell	WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
1492254885Sdumbbell
1493254885Sdumbbell	for (i = 0; i < rdev->num_crtc; i++) {
1494254885Sdumbbell		if (save->crtc_enabled[i]) {
1495254885Sdumbbell			if (ASIC_IS_DCE6(rdev)) {
1496254885Sdumbbell				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
1497254885Sdumbbell				tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
1498254885Sdumbbell				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
1499254885Sdumbbell				WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
1500254885Sdumbbell				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
1501254885Sdumbbell			} else {
1502254885Sdumbbell				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
1503254885Sdumbbell				tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1504254885Sdumbbell				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
1505254885Sdumbbell				WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
1506254885Sdumbbell				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
1507254885Sdumbbell			}
1508254885Sdumbbell			/* wait for the next frame */
1509254885Sdumbbell			frame_count = radeon_get_vblank_counter(rdev, i);
1510254885Sdumbbell			for (j = 0; j < rdev->usec_timeout; j++) {
1511254885Sdumbbell				if (radeon_get_vblank_counter(rdev, i) != frame_count)
1512254885Sdumbbell					break;
1513282199Sdumbbell				udelay(1);
1514254885Sdumbbell			}
1515254885Sdumbbell		}
1516254885Sdumbbell	}
1517254885Sdumbbell	/* Unlock vga access */
1518254885Sdumbbell	WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
1519282199Sdumbbell	mdelay(1);
1520254885Sdumbbell	WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
1521254885Sdumbbell}
1522254885Sdumbbell
1523254885Sdumbbellvoid evergreen_mc_program(struct radeon_device *rdev)
1524254885Sdumbbell{
1525254885Sdumbbell	struct evergreen_mc_save save;
1526254885Sdumbbell	u32 tmp;
1527254885Sdumbbell	int i, j;
1528254885Sdumbbell
1529254885Sdumbbell	/* Initialize HDP */
1530254885Sdumbbell	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
1531254885Sdumbbell		WREG32((0x2c14 + j), 0x00000000);
1532254885Sdumbbell		WREG32((0x2c18 + j), 0x00000000);
1533254885Sdumbbell		WREG32((0x2c1c + j), 0x00000000);
1534254885Sdumbbell		WREG32((0x2c20 + j), 0x00000000);
1535254885Sdumbbell		WREG32((0x2c24 + j), 0x00000000);
1536254885Sdumbbell	}
1537254885Sdumbbell	WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
1538254885Sdumbbell
1539254885Sdumbbell	evergreen_mc_stop(rdev, &save);
1540254885Sdumbbell	if (evergreen_mc_wait_for_idle(rdev)) {
1541254885Sdumbbell		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1542254885Sdumbbell	}
1543254885Sdumbbell	/* Lockout access through VGA aperture*/
1544254885Sdumbbell	WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
1545254885Sdumbbell	/* Update configuration */
1546254885Sdumbbell	if (rdev->flags & RADEON_IS_AGP) {
1547254885Sdumbbell		if (rdev->mc.vram_start < rdev->mc.gtt_start) {
1548254885Sdumbbell			/* VRAM before AGP */
1549254885Sdumbbell			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
1550254885Sdumbbell				rdev->mc.vram_start >> 12);
1551254885Sdumbbell			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
1552254885Sdumbbell				rdev->mc.gtt_end >> 12);
1553254885Sdumbbell		} else {
1554254885Sdumbbell			/* VRAM after AGP */
1555254885Sdumbbell			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
1556254885Sdumbbell				rdev->mc.gtt_start >> 12);
1557254885Sdumbbell			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
1558254885Sdumbbell				rdev->mc.vram_end >> 12);
1559254885Sdumbbell		}
1560254885Sdumbbell	} else {
1561254885Sdumbbell		WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
1562254885Sdumbbell			rdev->mc.vram_start >> 12);
1563254885Sdumbbell		WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
1564254885Sdumbbell			rdev->mc.vram_end >> 12);
1565254885Sdumbbell	}
1566254885Sdumbbell	WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
1567254885Sdumbbell	/* llano/ontario only */
1568254885Sdumbbell	if ((rdev->family == CHIP_PALM) ||
1569254885Sdumbbell	    (rdev->family == CHIP_SUMO) ||
1570254885Sdumbbell	    (rdev->family == CHIP_SUMO2)) {
1571254885Sdumbbell		tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
1572254885Sdumbbell		tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
1573254885Sdumbbell		tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
1574254885Sdumbbell		WREG32(MC_FUS_VM_FB_OFFSET, tmp);
1575254885Sdumbbell	}
1576254885Sdumbbell	tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
1577254885Sdumbbell	tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
1578254885Sdumbbell	WREG32(MC_VM_FB_LOCATION, tmp);
1579254885Sdumbbell	WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
1580254885Sdumbbell	WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
1581254885Sdumbbell	WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
1582254885Sdumbbell	if (rdev->flags & RADEON_IS_AGP) {
1583254885Sdumbbell		WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
1584254885Sdumbbell		WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
1585254885Sdumbbell		WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
1586254885Sdumbbell	} else {
1587254885Sdumbbell		WREG32(MC_VM_AGP_BASE, 0);
1588254885Sdumbbell		WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
1589254885Sdumbbell		WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
1590254885Sdumbbell	}
1591254885Sdumbbell	if (evergreen_mc_wait_for_idle(rdev)) {
1592254885Sdumbbell		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1593254885Sdumbbell	}
1594254885Sdumbbell	evergreen_mc_resume(rdev, &save);
1595254885Sdumbbell	/* we need to own VRAM, so turn off the VGA renderer here
1596254885Sdumbbell	 * to stop it overwriting our objects */
1597254885Sdumbbell	rv515_vga_render_disable(rdev);
1598254885Sdumbbell}
1599254885Sdumbbell
1600254885Sdumbbell/*
1601254885Sdumbbell * CP.
1602254885Sdumbbell */
1603254885Sdumbbellvoid evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
1604254885Sdumbbell{
1605254885Sdumbbell	struct radeon_ring *ring = &rdev->ring[ib->ring];
1606254885Sdumbbell	u32 next_rptr;
1607254885Sdumbbell
1608254885Sdumbbell	/* set to DX10/11 mode */
1609254885Sdumbbell	radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
1610254885Sdumbbell	radeon_ring_write(ring, 1);
1611254885Sdumbbell
1612254885Sdumbbell	if (ring->rptr_save_reg) {
1613254885Sdumbbell		next_rptr = ring->wptr + 3 + 4;
1614254885Sdumbbell		radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
1615254885Sdumbbell		radeon_ring_write(ring, ((ring->rptr_save_reg -
1616254885Sdumbbell					  PACKET3_SET_CONFIG_REG_START) >> 2));
1617254885Sdumbbell		radeon_ring_write(ring, next_rptr);
1618254885Sdumbbell	} else if (rdev->wb.enabled) {
1619254885Sdumbbell		next_rptr = ring->wptr + 5 + 4;
1620254885Sdumbbell		radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
1621254885Sdumbbell		radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
1622254885Sdumbbell		radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
1623254885Sdumbbell		radeon_ring_write(ring, next_rptr);
1624254885Sdumbbell		radeon_ring_write(ring, 0);
1625254885Sdumbbell	}
1626254885Sdumbbell
1627254885Sdumbbell	radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
1628254885Sdumbbell	radeon_ring_write(ring,
1629254885Sdumbbell#ifdef __BIG_ENDIAN
1630254885Sdumbbell			  (2 << 0) |
1631254885Sdumbbell#endif
1632254885Sdumbbell			  (ib->gpu_addr & 0xFFFFFFFC));
1633254885Sdumbbell	radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
1634254885Sdumbbell	radeon_ring_write(ring, ib->length_dw);
1635254885Sdumbbell}
1636254885Sdumbbell
1637254885Sdumbbell
1638254885Sdumbbellstatic int evergreen_cp_load_microcode(struct radeon_device *rdev)
1639254885Sdumbbell{
1640254885Sdumbbell	const __be32 *fw_data;
1641254885Sdumbbell	int i;
1642254885Sdumbbell
1643254885Sdumbbell	if (!rdev->me_fw || !rdev->pfp_fw)
1644254885Sdumbbell		return -EINVAL;
1645254885Sdumbbell
1646254885Sdumbbell	r700_cp_stop(rdev);
1647254885Sdumbbell	WREG32(CP_RB_CNTL,
1648254885Sdumbbell#ifdef __BIG_ENDIAN
1649254885Sdumbbell	       BUF_SWAP_32BIT |
1650254885Sdumbbell#endif
1651254885Sdumbbell	       RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
1652254885Sdumbbell
1653254885Sdumbbell	fw_data = (const __be32 *)rdev->pfp_fw->data;
1654254885Sdumbbell	WREG32(CP_PFP_UCODE_ADDR, 0);
1655254885Sdumbbell	for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
1656254885Sdumbbell		WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
1657254885Sdumbbell	WREG32(CP_PFP_UCODE_ADDR, 0);
1658254885Sdumbbell
1659254885Sdumbbell	fw_data = (const __be32 *)rdev->me_fw->data;
1660254885Sdumbbell	WREG32(CP_ME_RAM_WADDR, 0);
1661254885Sdumbbell	for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
1662254885Sdumbbell		WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
1663254885Sdumbbell
1664254885Sdumbbell	WREG32(CP_PFP_UCODE_ADDR, 0);
1665254885Sdumbbell	WREG32(CP_ME_RAM_WADDR, 0);
1666254885Sdumbbell	WREG32(CP_ME_RAM_RADDR, 0);
1667254885Sdumbbell	return 0;
1668254885Sdumbbell}
1669254885Sdumbbell
1670254885Sdumbbellstatic int evergreen_cp_start(struct radeon_device *rdev)
1671254885Sdumbbell{
1672254885Sdumbbell	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
1673254885Sdumbbell	int r, i;
1674254885Sdumbbell	uint32_t cp_me;
1675254885Sdumbbell
1676254885Sdumbbell	r = radeon_ring_lock(rdev, ring, 7);
1677254885Sdumbbell	if (r) {
1678254885Sdumbbell		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
1679254885Sdumbbell		return r;
1680254885Sdumbbell	}
1681254885Sdumbbell	radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
1682254885Sdumbbell	radeon_ring_write(ring, 0x1);
1683254885Sdumbbell	radeon_ring_write(ring, 0x0);
1684254885Sdumbbell	radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
1685254885Sdumbbell	radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
1686254885Sdumbbell	radeon_ring_write(ring, 0);
1687254885Sdumbbell	radeon_ring_write(ring, 0);
1688254885Sdumbbell	radeon_ring_unlock_commit(rdev, ring);
1689254885Sdumbbell
1690254885Sdumbbell	cp_me = 0xff;
1691254885Sdumbbell	WREG32(CP_ME_CNTL, cp_me);
1692254885Sdumbbell
1693254885Sdumbbell	r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
1694254885Sdumbbell	if (r) {
1695254885Sdumbbell		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
1696254885Sdumbbell		return r;
1697254885Sdumbbell	}
1698254885Sdumbbell
1699254885Sdumbbell	/* setup clear context state */
1700254885Sdumbbell	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
1701254885Sdumbbell	radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
1702254885Sdumbbell
1703254885Sdumbbell	for (i = 0; i < evergreen_default_size; i++)
1704254885Sdumbbell		radeon_ring_write(ring, evergreen_default_state[i]);
1705254885Sdumbbell
1706254885Sdumbbell	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
1707254885Sdumbbell	radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
1708254885Sdumbbell
1709254885Sdumbbell	/* set clear context state */
1710254885Sdumbbell	radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
1711254885Sdumbbell	radeon_ring_write(ring, 0);
1712254885Sdumbbell
1713254885Sdumbbell	/* SQ_VTX_BASE_VTX_LOC */
1714254885Sdumbbell	radeon_ring_write(ring, 0xc0026f00);
1715254885Sdumbbell	radeon_ring_write(ring, 0x00000000);
1716254885Sdumbbell	radeon_ring_write(ring, 0x00000000);
1717254885Sdumbbell	radeon_ring_write(ring, 0x00000000);
1718254885Sdumbbell
1719254885Sdumbbell	/* Clear consts */
1720254885Sdumbbell	radeon_ring_write(ring, 0xc0036f00);
1721254885Sdumbbell	radeon_ring_write(ring, 0x00000bc4);
1722254885Sdumbbell	radeon_ring_write(ring, 0xffffffff);
1723254885Sdumbbell	radeon_ring_write(ring, 0xffffffff);
1724254885Sdumbbell	radeon_ring_write(ring, 0xffffffff);
1725254885Sdumbbell
1726254885Sdumbbell	radeon_ring_write(ring, 0xc0026900);
1727254885Sdumbbell	radeon_ring_write(ring, 0x00000316);
1728254885Sdumbbell	radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
1729254885Sdumbbell	radeon_ring_write(ring, 0x00000010); /*  */
1730254885Sdumbbell
1731254885Sdumbbell	radeon_ring_unlock_commit(rdev, ring);
1732254885Sdumbbell
1733254885Sdumbbell	return 0;
1734254885Sdumbbell}
1735254885Sdumbbell
1736254885Sdumbbellstatic int evergreen_cp_resume(struct radeon_device *rdev)
1737254885Sdumbbell{
1738254885Sdumbbell	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
1739254885Sdumbbell	u32 tmp;
1740254885Sdumbbell	u32 rb_bufsz;
1741254885Sdumbbell	int r;
1742254885Sdumbbell
1743254885Sdumbbell	/* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
1744254885Sdumbbell	WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
1745254885Sdumbbell				 SOFT_RESET_PA |
1746254885Sdumbbell				 SOFT_RESET_SH |
1747254885Sdumbbell				 SOFT_RESET_VGT |
1748254885Sdumbbell				 SOFT_RESET_SPI |
1749254885Sdumbbell				 SOFT_RESET_SX));
1750254885Sdumbbell	RREG32(GRBM_SOFT_RESET);
1751282199Sdumbbell	mdelay(15);
1752254885Sdumbbell	WREG32(GRBM_SOFT_RESET, 0);
1753254885Sdumbbell	RREG32(GRBM_SOFT_RESET);
1754254885Sdumbbell
1755254885Sdumbbell	/* Set ring buffer size */
1756254885Sdumbbell	rb_bufsz = drm_order(ring->ring_size / 8);
1757254885Sdumbbell	tmp = (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
1758254885Sdumbbell#ifdef __BIG_ENDIAN
1759254885Sdumbbell	tmp |= BUF_SWAP_32BIT;
1760254885Sdumbbell#endif
1761254885Sdumbbell	WREG32(CP_RB_CNTL, tmp);
1762254885Sdumbbell	WREG32(CP_SEM_WAIT_TIMER, 0x0);
1763254885Sdumbbell	WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
1764254885Sdumbbell
1765254885Sdumbbell	/* Set the write pointer delay */
1766254885Sdumbbell	WREG32(CP_RB_WPTR_DELAY, 0);
1767254885Sdumbbell
1768254885Sdumbbell	/* Initialize the ring buffer's read and write pointers */
1769254885Sdumbbell	WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
1770254885Sdumbbell	WREG32(CP_RB_RPTR_WR, 0);
1771254885Sdumbbell	ring->wptr = 0;
1772254885Sdumbbell	WREG32(CP_RB_WPTR, ring->wptr);
1773254885Sdumbbell
1774254885Sdumbbell	/* set the wb address whether it's enabled or not */
1775254885Sdumbbell	WREG32(CP_RB_RPTR_ADDR,
1776254885Sdumbbell	       ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
1777254885Sdumbbell	WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
1778254885Sdumbbell	WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
1779254885Sdumbbell
1780254885Sdumbbell	if (rdev->wb.enabled)
1781254885Sdumbbell		WREG32(SCRATCH_UMSK, 0xff);
1782254885Sdumbbell	else {
1783254885Sdumbbell		tmp |= RB_NO_UPDATE;
1784254885Sdumbbell		WREG32(SCRATCH_UMSK, 0);
1785254885Sdumbbell	}
1786254885Sdumbbell
1787282199Sdumbbell	mdelay(1);
1788254885Sdumbbell	WREG32(CP_RB_CNTL, tmp);
1789254885Sdumbbell
1790254885Sdumbbell	WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
1791254885Sdumbbell	WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
1792254885Sdumbbell
1793254885Sdumbbell	ring->rptr = RREG32(CP_RB_RPTR);
1794254885Sdumbbell
1795254885Sdumbbell	evergreen_cp_start(rdev);
1796254885Sdumbbell	ring->ready = true;
1797254885Sdumbbell	r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
1798254885Sdumbbell	if (r) {
1799254885Sdumbbell		ring->ready = false;
1800254885Sdumbbell		return r;
1801254885Sdumbbell	}
1802254885Sdumbbell	return 0;
1803254885Sdumbbell}
1804254885Sdumbbell
1805254885Sdumbbell/*
1806254885Sdumbbell * Core functions
1807254885Sdumbbell */
1808254885Sdumbbellstatic void evergreen_gpu_init(struct radeon_device *rdev)
1809254885Sdumbbell{
1810254885Sdumbbell	u32 gb_addr_config;
1811254885Sdumbbell	u32 mc_shared_chmap, mc_arb_ramcfg;
1812254885Sdumbbell	u32 sx_debug_1;
1813254885Sdumbbell	u32 smx_dc_ctl0;
1814254885Sdumbbell	u32 sq_config;
1815254885Sdumbbell	u32 sq_lds_resource_mgmt;
1816254885Sdumbbell	u32 sq_gpr_resource_mgmt_1;
1817254885Sdumbbell	u32 sq_gpr_resource_mgmt_2;
1818254885Sdumbbell	u32 sq_gpr_resource_mgmt_3;
1819254885Sdumbbell	u32 sq_thread_resource_mgmt;
1820254885Sdumbbell	u32 sq_thread_resource_mgmt_2;
1821254885Sdumbbell	u32 sq_stack_resource_mgmt_1;
1822254885Sdumbbell	u32 sq_stack_resource_mgmt_2;
1823254885Sdumbbell	u32 sq_stack_resource_mgmt_3;
1824254885Sdumbbell	u32 vgt_cache_invalidation;
1825254885Sdumbbell	u32 hdp_host_path_cntl, tmp;
1826254885Sdumbbell	u32 disabled_rb_mask;
1827254885Sdumbbell	int i, j, num_shader_engines, ps_thread_count;
1828254885Sdumbbell
1829254885Sdumbbell	switch (rdev->family) {
1830254885Sdumbbell	case CHIP_CYPRESS:
1831254885Sdumbbell	case CHIP_HEMLOCK:
1832254885Sdumbbell		rdev->config.evergreen.num_ses = 2;
1833254885Sdumbbell		rdev->config.evergreen.max_pipes = 4;
1834254885Sdumbbell		rdev->config.evergreen.max_tile_pipes = 8;
1835254885Sdumbbell		rdev->config.evergreen.max_simds = 10;
1836254885Sdumbbell		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
1837254885Sdumbbell		rdev->config.evergreen.max_gprs = 256;
1838254885Sdumbbell		rdev->config.evergreen.max_threads = 248;
1839254885Sdumbbell		rdev->config.evergreen.max_gs_threads = 32;
1840254885Sdumbbell		rdev->config.evergreen.max_stack_entries = 512;
1841254885Sdumbbell		rdev->config.evergreen.sx_num_of_sets = 4;
1842254885Sdumbbell		rdev->config.evergreen.sx_max_export_size = 256;
1843254885Sdumbbell		rdev->config.evergreen.sx_max_export_pos_size = 64;
1844254885Sdumbbell		rdev->config.evergreen.sx_max_export_smx_size = 192;
1845254885Sdumbbell		rdev->config.evergreen.max_hw_contexts = 8;
1846254885Sdumbbell		rdev->config.evergreen.sq_num_cf_insts = 2;
1847254885Sdumbbell
1848254885Sdumbbell		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1849254885Sdumbbell		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1850254885Sdumbbell		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1851254885Sdumbbell		gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
1852254885Sdumbbell		break;
1853254885Sdumbbell	case CHIP_JUNIPER:
1854254885Sdumbbell		rdev->config.evergreen.num_ses = 1;
1855254885Sdumbbell		rdev->config.evergreen.max_pipes = 4;
1856254885Sdumbbell		rdev->config.evergreen.max_tile_pipes = 4;
1857254885Sdumbbell		rdev->config.evergreen.max_simds = 10;
1858254885Sdumbbell		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
1859254885Sdumbbell		rdev->config.evergreen.max_gprs = 256;
1860254885Sdumbbell		rdev->config.evergreen.max_threads = 248;
1861254885Sdumbbell		rdev->config.evergreen.max_gs_threads = 32;
1862254885Sdumbbell		rdev->config.evergreen.max_stack_entries = 512;
1863254885Sdumbbell		rdev->config.evergreen.sx_num_of_sets = 4;
1864254885Sdumbbell		rdev->config.evergreen.sx_max_export_size = 256;
1865254885Sdumbbell		rdev->config.evergreen.sx_max_export_pos_size = 64;
1866254885Sdumbbell		rdev->config.evergreen.sx_max_export_smx_size = 192;
1867254885Sdumbbell		rdev->config.evergreen.max_hw_contexts = 8;
1868254885Sdumbbell		rdev->config.evergreen.sq_num_cf_insts = 2;
1869254885Sdumbbell
1870254885Sdumbbell		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1871254885Sdumbbell		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1872254885Sdumbbell		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1873254885Sdumbbell		gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
1874254885Sdumbbell		break;
1875254885Sdumbbell	case CHIP_REDWOOD:
1876254885Sdumbbell		rdev->config.evergreen.num_ses = 1;
1877254885Sdumbbell		rdev->config.evergreen.max_pipes = 4;
1878254885Sdumbbell		rdev->config.evergreen.max_tile_pipes = 4;
1879254885Sdumbbell		rdev->config.evergreen.max_simds = 5;
1880254885Sdumbbell		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
1881254885Sdumbbell		rdev->config.evergreen.max_gprs = 256;
1882254885Sdumbbell		rdev->config.evergreen.max_threads = 248;
1883254885Sdumbbell		rdev->config.evergreen.max_gs_threads = 32;
1884254885Sdumbbell		rdev->config.evergreen.max_stack_entries = 256;
1885254885Sdumbbell		rdev->config.evergreen.sx_num_of_sets = 4;
1886254885Sdumbbell		rdev->config.evergreen.sx_max_export_size = 256;
1887254885Sdumbbell		rdev->config.evergreen.sx_max_export_pos_size = 64;
1888254885Sdumbbell		rdev->config.evergreen.sx_max_export_smx_size = 192;
1889254885Sdumbbell		rdev->config.evergreen.max_hw_contexts = 8;
1890254885Sdumbbell		rdev->config.evergreen.sq_num_cf_insts = 2;
1891254885Sdumbbell
1892254885Sdumbbell		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1893254885Sdumbbell		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1894254885Sdumbbell		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1895254885Sdumbbell		gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
1896254885Sdumbbell		break;
1897254885Sdumbbell	case CHIP_CEDAR:
1898254885Sdumbbell	default:
1899254885Sdumbbell		rdev->config.evergreen.num_ses = 1;
1900254885Sdumbbell		rdev->config.evergreen.max_pipes = 2;
1901254885Sdumbbell		rdev->config.evergreen.max_tile_pipes = 2;
1902254885Sdumbbell		rdev->config.evergreen.max_simds = 2;
1903254885Sdumbbell		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
1904254885Sdumbbell		rdev->config.evergreen.max_gprs = 256;
1905254885Sdumbbell		rdev->config.evergreen.max_threads = 192;
1906254885Sdumbbell		rdev->config.evergreen.max_gs_threads = 16;
1907254885Sdumbbell		rdev->config.evergreen.max_stack_entries = 256;
1908254885Sdumbbell		rdev->config.evergreen.sx_num_of_sets = 4;
1909254885Sdumbbell		rdev->config.evergreen.sx_max_export_size = 128;
1910254885Sdumbbell		rdev->config.evergreen.sx_max_export_pos_size = 32;
1911254885Sdumbbell		rdev->config.evergreen.sx_max_export_smx_size = 96;
1912254885Sdumbbell		rdev->config.evergreen.max_hw_contexts = 4;
1913254885Sdumbbell		rdev->config.evergreen.sq_num_cf_insts = 1;
1914254885Sdumbbell
1915254885Sdumbbell		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
1916254885Sdumbbell		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1917254885Sdumbbell		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1918254885Sdumbbell		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
1919254885Sdumbbell		break;
1920254885Sdumbbell	case CHIP_PALM:
1921254885Sdumbbell		rdev->config.evergreen.num_ses = 1;
1922254885Sdumbbell		rdev->config.evergreen.max_pipes = 2;
1923254885Sdumbbell		rdev->config.evergreen.max_tile_pipes = 2;
1924254885Sdumbbell		rdev->config.evergreen.max_simds = 2;
1925254885Sdumbbell		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
1926254885Sdumbbell		rdev->config.evergreen.max_gprs = 256;
1927254885Sdumbbell		rdev->config.evergreen.max_threads = 192;
1928254885Sdumbbell		rdev->config.evergreen.max_gs_threads = 16;
1929254885Sdumbbell		rdev->config.evergreen.max_stack_entries = 256;
1930254885Sdumbbell		rdev->config.evergreen.sx_num_of_sets = 4;
1931254885Sdumbbell		rdev->config.evergreen.sx_max_export_size = 128;
1932254885Sdumbbell		rdev->config.evergreen.sx_max_export_pos_size = 32;
1933254885Sdumbbell		rdev->config.evergreen.sx_max_export_smx_size = 96;
1934254885Sdumbbell		rdev->config.evergreen.max_hw_contexts = 4;
1935254885Sdumbbell		rdev->config.evergreen.sq_num_cf_insts = 1;
1936254885Sdumbbell
1937254885Sdumbbell		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
1938254885Sdumbbell		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1939254885Sdumbbell		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1940254885Sdumbbell		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
1941254885Sdumbbell		break;
1942254885Sdumbbell	case CHIP_SUMO:
1943254885Sdumbbell		rdev->config.evergreen.num_ses = 1;
1944254885Sdumbbell		rdev->config.evergreen.max_pipes = 4;
1945254885Sdumbbell		rdev->config.evergreen.max_tile_pipes = 4;
1946254885Sdumbbell		if (rdev->ddev->pci_device == 0x9648)
1947254885Sdumbbell			rdev->config.evergreen.max_simds = 3;
1948254885Sdumbbell		else if ((rdev->ddev->pci_device == 0x9647) ||
1949254885Sdumbbell			 (rdev->ddev->pci_device == 0x964a))
1950254885Sdumbbell			rdev->config.evergreen.max_simds = 4;
1951254885Sdumbbell		else
1952254885Sdumbbell			rdev->config.evergreen.max_simds = 5;
1953254885Sdumbbell		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
1954254885Sdumbbell		rdev->config.evergreen.max_gprs = 256;
1955254885Sdumbbell		rdev->config.evergreen.max_threads = 248;
1956254885Sdumbbell		rdev->config.evergreen.max_gs_threads = 32;
1957254885Sdumbbell		rdev->config.evergreen.max_stack_entries = 256;
1958254885Sdumbbell		rdev->config.evergreen.sx_num_of_sets = 4;
1959254885Sdumbbell		rdev->config.evergreen.sx_max_export_size = 256;
1960254885Sdumbbell		rdev->config.evergreen.sx_max_export_pos_size = 64;
1961254885Sdumbbell		rdev->config.evergreen.sx_max_export_smx_size = 192;
1962254885Sdumbbell		rdev->config.evergreen.max_hw_contexts = 8;
1963254885Sdumbbell		rdev->config.evergreen.sq_num_cf_insts = 2;
1964254885Sdumbbell
1965254885Sdumbbell		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
1966254885Sdumbbell		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1967254885Sdumbbell		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1968254885Sdumbbell		gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
1969254885Sdumbbell		break;
1970254885Sdumbbell	case CHIP_SUMO2:
1971254885Sdumbbell		rdev->config.evergreen.num_ses = 1;
1972254885Sdumbbell		rdev->config.evergreen.max_pipes = 4;
1973254885Sdumbbell		rdev->config.evergreen.max_tile_pipes = 4;
1974254885Sdumbbell		rdev->config.evergreen.max_simds = 2;
1975254885Sdumbbell		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
1976254885Sdumbbell		rdev->config.evergreen.max_gprs = 256;
1977254885Sdumbbell		rdev->config.evergreen.max_threads = 248;
1978254885Sdumbbell		rdev->config.evergreen.max_gs_threads = 32;
1979254885Sdumbbell		rdev->config.evergreen.max_stack_entries = 512;
1980254885Sdumbbell		rdev->config.evergreen.sx_num_of_sets = 4;
1981254885Sdumbbell		rdev->config.evergreen.sx_max_export_size = 256;
1982254885Sdumbbell		rdev->config.evergreen.sx_max_export_pos_size = 64;
1983254885Sdumbbell		rdev->config.evergreen.sx_max_export_smx_size = 192;
1984254885Sdumbbell		rdev->config.evergreen.max_hw_contexts = 8;
1985254885Sdumbbell		rdev->config.evergreen.sq_num_cf_insts = 2;
1986254885Sdumbbell
1987254885Sdumbbell		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
1988254885Sdumbbell		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1989254885Sdumbbell		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1990254885Sdumbbell		gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
1991254885Sdumbbell		break;
1992254885Sdumbbell	case CHIP_BARTS:
1993254885Sdumbbell		rdev->config.evergreen.num_ses = 2;
1994254885Sdumbbell		rdev->config.evergreen.max_pipes = 4;
1995254885Sdumbbell		rdev->config.evergreen.max_tile_pipes = 8;
1996254885Sdumbbell		rdev->config.evergreen.max_simds = 7;
1997254885Sdumbbell		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
1998254885Sdumbbell		rdev->config.evergreen.max_gprs = 256;
1999254885Sdumbbell		rdev->config.evergreen.max_threads = 248;
2000254885Sdumbbell		rdev->config.evergreen.max_gs_threads = 32;
2001254885Sdumbbell		rdev->config.evergreen.max_stack_entries = 512;
2002254885Sdumbbell		rdev->config.evergreen.sx_num_of_sets = 4;
2003254885Sdumbbell		rdev->config.evergreen.sx_max_export_size = 256;
2004254885Sdumbbell		rdev->config.evergreen.sx_max_export_pos_size = 64;
2005254885Sdumbbell		rdev->config.evergreen.sx_max_export_smx_size = 192;
2006254885Sdumbbell		rdev->config.evergreen.max_hw_contexts = 8;
2007254885Sdumbbell		rdev->config.evergreen.sq_num_cf_insts = 2;
2008254885Sdumbbell
2009254885Sdumbbell		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2010254885Sdumbbell		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2011254885Sdumbbell		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2012254885Sdumbbell		gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
2013254885Sdumbbell		break;
2014254885Sdumbbell	case CHIP_TURKS:
2015254885Sdumbbell		rdev->config.evergreen.num_ses = 1;
2016254885Sdumbbell		rdev->config.evergreen.max_pipes = 4;
2017254885Sdumbbell		rdev->config.evergreen.max_tile_pipes = 4;
2018254885Sdumbbell		rdev->config.evergreen.max_simds = 6;
2019254885Sdumbbell		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
2020254885Sdumbbell		rdev->config.evergreen.max_gprs = 256;
2021254885Sdumbbell		rdev->config.evergreen.max_threads = 248;
2022254885Sdumbbell		rdev->config.evergreen.max_gs_threads = 32;
2023254885Sdumbbell		rdev->config.evergreen.max_stack_entries = 256;
2024254885Sdumbbell		rdev->config.evergreen.sx_num_of_sets = 4;
2025254885Sdumbbell		rdev->config.evergreen.sx_max_export_size = 256;
2026254885Sdumbbell		rdev->config.evergreen.sx_max_export_pos_size = 64;
2027254885Sdumbbell		rdev->config.evergreen.sx_max_export_smx_size = 192;
2028254885Sdumbbell		rdev->config.evergreen.max_hw_contexts = 8;
2029254885Sdumbbell		rdev->config.evergreen.sq_num_cf_insts = 2;
2030254885Sdumbbell
2031254885Sdumbbell		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2032254885Sdumbbell		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2033254885Sdumbbell		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2034254885Sdumbbell		gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
2035254885Sdumbbell		break;
2036254885Sdumbbell	case CHIP_CAICOS:
2037254885Sdumbbell		rdev->config.evergreen.num_ses = 1;
2038254885Sdumbbell		rdev->config.evergreen.max_pipes = 2;
2039254885Sdumbbell		rdev->config.evergreen.max_tile_pipes = 2;
2040254885Sdumbbell		rdev->config.evergreen.max_simds = 2;
2041254885Sdumbbell		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
2042254885Sdumbbell		rdev->config.evergreen.max_gprs = 256;
2043254885Sdumbbell		rdev->config.evergreen.max_threads = 192;
2044254885Sdumbbell		rdev->config.evergreen.max_gs_threads = 16;
2045254885Sdumbbell		rdev->config.evergreen.max_stack_entries = 256;
2046254885Sdumbbell		rdev->config.evergreen.sx_num_of_sets = 4;
2047254885Sdumbbell		rdev->config.evergreen.sx_max_export_size = 128;
2048254885Sdumbbell		rdev->config.evergreen.sx_max_export_pos_size = 32;
2049254885Sdumbbell		rdev->config.evergreen.sx_max_export_smx_size = 96;
2050254885Sdumbbell		rdev->config.evergreen.max_hw_contexts = 4;
2051254885Sdumbbell		rdev->config.evergreen.sq_num_cf_insts = 1;
2052254885Sdumbbell
2053254885Sdumbbell		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2054254885Sdumbbell		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2055254885Sdumbbell		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2056254885Sdumbbell		gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
2057254885Sdumbbell		break;
2058254885Sdumbbell	}
2059254885Sdumbbell
2060254885Sdumbbell	/* Initialize HDP */
2061254885Sdumbbell	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2062254885Sdumbbell		WREG32((0x2c14 + j), 0x00000000);
2063254885Sdumbbell		WREG32((0x2c18 + j), 0x00000000);
2064254885Sdumbbell		WREG32((0x2c1c + j), 0x00000000);
2065254885Sdumbbell		WREG32((0x2c20 + j), 0x00000000);
2066254885Sdumbbell		WREG32((0x2c24 + j), 0x00000000);
2067254885Sdumbbell	}
2068254885Sdumbbell
2069254885Sdumbbell	WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
2070254885Sdumbbell
2071254885Sdumbbell	evergreen_fix_pci_max_read_req_size(rdev);
2072254885Sdumbbell
2073254885Sdumbbell	mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
2074254885Sdumbbell	if ((rdev->family == CHIP_PALM) ||
2075254885Sdumbbell	    (rdev->family == CHIP_SUMO) ||
2076254885Sdumbbell	    (rdev->family == CHIP_SUMO2))
2077254885Sdumbbell		mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
2078254885Sdumbbell	else
2079254885Sdumbbell		mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
2080254885Sdumbbell
2081254885Sdumbbell	/* setup tiling info dword.  gb_addr_config is not adequate since it does
2082254885Sdumbbell	 * not have bank info, so create a custom tiling dword.
2083254885Sdumbbell	 * bits 3:0   num_pipes
2084254885Sdumbbell	 * bits 7:4   num_banks
2085254885Sdumbbell	 * bits 11:8  group_size
2086254885Sdumbbell	 * bits 15:12 row_size
2087254885Sdumbbell	 */
2088254885Sdumbbell	rdev->config.evergreen.tile_config = 0;
2089254885Sdumbbell	switch (rdev->config.evergreen.max_tile_pipes) {
2090254885Sdumbbell	case 1:
2091254885Sdumbbell	default:
2092254885Sdumbbell		rdev->config.evergreen.tile_config |= (0 << 0);
2093254885Sdumbbell		break;
2094254885Sdumbbell	case 2:
2095254885Sdumbbell		rdev->config.evergreen.tile_config |= (1 << 0);
2096254885Sdumbbell		break;
2097254885Sdumbbell	case 4:
2098254885Sdumbbell		rdev->config.evergreen.tile_config |= (2 << 0);
2099254885Sdumbbell		break;
2100254885Sdumbbell	case 8:
2101254885Sdumbbell		rdev->config.evergreen.tile_config |= (3 << 0);
2102254885Sdumbbell		break;
2103254885Sdumbbell	}
2104254885Sdumbbell	/* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
2105254885Sdumbbell	if (rdev->flags & RADEON_IS_IGP)
2106254885Sdumbbell		rdev->config.evergreen.tile_config |= 1 << 4;
2107254885Sdumbbell	else {
2108254885Sdumbbell		switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
2109254885Sdumbbell		case 0: /* four banks */
2110254885Sdumbbell			rdev->config.evergreen.tile_config |= 0 << 4;
2111254885Sdumbbell			break;
2112254885Sdumbbell		case 1: /* eight banks */
2113254885Sdumbbell			rdev->config.evergreen.tile_config |= 1 << 4;
2114254885Sdumbbell			break;
2115254885Sdumbbell		case 2: /* sixteen banks */
2116254885Sdumbbell		default:
2117254885Sdumbbell			rdev->config.evergreen.tile_config |= 2 << 4;
2118254885Sdumbbell			break;
2119254885Sdumbbell		}
2120254885Sdumbbell	}
2121254885Sdumbbell	rdev->config.evergreen.tile_config |= 0 << 8;
2122254885Sdumbbell	rdev->config.evergreen.tile_config |=
2123254885Sdumbbell		((gb_addr_config & 0x30000000) >> 28) << 12;
2124254885Sdumbbell
2125254885Sdumbbell	num_shader_engines = (gb_addr_config & NUM_SHADER_ENGINES(3) >> 12) + 1;
2126254885Sdumbbell
2127254885Sdumbbell	if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
2128254885Sdumbbell		u32 efuse_straps_4;
2129254885Sdumbbell		u32 efuse_straps_3;
2130254885Sdumbbell
2131254885Sdumbbell		WREG32(RCU_IND_INDEX, 0x204);
2132254885Sdumbbell		efuse_straps_4 = RREG32(RCU_IND_DATA);
2133254885Sdumbbell		WREG32(RCU_IND_INDEX, 0x203);
2134254885Sdumbbell		efuse_straps_3 = RREG32(RCU_IND_DATA);
2135254885Sdumbbell		tmp = (((efuse_straps_4 & 0xf) << 4) |
2136254885Sdumbbell		      ((efuse_straps_3 & 0xf0000000) >> 28));
2137254885Sdumbbell	} else {
2138254885Sdumbbell		tmp = 0;
2139254885Sdumbbell		for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
2140254885Sdumbbell			u32 rb_disable_bitmap;
2141254885Sdumbbell
2142254885Sdumbbell			WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
2143254885Sdumbbell			WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
2144254885Sdumbbell			rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
2145254885Sdumbbell			tmp <<= 4;
2146254885Sdumbbell			tmp |= rb_disable_bitmap;
2147254885Sdumbbell		}
2148254885Sdumbbell	}
2149254885Sdumbbell	/* enabled rb are just the one not disabled :) */
2150254885Sdumbbell	disabled_rb_mask = tmp;
2151254885Sdumbbell
2152254885Sdumbbell	WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
2153254885Sdumbbell	WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
2154254885Sdumbbell
2155254885Sdumbbell	WREG32(GB_ADDR_CONFIG, gb_addr_config);
2156254885Sdumbbell	WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
2157254885Sdumbbell	WREG32(HDP_ADDR_CONFIG, gb_addr_config);
2158254885Sdumbbell	WREG32(DMA_TILING_CONFIG, gb_addr_config);
2159254885Sdumbbell
2160254885Sdumbbell	if ((rdev->config.evergreen.max_backends == 1) &&
2161254885Sdumbbell	    (rdev->flags & RADEON_IS_IGP)) {
2162254885Sdumbbell		if ((disabled_rb_mask & 3) == 1) {
2163254885Sdumbbell			/* RB0 disabled, RB1 enabled */
2164254885Sdumbbell			tmp = 0x11111111;
2165254885Sdumbbell		} else {
2166254885Sdumbbell			/* RB1 disabled, RB0 enabled */
2167254885Sdumbbell			tmp = 0x00000000;
2168254885Sdumbbell		}
2169254885Sdumbbell	} else {
2170254885Sdumbbell		tmp = gb_addr_config & NUM_PIPES_MASK;
2171254885Sdumbbell		tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
2172254885Sdumbbell						EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
2173254885Sdumbbell	}
2174254885Sdumbbell	WREG32(GB_BACKEND_MAP, tmp);
2175254885Sdumbbell
2176254885Sdumbbell	WREG32(CGTS_SYS_TCC_DISABLE, 0);
2177254885Sdumbbell	WREG32(CGTS_TCC_DISABLE, 0);
2178254885Sdumbbell	WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
2179254885Sdumbbell	WREG32(CGTS_USER_TCC_DISABLE, 0);
2180254885Sdumbbell
2181254885Sdumbbell	/* set HW defaults for 3D engine */
2182254885Sdumbbell	WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
2183254885Sdumbbell				     ROQ_IB2_START(0x2b)));
2184254885Sdumbbell
2185254885Sdumbbell	WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
2186254885Sdumbbell
2187254885Sdumbbell	WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
2188254885Sdumbbell			     SYNC_GRADIENT |
2189254885Sdumbbell			     SYNC_WALKER |
2190254885Sdumbbell			     SYNC_ALIGNER));
2191254885Sdumbbell
2192254885Sdumbbell	sx_debug_1 = RREG32(SX_DEBUG_1);
2193254885Sdumbbell	sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
2194254885Sdumbbell	WREG32(SX_DEBUG_1, sx_debug_1);
2195254885Sdumbbell
2196254885Sdumbbell
2197254885Sdumbbell	smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
2198254885Sdumbbell	smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
2199254885Sdumbbell	smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
2200254885Sdumbbell	WREG32(SMX_DC_CTL0, smx_dc_ctl0);
2201254885Sdumbbell
2202254885Sdumbbell	if (rdev->family <= CHIP_SUMO2)
2203254885Sdumbbell		WREG32(SMX_SAR_CTL0, 0x00010000);
2204254885Sdumbbell
2205254885Sdumbbell	WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
2206254885Sdumbbell					POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
2207254885Sdumbbell					SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
2208254885Sdumbbell
2209254885Sdumbbell	WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
2210254885Sdumbbell				 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
2211254885Sdumbbell				 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
2212254885Sdumbbell
2213254885Sdumbbell	WREG32(VGT_NUM_INSTANCES, 1);
2214254885Sdumbbell	WREG32(SPI_CONFIG_CNTL, 0);
2215254885Sdumbbell	WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
2216254885Sdumbbell	WREG32(CP_PERFMON_CNTL, 0);
2217254885Sdumbbell
2218254885Sdumbbell	WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
2219254885Sdumbbell				  FETCH_FIFO_HIWATER(0x4) |
2220254885Sdumbbell				  DONE_FIFO_HIWATER(0xe0) |
2221254885Sdumbbell				  ALU_UPDATE_FIFO_HIWATER(0x8)));
2222254885Sdumbbell
2223254885Sdumbbell	sq_config = RREG32(SQ_CONFIG);
2224254885Sdumbbell	sq_config &= ~(PS_PRIO(3) |
2225254885Sdumbbell		       VS_PRIO(3) |
2226254885Sdumbbell		       GS_PRIO(3) |
2227254885Sdumbbell		       ES_PRIO(3));
2228254885Sdumbbell	sq_config |= (VC_ENABLE |
2229254885Sdumbbell		      EXPORT_SRC_C |
2230254885Sdumbbell		      PS_PRIO(0) |
2231254885Sdumbbell		      VS_PRIO(1) |
2232254885Sdumbbell		      GS_PRIO(2) |
2233254885Sdumbbell		      ES_PRIO(3));
2234254885Sdumbbell
2235254885Sdumbbell	switch (rdev->family) {
2236254885Sdumbbell	case CHIP_CEDAR:
2237254885Sdumbbell	case CHIP_PALM:
2238254885Sdumbbell	case CHIP_SUMO:
2239254885Sdumbbell	case CHIP_SUMO2:
2240254885Sdumbbell	case CHIP_CAICOS:
2241254885Sdumbbell		/* no vertex cache */
2242254885Sdumbbell		sq_config &= ~VC_ENABLE;
2243254885Sdumbbell		break;
2244254885Sdumbbell	default:
2245254885Sdumbbell		break;
2246254885Sdumbbell	}
2247254885Sdumbbell
2248254885Sdumbbell	sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
2249254885Sdumbbell
2250254885Sdumbbell	sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
2251254885Sdumbbell	sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
2252254885Sdumbbell	sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
2253254885Sdumbbell	sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
2254254885Sdumbbell	sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
2255254885Sdumbbell	sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
2256254885Sdumbbell	sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
2257254885Sdumbbell
2258254885Sdumbbell	switch (rdev->family) {
2259254885Sdumbbell	case CHIP_CEDAR:
2260254885Sdumbbell	case CHIP_PALM:
2261254885Sdumbbell	case CHIP_SUMO:
2262254885Sdumbbell	case CHIP_SUMO2:
2263254885Sdumbbell		ps_thread_count = 96;
2264254885Sdumbbell		break;
2265254885Sdumbbell	default:
2266254885Sdumbbell		ps_thread_count = 128;
2267254885Sdumbbell		break;
2268254885Sdumbbell	}
2269254885Sdumbbell
2270254885Sdumbbell	sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
2271254885Sdumbbell	sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
2272254885Sdumbbell	sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
2273254885Sdumbbell	sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
2274254885Sdumbbell	sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
2275254885Sdumbbell	sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
2276254885Sdumbbell
2277254885Sdumbbell	sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
2278254885Sdumbbell	sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
2279254885Sdumbbell	sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
2280254885Sdumbbell	sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
2281254885Sdumbbell	sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
2282254885Sdumbbell	sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
2283254885Sdumbbell
2284254885Sdumbbell	WREG32(SQ_CONFIG, sq_config);
2285254885Sdumbbell	WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
2286254885Sdumbbell	WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
2287254885Sdumbbell	WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
2288254885Sdumbbell	WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
2289254885Sdumbbell	WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
2290254885Sdumbbell	WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
2291254885Sdumbbell	WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
2292254885Sdumbbell	WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
2293254885Sdumbbell	WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
2294254885Sdumbbell	WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
2295254885Sdumbbell
2296254885Sdumbbell	WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
2297254885Sdumbbell					  FORCE_EOV_MAX_REZ_CNT(255)));
2298254885Sdumbbell
2299254885Sdumbbell	switch (rdev->family) {
2300254885Sdumbbell	case CHIP_CEDAR:
2301254885Sdumbbell	case CHIP_PALM:
2302254885Sdumbbell	case CHIP_SUMO:
2303254885Sdumbbell	case CHIP_SUMO2:
2304254885Sdumbbell	case CHIP_CAICOS:
2305254885Sdumbbell		vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
2306254885Sdumbbell		break;
2307254885Sdumbbell	default:
2308254885Sdumbbell		vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
2309254885Sdumbbell		break;
2310254885Sdumbbell	}
2311254885Sdumbbell	vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
2312254885Sdumbbell	WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
2313254885Sdumbbell
2314254885Sdumbbell	WREG32(VGT_GS_VERTEX_REUSE, 16);
2315254885Sdumbbell	WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
2316254885Sdumbbell	WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
2317254885Sdumbbell
2318254885Sdumbbell	WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
2319254885Sdumbbell	WREG32(VGT_OUT_DEALLOC_CNTL, 16);
2320254885Sdumbbell
2321254885Sdumbbell	WREG32(CB_PERF_CTR0_SEL_0, 0);
2322254885Sdumbbell	WREG32(CB_PERF_CTR0_SEL_1, 0);
2323254885Sdumbbell	WREG32(CB_PERF_CTR1_SEL_0, 0);
2324254885Sdumbbell	WREG32(CB_PERF_CTR1_SEL_1, 0);
2325254885Sdumbbell	WREG32(CB_PERF_CTR2_SEL_0, 0);
2326254885Sdumbbell	WREG32(CB_PERF_CTR2_SEL_1, 0);
2327254885Sdumbbell	WREG32(CB_PERF_CTR3_SEL_0, 0);
2328254885Sdumbbell	WREG32(CB_PERF_CTR3_SEL_1, 0);
2329254885Sdumbbell
2330254885Sdumbbell	/* clear render buffer base addresses */
2331254885Sdumbbell	WREG32(CB_COLOR0_BASE, 0);
2332254885Sdumbbell	WREG32(CB_COLOR1_BASE, 0);
2333254885Sdumbbell	WREG32(CB_COLOR2_BASE, 0);
2334254885Sdumbbell	WREG32(CB_COLOR3_BASE, 0);
2335254885Sdumbbell	WREG32(CB_COLOR4_BASE, 0);
2336254885Sdumbbell	WREG32(CB_COLOR5_BASE, 0);
2337254885Sdumbbell	WREG32(CB_COLOR6_BASE, 0);
2338254885Sdumbbell	WREG32(CB_COLOR7_BASE, 0);
2339254885Sdumbbell	WREG32(CB_COLOR8_BASE, 0);
2340254885Sdumbbell	WREG32(CB_COLOR9_BASE, 0);
2341254885Sdumbbell	WREG32(CB_COLOR10_BASE, 0);
2342254885Sdumbbell	WREG32(CB_COLOR11_BASE, 0);
2343254885Sdumbbell
2344254885Sdumbbell	/* set the shader const cache sizes to 0 */
2345254885Sdumbbell	for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
2346254885Sdumbbell		WREG32(i, 0);
2347254885Sdumbbell	for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
2348254885Sdumbbell		WREG32(i, 0);
2349254885Sdumbbell
2350254885Sdumbbell	tmp = RREG32(HDP_MISC_CNTL);
2351254885Sdumbbell	tmp |= HDP_FLUSH_INVALIDATE_CACHE;
2352254885Sdumbbell	WREG32(HDP_MISC_CNTL, tmp);
2353254885Sdumbbell
2354254885Sdumbbell	hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
2355254885Sdumbbell	WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
2356254885Sdumbbell
2357254885Sdumbbell	WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
2358254885Sdumbbell
2359282199Sdumbbell	udelay(50);
2360254885Sdumbbell
2361254885Sdumbbell}
2362254885Sdumbbell
2363254885Sdumbbellint evergreen_mc_init(struct radeon_device *rdev)
2364254885Sdumbbell{
2365254885Sdumbbell	u32 tmp;
2366254885Sdumbbell	int chansize, numchan;
2367254885Sdumbbell
2368254885Sdumbbell	/* Get VRAM informations */
2369254885Sdumbbell	rdev->mc.vram_is_ddr = true;
2370254885Sdumbbell	if ((rdev->family == CHIP_PALM) ||
2371254885Sdumbbell	    (rdev->family == CHIP_SUMO) ||
2372254885Sdumbbell	    (rdev->family == CHIP_SUMO2))
2373254885Sdumbbell		tmp = RREG32(FUS_MC_ARB_RAMCFG);
2374254885Sdumbbell	else
2375254885Sdumbbell		tmp = RREG32(MC_ARB_RAMCFG);
2376254885Sdumbbell	if (tmp & CHANSIZE_OVERRIDE) {
2377254885Sdumbbell		chansize = 16;
2378254885Sdumbbell	} else if (tmp & CHANSIZE_MASK) {
2379254885Sdumbbell		chansize = 64;
2380254885Sdumbbell	} else {
2381254885Sdumbbell		chansize = 32;
2382254885Sdumbbell	}
2383254885Sdumbbell	tmp = RREG32(MC_SHARED_CHMAP);
2384254885Sdumbbell	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
2385254885Sdumbbell	case 0:
2386254885Sdumbbell	default:
2387254885Sdumbbell		numchan = 1;
2388254885Sdumbbell		break;
2389254885Sdumbbell	case 1:
2390254885Sdumbbell		numchan = 2;
2391254885Sdumbbell		break;
2392254885Sdumbbell	case 2:
2393254885Sdumbbell		numchan = 4;
2394254885Sdumbbell		break;
2395254885Sdumbbell	case 3:
2396254885Sdumbbell		numchan = 8;
2397254885Sdumbbell		break;
2398254885Sdumbbell	}
2399254885Sdumbbell	rdev->mc.vram_width = numchan * chansize;
2400254885Sdumbbell	/* Could aper size report 0 ? */
2401254885Sdumbbell	rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0);
2402254885Sdumbbell	rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0);
2403254885Sdumbbell	/* Setup GPU memory space */
2404254885Sdumbbell	if ((rdev->family == CHIP_PALM) ||
2405254885Sdumbbell	    (rdev->family == CHIP_SUMO) ||
2406254885Sdumbbell	    (rdev->family == CHIP_SUMO2)) {
2407254885Sdumbbell		/* size in bytes on fusion */
2408254885Sdumbbell		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
2409254885Sdumbbell		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
2410254885Sdumbbell	} else {
2411254885Sdumbbell		/* size in MB on evergreen/cayman/tn */
2412254885Sdumbbell		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
2413254885Sdumbbell		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
2414254885Sdumbbell	}
2415254885Sdumbbell	rdev->mc.visible_vram_size = rdev->mc.aper_size;
2416254885Sdumbbell	r700_vram_gtt_location(rdev, &rdev->mc);
2417254885Sdumbbell	radeon_update_bandwidth_info(rdev);
2418254885Sdumbbell
2419254885Sdumbbell	return 0;
2420254885Sdumbbell}
2421254885Sdumbbell
2422254885Sdumbbellbool evergreen_gpu_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
2423254885Sdumbbell{
2424254885Sdumbbell	u32 srbm_status;
2425254885Sdumbbell	u32 grbm_status;
2426254885Sdumbbell	u32 grbm_status_se0, grbm_status_se1;
2427254885Sdumbbell
2428254885Sdumbbell	srbm_status = RREG32(SRBM_STATUS);
2429254885Sdumbbell	grbm_status = RREG32(GRBM_STATUS);
2430254885Sdumbbell	grbm_status_se0 = RREG32(GRBM_STATUS_SE0);
2431254885Sdumbbell	grbm_status_se1 = RREG32(GRBM_STATUS_SE1);
2432254885Sdumbbell	if (!(grbm_status & GUI_ACTIVE)) {
2433254885Sdumbbell		radeon_ring_lockup_update(ring);
2434254885Sdumbbell		return false;
2435254885Sdumbbell	}
2436254885Sdumbbell	/* force CP activities */
2437254885Sdumbbell	radeon_ring_force_activity(rdev, ring);
2438254885Sdumbbell	return radeon_ring_test_lockup(rdev, ring);
2439254885Sdumbbell}
2440254885Sdumbbell
2441254885Sdumbbellstatic void evergreen_gpu_soft_reset_gfx(struct radeon_device *rdev)
2442254885Sdumbbell{
2443254885Sdumbbell	u32 grbm_reset = 0;
2444254885Sdumbbell
2445254885Sdumbbell	if (!(RREG32(GRBM_STATUS) & GUI_ACTIVE))
2446254885Sdumbbell		return;
2447254885Sdumbbell
2448254885Sdumbbell	dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
2449254885Sdumbbell		RREG32(GRBM_STATUS));
2450254885Sdumbbell	dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
2451254885Sdumbbell		RREG32(GRBM_STATUS_SE0));
2452254885Sdumbbell	dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
2453254885Sdumbbell		RREG32(GRBM_STATUS_SE1));
2454254885Sdumbbell	dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
2455254885Sdumbbell		RREG32(SRBM_STATUS));
2456254885Sdumbbell	dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
2457254885Sdumbbell		RREG32(CP_STALLED_STAT1));
2458254885Sdumbbell	dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
2459254885Sdumbbell		RREG32(CP_STALLED_STAT2));
2460254885Sdumbbell	dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
2461254885Sdumbbell		RREG32(CP_BUSY_STAT));
2462254885Sdumbbell	dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
2463254885Sdumbbell		RREG32(CP_STAT));
2464254885Sdumbbell
2465254885Sdumbbell	/* Disable CP parsing/prefetching */
2466254885Sdumbbell	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
2467254885Sdumbbell
2468254885Sdumbbell	/* reset all the gfx blocks */
2469254885Sdumbbell	grbm_reset = (SOFT_RESET_CP |
2470254885Sdumbbell		      SOFT_RESET_CB |
2471254885Sdumbbell		      SOFT_RESET_DB |
2472254885Sdumbbell		      SOFT_RESET_PA |
2473254885Sdumbbell		      SOFT_RESET_SC |
2474254885Sdumbbell		      SOFT_RESET_SPI |
2475254885Sdumbbell		      SOFT_RESET_SH |
2476254885Sdumbbell		      SOFT_RESET_SX |
2477254885Sdumbbell		      SOFT_RESET_TC |
2478254885Sdumbbell		      SOFT_RESET_TA |
2479254885Sdumbbell		      SOFT_RESET_VC |
2480254885Sdumbbell		      SOFT_RESET_VGT);
2481254885Sdumbbell
2482254885Sdumbbell	dev_info(rdev->dev, "  GRBM_SOFT_RESET=0x%08X\n", grbm_reset);
2483254885Sdumbbell	WREG32(GRBM_SOFT_RESET, grbm_reset);
2484254885Sdumbbell	(void)RREG32(GRBM_SOFT_RESET);
2485282199Sdumbbell	udelay(50);
2486254885Sdumbbell	WREG32(GRBM_SOFT_RESET, 0);
2487254885Sdumbbell	(void)RREG32(GRBM_SOFT_RESET);
2488254885Sdumbbell
2489254885Sdumbbell	dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
2490254885Sdumbbell		RREG32(GRBM_STATUS));
2491254885Sdumbbell	dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
2492254885Sdumbbell		RREG32(GRBM_STATUS_SE0));
2493254885Sdumbbell	dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
2494254885Sdumbbell		RREG32(GRBM_STATUS_SE1));
2495254885Sdumbbell	dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
2496254885Sdumbbell		RREG32(SRBM_STATUS));
2497254885Sdumbbell	dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
2498254885Sdumbbell		RREG32(CP_STALLED_STAT1));
2499254885Sdumbbell	dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
2500254885Sdumbbell		RREG32(CP_STALLED_STAT2));
2501254885Sdumbbell	dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
2502254885Sdumbbell		RREG32(CP_BUSY_STAT));
2503254885Sdumbbell	dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
2504254885Sdumbbell		RREG32(CP_STAT));
2505254885Sdumbbell}
2506254885Sdumbbell
2507254885Sdumbbellstatic void evergreen_gpu_soft_reset_dma(struct radeon_device *rdev)
2508254885Sdumbbell{
2509254885Sdumbbell	u32 tmp;
2510254885Sdumbbell
2511254885Sdumbbell	if (RREG32(DMA_STATUS_REG) & DMA_IDLE)
2512254885Sdumbbell		return;
2513254885Sdumbbell
2514254885Sdumbbell	dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
2515254885Sdumbbell		RREG32(DMA_STATUS_REG));
2516254885Sdumbbell
2517254885Sdumbbell	/* Disable DMA */
2518254885Sdumbbell	tmp = RREG32(DMA_RB_CNTL);
2519254885Sdumbbell	tmp &= ~DMA_RB_ENABLE;
2520254885Sdumbbell	WREG32(DMA_RB_CNTL, tmp);
2521254885Sdumbbell
2522254885Sdumbbell	/* Reset dma */
2523254885Sdumbbell	WREG32(SRBM_SOFT_RESET, SOFT_RESET_DMA);
2524254885Sdumbbell	RREG32(SRBM_SOFT_RESET);
2525282199Sdumbbell	udelay(50);
2526254885Sdumbbell	WREG32(SRBM_SOFT_RESET, 0);
2527254885Sdumbbell
2528254885Sdumbbell	dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
2529254885Sdumbbell		RREG32(DMA_STATUS_REG));
2530254885Sdumbbell}
2531254885Sdumbbell
2532254885Sdumbbellstatic int evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
2533254885Sdumbbell{
2534254885Sdumbbell	struct evergreen_mc_save save;
2535254885Sdumbbell
2536254885Sdumbbell	if (!(RREG32(GRBM_STATUS) & GUI_ACTIVE))
2537254885Sdumbbell		reset_mask &= ~(RADEON_RESET_GFX | RADEON_RESET_COMPUTE);
2538254885Sdumbbell
2539254885Sdumbbell	if (RREG32(DMA_STATUS_REG) & DMA_IDLE)
2540254885Sdumbbell		reset_mask &= ~RADEON_RESET_DMA;
2541254885Sdumbbell
2542254885Sdumbbell	if (reset_mask == 0)
2543254885Sdumbbell		return 0;
2544254885Sdumbbell
2545254885Sdumbbell	dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
2546254885Sdumbbell
2547254885Sdumbbell	evergreen_mc_stop(rdev, &save);
2548254885Sdumbbell	if (evergreen_mc_wait_for_idle(rdev)) {
2549254885Sdumbbell		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2550254885Sdumbbell	}
2551254885Sdumbbell
2552254885Sdumbbell	if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE))
2553254885Sdumbbell		evergreen_gpu_soft_reset_gfx(rdev);
2554254885Sdumbbell
2555254885Sdumbbell	if (reset_mask & RADEON_RESET_DMA)
2556254885Sdumbbell		evergreen_gpu_soft_reset_dma(rdev);
2557254885Sdumbbell
2558254885Sdumbbell	/* Wait a little for things to settle down */
2559282199Sdumbbell	udelay(50);
2560254885Sdumbbell
2561254885Sdumbbell	evergreen_mc_resume(rdev, &save);
2562254885Sdumbbell	return 0;
2563254885Sdumbbell}
2564254885Sdumbbell
2565254885Sdumbbellint evergreen_asic_reset(struct radeon_device *rdev)
2566254885Sdumbbell{
2567254885Sdumbbell	return evergreen_gpu_soft_reset(rdev, (RADEON_RESET_GFX |
2568254885Sdumbbell					       RADEON_RESET_COMPUTE |
2569254885Sdumbbell					       RADEON_RESET_DMA));
2570254885Sdumbbell}
2571254885Sdumbbell
2572254885Sdumbbell/* Interrupts */
2573254885Sdumbbell
2574254885Sdumbbellu32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
2575254885Sdumbbell{
2576254885Sdumbbell	if (crtc >= rdev->num_crtc)
2577254885Sdumbbell		return 0;
2578254885Sdumbbell	else
2579254885Sdumbbell		return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
2580254885Sdumbbell}
2581254885Sdumbbell
2582254885Sdumbbellvoid evergreen_disable_interrupt_state(struct radeon_device *rdev)
2583254885Sdumbbell{
2584254885Sdumbbell	u32 tmp;
2585254885Sdumbbell
2586254885Sdumbbell	if (rdev->family >= CHIP_CAYMAN) {
2587254885Sdumbbell		cayman_cp_int_cntl_setup(rdev, 0,
2588254885Sdumbbell					 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
2589254885Sdumbbell		cayman_cp_int_cntl_setup(rdev, 1, 0);
2590254885Sdumbbell		cayman_cp_int_cntl_setup(rdev, 2, 0);
2591254885Sdumbbell		tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
2592254885Sdumbbell		WREG32(CAYMAN_DMA1_CNTL, tmp);
2593254885Sdumbbell	} else
2594254885Sdumbbell		WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
2595254885Sdumbbell	tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
2596254885Sdumbbell	WREG32(DMA_CNTL, tmp);
2597254885Sdumbbell	WREG32(GRBM_INT_CNTL, 0);
2598254885Sdumbbell	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
2599254885Sdumbbell	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
2600254885Sdumbbell	if (rdev->num_crtc >= 4) {
2601254885Sdumbbell		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
2602254885Sdumbbell		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
2603254885Sdumbbell	}
2604254885Sdumbbell	if (rdev->num_crtc >= 6) {
2605254885Sdumbbell		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
2606254885Sdumbbell		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
2607254885Sdumbbell	}
2608254885Sdumbbell
2609254885Sdumbbell	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
2610254885Sdumbbell	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
2611254885Sdumbbell	if (rdev->num_crtc >= 4) {
2612254885Sdumbbell		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
2613254885Sdumbbell		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
2614254885Sdumbbell	}
2615254885Sdumbbell	if (rdev->num_crtc >= 6) {
2616254885Sdumbbell		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
2617254885Sdumbbell		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
2618254885Sdumbbell	}
2619254885Sdumbbell
2620254885Sdumbbell	/* only one DAC on DCE6 */
2621254885Sdumbbell	if (!ASIC_IS_DCE6(rdev))
2622254885Sdumbbell		WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
2623254885Sdumbbell	WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
2624254885Sdumbbell
2625254885Sdumbbell	tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2626254885Sdumbbell	WREG32(DC_HPD1_INT_CONTROL, tmp);
2627254885Sdumbbell	tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2628254885Sdumbbell	WREG32(DC_HPD2_INT_CONTROL, tmp);
2629254885Sdumbbell	tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2630254885Sdumbbell	WREG32(DC_HPD3_INT_CONTROL, tmp);
2631254885Sdumbbell	tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2632254885Sdumbbell	WREG32(DC_HPD4_INT_CONTROL, tmp);
2633254885Sdumbbell	tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2634254885Sdumbbell	WREG32(DC_HPD5_INT_CONTROL, tmp);
2635254885Sdumbbell	tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2636254885Sdumbbell	WREG32(DC_HPD6_INT_CONTROL, tmp);
2637254885Sdumbbell
2638254885Sdumbbell}
2639254885Sdumbbell
2640254885Sdumbbellint evergreen_irq_set(struct radeon_device *rdev)
2641254885Sdumbbell{
2642254885Sdumbbell	u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
2643254885Sdumbbell	u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
2644254885Sdumbbell	u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
2645254885Sdumbbell	u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
2646254885Sdumbbell	u32 grbm_int_cntl = 0;
2647254885Sdumbbell	u32 grph1 = 0, grph2 = 0, grph3 = 0, grph4 = 0, grph5 = 0, grph6 = 0;
2648254885Sdumbbell	u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
2649254885Sdumbbell	u32 dma_cntl, dma_cntl1 = 0;
2650254885Sdumbbell
2651254885Sdumbbell	if (!rdev->irq.installed) {
2652254885Sdumbbell		dev_warn(rdev->dev, "Can't enable IRQ/MSI because no handler is installed\n");
2653254885Sdumbbell		return -EINVAL;
2654254885Sdumbbell	}
2655254885Sdumbbell	/* don't enable anything if the ih is disabled */
2656254885Sdumbbell	if (!rdev->ih.enabled) {
2657254885Sdumbbell		r600_disable_interrupts(rdev);
2658254885Sdumbbell		/* force the active interrupt state to all disabled */
2659254885Sdumbbell		evergreen_disable_interrupt_state(rdev);
2660254885Sdumbbell		return 0;
2661254885Sdumbbell	}
2662254885Sdumbbell
2663254885Sdumbbell	hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
2664254885Sdumbbell	hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
2665254885Sdumbbell	hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
2666254885Sdumbbell	hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
2667254885Sdumbbell	hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
2668254885Sdumbbell	hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
2669254885Sdumbbell
2670254885Sdumbbell	afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
2671254885Sdumbbell	afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
2672254885Sdumbbell	afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
2673254885Sdumbbell	afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
2674254885Sdumbbell	afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
2675254885Sdumbbell	afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
2676254885Sdumbbell
2677254885Sdumbbell	dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
2678254885Sdumbbell
2679254885Sdumbbell	if (rdev->family >= CHIP_CAYMAN) {
2680254885Sdumbbell		/* enable CP interrupts on all rings */
2681254885Sdumbbell		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
2682254885Sdumbbell			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
2683254885Sdumbbell			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
2684254885Sdumbbell		}
2685254885Sdumbbell		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
2686254885Sdumbbell			DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
2687254885Sdumbbell			cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
2688254885Sdumbbell		}
2689254885Sdumbbell		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
2690254885Sdumbbell			DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
2691254885Sdumbbell			cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
2692254885Sdumbbell		}
2693254885Sdumbbell	} else {
2694254885Sdumbbell		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
2695254885Sdumbbell			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
2696254885Sdumbbell			cp_int_cntl |= RB_INT_ENABLE;
2697254885Sdumbbell			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
2698254885Sdumbbell		}
2699254885Sdumbbell	}
2700254885Sdumbbell
2701254885Sdumbbell	if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
2702254885Sdumbbell		DRM_DEBUG("r600_irq_set: sw int dma\n");
2703254885Sdumbbell		dma_cntl |= TRAP_ENABLE;
2704254885Sdumbbell	}
2705254885Sdumbbell
2706254885Sdumbbell	if (rdev->family >= CHIP_CAYMAN) {
2707254885Sdumbbell		dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
2708254885Sdumbbell		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
2709254885Sdumbbell			DRM_DEBUG("r600_irq_set: sw int dma1\n");
2710254885Sdumbbell			dma_cntl1 |= TRAP_ENABLE;
2711254885Sdumbbell		}
2712254885Sdumbbell	}
2713254885Sdumbbell
2714254885Sdumbbell	if (rdev->irq.crtc_vblank_int[0] ||
2715254885Sdumbbell	    atomic_read(&rdev->irq.pflip[0])) {
2716254885Sdumbbell		DRM_DEBUG("evergreen_irq_set: vblank 0\n");
2717254885Sdumbbell		crtc1 |= VBLANK_INT_MASK;
2718254885Sdumbbell	}
2719254885Sdumbbell	if (rdev->irq.crtc_vblank_int[1] ||
2720254885Sdumbbell	    atomic_read(&rdev->irq.pflip[1])) {
2721254885Sdumbbell		DRM_DEBUG("evergreen_irq_set: vblank 1\n");
2722254885Sdumbbell		crtc2 |= VBLANK_INT_MASK;
2723254885Sdumbbell	}
2724254885Sdumbbell	if (rdev->irq.crtc_vblank_int[2] ||
2725254885Sdumbbell	    atomic_read(&rdev->irq.pflip[2])) {
2726254885Sdumbbell		DRM_DEBUG("evergreen_irq_set: vblank 2\n");
2727254885Sdumbbell		crtc3 |= VBLANK_INT_MASK;
2728254885Sdumbbell	}
2729254885Sdumbbell	if (rdev->irq.crtc_vblank_int[3] ||
2730254885Sdumbbell	    atomic_read(&rdev->irq.pflip[3])) {
2731254885Sdumbbell		DRM_DEBUG("evergreen_irq_set: vblank 3\n");
2732254885Sdumbbell		crtc4 |= VBLANK_INT_MASK;
2733254885Sdumbbell	}
2734254885Sdumbbell	if (rdev->irq.crtc_vblank_int[4] ||
2735254885Sdumbbell	    atomic_read(&rdev->irq.pflip[4])) {
2736254885Sdumbbell		DRM_DEBUG("evergreen_irq_set: vblank 4\n");
2737254885Sdumbbell		crtc5 |= VBLANK_INT_MASK;
2738254885Sdumbbell	}
2739254885Sdumbbell	if (rdev->irq.crtc_vblank_int[5] ||
2740254885Sdumbbell	    atomic_read(&rdev->irq.pflip[5])) {
2741254885Sdumbbell		DRM_DEBUG("evergreen_irq_set: vblank 5\n");
2742254885Sdumbbell		crtc6 |= VBLANK_INT_MASK;
2743254885Sdumbbell	}
2744254885Sdumbbell	if (rdev->irq.hpd[0]) {
2745254885Sdumbbell		DRM_DEBUG("evergreen_irq_set: hpd 1\n");
2746254885Sdumbbell		hpd1 |= DC_HPDx_INT_EN;
2747254885Sdumbbell	}
2748254885Sdumbbell	if (rdev->irq.hpd[1]) {
2749254885Sdumbbell		DRM_DEBUG("evergreen_irq_set: hpd 2\n");
2750254885Sdumbbell		hpd2 |= DC_HPDx_INT_EN;
2751254885Sdumbbell	}
2752254885Sdumbbell	if (rdev->irq.hpd[2]) {
2753254885Sdumbbell		DRM_DEBUG("evergreen_irq_set: hpd 3\n");
2754254885Sdumbbell		hpd3 |= DC_HPDx_INT_EN;
2755254885Sdumbbell	}
2756254885Sdumbbell	if (rdev->irq.hpd[3]) {
2757254885Sdumbbell		DRM_DEBUG("evergreen_irq_set: hpd 4\n");
2758254885Sdumbbell		hpd4 |= DC_HPDx_INT_EN;
2759254885Sdumbbell	}
2760254885Sdumbbell	if (rdev->irq.hpd[4]) {
2761254885Sdumbbell		DRM_DEBUG("evergreen_irq_set: hpd 5\n");
2762254885Sdumbbell		hpd5 |= DC_HPDx_INT_EN;
2763254885Sdumbbell	}
2764254885Sdumbbell	if (rdev->irq.hpd[5]) {
2765254885Sdumbbell		DRM_DEBUG("evergreen_irq_set: hpd 6\n");
2766254885Sdumbbell		hpd6 |= DC_HPDx_INT_EN;
2767254885Sdumbbell	}
2768254885Sdumbbell	if (rdev->irq.afmt[0]) {
2769254885Sdumbbell		DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
2770254885Sdumbbell		afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
2771254885Sdumbbell	}
2772254885Sdumbbell	if (rdev->irq.afmt[1]) {
2773254885Sdumbbell		DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
2774254885Sdumbbell		afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
2775254885Sdumbbell	}
2776254885Sdumbbell	if (rdev->irq.afmt[2]) {
2777254885Sdumbbell		DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
2778254885Sdumbbell		afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
2779254885Sdumbbell	}
2780254885Sdumbbell	if (rdev->irq.afmt[3]) {
2781254885Sdumbbell		DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
2782254885Sdumbbell		afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
2783254885Sdumbbell	}
2784254885Sdumbbell	if (rdev->irq.afmt[4]) {
2785254885Sdumbbell		DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
2786254885Sdumbbell		afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
2787254885Sdumbbell	}
2788254885Sdumbbell	if (rdev->irq.afmt[5]) {
2789254885Sdumbbell		DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
2790254885Sdumbbell		afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
2791254885Sdumbbell	}
2792254885Sdumbbell
2793254885Sdumbbell	if (rdev->family >= CHIP_CAYMAN) {
2794254885Sdumbbell		cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
2795254885Sdumbbell		cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
2796254885Sdumbbell		cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
2797254885Sdumbbell	} else
2798254885Sdumbbell		WREG32(CP_INT_CNTL, cp_int_cntl);
2799254885Sdumbbell
2800254885Sdumbbell	WREG32(DMA_CNTL, dma_cntl);
2801254885Sdumbbell
2802254885Sdumbbell	if (rdev->family >= CHIP_CAYMAN)
2803254885Sdumbbell		WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
2804254885Sdumbbell
2805254885Sdumbbell	WREG32(GRBM_INT_CNTL, grbm_int_cntl);
2806254885Sdumbbell
2807254885Sdumbbell	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
2808254885Sdumbbell	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
2809254885Sdumbbell	if (rdev->num_crtc >= 4) {
2810254885Sdumbbell		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
2811254885Sdumbbell		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
2812254885Sdumbbell	}
2813254885Sdumbbell	if (rdev->num_crtc >= 6) {
2814254885Sdumbbell		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
2815254885Sdumbbell		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
2816254885Sdumbbell	}
2817254885Sdumbbell
2818254885Sdumbbell	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, grph1);
2819254885Sdumbbell	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, grph2);
2820254885Sdumbbell	if (rdev->num_crtc >= 4) {
2821254885Sdumbbell		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, grph3);
2822254885Sdumbbell		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, grph4);
2823254885Sdumbbell	}
2824254885Sdumbbell	if (rdev->num_crtc >= 6) {
2825254885Sdumbbell		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, grph5);
2826254885Sdumbbell		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, grph6);
2827254885Sdumbbell	}
2828254885Sdumbbell
2829254885Sdumbbell	WREG32(DC_HPD1_INT_CONTROL, hpd1);
2830254885Sdumbbell	WREG32(DC_HPD2_INT_CONTROL, hpd2);
2831254885Sdumbbell	WREG32(DC_HPD3_INT_CONTROL, hpd3);
2832254885Sdumbbell	WREG32(DC_HPD4_INT_CONTROL, hpd4);
2833254885Sdumbbell	WREG32(DC_HPD5_INT_CONTROL, hpd5);
2834254885Sdumbbell	WREG32(DC_HPD6_INT_CONTROL, hpd6);
2835254885Sdumbbell
2836254885Sdumbbell	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
2837254885Sdumbbell	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
2838254885Sdumbbell	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
2839254885Sdumbbell	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
2840254885Sdumbbell	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
2841254885Sdumbbell	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
2842254885Sdumbbell
2843254885Sdumbbell	return 0;
2844254885Sdumbbell}
2845254885Sdumbbell
2846254885Sdumbbellstatic void evergreen_irq_ack(struct radeon_device *rdev)
2847254885Sdumbbell{
2848254885Sdumbbell	u32 tmp;
2849254885Sdumbbell
2850254885Sdumbbell	rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
2851254885Sdumbbell	rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
2852254885Sdumbbell	rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
2853254885Sdumbbell	rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
2854254885Sdumbbell	rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
2855254885Sdumbbell	rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
2856254885Sdumbbell	rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
2857254885Sdumbbell	rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
2858254885Sdumbbell	if (rdev->num_crtc >= 4) {
2859254885Sdumbbell		rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
2860254885Sdumbbell		rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
2861254885Sdumbbell	}
2862254885Sdumbbell	if (rdev->num_crtc >= 6) {
2863254885Sdumbbell		rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
2864254885Sdumbbell		rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
2865254885Sdumbbell	}
2866254885Sdumbbell
2867254885Sdumbbell	rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
2868254885Sdumbbell	rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
2869254885Sdumbbell	rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
2870254885Sdumbbell	rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
2871254885Sdumbbell	rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
2872254885Sdumbbell	rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
2873254885Sdumbbell
2874254885Sdumbbell	if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
2875254885Sdumbbell		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
2876254885Sdumbbell	if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
2877254885Sdumbbell		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
2878254885Sdumbbell	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
2879254885Sdumbbell		WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
2880254885Sdumbbell	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
2881254885Sdumbbell		WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
2882254885Sdumbbell	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
2883254885Sdumbbell		WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
2884254885Sdumbbell	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
2885254885Sdumbbell		WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
2886254885Sdumbbell
2887254885Sdumbbell	if (rdev->num_crtc >= 4) {
2888254885Sdumbbell		if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
2889254885Sdumbbell			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
2890254885Sdumbbell		if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
2891254885Sdumbbell			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
2892254885Sdumbbell		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
2893254885Sdumbbell			WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
2894254885Sdumbbell		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
2895254885Sdumbbell			WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
2896254885Sdumbbell		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
2897254885Sdumbbell			WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
2898254885Sdumbbell		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
2899254885Sdumbbell			WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
2900254885Sdumbbell	}
2901254885Sdumbbell
2902254885Sdumbbell	if (rdev->num_crtc >= 6) {
2903254885Sdumbbell		if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
2904254885Sdumbbell			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
2905254885Sdumbbell		if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
2906254885Sdumbbell			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
2907254885Sdumbbell		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
2908254885Sdumbbell			WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
2909254885Sdumbbell		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
2910254885Sdumbbell			WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
2911254885Sdumbbell		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
2912254885Sdumbbell			WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
2913254885Sdumbbell		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
2914254885Sdumbbell			WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
2915254885Sdumbbell	}
2916254885Sdumbbell
2917254885Sdumbbell	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
2918254885Sdumbbell		tmp = RREG32(DC_HPD1_INT_CONTROL);
2919254885Sdumbbell		tmp |= DC_HPDx_INT_ACK;
2920254885Sdumbbell		WREG32(DC_HPD1_INT_CONTROL, tmp);
2921254885Sdumbbell	}
2922254885Sdumbbell	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
2923254885Sdumbbell		tmp = RREG32(DC_HPD2_INT_CONTROL);
2924254885Sdumbbell		tmp |= DC_HPDx_INT_ACK;
2925254885Sdumbbell		WREG32(DC_HPD2_INT_CONTROL, tmp);
2926254885Sdumbbell	}
2927254885Sdumbbell	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
2928254885Sdumbbell		tmp = RREG32(DC_HPD3_INT_CONTROL);
2929254885Sdumbbell		tmp |= DC_HPDx_INT_ACK;
2930254885Sdumbbell		WREG32(DC_HPD3_INT_CONTROL, tmp);
2931254885Sdumbbell	}
2932254885Sdumbbell	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
2933254885Sdumbbell		tmp = RREG32(DC_HPD4_INT_CONTROL);
2934254885Sdumbbell		tmp |= DC_HPDx_INT_ACK;
2935254885Sdumbbell		WREG32(DC_HPD4_INT_CONTROL, tmp);
2936254885Sdumbbell	}
2937254885Sdumbbell	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
2938254885Sdumbbell		tmp = RREG32(DC_HPD5_INT_CONTROL);
2939254885Sdumbbell		tmp |= DC_HPDx_INT_ACK;
2940254885Sdumbbell		WREG32(DC_HPD5_INT_CONTROL, tmp);
2941254885Sdumbbell	}
2942254885Sdumbbell	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
2943254885Sdumbbell		tmp = RREG32(DC_HPD5_INT_CONTROL);
2944254885Sdumbbell		tmp |= DC_HPDx_INT_ACK;
2945254885Sdumbbell		WREG32(DC_HPD6_INT_CONTROL, tmp);
2946254885Sdumbbell	}
2947254885Sdumbbell	if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
2948254885Sdumbbell		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
2949254885Sdumbbell		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
2950254885Sdumbbell		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
2951254885Sdumbbell	}
2952254885Sdumbbell	if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
2953254885Sdumbbell		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
2954254885Sdumbbell		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
2955254885Sdumbbell		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
2956254885Sdumbbell	}
2957254885Sdumbbell	if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
2958254885Sdumbbell		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
2959254885Sdumbbell		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
2960254885Sdumbbell		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
2961254885Sdumbbell	}
2962254885Sdumbbell	if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
2963254885Sdumbbell		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
2964254885Sdumbbell		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
2965254885Sdumbbell		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
2966254885Sdumbbell	}
2967254885Sdumbbell	if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
2968254885Sdumbbell		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
2969254885Sdumbbell		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
2970254885Sdumbbell		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
2971254885Sdumbbell	}
2972254885Sdumbbell	if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
2973254885Sdumbbell		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
2974254885Sdumbbell		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
2975254885Sdumbbell		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
2976254885Sdumbbell	}
2977254885Sdumbbell}
2978254885Sdumbbell
2979254885Sdumbbellstatic void evergreen_irq_disable(struct radeon_device *rdev)
2980254885Sdumbbell{
2981254885Sdumbbell	r600_disable_interrupts(rdev);
2982254885Sdumbbell	/* Wait and acknowledge irq */
2983282199Sdumbbell	mdelay(1);
2984254885Sdumbbell	evergreen_irq_ack(rdev);
2985254885Sdumbbell	evergreen_disable_interrupt_state(rdev);
2986254885Sdumbbell}
2987254885Sdumbbell
2988254885Sdumbbellvoid evergreen_irq_suspend(struct radeon_device *rdev)
2989254885Sdumbbell{
2990254885Sdumbbell	evergreen_irq_disable(rdev);
2991254885Sdumbbell	r600_rlc_stop(rdev);
2992254885Sdumbbell}
2993254885Sdumbbell
2994254885Sdumbbellstatic u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
2995254885Sdumbbell{
2996254885Sdumbbell	u32 wptr, tmp;
2997254885Sdumbbell
2998254885Sdumbbell	if (rdev->wb.enabled)
2999254885Sdumbbell		wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
3000254885Sdumbbell	else
3001254885Sdumbbell		wptr = RREG32(IH_RB_WPTR);
3002254885Sdumbbell
3003254885Sdumbbell	if (wptr & RB_OVERFLOW) {
3004254885Sdumbbell		/* When a ring buffer overflow happen start parsing interrupt
3005254885Sdumbbell		 * from the last not overwritten vector (wptr + 16). Hopefully
3006254885Sdumbbell		 * this should allow us to catchup.
3007254885Sdumbbell		 */
3008254885Sdumbbell		dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
3009254885Sdumbbell			wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
3010254885Sdumbbell		rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
3011254885Sdumbbell		tmp = RREG32(IH_RB_CNTL);
3012254885Sdumbbell		tmp |= IH_WPTR_OVERFLOW_CLEAR;
3013254885Sdumbbell		WREG32(IH_RB_CNTL, tmp);
3014254885Sdumbbell	}
3015254885Sdumbbell	return (wptr & rdev->ih.ptr_mask);
3016254885Sdumbbell}
3017254885Sdumbbell
3018254885Sdumbbellirqreturn_t evergreen_irq_process(struct radeon_device *rdev)
3019254885Sdumbbell{
3020254885Sdumbbell	u32 wptr;
3021254885Sdumbbell	u32 rptr;
3022254885Sdumbbell	u32 src_id, src_data;
3023254885Sdumbbell	u32 ring_index;
3024254885Sdumbbell	bool queue_hotplug = false;
3025254885Sdumbbell	bool queue_hdmi = false;
3026254885Sdumbbell
3027254885Sdumbbell	if (!rdev->ih.enabled || rdev->shutdown)
3028254885Sdumbbell		return IRQ_NONE;
3029254885Sdumbbell
3030254885Sdumbbell	wptr = evergreen_get_ih_wptr(rdev);
3031254885Sdumbbell
3032254885Sdumbbellrestart_ih:
3033254885Sdumbbell	/* is somebody else already processing irqs? */
3034254885Sdumbbell	if (atomic_xchg(&rdev->ih.lock, 1))
3035254885Sdumbbell		return IRQ_NONE;
3036254885Sdumbbell
3037254885Sdumbbell	rptr = rdev->ih.rptr;
3038254885Sdumbbell	DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
3039254885Sdumbbell
3040254885Sdumbbell	/* Order reading of wptr vs. reading of IH ring data */
3041254885Sdumbbell	rmb();
3042254885Sdumbbell
3043254885Sdumbbell	/* display interrupts */
3044254885Sdumbbell	evergreen_irq_ack(rdev);
3045254885Sdumbbell
3046254885Sdumbbell	while (rptr != wptr) {
3047254885Sdumbbell		/* wptr/rptr are in bytes! */
3048254885Sdumbbell		ring_index = rptr / 4;
3049254885Sdumbbell		src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
3050254885Sdumbbell		src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
3051254885Sdumbbell
3052254885Sdumbbell		switch (src_id) {
3053254885Sdumbbell		case 1: /* D1 vblank/vline */
3054254885Sdumbbell			switch (src_data) {
3055254885Sdumbbell			case 0: /* D1 vblank */
3056254885Sdumbbell				if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
3057254885Sdumbbell					if (rdev->irq.crtc_vblank_int[0]) {
3058254885Sdumbbell						drm_handle_vblank(rdev->ddev, 0);
3059254885Sdumbbell						rdev->pm.vblank_sync = true;
3060254885Sdumbbell						DRM_WAKEUP(&rdev->irq.vblank_queue);
3061254885Sdumbbell					}
3062254885Sdumbbell					if (atomic_read(&rdev->irq.pflip[0]))
3063254885Sdumbbell						radeon_crtc_handle_flip(rdev, 0);
3064254885Sdumbbell					rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
3065254885Sdumbbell					DRM_DEBUG("IH: D1 vblank\n");
3066254885Sdumbbell				}
3067254885Sdumbbell				break;
3068254885Sdumbbell			case 1: /* D1 vline */
3069254885Sdumbbell				if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
3070254885Sdumbbell					rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
3071254885Sdumbbell					DRM_DEBUG("IH: D1 vline\n");
3072254885Sdumbbell				}
3073254885Sdumbbell				break;
3074254885Sdumbbell			default:
3075254885Sdumbbell				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
3076254885Sdumbbell				break;
3077254885Sdumbbell			}
3078254885Sdumbbell			break;
3079254885Sdumbbell		case 2: /* D2 vblank/vline */
3080254885Sdumbbell			switch (src_data) {
3081254885Sdumbbell			case 0: /* D2 vblank */
3082254885Sdumbbell				if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
3083254885Sdumbbell					if (rdev->irq.crtc_vblank_int[1]) {
3084254885Sdumbbell						drm_handle_vblank(rdev->ddev, 1);
3085254885Sdumbbell						rdev->pm.vblank_sync = true;
3086254885Sdumbbell						DRM_WAKEUP(&rdev->irq.vblank_queue);
3087254885Sdumbbell					}
3088254885Sdumbbell					if (atomic_read(&rdev->irq.pflip[1]))
3089254885Sdumbbell						radeon_crtc_handle_flip(rdev, 1);
3090254885Sdumbbell					rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
3091254885Sdumbbell					DRM_DEBUG("IH: D2 vblank\n");
3092254885Sdumbbell				}
3093254885Sdumbbell				break;
3094254885Sdumbbell			case 1: /* D2 vline */
3095254885Sdumbbell				if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
3096254885Sdumbbell					rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
3097254885Sdumbbell					DRM_DEBUG("IH: D2 vline\n");
3098254885Sdumbbell				}
3099254885Sdumbbell				break;
3100254885Sdumbbell			default:
3101254885Sdumbbell				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
3102254885Sdumbbell				break;
3103254885Sdumbbell			}
3104254885Sdumbbell			break;
3105254885Sdumbbell		case 3: /* D3 vblank/vline */
3106254885Sdumbbell			switch (src_data) {
3107254885Sdumbbell			case 0: /* D3 vblank */
3108254885Sdumbbell				if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
3109254885Sdumbbell					if (rdev->irq.crtc_vblank_int[2]) {
3110254885Sdumbbell						drm_handle_vblank(rdev->ddev, 2);
3111254885Sdumbbell						rdev->pm.vblank_sync = true;
3112254885Sdumbbell						DRM_WAKEUP(&rdev->irq.vblank_queue);
3113254885Sdumbbell					}
3114254885Sdumbbell					if (atomic_read(&rdev->irq.pflip[2]))
3115254885Sdumbbell						radeon_crtc_handle_flip(rdev, 2);
3116254885Sdumbbell					rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
3117254885Sdumbbell					DRM_DEBUG("IH: D3 vblank\n");
3118254885Sdumbbell				}
3119254885Sdumbbell				break;
3120254885Sdumbbell			case 1: /* D3 vline */
3121254885Sdumbbell				if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
3122254885Sdumbbell					rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
3123254885Sdumbbell					DRM_DEBUG("IH: D3 vline\n");
3124254885Sdumbbell				}
3125254885Sdumbbell				break;
3126254885Sdumbbell			default:
3127254885Sdumbbell				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
3128254885Sdumbbell				break;
3129254885Sdumbbell			}
3130254885Sdumbbell			break;
3131254885Sdumbbell		case 4: /* D4 vblank/vline */
3132254885Sdumbbell			switch (src_data) {
3133254885Sdumbbell			case 0: /* D4 vblank */
3134254885Sdumbbell				if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
3135254885Sdumbbell					if (rdev->irq.crtc_vblank_int[3]) {
3136254885Sdumbbell						drm_handle_vblank(rdev->ddev, 3);
3137254885Sdumbbell						rdev->pm.vblank_sync = true;
3138254885Sdumbbell						DRM_WAKEUP(&rdev->irq.vblank_queue);
3139254885Sdumbbell					}
3140254885Sdumbbell					if (atomic_read(&rdev->irq.pflip[3]))
3141254885Sdumbbell						radeon_crtc_handle_flip(rdev, 3);
3142254885Sdumbbell					rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
3143254885Sdumbbell					DRM_DEBUG("IH: D4 vblank\n");
3144254885Sdumbbell				}
3145254885Sdumbbell				break;
3146254885Sdumbbell			case 1: /* D4 vline */
3147254885Sdumbbell				if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
3148254885Sdumbbell					rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
3149254885Sdumbbell					DRM_DEBUG("IH: D4 vline\n");
3150254885Sdumbbell				}
3151254885Sdumbbell				break;
3152254885Sdumbbell			default:
3153254885Sdumbbell				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
3154254885Sdumbbell				break;
3155254885Sdumbbell			}
3156254885Sdumbbell			break;
3157254885Sdumbbell		case 5: /* D5 vblank/vline */
3158254885Sdumbbell			switch (src_data) {
3159254885Sdumbbell			case 0: /* D5 vblank */
3160254885Sdumbbell				if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
3161254885Sdumbbell					if (rdev->irq.crtc_vblank_int[4]) {
3162254885Sdumbbell						drm_handle_vblank(rdev->ddev, 4);
3163254885Sdumbbell						rdev->pm.vblank_sync = true;
3164254885Sdumbbell						DRM_WAKEUP(&rdev->irq.vblank_queue);
3165254885Sdumbbell					}
3166254885Sdumbbell					if (atomic_read(&rdev->irq.pflip[4]))
3167254885Sdumbbell						radeon_crtc_handle_flip(rdev, 4);
3168254885Sdumbbell					rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
3169254885Sdumbbell					DRM_DEBUG("IH: D5 vblank\n");
3170254885Sdumbbell				}
3171254885Sdumbbell				break;
3172254885Sdumbbell			case 1: /* D5 vline */
3173254885Sdumbbell				if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
3174254885Sdumbbell					rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
3175254885Sdumbbell					DRM_DEBUG("IH: D5 vline\n");
3176254885Sdumbbell				}
3177254885Sdumbbell				break;
3178254885Sdumbbell			default:
3179254885Sdumbbell				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
3180254885Sdumbbell				break;
3181254885Sdumbbell			}
3182254885Sdumbbell			break;
3183254885Sdumbbell		case 6: /* D6 vblank/vline */
3184254885Sdumbbell			switch (src_data) {
3185254885Sdumbbell			case 0: /* D6 vblank */
3186254885Sdumbbell				if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
3187254885Sdumbbell					if (rdev->irq.crtc_vblank_int[5]) {
3188254885Sdumbbell						drm_handle_vblank(rdev->ddev, 5);
3189254885Sdumbbell						rdev->pm.vblank_sync = true;
3190254885Sdumbbell						DRM_WAKEUP(&rdev->irq.vblank_queue);
3191254885Sdumbbell					}
3192254885Sdumbbell					if (atomic_read(&rdev->irq.pflip[5]))
3193254885Sdumbbell						radeon_crtc_handle_flip(rdev, 5);
3194254885Sdumbbell					rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
3195254885Sdumbbell					DRM_DEBUG("IH: D6 vblank\n");
3196254885Sdumbbell				}
3197254885Sdumbbell				break;
3198254885Sdumbbell			case 1: /* D6 vline */
3199254885Sdumbbell				if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
3200254885Sdumbbell					rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
3201254885Sdumbbell					DRM_DEBUG("IH: D6 vline\n");
3202254885Sdumbbell				}
3203254885Sdumbbell				break;
3204254885Sdumbbell			default:
3205254885Sdumbbell				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
3206254885Sdumbbell				break;
3207254885Sdumbbell			}
3208254885Sdumbbell			break;
3209254885Sdumbbell		case 42: /* HPD hotplug */
3210254885Sdumbbell			switch (src_data) {
3211254885Sdumbbell			case 0:
3212254885Sdumbbell				if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
3213254885Sdumbbell					rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
3214254885Sdumbbell					queue_hotplug = true;
3215254885Sdumbbell					DRM_DEBUG("IH: HPD1\n");
3216254885Sdumbbell				}
3217254885Sdumbbell				break;
3218254885Sdumbbell			case 1:
3219254885Sdumbbell				if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
3220254885Sdumbbell					rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
3221254885Sdumbbell					queue_hotplug = true;
3222254885Sdumbbell					DRM_DEBUG("IH: HPD2\n");
3223254885Sdumbbell				}
3224254885Sdumbbell				break;
3225254885Sdumbbell			case 2:
3226254885Sdumbbell				if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
3227254885Sdumbbell					rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
3228254885Sdumbbell					queue_hotplug = true;
3229254885Sdumbbell					DRM_DEBUG("IH: HPD3\n");
3230254885Sdumbbell				}
3231254885Sdumbbell				break;
3232254885Sdumbbell			case 3:
3233254885Sdumbbell				if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
3234254885Sdumbbell					rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
3235254885Sdumbbell					queue_hotplug = true;
3236254885Sdumbbell					DRM_DEBUG("IH: HPD4\n");
3237254885Sdumbbell				}
3238254885Sdumbbell				break;
3239254885Sdumbbell			case 4:
3240254885Sdumbbell				if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
3241254885Sdumbbell					rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
3242254885Sdumbbell					queue_hotplug = true;
3243254885Sdumbbell					DRM_DEBUG("IH: HPD5\n");
3244254885Sdumbbell				}
3245254885Sdumbbell				break;
3246254885Sdumbbell			case 5:
3247254885Sdumbbell				if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
3248254885Sdumbbell					rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
3249254885Sdumbbell					queue_hotplug = true;
3250254885Sdumbbell					DRM_DEBUG("IH: HPD6\n");
3251254885Sdumbbell				}
3252254885Sdumbbell				break;
3253254885Sdumbbell			default:
3254254885Sdumbbell				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
3255254885Sdumbbell				break;
3256254885Sdumbbell			}
3257254885Sdumbbell			break;
3258254885Sdumbbell		case 44: /* hdmi */
3259254885Sdumbbell			switch (src_data) {
3260254885Sdumbbell			case 0:
3261254885Sdumbbell				if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
3262254885Sdumbbell					rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
3263254885Sdumbbell					queue_hdmi = true;
3264254885Sdumbbell					DRM_DEBUG("IH: HDMI0\n");
3265254885Sdumbbell				}
3266254885Sdumbbell				break;
3267254885Sdumbbell			case 1:
3268254885Sdumbbell				if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
3269254885Sdumbbell					rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
3270254885Sdumbbell					queue_hdmi = true;
3271254885Sdumbbell					DRM_DEBUG("IH: HDMI1\n");
3272254885Sdumbbell				}
3273254885Sdumbbell				break;
3274254885Sdumbbell			case 2:
3275254885Sdumbbell				if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
3276254885Sdumbbell					rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
3277254885Sdumbbell					queue_hdmi = true;
3278254885Sdumbbell					DRM_DEBUG("IH: HDMI2\n");
3279254885Sdumbbell				}
3280254885Sdumbbell				break;
3281254885Sdumbbell			case 3:
3282254885Sdumbbell				if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
3283254885Sdumbbell					rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
3284254885Sdumbbell					queue_hdmi = true;
3285254885Sdumbbell					DRM_DEBUG("IH: HDMI3\n");
3286254885Sdumbbell				}
3287254885Sdumbbell				break;
3288254885Sdumbbell			case 4:
3289254885Sdumbbell				if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
3290254885Sdumbbell					rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
3291254885Sdumbbell					queue_hdmi = true;
3292254885Sdumbbell					DRM_DEBUG("IH: HDMI4\n");
3293254885Sdumbbell				}
3294254885Sdumbbell				break;
3295254885Sdumbbell			case 5:
3296254885Sdumbbell				if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
3297254885Sdumbbell					rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
3298254885Sdumbbell					queue_hdmi = true;
3299254885Sdumbbell					DRM_DEBUG("IH: HDMI5\n");
3300254885Sdumbbell				}
3301254885Sdumbbell				break;
3302254885Sdumbbell			default:
3303254885Sdumbbell				DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
3304254885Sdumbbell				break;
3305254885Sdumbbell			}
3306254885Sdumbbell			break;
3307254885Sdumbbell		case 146:
3308254885Sdumbbell		case 147:
3309254885Sdumbbell			dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
3310254885Sdumbbell			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
3311254885Sdumbbell				RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR));
3312254885Sdumbbell			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
3313254885Sdumbbell				RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS));
3314254885Sdumbbell			/* reset addr and status */
3315254885Sdumbbell			WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
3316254885Sdumbbell			break;
3317254885Sdumbbell		case 176: /* CP_INT in ring buffer */
3318254885Sdumbbell		case 177: /* CP_INT in IB1 */
3319254885Sdumbbell		case 178: /* CP_INT in IB2 */
3320254885Sdumbbell			DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
3321254885Sdumbbell			radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
3322254885Sdumbbell			break;
3323254885Sdumbbell		case 181: /* CP EOP event */
3324254885Sdumbbell			DRM_DEBUG("IH: CP EOP\n");
3325254885Sdumbbell			if (rdev->family >= CHIP_CAYMAN) {
3326254885Sdumbbell				switch (src_data) {
3327254885Sdumbbell				case 0:
3328254885Sdumbbell					radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
3329254885Sdumbbell					break;
3330254885Sdumbbell				case 1:
3331254885Sdumbbell					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
3332254885Sdumbbell					break;
3333254885Sdumbbell				case 2:
3334254885Sdumbbell					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
3335254885Sdumbbell					break;
3336254885Sdumbbell				}
3337254885Sdumbbell			} else
3338254885Sdumbbell				radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
3339254885Sdumbbell			break;
3340254885Sdumbbell		case 224: /* DMA trap event */
3341254885Sdumbbell			DRM_DEBUG("IH: DMA trap\n");
3342254885Sdumbbell			radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
3343254885Sdumbbell			break;
3344254885Sdumbbell		case 233: /* GUI IDLE */
3345254885Sdumbbell			DRM_DEBUG("IH: GUI idle\n");
3346254885Sdumbbell			break;
3347254885Sdumbbell		case 244: /* DMA trap event */
3348254885Sdumbbell			if (rdev->family >= CHIP_CAYMAN) {
3349254885Sdumbbell				DRM_DEBUG("IH: DMA1 trap\n");
3350254885Sdumbbell				radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
3351254885Sdumbbell			}
3352254885Sdumbbell			break;
3353254885Sdumbbell		default:
3354254885Sdumbbell			DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
3355254885Sdumbbell			break;
3356254885Sdumbbell		}
3357254885Sdumbbell
3358254885Sdumbbell		/* wptr/rptr are in bytes! */
3359254885Sdumbbell		rptr += 16;
3360254885Sdumbbell		rptr &= rdev->ih.ptr_mask;
3361254885Sdumbbell	}
3362254885Sdumbbell	if (queue_hotplug)
3363254885Sdumbbell		taskqueue_enqueue(rdev->tq, &rdev->hotplug_work);
3364254885Sdumbbell	if (queue_hdmi)
3365254885Sdumbbell		taskqueue_enqueue(rdev->tq, &rdev->audio_work);
3366254885Sdumbbell	rdev->ih.rptr = rptr;
3367254885Sdumbbell	WREG32(IH_RB_RPTR, rdev->ih.rptr);
3368254885Sdumbbell	atomic_set(&rdev->ih.lock, 0);
3369254885Sdumbbell
3370254885Sdumbbell	/* make sure wptr hasn't changed while processing */
3371254885Sdumbbell	wptr = evergreen_get_ih_wptr(rdev);
3372254885Sdumbbell	if (wptr != rptr)
3373254885Sdumbbell		goto restart_ih;
3374254885Sdumbbell
3375254885Sdumbbell	return IRQ_HANDLED;
3376254885Sdumbbell}
3377254885Sdumbbell
3378254885Sdumbbell/**
3379254885Sdumbbell * evergreen_dma_fence_ring_emit - emit a fence on the DMA ring
3380254885Sdumbbell *
3381254885Sdumbbell * @rdev: radeon_device pointer
3382254885Sdumbbell * @fence: radeon fence object
3383254885Sdumbbell *
3384254885Sdumbbell * Add a DMA fence packet to the ring to write
3385254885Sdumbbell * the fence seq number and DMA trap packet to generate
3386254885Sdumbbell * an interrupt if needed (evergreen-SI).
3387254885Sdumbbell */
3388254885Sdumbbellvoid evergreen_dma_fence_ring_emit(struct radeon_device *rdev,
3389254885Sdumbbell				   struct radeon_fence *fence)
3390254885Sdumbbell{
3391254885Sdumbbell	struct radeon_ring *ring = &rdev->ring[fence->ring];
3392254885Sdumbbell	u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
3393254885Sdumbbell	/* write the fence */
3394254885Sdumbbell	radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_FENCE, 0, 0, 0));
3395254885Sdumbbell	radeon_ring_write(ring, addr & 0xfffffffc);
3396254885Sdumbbell	radeon_ring_write(ring, (upper_32_bits(addr) & 0xff));
3397254885Sdumbbell	radeon_ring_write(ring, fence->seq);
3398254885Sdumbbell	/* generate an interrupt */
3399254885Sdumbbell	radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_TRAP, 0, 0, 0));
3400254885Sdumbbell	/* flush HDP */
3401254885Sdumbbell	radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_SRBM_WRITE, 0, 0, 0));
3402254885Sdumbbell	radeon_ring_write(ring, (0xf << 16) | (HDP_MEM_COHERENCY_FLUSH_CNTL >> 2));
3403254885Sdumbbell	radeon_ring_write(ring, 1);
3404254885Sdumbbell}
3405254885Sdumbbell
3406254885Sdumbbell/**
3407254885Sdumbbell * evergreen_dma_ring_ib_execute - schedule an IB on the DMA engine
3408254885Sdumbbell *
3409254885Sdumbbell * @rdev: radeon_device pointer
3410254885Sdumbbell * @ib: IB object to schedule
3411254885Sdumbbell *
3412254885Sdumbbell * Schedule an IB in the DMA ring (evergreen).
3413254885Sdumbbell */
3414254885Sdumbbellvoid evergreen_dma_ring_ib_execute(struct radeon_device *rdev,
3415254885Sdumbbell				   struct radeon_ib *ib)
3416254885Sdumbbell{
3417254885Sdumbbell	struct radeon_ring *ring = &rdev->ring[ib->ring];
3418254885Sdumbbell
3419254885Sdumbbell	if (rdev->wb.enabled) {
3420254885Sdumbbell		u32 next_rptr = ring->wptr + 4;
3421254885Sdumbbell		while ((next_rptr & 7) != 5)
3422254885Sdumbbell			next_rptr++;
3423254885Sdumbbell		next_rptr += 3;
3424254885Sdumbbell		radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 1));
3425254885Sdumbbell		radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
3426254885Sdumbbell		radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr) & 0xff);
3427254885Sdumbbell		radeon_ring_write(ring, next_rptr);
3428254885Sdumbbell	}
3429254885Sdumbbell
3430254885Sdumbbell	/* The indirect buffer packet must end on an 8 DW boundary in the DMA ring.
3431254885Sdumbbell	 * Pad as necessary with NOPs.
3432254885Sdumbbell	 */
3433254885Sdumbbell	while ((ring->wptr & 7) != 5)
3434254885Sdumbbell		radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_NOP, 0, 0, 0));
3435254885Sdumbbell	radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_INDIRECT_BUFFER, 0, 0, 0));
3436254885Sdumbbell	radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0));
3437254885Sdumbbell	radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF));
3438254885Sdumbbell
3439254885Sdumbbell}
3440254885Sdumbbell
3441254885Sdumbbell/**
3442254885Sdumbbell * evergreen_copy_dma - copy pages using the DMA engine
3443254885Sdumbbell *
3444254885Sdumbbell * @rdev: radeon_device pointer
3445254885Sdumbbell * @src_offset: src GPU address
3446254885Sdumbbell * @dst_offset: dst GPU address
3447254885Sdumbbell * @num_gpu_pages: number of GPU pages to xfer
3448254885Sdumbbell * @fence: radeon fence object
3449254885Sdumbbell *
3450254885Sdumbbell * Copy GPU paging using the DMA engine (evergreen-cayman).
3451254885Sdumbbell * Used by the radeon ttm implementation to move pages if
3452254885Sdumbbell * registered as the asic copy callback.
3453254885Sdumbbell */
3454254885Sdumbbellint evergreen_copy_dma(struct radeon_device *rdev,
3455254885Sdumbbell		       uint64_t src_offset, uint64_t dst_offset,
3456254885Sdumbbell		       unsigned num_gpu_pages,
3457254885Sdumbbell		       struct radeon_fence **fence)
3458254885Sdumbbell{
3459254885Sdumbbell	struct radeon_semaphore *sem = NULL;
3460254885Sdumbbell	int ring_index = rdev->asic->copy.dma_ring_index;
3461254885Sdumbbell	struct radeon_ring *ring = &rdev->ring[ring_index];
3462254885Sdumbbell	u32 size_in_dw, cur_size_in_dw;
3463254885Sdumbbell	int i, num_loops;
3464254885Sdumbbell	int r = 0;
3465254885Sdumbbell
3466254885Sdumbbell	r = radeon_semaphore_create(rdev, &sem);
3467254885Sdumbbell	if (r) {
3468254885Sdumbbell		DRM_ERROR("radeon: moving bo (%d).\n", r);
3469254885Sdumbbell		return r;
3470254885Sdumbbell	}
3471254885Sdumbbell
3472254885Sdumbbell	size_in_dw = (num_gpu_pages << RADEON_GPU_PAGE_SHIFT) / 4;
3473254885Sdumbbell	num_loops = DIV_ROUND_UP(size_in_dw, 0xfffff);
3474254885Sdumbbell	r = radeon_ring_lock(rdev, ring, num_loops * 5 + 11);
3475254885Sdumbbell	if (r) {
3476254885Sdumbbell		DRM_ERROR("radeon: moving bo (%d).\n", r);
3477254885Sdumbbell		radeon_semaphore_free(rdev, &sem, NULL);
3478254885Sdumbbell		return r;
3479254885Sdumbbell	}
3480254885Sdumbbell
3481254885Sdumbbell	if (radeon_fence_need_sync(*fence, ring->idx)) {
3482254885Sdumbbell		radeon_semaphore_sync_rings(rdev, sem, (*fence)->ring,
3483254885Sdumbbell					    ring->idx);
3484254885Sdumbbell		radeon_fence_note_sync(*fence, ring->idx);
3485254885Sdumbbell	} else {
3486254885Sdumbbell		radeon_semaphore_free(rdev, &sem, NULL);
3487254885Sdumbbell	}
3488254885Sdumbbell
3489254885Sdumbbell	for (i = 0; i < num_loops; i++) {
3490254885Sdumbbell		cur_size_in_dw = size_in_dw;
3491254885Sdumbbell		if (cur_size_in_dw > 0xFFFFF)
3492254885Sdumbbell			cur_size_in_dw = 0xFFFFF;
3493254885Sdumbbell		size_in_dw -= cur_size_in_dw;
3494254885Sdumbbell		radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_COPY, 0, 0, cur_size_in_dw));
3495254885Sdumbbell		radeon_ring_write(ring, dst_offset & 0xfffffffc);
3496254885Sdumbbell		radeon_ring_write(ring, src_offset & 0xfffffffc);
3497254885Sdumbbell		radeon_ring_write(ring, upper_32_bits(dst_offset) & 0xff);
3498254885Sdumbbell		radeon_ring_write(ring, upper_32_bits(src_offset) & 0xff);
3499254885Sdumbbell		src_offset += cur_size_in_dw * 4;
3500254885Sdumbbell		dst_offset += cur_size_in_dw * 4;
3501254885Sdumbbell	}
3502254885Sdumbbell
3503254885Sdumbbell	r = radeon_fence_emit(rdev, fence, ring->idx);
3504254885Sdumbbell	if (r) {
3505254885Sdumbbell		radeon_ring_unlock_undo(rdev, ring);
3506254885Sdumbbell		return r;
3507254885Sdumbbell	}
3508254885Sdumbbell
3509254885Sdumbbell	radeon_ring_unlock_commit(rdev, ring);
3510254885Sdumbbell	radeon_semaphore_free(rdev, &sem, *fence);
3511254885Sdumbbell
3512254885Sdumbbell	return r;
3513254885Sdumbbell}
3514254885Sdumbbell
3515254885Sdumbbellstatic int evergreen_startup(struct radeon_device *rdev)
3516254885Sdumbbell{
3517254885Sdumbbell	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3518254885Sdumbbell	int r;
3519254885Sdumbbell
3520254885Sdumbbell	/* enable pcie gen2 link */
3521254885Sdumbbell	evergreen_pcie_gen2_enable(rdev);
3522254885Sdumbbell
3523254885Sdumbbell	if (ASIC_IS_DCE5(rdev)) {
3524254885Sdumbbell		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
3525254885Sdumbbell			r = ni_init_microcode(rdev);
3526254885Sdumbbell			if (r) {
3527254885Sdumbbell				DRM_ERROR("Failed to load firmware!\n");
3528254885Sdumbbell				return r;
3529254885Sdumbbell			}
3530254885Sdumbbell		}
3531254885Sdumbbell		r = ni_mc_load_microcode(rdev);
3532254885Sdumbbell		if (r) {
3533254885Sdumbbell			DRM_ERROR("Failed to load MC firmware!\n");
3534254885Sdumbbell			return r;
3535254885Sdumbbell		}
3536254885Sdumbbell	} else {
3537254885Sdumbbell		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
3538254885Sdumbbell			r = r600_init_microcode(rdev);
3539254885Sdumbbell			if (r) {
3540254885Sdumbbell				DRM_ERROR("Failed to load firmware!\n");
3541254885Sdumbbell				return r;
3542254885Sdumbbell			}
3543254885Sdumbbell		}
3544254885Sdumbbell	}
3545254885Sdumbbell
3546254885Sdumbbell	r = r600_vram_scratch_init(rdev);
3547254885Sdumbbell	if (r)
3548254885Sdumbbell		return r;
3549254885Sdumbbell
3550254885Sdumbbell	evergreen_mc_program(rdev);
3551254885Sdumbbell	if (rdev->flags & RADEON_IS_AGP) {
3552254885Sdumbbell		evergreen_agp_enable(rdev);
3553254885Sdumbbell	} else {
3554254885Sdumbbell		r = evergreen_pcie_gart_enable(rdev);
3555254885Sdumbbell		if (r)
3556254885Sdumbbell			return r;
3557254885Sdumbbell	}
3558254885Sdumbbell	evergreen_gpu_init(rdev);
3559254885Sdumbbell
3560254885Sdumbbell	r = evergreen_blit_init(rdev);
3561254885Sdumbbell	if (r) {
3562254885Sdumbbell		r600_blit_fini(rdev);
3563254885Sdumbbell		rdev->asic->copy.copy = NULL;
3564254885Sdumbbell		dev_warn(rdev->dev, "failed blitter (%d) falling back to memcpy\n", r);
3565254885Sdumbbell	}
3566254885Sdumbbell
3567254885Sdumbbell	/* allocate wb buffer */
3568254885Sdumbbell	r = radeon_wb_init(rdev);
3569254885Sdumbbell	if (r)
3570254885Sdumbbell		return r;
3571254885Sdumbbell
3572254885Sdumbbell	r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
3573254885Sdumbbell	if (r) {
3574254885Sdumbbell		dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
3575254885Sdumbbell		return r;
3576254885Sdumbbell	}
3577254885Sdumbbell
3578254885Sdumbbell	r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
3579254885Sdumbbell	if (r) {
3580254885Sdumbbell		dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
3581254885Sdumbbell		return r;
3582254885Sdumbbell	}
3583254885Sdumbbell
3584254885Sdumbbell	/* Enable IRQ */
3585254885Sdumbbell	r = r600_irq_init(rdev);
3586254885Sdumbbell	if (r) {
3587254885Sdumbbell		DRM_ERROR("radeon: IH init failed (%d).\n", r);
3588254885Sdumbbell		radeon_irq_kms_fini(rdev);
3589254885Sdumbbell		return r;
3590254885Sdumbbell	}
3591254885Sdumbbell	evergreen_irq_set(rdev);
3592254885Sdumbbell
3593254885Sdumbbell	r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
3594254885Sdumbbell			     R600_CP_RB_RPTR, R600_CP_RB_WPTR,
3595254885Sdumbbell			     0, 0xfffff, RADEON_CP_PACKET2);
3596254885Sdumbbell	if (r)
3597254885Sdumbbell		return r;
3598254885Sdumbbell
3599254885Sdumbbell	ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
3600254885Sdumbbell	r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
3601254885Sdumbbell			     DMA_RB_RPTR, DMA_RB_WPTR,
3602254885Sdumbbell			     2, 0x3fffc, DMA_PACKET(DMA_PACKET_NOP, 0, 0, 0));
3603254885Sdumbbell	if (r)
3604254885Sdumbbell		return r;
3605254885Sdumbbell
3606254885Sdumbbell	r = evergreen_cp_load_microcode(rdev);
3607254885Sdumbbell	if (r)
3608254885Sdumbbell		return r;
3609254885Sdumbbell	r = evergreen_cp_resume(rdev);
3610254885Sdumbbell	if (r)
3611254885Sdumbbell		return r;
3612254885Sdumbbell	r = r600_dma_resume(rdev);
3613254885Sdumbbell	if (r)
3614254885Sdumbbell		return r;
3615254885Sdumbbell
3616254885Sdumbbell	r = radeon_ib_pool_init(rdev);
3617254885Sdumbbell	if (r) {
3618254885Sdumbbell		dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
3619254885Sdumbbell		return r;
3620254885Sdumbbell	}
3621254885Sdumbbell
3622254885Sdumbbell	r = r600_audio_init(rdev);
3623254885Sdumbbell	if (r) {
3624254885Sdumbbell		DRM_ERROR("radeon: audio init failed\n");
3625254885Sdumbbell		return r;
3626254885Sdumbbell	}
3627254885Sdumbbell
3628254885Sdumbbell	return 0;
3629254885Sdumbbell}
3630254885Sdumbbell
3631254885Sdumbbellint evergreen_resume(struct radeon_device *rdev)
3632254885Sdumbbell{
3633254885Sdumbbell	int r;
3634254885Sdumbbell
3635254885Sdumbbell	/* reset the asic, the gfx blocks are often in a bad state
3636254885Sdumbbell	 * after the driver is unloaded or after a resume
3637254885Sdumbbell	 */
3638254885Sdumbbell	if (radeon_asic_reset(rdev))
3639254885Sdumbbell		dev_warn(rdev->dev, "GPU reset failed !\n");
3640254885Sdumbbell	/* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
3641254885Sdumbbell	 * posting will perform necessary task to bring back GPU into good
3642254885Sdumbbell	 * shape.
3643254885Sdumbbell	 */
3644254885Sdumbbell	/* post card */
3645254885Sdumbbell	atom_asic_init(rdev->mode_info.atom_context);
3646254885Sdumbbell
3647254885Sdumbbell	rdev->accel_working = true;
3648254885Sdumbbell	r = evergreen_startup(rdev);
3649254885Sdumbbell	if (r) {
3650254885Sdumbbell		DRM_ERROR("evergreen startup failed on resume\n");
3651254885Sdumbbell		rdev->accel_working = false;
3652254885Sdumbbell		return r;
3653254885Sdumbbell	}
3654254885Sdumbbell
3655254885Sdumbbell	return r;
3656254885Sdumbbell
3657254885Sdumbbell}
3658254885Sdumbbell
3659254885Sdumbbellint evergreen_suspend(struct radeon_device *rdev)
3660254885Sdumbbell{
3661254885Sdumbbell	r600_audio_fini(rdev);
3662254885Sdumbbell	r700_cp_stop(rdev);
3663254885Sdumbbell	r600_dma_stop(rdev);
3664254885Sdumbbell	evergreen_irq_suspend(rdev);
3665254885Sdumbbell	radeon_wb_disable(rdev);
3666254885Sdumbbell	evergreen_pcie_gart_disable(rdev);
3667254885Sdumbbell
3668254885Sdumbbell	return 0;
3669254885Sdumbbell}
3670254885Sdumbbell
3671254885Sdumbbell/* Plan is to move initialization in that function and use
3672254885Sdumbbell * helper function so that radeon_device_init pretty much
3673254885Sdumbbell * do nothing more than calling asic specific function. This
3674254885Sdumbbell * should also allow to remove a bunch of callback function
3675254885Sdumbbell * like vram_info.
3676254885Sdumbbell */
3677254885Sdumbbellint evergreen_init(struct radeon_device *rdev)
3678254885Sdumbbell{
3679254885Sdumbbell	int r;
3680254885Sdumbbell
3681254885Sdumbbell	/* Read BIOS */
3682254885Sdumbbell	if (!radeon_get_bios(rdev)) {
3683254885Sdumbbell		if (ASIC_IS_AVIVO(rdev))
3684254885Sdumbbell			return -EINVAL;
3685254885Sdumbbell	}
3686254885Sdumbbell	/* Must be an ATOMBIOS */
3687254885Sdumbbell	if (!rdev->is_atom_bios) {
3688254885Sdumbbell		dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
3689254885Sdumbbell		return -EINVAL;
3690254885Sdumbbell	}
3691254885Sdumbbell	r = radeon_atombios_init(rdev);
3692254885Sdumbbell	if (r)
3693254885Sdumbbell		return r;
3694254885Sdumbbell	/* reset the asic, the gfx blocks are often in a bad state
3695254885Sdumbbell	 * after the driver is unloaded or after a resume
3696254885Sdumbbell	 */
3697254885Sdumbbell	if (radeon_asic_reset(rdev))
3698254885Sdumbbell		dev_warn(rdev->dev, "GPU reset failed !\n");
3699254885Sdumbbell	/* Post card if necessary */
3700254885Sdumbbell	if (!radeon_card_posted(rdev)) {
3701254885Sdumbbell		if (!rdev->bios) {
3702254885Sdumbbell			dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
3703254885Sdumbbell			return -EINVAL;
3704254885Sdumbbell		}
3705254885Sdumbbell		DRM_INFO("GPU not posted. posting now...\n");
3706254885Sdumbbell		atom_asic_init(rdev->mode_info.atom_context);
3707254885Sdumbbell	}
3708254885Sdumbbell	/* Initialize scratch registers */
3709254885Sdumbbell	r600_scratch_init(rdev);
3710254885Sdumbbell	/* Initialize surface registers */
3711254885Sdumbbell	radeon_surface_init(rdev);
3712254885Sdumbbell	/* Initialize clocks */
3713254885Sdumbbell	radeon_get_clock_info(rdev->ddev);
3714254885Sdumbbell	/* Fence driver */
3715254885Sdumbbell	r = radeon_fence_driver_init(rdev);
3716254885Sdumbbell	if (r)
3717254885Sdumbbell		return r;
3718254885Sdumbbell	/* initialize AGP */
3719254885Sdumbbell	if (rdev->flags & RADEON_IS_AGP) {
3720254885Sdumbbell		r = radeon_agp_init(rdev);
3721254885Sdumbbell		if (r)
3722254885Sdumbbell			radeon_agp_disable(rdev);
3723254885Sdumbbell	}
3724254885Sdumbbell	/* initialize memory controller */
3725254885Sdumbbell	r = evergreen_mc_init(rdev);
3726254885Sdumbbell	if (r)
3727254885Sdumbbell		return r;
3728254885Sdumbbell	/* Memory manager */
3729254885Sdumbbell	r = radeon_bo_init(rdev);
3730254885Sdumbbell	if (r)
3731254885Sdumbbell		return r;
3732254885Sdumbbell
3733254885Sdumbbell	r = radeon_irq_kms_init(rdev);
3734254885Sdumbbell	if (r)
3735254885Sdumbbell		return r;
3736254885Sdumbbell
3737254885Sdumbbell	rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
3738254885Sdumbbell	r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
3739254885Sdumbbell
3740254885Sdumbbell	rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
3741254885Sdumbbell	r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
3742254885Sdumbbell
3743254885Sdumbbell	rdev->ih.ring_obj = NULL;
3744254885Sdumbbell	r600_ih_ring_init(rdev, 64 * 1024);
3745254885Sdumbbell
3746254885Sdumbbell	r = r600_pcie_gart_init(rdev);
3747254885Sdumbbell	if (r)
3748254885Sdumbbell		return r;
3749254885Sdumbbell
3750254885Sdumbbell	rdev->accel_working = true;
3751254885Sdumbbell	r = evergreen_startup(rdev);
3752254885Sdumbbell	if (r) {
3753254885Sdumbbell		dev_err(rdev->dev, "disabling GPU acceleration\n");
3754254885Sdumbbell		r700_cp_fini(rdev);
3755254885Sdumbbell		r600_dma_fini(rdev);
3756254885Sdumbbell		r600_irq_fini(rdev);
3757254885Sdumbbell		radeon_wb_fini(rdev);
3758254885Sdumbbell		radeon_ib_pool_fini(rdev);
3759254885Sdumbbell		radeon_irq_kms_fini(rdev);
3760254885Sdumbbell		evergreen_pcie_gart_fini(rdev);
3761254885Sdumbbell		rdev->accel_working = false;
3762254885Sdumbbell	}
3763254885Sdumbbell
3764254885Sdumbbell	/* Don't start up if the MC ucode is missing on BTC parts.
3765254885Sdumbbell	 * The default clocks and voltages before the MC ucode
3766254885Sdumbbell	 * is loaded are not suffient for advanced operations.
3767254885Sdumbbell	 */
3768254885Sdumbbell	if (ASIC_IS_DCE5(rdev)) {
3769254885Sdumbbell		if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
3770254885Sdumbbell			DRM_ERROR("radeon: MC ucode required for NI+.\n");
3771254885Sdumbbell			return -EINVAL;
3772254885Sdumbbell		}
3773254885Sdumbbell	}
3774254885Sdumbbell
3775254885Sdumbbell	return 0;
3776254885Sdumbbell}
3777254885Sdumbbell
3778254885Sdumbbellvoid evergreen_fini(struct radeon_device *rdev)
3779254885Sdumbbell{
3780254885Sdumbbell	r600_audio_fini(rdev);
3781254885Sdumbbell	r600_blit_fini(rdev);
3782254885Sdumbbell	r700_cp_fini(rdev);
3783254885Sdumbbell	r600_dma_fini(rdev);
3784254885Sdumbbell	r600_irq_fini(rdev);
3785254885Sdumbbell	radeon_wb_fini(rdev);
3786254885Sdumbbell	radeon_ib_pool_fini(rdev);
3787254885Sdumbbell	radeon_irq_kms_fini(rdev);
3788254885Sdumbbell	evergreen_pcie_gart_fini(rdev);
3789254885Sdumbbell	r600_vram_scratch_fini(rdev);
3790254885Sdumbbell	radeon_gem_fini(rdev);
3791254885Sdumbbell	radeon_fence_driver_fini(rdev);
3792254885Sdumbbell	radeon_agp_fini(rdev);
3793254885Sdumbbell	radeon_bo_fini(rdev);
3794254885Sdumbbell	radeon_atombios_fini(rdev);
3795254885Sdumbbell	if (ASIC_IS_DCE5(rdev))
3796254885Sdumbbell		ni_fini_microcode(rdev);
3797254885Sdumbbell	else
3798254885Sdumbbell		r600_fini_microcode(rdev);
3799254885Sdumbbell	free(rdev->bios, DRM_MEM_DRIVER);
3800254885Sdumbbell	rdev->bios = NULL;
3801254885Sdumbbell}
3802254885Sdumbbell
3803254885Sdumbbellvoid evergreen_pcie_gen2_enable(struct radeon_device *rdev)
3804254885Sdumbbell{
3805254885Sdumbbell	u32 link_width_cntl, speed_cntl, mask;
3806254885Sdumbbell	int ret;
3807254885Sdumbbell
3808254885Sdumbbell	if (radeon_pcie_gen2 == 0)
3809254885Sdumbbell		return;
3810254885Sdumbbell
3811254885Sdumbbell	if (rdev->flags & RADEON_IS_IGP)
3812254885Sdumbbell		return;
3813254885Sdumbbell
3814254885Sdumbbell	if (!(rdev->flags & RADEON_IS_PCIE))
3815254885Sdumbbell		return;
3816254885Sdumbbell
3817254885Sdumbbell	/* x2 cards have a special sequence */
3818254885Sdumbbell	if (ASIC_IS_X2(rdev))
3819254885Sdumbbell		return;
3820254885Sdumbbell
3821254885Sdumbbell	ret = drm_pcie_get_speed_cap_mask(rdev->ddev, &mask);
3822254885Sdumbbell	if (ret != 0)
3823254885Sdumbbell		return;
3824254885Sdumbbell
3825254885Sdumbbell	if (!(mask & DRM_PCIE_SPEED_50))
3826254885Sdumbbell		return;
3827254885Sdumbbell
3828254885Sdumbbell	speed_cntl = RREG32_PCIE_P(PCIE_LC_SPEED_CNTL);
3829254885Sdumbbell	if (speed_cntl & LC_CURRENT_DATA_RATE) {
3830254885Sdumbbell		DRM_INFO("PCIE gen 2 link speeds already enabled\n");
3831254885Sdumbbell		return;
3832254885Sdumbbell	}
3833254885Sdumbbell
3834254885Sdumbbell	DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
3835254885Sdumbbell
3836254885Sdumbbell	if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
3837254885Sdumbbell	    (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
3838254885Sdumbbell
3839254885Sdumbbell		link_width_cntl = RREG32_PCIE_P(PCIE_LC_LINK_WIDTH_CNTL);
3840254885Sdumbbell		link_width_cntl &= ~LC_UPCONFIGURE_DIS;
3841254885Sdumbbell		WREG32_PCIE_P(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
3842254885Sdumbbell
3843254885Sdumbbell		speed_cntl = RREG32_PCIE_P(PCIE_LC_SPEED_CNTL);
3844254885Sdumbbell		speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
3845254885Sdumbbell		WREG32_PCIE_P(PCIE_LC_SPEED_CNTL, speed_cntl);
3846254885Sdumbbell
3847254885Sdumbbell		speed_cntl = RREG32_PCIE_P(PCIE_LC_SPEED_CNTL);
3848254885Sdumbbell		speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
3849254885Sdumbbell		WREG32_PCIE_P(PCIE_LC_SPEED_CNTL, speed_cntl);
3850254885Sdumbbell
3851254885Sdumbbell		speed_cntl = RREG32_PCIE_P(PCIE_LC_SPEED_CNTL);
3852254885Sdumbbell		speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
3853254885Sdumbbell		WREG32_PCIE_P(PCIE_LC_SPEED_CNTL, speed_cntl);
3854254885Sdumbbell
3855254885Sdumbbell		speed_cntl = RREG32_PCIE_P(PCIE_LC_SPEED_CNTL);
3856254885Sdumbbell		speed_cntl |= LC_GEN2_EN_STRAP;
3857254885Sdumbbell		WREG32_PCIE_P(PCIE_LC_SPEED_CNTL, speed_cntl);
3858254885Sdumbbell
3859254885Sdumbbell	} else {
3860254885Sdumbbell		link_width_cntl = RREG32_PCIE_P(PCIE_LC_LINK_WIDTH_CNTL);
3861254885Sdumbbell		/* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
3862254885Sdumbbell		if (1)
3863254885Sdumbbell			link_width_cntl |= LC_UPCONFIGURE_DIS;
3864254885Sdumbbell		else
3865254885Sdumbbell			link_width_cntl &= ~LC_UPCONFIGURE_DIS;
3866254885Sdumbbell		WREG32_PCIE_P(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
3867254885Sdumbbell	}
3868254885Sdumbbell}
3869