• Home
  • History
  • Annotate
  • Line#
  • Navigate
  • Raw
  • Download
  • only in /asuswrt-rt-n18u-9.0.0.4.380.2695/release/src-rt-6.x.4708/linux/linux-2.6.36/drivers/gpu/drm/radeon/
1/*
2 * Copyright 2010 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Alex Deucher
23 */
24#include <linux/firmware.h>
25#include <linux/platform_device.h>
26#include <linux/slab.h>
27#include "drmP.h"
28#include "radeon.h"
29#include "radeon_asic.h"
30#include "radeon_drm.h"
31#include "evergreend.h"
32#include "atom.h"
33#include "avivod.h"
34#include "evergreen_reg.h"
35
36#define EVERGREEN_PFP_UCODE_SIZE 1120
37#define EVERGREEN_PM4_UCODE_SIZE 1376
38
39static void evergreen_gpu_init(struct radeon_device *rdev);
40void evergreen_fini(struct radeon_device *rdev);
41
42/* get temperature in millidegrees */
43u32 evergreen_get_temp(struct radeon_device *rdev)
44{
45	u32 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
46		ASIC_T_SHIFT;
47	u32 actual_temp = 0;
48
49	if ((temp >> 10) & 1)
50		actual_temp = 0;
51	else if ((temp >> 9) & 1)
52		actual_temp = 255;
53	else
54		actual_temp = (temp >> 1) & 0xff;
55
56	return actual_temp * 1000;
57}
58
59void evergreen_pm_misc(struct radeon_device *rdev)
60{
61	int req_ps_idx = rdev->pm.requested_power_state_index;
62	int req_cm_idx = rdev->pm.requested_clock_mode_index;
63	struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
64	struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
65
66	if ((voltage->type == VOLTAGE_SW) && voltage->voltage) {
67		if (voltage->voltage != rdev->pm.current_vddc) {
68			radeon_atom_set_voltage(rdev, voltage->voltage);
69			rdev->pm.current_vddc = voltage->voltage;
70			DRM_DEBUG("Setting: v: %d\n", voltage->voltage);
71		}
72	}
73}
74
75void evergreen_pm_prepare(struct radeon_device *rdev)
76{
77	struct drm_device *ddev = rdev->ddev;
78	struct drm_crtc *crtc;
79	struct radeon_crtc *radeon_crtc;
80	u32 tmp;
81
82	/* disable any active CRTCs */
83	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
84		radeon_crtc = to_radeon_crtc(crtc);
85		if (radeon_crtc->enabled) {
86			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
87			tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
88			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
89		}
90	}
91}
92
93void evergreen_pm_finish(struct radeon_device *rdev)
94{
95	struct drm_device *ddev = rdev->ddev;
96	struct drm_crtc *crtc;
97	struct radeon_crtc *radeon_crtc;
98	u32 tmp;
99
100	/* enable any active CRTCs */
101	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
102		radeon_crtc = to_radeon_crtc(crtc);
103		if (radeon_crtc->enabled) {
104			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
105			tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
106			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
107		}
108	}
109}
110
111bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
112{
113	bool connected = false;
114
115	switch (hpd) {
116	case RADEON_HPD_1:
117		if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
118			connected = true;
119		break;
120	case RADEON_HPD_2:
121		if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
122			connected = true;
123		break;
124	case RADEON_HPD_3:
125		if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
126			connected = true;
127		break;
128	case RADEON_HPD_4:
129		if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
130			connected = true;
131		break;
132	case RADEON_HPD_5:
133		if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
134			connected = true;
135		break;
136	case RADEON_HPD_6:
137		if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
138			connected = true;
139			break;
140	default:
141		break;
142	}
143
144	return connected;
145}
146
147void evergreen_hpd_set_polarity(struct radeon_device *rdev,
148				enum radeon_hpd_id hpd)
149{
150	u32 tmp;
151	bool connected = evergreen_hpd_sense(rdev, hpd);
152
153	switch (hpd) {
154	case RADEON_HPD_1:
155		tmp = RREG32(DC_HPD1_INT_CONTROL);
156		if (connected)
157			tmp &= ~DC_HPDx_INT_POLARITY;
158		else
159			tmp |= DC_HPDx_INT_POLARITY;
160		WREG32(DC_HPD1_INT_CONTROL, tmp);
161		break;
162	case RADEON_HPD_2:
163		tmp = RREG32(DC_HPD2_INT_CONTROL);
164		if (connected)
165			tmp &= ~DC_HPDx_INT_POLARITY;
166		else
167			tmp |= DC_HPDx_INT_POLARITY;
168		WREG32(DC_HPD2_INT_CONTROL, tmp);
169		break;
170	case RADEON_HPD_3:
171		tmp = RREG32(DC_HPD3_INT_CONTROL);
172		if (connected)
173			tmp &= ~DC_HPDx_INT_POLARITY;
174		else
175			tmp |= DC_HPDx_INT_POLARITY;
176		WREG32(DC_HPD3_INT_CONTROL, tmp);
177		break;
178	case RADEON_HPD_4:
179		tmp = RREG32(DC_HPD4_INT_CONTROL);
180		if (connected)
181			tmp &= ~DC_HPDx_INT_POLARITY;
182		else
183			tmp |= DC_HPDx_INT_POLARITY;
184		WREG32(DC_HPD4_INT_CONTROL, tmp);
185		break;
186	case RADEON_HPD_5:
187		tmp = RREG32(DC_HPD5_INT_CONTROL);
188		if (connected)
189			tmp &= ~DC_HPDx_INT_POLARITY;
190		else
191			tmp |= DC_HPDx_INT_POLARITY;
192		WREG32(DC_HPD5_INT_CONTROL, tmp);
193			break;
194	case RADEON_HPD_6:
195		tmp = RREG32(DC_HPD6_INT_CONTROL);
196		if (connected)
197			tmp &= ~DC_HPDx_INT_POLARITY;
198		else
199			tmp |= DC_HPDx_INT_POLARITY;
200		WREG32(DC_HPD6_INT_CONTROL, tmp);
201		break;
202	default:
203		break;
204	}
205}
206
207void evergreen_hpd_init(struct radeon_device *rdev)
208{
209	struct drm_device *dev = rdev->ddev;
210	struct drm_connector *connector;
211	u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
212		DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
213
214	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
215		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
216		switch (radeon_connector->hpd.hpd) {
217		case RADEON_HPD_1:
218			WREG32(DC_HPD1_CONTROL, tmp);
219			rdev->irq.hpd[0] = true;
220			break;
221		case RADEON_HPD_2:
222			WREG32(DC_HPD2_CONTROL, tmp);
223			rdev->irq.hpd[1] = true;
224			break;
225		case RADEON_HPD_3:
226			WREG32(DC_HPD3_CONTROL, tmp);
227			rdev->irq.hpd[2] = true;
228			break;
229		case RADEON_HPD_4:
230			WREG32(DC_HPD4_CONTROL, tmp);
231			rdev->irq.hpd[3] = true;
232			break;
233		case RADEON_HPD_5:
234			WREG32(DC_HPD5_CONTROL, tmp);
235			rdev->irq.hpd[4] = true;
236			break;
237		case RADEON_HPD_6:
238			WREG32(DC_HPD6_CONTROL, tmp);
239			rdev->irq.hpd[5] = true;
240			break;
241		default:
242			break;
243		}
244	}
245	if (rdev->irq.installed)
246		evergreen_irq_set(rdev);
247}
248
249void evergreen_hpd_fini(struct radeon_device *rdev)
250{
251	struct drm_device *dev = rdev->ddev;
252	struct drm_connector *connector;
253
254	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
255		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
256		switch (radeon_connector->hpd.hpd) {
257		case RADEON_HPD_1:
258			WREG32(DC_HPD1_CONTROL, 0);
259			rdev->irq.hpd[0] = false;
260			break;
261		case RADEON_HPD_2:
262			WREG32(DC_HPD2_CONTROL, 0);
263			rdev->irq.hpd[1] = false;
264			break;
265		case RADEON_HPD_3:
266			WREG32(DC_HPD3_CONTROL, 0);
267			rdev->irq.hpd[2] = false;
268			break;
269		case RADEON_HPD_4:
270			WREG32(DC_HPD4_CONTROL, 0);
271			rdev->irq.hpd[3] = false;
272			break;
273		case RADEON_HPD_5:
274			WREG32(DC_HPD5_CONTROL, 0);
275			rdev->irq.hpd[4] = false;
276			break;
277		case RADEON_HPD_6:
278			WREG32(DC_HPD6_CONTROL, 0);
279			rdev->irq.hpd[5] = false;
280			break;
281		default:
282			break;
283		}
284	}
285}
286
287void evergreen_bandwidth_update(struct radeon_device *rdev)
288{
289}
290
291static int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
292{
293	unsigned i;
294	u32 tmp;
295
296	for (i = 0; i < rdev->usec_timeout; i++) {
297		/* read MC_STATUS */
298		tmp = RREG32(SRBM_STATUS) & 0x1F00;
299		if (!tmp)
300			return 0;
301		udelay(1);
302	}
303	return -1;
304}
305
306/*
307 * GART
308 */
309void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
310{
311	unsigned i;
312	u32 tmp;
313
314	WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
315	for (i = 0; i < rdev->usec_timeout; i++) {
316		/* read MC_STATUS */
317		tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
318		tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
319		if (tmp == 2) {
320			printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
321			return;
322		}
323		if (tmp) {
324			return;
325		}
326		udelay(1);
327	}
328}
329
330int evergreen_pcie_gart_enable(struct radeon_device *rdev)
331{
332	u32 tmp;
333	int r;
334
335	if (rdev->gart.table.vram.robj == NULL) {
336		dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
337		return -EINVAL;
338	}
339	r = radeon_gart_table_vram_pin(rdev);
340	if (r)
341		return r;
342	radeon_gart_restore(rdev);
343	/* Setup L2 cache */
344	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
345				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
346				EFFECTIVE_L2_QUEUE_SIZE(7));
347	WREG32(VM_L2_CNTL2, 0);
348	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
349	/* Setup TLB control */
350	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
351		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
352		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
353		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
354	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
355	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
356	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
357	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
358	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
359	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
360	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
361	WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
362	WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
363	WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
364	WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
365				RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
366	WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
367			(u32)(rdev->dummy_page.addr >> 12));
368	WREG32(VM_CONTEXT1_CNTL, 0);
369
370	evergreen_pcie_gart_tlb_flush(rdev);
371	rdev->gart.ready = true;
372	return 0;
373}
374
375void evergreen_pcie_gart_disable(struct radeon_device *rdev)
376{
377	u32 tmp;
378	int r;
379
380	/* Disable all tables */
381	WREG32(VM_CONTEXT0_CNTL, 0);
382	WREG32(VM_CONTEXT1_CNTL, 0);
383
384	/* Setup L2 cache */
385	WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
386				EFFECTIVE_L2_QUEUE_SIZE(7));
387	WREG32(VM_L2_CNTL2, 0);
388	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
389	/* Setup TLB control */
390	tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
391	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
392	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
393	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
394	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
395	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
396	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
397	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
398	if (rdev->gart.table.vram.robj) {
399		r = radeon_bo_reserve(rdev->gart.table.vram.robj, false);
400		if (likely(r == 0)) {
401			radeon_bo_kunmap(rdev->gart.table.vram.robj);
402			radeon_bo_unpin(rdev->gart.table.vram.robj);
403			radeon_bo_unreserve(rdev->gart.table.vram.robj);
404		}
405	}
406}
407
408void evergreen_pcie_gart_fini(struct radeon_device *rdev)
409{
410	evergreen_pcie_gart_disable(rdev);
411	radeon_gart_table_vram_free(rdev);
412	radeon_gart_fini(rdev);
413}
414
415
416void evergreen_agp_enable(struct radeon_device *rdev)
417{
418	u32 tmp;
419
420	/* Setup L2 cache */
421	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
422				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
423				EFFECTIVE_L2_QUEUE_SIZE(7));
424	WREG32(VM_L2_CNTL2, 0);
425	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
426	/* Setup TLB control */
427	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
428		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
429		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
430		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
431	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
432	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
433	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
434	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
435	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
436	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
437	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
438	WREG32(VM_CONTEXT0_CNTL, 0);
439	WREG32(VM_CONTEXT1_CNTL, 0);
440}
441
442static void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
443{
444	save->vga_control[0] = RREG32(D1VGA_CONTROL);
445	save->vga_control[1] = RREG32(D2VGA_CONTROL);
446	save->vga_control[2] = RREG32(EVERGREEN_D3VGA_CONTROL);
447	save->vga_control[3] = RREG32(EVERGREEN_D4VGA_CONTROL);
448	save->vga_control[4] = RREG32(EVERGREEN_D5VGA_CONTROL);
449	save->vga_control[5] = RREG32(EVERGREEN_D6VGA_CONTROL);
450	save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
451	save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
452	save->crtc_control[0] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
453	save->crtc_control[1] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
454	save->crtc_control[2] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
455	save->crtc_control[3] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
456	save->crtc_control[4] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
457	save->crtc_control[5] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
458
459	/* Stop all video */
460	WREG32(VGA_RENDER_CONTROL, 0);
461	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
462	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
463	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
464	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
465	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
466	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
467	WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
468	WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
469	WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
470	WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
471	WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
472	WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
473	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
474	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
475	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
476	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
477	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
478	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
479
480	WREG32(D1VGA_CONTROL, 0);
481	WREG32(D2VGA_CONTROL, 0);
482	WREG32(EVERGREEN_D3VGA_CONTROL, 0);
483	WREG32(EVERGREEN_D4VGA_CONTROL, 0);
484	WREG32(EVERGREEN_D5VGA_CONTROL, 0);
485	WREG32(EVERGREEN_D6VGA_CONTROL, 0);
486}
487
488static void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
489{
490	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
491	       upper_32_bits(rdev->mc.vram_start));
492	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
493	       upper_32_bits(rdev->mc.vram_start));
494	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
495	       (u32)rdev->mc.vram_start);
496	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
497	       (u32)rdev->mc.vram_start);
498
499	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
500	       upper_32_bits(rdev->mc.vram_start));
501	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
502	       upper_32_bits(rdev->mc.vram_start));
503	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
504	       (u32)rdev->mc.vram_start);
505	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
506	       (u32)rdev->mc.vram_start);
507
508	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
509	       upper_32_bits(rdev->mc.vram_start));
510	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
511	       upper_32_bits(rdev->mc.vram_start));
512	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
513	       (u32)rdev->mc.vram_start);
514	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
515	       (u32)rdev->mc.vram_start);
516
517	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
518	       upper_32_bits(rdev->mc.vram_start));
519	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
520	       upper_32_bits(rdev->mc.vram_start));
521	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
522	       (u32)rdev->mc.vram_start);
523	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
524	       (u32)rdev->mc.vram_start);
525
526	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
527	       upper_32_bits(rdev->mc.vram_start));
528	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
529	       upper_32_bits(rdev->mc.vram_start));
530	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
531	       (u32)rdev->mc.vram_start);
532	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
533	       (u32)rdev->mc.vram_start);
534
535	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
536	       upper_32_bits(rdev->mc.vram_start));
537	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
538	       upper_32_bits(rdev->mc.vram_start));
539	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
540	       (u32)rdev->mc.vram_start);
541	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
542	       (u32)rdev->mc.vram_start);
543
544	WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
545	WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
546	/* Unlock host access */
547	WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
548	mdelay(1);
549	/* Restore video state */
550	WREG32(D1VGA_CONTROL, save->vga_control[0]);
551	WREG32(D2VGA_CONTROL, save->vga_control[1]);
552	WREG32(EVERGREEN_D3VGA_CONTROL, save->vga_control[2]);
553	WREG32(EVERGREEN_D4VGA_CONTROL, save->vga_control[3]);
554	WREG32(EVERGREEN_D5VGA_CONTROL, save->vga_control[4]);
555	WREG32(EVERGREEN_D6VGA_CONTROL, save->vga_control[5]);
556	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
557	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
558	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
559	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
560	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
561	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
562	WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, save->crtc_control[0]);
563	WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, save->crtc_control[1]);
564	WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, save->crtc_control[2]);
565	WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, save->crtc_control[3]);
566	WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, save->crtc_control[4]);
567	WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, save->crtc_control[5]);
568	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
569	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
570	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
571	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
572	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
573	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
574	WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
575}
576
577static void evergreen_mc_program(struct radeon_device *rdev)
578{
579	struct evergreen_mc_save save;
580	u32 tmp;
581	int i, j;
582
583	/* Initialize HDP */
584	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
585		WREG32((0x2c14 + j), 0x00000000);
586		WREG32((0x2c18 + j), 0x00000000);
587		WREG32((0x2c1c + j), 0x00000000);
588		WREG32((0x2c20 + j), 0x00000000);
589		WREG32((0x2c24 + j), 0x00000000);
590	}
591	WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
592
593	evergreen_mc_stop(rdev, &save);
594	if (evergreen_mc_wait_for_idle(rdev)) {
595		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
596	}
597	/* Lockout access through VGA aperture*/
598	WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
599	/* Update configuration */
600	if (rdev->flags & RADEON_IS_AGP) {
601		if (rdev->mc.vram_start < rdev->mc.gtt_start) {
602			/* VRAM before AGP */
603			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
604				rdev->mc.vram_start >> 12);
605			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
606				rdev->mc.gtt_end >> 12);
607		} else {
608			/* VRAM after AGP */
609			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
610				rdev->mc.gtt_start >> 12);
611			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
612				rdev->mc.vram_end >> 12);
613		}
614	} else {
615		WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
616			rdev->mc.vram_start >> 12);
617		WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
618			rdev->mc.vram_end >> 12);
619	}
620	WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, 0);
621	tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
622	tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
623	WREG32(MC_VM_FB_LOCATION, tmp);
624	WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
625	WREG32(HDP_NONSURFACE_INFO, (2 << 7));
626	WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
627	if (rdev->flags & RADEON_IS_AGP) {
628		WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
629		WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
630		WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
631	} else {
632		WREG32(MC_VM_AGP_BASE, 0);
633		WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
634		WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
635	}
636	if (evergreen_mc_wait_for_idle(rdev)) {
637		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
638	}
639	evergreen_mc_resume(rdev, &save);
640	/* we need to own VRAM, so turn off the VGA renderer here
641	 * to stop it overwriting our objects */
642	rv515_vga_render_disable(rdev);
643}
644
645/*
646 * CP.
647 */
648
649static int evergreen_cp_load_microcode(struct radeon_device *rdev)
650{
651	const __be32 *fw_data;
652	int i;
653
654	if (!rdev->me_fw || !rdev->pfp_fw)
655		return -EINVAL;
656
657	r700_cp_stop(rdev);
658	WREG32(CP_RB_CNTL, RB_NO_UPDATE | (15 << 8) | (3 << 0));
659
660	fw_data = (const __be32 *)rdev->pfp_fw->data;
661	WREG32(CP_PFP_UCODE_ADDR, 0);
662	for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
663		WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
664	WREG32(CP_PFP_UCODE_ADDR, 0);
665
666	fw_data = (const __be32 *)rdev->me_fw->data;
667	WREG32(CP_ME_RAM_WADDR, 0);
668	for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
669		WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
670
671	WREG32(CP_PFP_UCODE_ADDR, 0);
672	WREG32(CP_ME_RAM_WADDR, 0);
673	WREG32(CP_ME_RAM_RADDR, 0);
674	return 0;
675}
676
677static int evergreen_cp_start(struct radeon_device *rdev)
678{
679	int r;
680	uint32_t cp_me;
681
682	r = radeon_ring_lock(rdev, 7);
683	if (r) {
684		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
685		return r;
686	}
687	radeon_ring_write(rdev, PACKET3(PACKET3_ME_INITIALIZE, 5));
688	radeon_ring_write(rdev, 0x1);
689	radeon_ring_write(rdev, 0x0);
690	radeon_ring_write(rdev, rdev->config.evergreen.max_hw_contexts - 1);
691	radeon_ring_write(rdev, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
692	radeon_ring_write(rdev, 0);
693	radeon_ring_write(rdev, 0);
694	radeon_ring_unlock_commit(rdev);
695
696	cp_me = 0xff;
697	WREG32(CP_ME_CNTL, cp_me);
698
699	r = radeon_ring_lock(rdev, 4);
700	if (r) {
701		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
702		return r;
703	}
704	/* init some VGT regs */
705	radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONTEXT_REG, 2));
706	radeon_ring_write(rdev, (VGT_VERTEX_REUSE_BLOCK_CNTL - PACKET3_SET_CONTEXT_REG_START) >> 2);
707	radeon_ring_write(rdev, 0xe);
708	radeon_ring_write(rdev, 0x10);
709	radeon_ring_unlock_commit(rdev);
710
711	return 0;
712}
713
714int evergreen_cp_resume(struct radeon_device *rdev)
715{
716	u32 tmp;
717	u32 rb_bufsz;
718	int r;
719
720	/* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
721	WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
722				 SOFT_RESET_PA |
723				 SOFT_RESET_SH |
724				 SOFT_RESET_VGT |
725				 SOFT_RESET_SX));
726	RREG32(GRBM_SOFT_RESET);
727	mdelay(15);
728	WREG32(GRBM_SOFT_RESET, 0);
729	RREG32(GRBM_SOFT_RESET);
730
731	/* Set ring buffer size */
732	rb_bufsz = drm_order(rdev->cp.ring_size / 8);
733	tmp = RB_NO_UPDATE | (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
734#ifdef __BIG_ENDIAN
735	tmp |= BUF_SWAP_32BIT;
736#endif
737	WREG32(CP_RB_CNTL, tmp);
738	WREG32(CP_SEM_WAIT_TIMER, 0x4);
739
740	/* Set the write pointer delay */
741	WREG32(CP_RB_WPTR_DELAY, 0);
742
743	/* Initialize the ring buffer's read and write pointers */
744	WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
745	WREG32(CP_RB_RPTR_WR, 0);
746	WREG32(CP_RB_WPTR, 0);
747	WREG32(CP_RB_RPTR_ADDR, rdev->cp.gpu_addr & 0xFFFFFFFF);
748	WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->cp.gpu_addr));
749	mdelay(1);
750	WREG32(CP_RB_CNTL, tmp);
751
752	WREG32(CP_RB_BASE, rdev->cp.gpu_addr >> 8);
753	WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
754
755	rdev->cp.rptr = RREG32(CP_RB_RPTR);
756	rdev->cp.wptr = RREG32(CP_RB_WPTR);
757
758	evergreen_cp_start(rdev);
759	rdev->cp.ready = true;
760	r = radeon_ring_test(rdev);
761	if (r) {
762		rdev->cp.ready = false;
763		return r;
764	}
765	return 0;
766}
767
768/*
769 * Core functions
770 */
771static u32 evergreen_get_tile_pipe_to_backend_map(struct radeon_device *rdev,
772						  u32 num_tile_pipes,
773						  u32 num_backends,
774						  u32 backend_disable_mask)
775{
776	u32 backend_map = 0;
777	u32 enabled_backends_mask = 0;
778	u32 enabled_backends_count = 0;
779	u32 cur_pipe;
780	u32 swizzle_pipe[EVERGREEN_MAX_PIPES];
781	u32 cur_backend = 0;
782	u32 i;
783	bool force_no_swizzle;
784
785	if (num_tile_pipes > EVERGREEN_MAX_PIPES)
786		num_tile_pipes = EVERGREEN_MAX_PIPES;
787	if (num_tile_pipes < 1)
788		num_tile_pipes = 1;
789	if (num_backends > EVERGREEN_MAX_BACKENDS)
790		num_backends = EVERGREEN_MAX_BACKENDS;
791	if (num_backends < 1)
792		num_backends = 1;
793
794	for (i = 0; i < EVERGREEN_MAX_BACKENDS; ++i) {
795		if (((backend_disable_mask >> i) & 1) == 0) {
796			enabled_backends_mask |= (1 << i);
797			++enabled_backends_count;
798		}
799		if (enabled_backends_count == num_backends)
800			break;
801	}
802
803	if (enabled_backends_count == 0) {
804		enabled_backends_mask = 1;
805		enabled_backends_count = 1;
806	}
807
808	if (enabled_backends_count != num_backends)
809		num_backends = enabled_backends_count;
810
811	memset((uint8_t *)&swizzle_pipe[0], 0, sizeof(u32) * EVERGREEN_MAX_PIPES);
812	switch (rdev->family) {
813	case CHIP_CEDAR:
814	case CHIP_REDWOOD:
815		force_no_swizzle = false;
816		break;
817	case CHIP_CYPRESS:
818	case CHIP_HEMLOCK:
819	case CHIP_JUNIPER:
820	default:
821		force_no_swizzle = true;
822		break;
823	}
824	if (force_no_swizzle) {
825		bool last_backend_enabled = false;
826
827		force_no_swizzle = false;
828		for (i = 0; i < EVERGREEN_MAX_BACKENDS; ++i) {
829			if (((enabled_backends_mask >> i) & 1) == 1) {
830				if (last_backend_enabled)
831					force_no_swizzle = true;
832				last_backend_enabled = true;
833			} else
834				last_backend_enabled = false;
835		}
836	}
837
838	switch (num_tile_pipes) {
839	case 1:
840	case 3:
841	case 5:
842	case 7:
843		DRM_ERROR("odd number of pipes!\n");
844		break;
845	case 2:
846		swizzle_pipe[0] = 0;
847		swizzle_pipe[1] = 1;
848		break;
849	case 4:
850		if (force_no_swizzle) {
851			swizzle_pipe[0] = 0;
852			swizzle_pipe[1] = 1;
853			swizzle_pipe[2] = 2;
854			swizzle_pipe[3] = 3;
855		} else {
856			swizzle_pipe[0] = 0;
857			swizzle_pipe[1] = 2;
858			swizzle_pipe[2] = 1;
859			swizzle_pipe[3] = 3;
860		}
861		break;
862	case 6:
863		if (force_no_swizzle) {
864			swizzle_pipe[0] = 0;
865			swizzle_pipe[1] = 1;
866			swizzle_pipe[2] = 2;
867			swizzle_pipe[3] = 3;
868			swizzle_pipe[4] = 4;
869			swizzle_pipe[5] = 5;
870		} else {
871			swizzle_pipe[0] = 0;
872			swizzle_pipe[1] = 2;
873			swizzle_pipe[2] = 4;
874			swizzle_pipe[3] = 1;
875			swizzle_pipe[4] = 3;
876			swizzle_pipe[5] = 5;
877		}
878		break;
879	case 8:
880		if (force_no_swizzle) {
881			swizzle_pipe[0] = 0;
882			swizzle_pipe[1] = 1;
883			swizzle_pipe[2] = 2;
884			swizzle_pipe[3] = 3;
885			swizzle_pipe[4] = 4;
886			swizzle_pipe[5] = 5;
887			swizzle_pipe[6] = 6;
888			swizzle_pipe[7] = 7;
889		} else {
890			swizzle_pipe[0] = 0;
891			swizzle_pipe[1] = 2;
892			swizzle_pipe[2] = 4;
893			swizzle_pipe[3] = 6;
894			swizzle_pipe[4] = 1;
895			swizzle_pipe[5] = 3;
896			swizzle_pipe[6] = 5;
897			swizzle_pipe[7] = 7;
898		}
899		break;
900	}
901
902	for (cur_pipe = 0; cur_pipe < num_tile_pipes; ++cur_pipe) {
903		while (((1 << cur_backend) & enabled_backends_mask) == 0)
904			cur_backend = (cur_backend + 1) % EVERGREEN_MAX_BACKENDS;
905
906		backend_map |= (((cur_backend & 0xf) << (swizzle_pipe[cur_pipe] * 4)));
907
908		cur_backend = (cur_backend + 1) % EVERGREEN_MAX_BACKENDS;
909	}
910
911	return backend_map;
912}
913
914static void evergreen_gpu_init(struct radeon_device *rdev)
915{
916	u32 cc_rb_backend_disable = 0;
917	u32 cc_gc_shader_pipe_config;
918	u32 gb_addr_config = 0;
919	u32 mc_shared_chmap, mc_arb_ramcfg;
920	u32 gb_backend_map;
921	u32 grbm_gfx_index;
922	u32 sx_debug_1;
923	u32 smx_dc_ctl0;
924	u32 sq_config;
925	u32 sq_lds_resource_mgmt;
926	u32 sq_gpr_resource_mgmt_1;
927	u32 sq_gpr_resource_mgmt_2;
928	u32 sq_gpr_resource_mgmt_3;
929	u32 sq_thread_resource_mgmt;
930	u32 sq_thread_resource_mgmt_2;
931	u32 sq_stack_resource_mgmt_1;
932	u32 sq_stack_resource_mgmt_2;
933	u32 sq_stack_resource_mgmt_3;
934	u32 vgt_cache_invalidation;
935	u32 hdp_host_path_cntl;
936	int i, j, num_shader_engines, ps_thread_count;
937
938	switch (rdev->family) {
939	case CHIP_CYPRESS:
940	case CHIP_HEMLOCK:
941		rdev->config.evergreen.num_ses = 2;
942		rdev->config.evergreen.max_pipes = 4;
943		rdev->config.evergreen.max_tile_pipes = 8;
944		rdev->config.evergreen.max_simds = 10;
945		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
946		rdev->config.evergreen.max_gprs = 256;
947		rdev->config.evergreen.max_threads = 248;
948		rdev->config.evergreen.max_gs_threads = 32;
949		rdev->config.evergreen.max_stack_entries = 512;
950		rdev->config.evergreen.sx_num_of_sets = 4;
951		rdev->config.evergreen.sx_max_export_size = 256;
952		rdev->config.evergreen.sx_max_export_pos_size = 64;
953		rdev->config.evergreen.sx_max_export_smx_size = 192;
954		rdev->config.evergreen.max_hw_contexts = 8;
955		rdev->config.evergreen.sq_num_cf_insts = 2;
956
957		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
958		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
959		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
960		break;
961	case CHIP_JUNIPER:
962		rdev->config.evergreen.num_ses = 1;
963		rdev->config.evergreen.max_pipes = 4;
964		rdev->config.evergreen.max_tile_pipes = 4;
965		rdev->config.evergreen.max_simds = 10;
966		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
967		rdev->config.evergreen.max_gprs = 256;
968		rdev->config.evergreen.max_threads = 248;
969		rdev->config.evergreen.max_gs_threads = 32;
970		rdev->config.evergreen.max_stack_entries = 512;
971		rdev->config.evergreen.sx_num_of_sets = 4;
972		rdev->config.evergreen.sx_max_export_size = 256;
973		rdev->config.evergreen.sx_max_export_pos_size = 64;
974		rdev->config.evergreen.sx_max_export_smx_size = 192;
975		rdev->config.evergreen.max_hw_contexts = 8;
976		rdev->config.evergreen.sq_num_cf_insts = 2;
977
978		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
979		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
980		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
981		break;
982	case CHIP_REDWOOD:
983		rdev->config.evergreen.num_ses = 1;
984		rdev->config.evergreen.max_pipes = 4;
985		rdev->config.evergreen.max_tile_pipes = 4;
986		rdev->config.evergreen.max_simds = 5;
987		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
988		rdev->config.evergreen.max_gprs = 256;
989		rdev->config.evergreen.max_threads = 248;
990		rdev->config.evergreen.max_gs_threads = 32;
991		rdev->config.evergreen.max_stack_entries = 256;
992		rdev->config.evergreen.sx_num_of_sets = 4;
993		rdev->config.evergreen.sx_max_export_size = 256;
994		rdev->config.evergreen.sx_max_export_pos_size = 64;
995		rdev->config.evergreen.sx_max_export_smx_size = 192;
996		rdev->config.evergreen.max_hw_contexts = 8;
997		rdev->config.evergreen.sq_num_cf_insts = 2;
998
999		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1000		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1001		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1002		break;
1003	case CHIP_CEDAR:
1004	default:
1005		rdev->config.evergreen.num_ses = 1;
1006		rdev->config.evergreen.max_pipes = 2;
1007		rdev->config.evergreen.max_tile_pipes = 2;
1008		rdev->config.evergreen.max_simds = 2;
1009		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
1010		rdev->config.evergreen.max_gprs = 256;
1011		rdev->config.evergreen.max_threads = 192;
1012		rdev->config.evergreen.max_gs_threads = 16;
1013		rdev->config.evergreen.max_stack_entries = 256;
1014		rdev->config.evergreen.sx_num_of_sets = 4;
1015		rdev->config.evergreen.sx_max_export_size = 128;
1016		rdev->config.evergreen.sx_max_export_pos_size = 32;
1017		rdev->config.evergreen.sx_max_export_smx_size = 96;
1018		rdev->config.evergreen.max_hw_contexts = 4;
1019		rdev->config.evergreen.sq_num_cf_insts = 1;
1020
1021		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
1022		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1023		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1024		break;
1025	}
1026
1027	/* Initialize HDP */
1028	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
1029		WREG32((0x2c14 + j), 0x00000000);
1030		WREG32((0x2c18 + j), 0x00000000);
1031		WREG32((0x2c1c + j), 0x00000000);
1032		WREG32((0x2c20 + j), 0x00000000);
1033		WREG32((0x2c24 + j), 0x00000000);
1034	}
1035
1036	WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
1037
1038	cc_gc_shader_pipe_config = RREG32(CC_GC_SHADER_PIPE_CONFIG) & ~2;
1039
1040	cc_gc_shader_pipe_config |=
1041		INACTIVE_QD_PIPES((EVERGREEN_MAX_PIPES_MASK << rdev->config.evergreen.max_pipes)
1042				  & EVERGREEN_MAX_PIPES_MASK);
1043	cc_gc_shader_pipe_config |=
1044		INACTIVE_SIMDS((EVERGREEN_MAX_SIMDS_MASK << rdev->config.evergreen.max_simds)
1045			       & EVERGREEN_MAX_SIMDS_MASK);
1046
1047	cc_rb_backend_disable =
1048		BACKEND_DISABLE((EVERGREEN_MAX_BACKENDS_MASK << rdev->config.evergreen.max_backends)
1049				& EVERGREEN_MAX_BACKENDS_MASK);
1050
1051
1052	mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
1053	mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
1054
1055	switch (rdev->config.evergreen.max_tile_pipes) {
1056	case 1:
1057	default:
1058		gb_addr_config |= NUM_PIPES(0);
1059		break;
1060	case 2:
1061		gb_addr_config |= NUM_PIPES(1);
1062		break;
1063	case 4:
1064		gb_addr_config |= NUM_PIPES(2);
1065		break;
1066	case 8:
1067		gb_addr_config |= NUM_PIPES(3);
1068		break;
1069	}
1070
1071	gb_addr_config |= PIPE_INTERLEAVE_SIZE((mc_arb_ramcfg & BURSTLENGTH_MASK) >> BURSTLENGTH_SHIFT);
1072	gb_addr_config |= BANK_INTERLEAVE_SIZE(0);
1073	gb_addr_config |= NUM_SHADER_ENGINES(rdev->config.evergreen.num_ses - 1);
1074	gb_addr_config |= SHADER_ENGINE_TILE_SIZE(1);
1075	gb_addr_config |= NUM_GPUS(0); /* Hemlock? */
1076	gb_addr_config |= MULTI_GPU_TILE_SIZE(2);
1077
1078	if (((mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT) > 2)
1079		gb_addr_config |= ROW_SIZE(2);
1080	else
1081		gb_addr_config |= ROW_SIZE((mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT);
1082
1083	if (rdev->ddev->pdev->device == 0x689e) {
1084		u32 efuse_straps_4;
1085		u32 efuse_straps_3;
1086		u8 efuse_box_bit_131_124;
1087
1088		WREG32(RCU_IND_INDEX, 0x204);
1089		efuse_straps_4 = RREG32(RCU_IND_DATA);
1090		WREG32(RCU_IND_INDEX, 0x203);
1091		efuse_straps_3 = RREG32(RCU_IND_DATA);
1092		efuse_box_bit_131_124 = (u8)(((efuse_straps_4 & 0xf) << 4) | ((efuse_straps_3 & 0xf0000000) >> 28));
1093
1094		switch(efuse_box_bit_131_124) {
1095		case 0x00:
1096			gb_backend_map = 0x76543210;
1097			break;
1098		case 0x55:
1099			gb_backend_map = 0x77553311;
1100			break;
1101		case 0x56:
1102			gb_backend_map = 0x77553300;
1103			break;
1104		case 0x59:
1105			gb_backend_map = 0x77552211;
1106			break;
1107		case 0x66:
1108			gb_backend_map = 0x77443300;
1109			break;
1110		case 0x99:
1111			gb_backend_map = 0x66552211;
1112			break;
1113		case 0x5a:
1114			gb_backend_map = 0x77552200;
1115			break;
1116		case 0xaa:
1117			gb_backend_map = 0x66442200;
1118			break;
1119		case 0x95:
1120			gb_backend_map = 0x66553311;
1121			break;
1122		default:
1123			DRM_ERROR("bad backend map, using default\n");
1124			gb_backend_map =
1125				evergreen_get_tile_pipe_to_backend_map(rdev,
1126								       rdev->config.evergreen.max_tile_pipes,
1127								       rdev->config.evergreen.max_backends,
1128								       ((EVERGREEN_MAX_BACKENDS_MASK <<
1129								   rdev->config.evergreen.max_backends) &
1130									EVERGREEN_MAX_BACKENDS_MASK));
1131			break;
1132		}
1133	} else if (rdev->ddev->pdev->device == 0x68b9) {
1134		u32 efuse_straps_3;
1135		u8 efuse_box_bit_127_124;
1136
1137		WREG32(RCU_IND_INDEX, 0x203);
1138		efuse_straps_3 = RREG32(RCU_IND_DATA);
1139		efuse_box_bit_127_124 = (u8)((efuse_straps_3 & 0xF0000000) >> 28);
1140
1141		switch(efuse_box_bit_127_124) {
1142		case 0x0:
1143			gb_backend_map = 0x00003210;
1144			break;
1145		case 0x5:
1146		case 0x6:
1147		case 0x9:
1148		case 0xa:
1149			gb_backend_map = 0x00003311;
1150			break;
1151		default:
1152			DRM_ERROR("bad backend map, using default\n");
1153			gb_backend_map =
1154				evergreen_get_tile_pipe_to_backend_map(rdev,
1155								       rdev->config.evergreen.max_tile_pipes,
1156								       rdev->config.evergreen.max_backends,
1157								       ((EVERGREEN_MAX_BACKENDS_MASK <<
1158								   rdev->config.evergreen.max_backends) &
1159									EVERGREEN_MAX_BACKENDS_MASK));
1160			break;
1161		}
1162	} else {
1163		switch (rdev->family) {
1164		case CHIP_CYPRESS:
1165		case CHIP_HEMLOCK:
1166			gb_backend_map = 0x66442200;
1167			break;
1168		case CHIP_JUNIPER:
1169			gb_backend_map = 0x00006420;
1170			break;
1171		default:
1172			gb_backend_map =
1173				evergreen_get_tile_pipe_to_backend_map(rdev,
1174								       rdev->config.evergreen.max_tile_pipes,
1175								       rdev->config.evergreen.max_backends,
1176								       ((EVERGREEN_MAX_BACKENDS_MASK <<
1177									 rdev->config.evergreen.max_backends) &
1178									EVERGREEN_MAX_BACKENDS_MASK));
1179		}
1180	}
1181
1182	rdev->config.evergreen.tile_config = gb_addr_config;
1183	WREG32(GB_BACKEND_MAP, gb_backend_map);
1184	WREG32(GB_ADDR_CONFIG, gb_addr_config);
1185	WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
1186	WREG32(HDP_ADDR_CONFIG, gb_addr_config);
1187
1188	num_shader_engines = ((RREG32(GB_ADDR_CONFIG) & NUM_SHADER_ENGINES(3)) >> 12) + 1;
1189	grbm_gfx_index = INSTANCE_BROADCAST_WRITES;
1190
1191	for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
1192		u32 rb = cc_rb_backend_disable | (0xf0 << 16);
1193		u32 sp = cc_gc_shader_pipe_config;
1194		u32 gfx = grbm_gfx_index | SE_INDEX(i);
1195
1196		if (i == num_shader_engines) {
1197			rb |= BACKEND_DISABLE(EVERGREEN_MAX_BACKENDS_MASK);
1198			sp |= INACTIVE_SIMDS(EVERGREEN_MAX_SIMDS_MASK);
1199		}
1200
1201		WREG32(GRBM_GFX_INDEX, gfx);
1202		WREG32(RLC_GFX_INDEX, gfx);
1203
1204		WREG32(CC_RB_BACKEND_DISABLE, rb);
1205		WREG32(CC_SYS_RB_BACKEND_DISABLE, rb);
1206		WREG32(GC_USER_RB_BACKEND_DISABLE, rb);
1207		WREG32(CC_GC_SHADER_PIPE_CONFIG, sp);
1208        }
1209
1210	grbm_gfx_index |= SE_BROADCAST_WRITES;
1211	WREG32(GRBM_GFX_INDEX, grbm_gfx_index);
1212	WREG32(RLC_GFX_INDEX, grbm_gfx_index);
1213
1214	WREG32(CGTS_SYS_TCC_DISABLE, 0);
1215	WREG32(CGTS_TCC_DISABLE, 0);
1216	WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
1217	WREG32(CGTS_USER_TCC_DISABLE, 0);
1218
1219	/* set HW defaults for 3D engine */
1220	WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
1221				     ROQ_IB2_START(0x2b)));
1222
1223	WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
1224
1225	WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
1226			     SYNC_GRADIENT |
1227			     SYNC_WALKER |
1228			     SYNC_ALIGNER));
1229
1230	sx_debug_1 = RREG32(SX_DEBUG_1);
1231	sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
1232	WREG32(SX_DEBUG_1, sx_debug_1);
1233
1234
1235	smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
1236	smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
1237	smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
1238	WREG32(SMX_DC_CTL0, smx_dc_ctl0);
1239
1240	WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
1241					POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
1242					SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
1243
1244	WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
1245				 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
1246				 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
1247
1248	WREG32(VGT_NUM_INSTANCES, 1);
1249	WREG32(SPI_CONFIG_CNTL, 0);
1250	WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
1251	WREG32(CP_PERFMON_CNTL, 0);
1252
1253	WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
1254				  FETCH_FIFO_HIWATER(0x4) |
1255				  DONE_FIFO_HIWATER(0xe0) |
1256				  ALU_UPDATE_FIFO_HIWATER(0x8)));
1257
1258	sq_config = RREG32(SQ_CONFIG);
1259	sq_config &= ~(PS_PRIO(3) |
1260		       VS_PRIO(3) |
1261		       GS_PRIO(3) |
1262		       ES_PRIO(3));
1263	sq_config |= (VC_ENABLE |
1264		      EXPORT_SRC_C |
1265		      PS_PRIO(0) |
1266		      VS_PRIO(1) |
1267		      GS_PRIO(2) |
1268		      ES_PRIO(3));
1269
1270	if (rdev->family == CHIP_CEDAR)
1271		/* no vertex cache */
1272		sq_config &= ~VC_ENABLE;
1273
1274	sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
1275
1276	sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
1277	sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
1278	sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
1279	sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
1280	sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
1281	sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
1282	sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
1283
1284	if (rdev->family == CHIP_CEDAR)
1285		ps_thread_count = 96;
1286	else
1287		ps_thread_count = 128;
1288
1289	sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
1290	sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
1291	sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
1292	sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
1293	sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
1294	sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
1295
1296	sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1297	sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1298	sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1299	sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1300	sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1301	sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1302
1303	WREG32(SQ_CONFIG, sq_config);
1304	WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
1305	WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
1306	WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
1307	WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
1308	WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
1309	WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
1310	WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
1311	WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
1312	WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
1313	WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
1314
1315	WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
1316					  FORCE_EOV_MAX_REZ_CNT(255)));
1317
1318	if (rdev->family == CHIP_CEDAR)
1319		vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
1320	else
1321		vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
1322	vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
1323	WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
1324
1325	WREG32(VGT_GS_VERTEX_REUSE, 16);
1326	WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
1327
1328	WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
1329	WREG32(VGT_OUT_DEALLOC_CNTL, 16);
1330
1331	WREG32(CB_PERF_CTR0_SEL_0, 0);
1332	WREG32(CB_PERF_CTR0_SEL_1, 0);
1333	WREG32(CB_PERF_CTR1_SEL_0, 0);
1334	WREG32(CB_PERF_CTR1_SEL_1, 0);
1335	WREG32(CB_PERF_CTR2_SEL_0, 0);
1336	WREG32(CB_PERF_CTR2_SEL_1, 0);
1337	WREG32(CB_PERF_CTR3_SEL_0, 0);
1338	WREG32(CB_PERF_CTR3_SEL_1, 0);
1339
1340	/* clear render buffer base addresses */
1341	WREG32(CB_COLOR0_BASE, 0);
1342	WREG32(CB_COLOR1_BASE, 0);
1343	WREG32(CB_COLOR2_BASE, 0);
1344	WREG32(CB_COLOR3_BASE, 0);
1345	WREG32(CB_COLOR4_BASE, 0);
1346	WREG32(CB_COLOR5_BASE, 0);
1347	WREG32(CB_COLOR6_BASE, 0);
1348	WREG32(CB_COLOR7_BASE, 0);
1349	WREG32(CB_COLOR8_BASE, 0);
1350	WREG32(CB_COLOR9_BASE, 0);
1351	WREG32(CB_COLOR10_BASE, 0);
1352	WREG32(CB_COLOR11_BASE, 0);
1353
1354	/* set the shader const cache sizes to 0 */
1355	for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
1356		WREG32(i, 0);
1357	for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
1358		WREG32(i, 0);
1359
1360	hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
1361	WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
1362
1363	WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
1364
1365	udelay(50);
1366
1367}
1368
1369int evergreen_mc_init(struct radeon_device *rdev)
1370{
1371	u32 tmp;
1372	int chansize, numchan;
1373
1374	/* Get VRAM informations */
1375	rdev->mc.vram_is_ddr = true;
1376	tmp = RREG32(MC_ARB_RAMCFG);
1377	if (tmp & CHANSIZE_OVERRIDE) {
1378		chansize = 16;
1379	} else if (tmp & CHANSIZE_MASK) {
1380		chansize = 64;
1381	} else {
1382		chansize = 32;
1383	}
1384	tmp = RREG32(MC_SHARED_CHMAP);
1385	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1386	case 0:
1387	default:
1388		numchan = 1;
1389		break;
1390	case 1:
1391		numchan = 2;
1392		break;
1393	case 2:
1394		numchan = 4;
1395		break;
1396	case 3:
1397		numchan = 8;
1398		break;
1399	}
1400	rdev->mc.vram_width = numchan * chansize;
1401	/* Could aper size report 0 ? */
1402	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
1403	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
1404	/* Setup GPU memory space */
1405	/* size in MB on evergreen */
1406	rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
1407	rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
1408	rdev->mc.visible_vram_size = rdev->mc.aper_size;
1409	rdev->mc.active_vram_size = rdev->mc.visible_vram_size;
1410	r600_vram_gtt_location(rdev, &rdev->mc);
1411	radeon_update_bandwidth_info(rdev);
1412
1413	return 0;
1414}
1415
1416bool evergreen_gpu_is_lockup(struct radeon_device *rdev)
1417{
1418	return false;
1419}
1420
1421static int evergreen_gpu_soft_reset(struct radeon_device *rdev)
1422{
1423	struct evergreen_mc_save save;
1424	u32 grbm_reset = 0;
1425
1426	dev_info(rdev->dev, "GPU softreset \n");
1427	dev_info(rdev->dev, "  GRBM_STATUS=0x%08X\n",
1428		RREG32(GRBM_STATUS));
1429	dev_info(rdev->dev, "  GRBM_STATUS_SE0=0x%08X\n",
1430		RREG32(GRBM_STATUS_SE0));
1431	dev_info(rdev->dev, "  GRBM_STATUS_SE1=0x%08X\n",
1432		RREG32(GRBM_STATUS_SE1));
1433	dev_info(rdev->dev, "  SRBM_STATUS=0x%08X\n",
1434		RREG32(SRBM_STATUS));
1435	evergreen_mc_stop(rdev, &save);
1436	if (evergreen_mc_wait_for_idle(rdev)) {
1437		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1438	}
1439	/* Disable CP parsing/prefetching */
1440	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
1441
1442	/* reset all the gfx blocks */
1443	grbm_reset = (SOFT_RESET_CP |
1444		      SOFT_RESET_CB |
1445		      SOFT_RESET_DB |
1446		      SOFT_RESET_PA |
1447		      SOFT_RESET_SC |
1448		      SOFT_RESET_SPI |
1449		      SOFT_RESET_SH |
1450		      SOFT_RESET_SX |
1451		      SOFT_RESET_TC |
1452		      SOFT_RESET_TA |
1453		      SOFT_RESET_VC |
1454		      SOFT_RESET_VGT);
1455
1456	dev_info(rdev->dev, "  GRBM_SOFT_RESET=0x%08X\n", grbm_reset);
1457	WREG32(GRBM_SOFT_RESET, grbm_reset);
1458	(void)RREG32(GRBM_SOFT_RESET);
1459	udelay(50);
1460	WREG32(GRBM_SOFT_RESET, 0);
1461	(void)RREG32(GRBM_SOFT_RESET);
1462	/* Wait a little for things to settle down */
1463	udelay(50);
1464	dev_info(rdev->dev, "  GRBM_STATUS=0x%08X\n",
1465		RREG32(GRBM_STATUS));
1466	dev_info(rdev->dev, "  GRBM_STATUS_SE0=0x%08X\n",
1467		RREG32(GRBM_STATUS_SE0));
1468	dev_info(rdev->dev, "  GRBM_STATUS_SE1=0x%08X\n",
1469		RREG32(GRBM_STATUS_SE1));
1470	dev_info(rdev->dev, "  SRBM_STATUS=0x%08X\n",
1471		RREG32(SRBM_STATUS));
1472	evergreen_mc_resume(rdev, &save);
1473	return 0;
1474}
1475
1476int evergreen_asic_reset(struct radeon_device *rdev)
1477{
1478	return evergreen_gpu_soft_reset(rdev);
1479}
1480
1481/* Interrupts */
1482
1483u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
1484{
1485	switch (crtc) {
1486	case 0:
1487		return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC0_REGISTER_OFFSET);
1488	case 1:
1489		return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC1_REGISTER_OFFSET);
1490	case 2:
1491		return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC2_REGISTER_OFFSET);
1492	case 3:
1493		return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC3_REGISTER_OFFSET);
1494	case 4:
1495		return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC4_REGISTER_OFFSET);
1496	case 5:
1497		return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC5_REGISTER_OFFSET);
1498	default:
1499		return 0;
1500	}
1501}
1502
1503void evergreen_disable_interrupt_state(struct radeon_device *rdev)
1504{
1505	u32 tmp;
1506
1507	WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
1508	WREG32(GRBM_INT_CNTL, 0);
1509	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
1510	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
1511	WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
1512	WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
1513	WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
1514	WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
1515
1516	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
1517	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
1518	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
1519	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
1520	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
1521	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
1522
1523	WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
1524	WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
1525
1526	tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1527	WREG32(DC_HPD1_INT_CONTROL, tmp);
1528	tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1529	WREG32(DC_HPD2_INT_CONTROL, tmp);
1530	tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1531	WREG32(DC_HPD3_INT_CONTROL, tmp);
1532	tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1533	WREG32(DC_HPD4_INT_CONTROL, tmp);
1534	tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1535	WREG32(DC_HPD5_INT_CONTROL, tmp);
1536	tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1537	WREG32(DC_HPD6_INT_CONTROL, tmp);
1538
1539}
1540
1541int evergreen_irq_set(struct radeon_device *rdev)
1542{
1543	u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
1544	u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
1545	u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
1546	u32 grbm_int_cntl = 0;
1547
1548	if (!rdev->irq.installed) {
1549		WARN(1, "Can't enable IRQ/MSI because no handler is installed.\n");
1550		return -EINVAL;
1551	}
1552	/* don't enable anything if the ih is disabled */
1553	if (!rdev->ih.enabled) {
1554		r600_disable_interrupts(rdev);
1555		/* force the active interrupt state to all disabled */
1556		evergreen_disable_interrupt_state(rdev);
1557		return 0;
1558	}
1559
1560	hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
1561	hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
1562	hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
1563	hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
1564	hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
1565	hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
1566
1567	if (rdev->irq.sw_int) {
1568		DRM_DEBUG("evergreen_irq_set: sw int\n");
1569		cp_int_cntl |= RB_INT_ENABLE;
1570	}
1571	if (rdev->irq.crtc_vblank_int[0]) {
1572		DRM_DEBUG("evergreen_irq_set: vblank 0\n");
1573		crtc1 |= VBLANK_INT_MASK;
1574	}
1575	if (rdev->irq.crtc_vblank_int[1]) {
1576		DRM_DEBUG("evergreen_irq_set: vblank 1\n");
1577		crtc2 |= VBLANK_INT_MASK;
1578	}
1579	if (rdev->irq.crtc_vblank_int[2]) {
1580		DRM_DEBUG("evergreen_irq_set: vblank 2\n");
1581		crtc3 |= VBLANK_INT_MASK;
1582	}
1583	if (rdev->irq.crtc_vblank_int[3]) {
1584		DRM_DEBUG("evergreen_irq_set: vblank 3\n");
1585		crtc4 |= VBLANK_INT_MASK;
1586	}
1587	if (rdev->irq.crtc_vblank_int[4]) {
1588		DRM_DEBUG("evergreen_irq_set: vblank 4\n");
1589		crtc5 |= VBLANK_INT_MASK;
1590	}
1591	if (rdev->irq.crtc_vblank_int[5]) {
1592		DRM_DEBUG("evergreen_irq_set: vblank 5\n");
1593		crtc6 |= VBLANK_INT_MASK;
1594	}
1595	if (rdev->irq.hpd[0]) {
1596		DRM_DEBUG("evergreen_irq_set: hpd 1\n");
1597		hpd1 |= DC_HPDx_INT_EN;
1598	}
1599	if (rdev->irq.hpd[1]) {
1600		DRM_DEBUG("evergreen_irq_set: hpd 2\n");
1601		hpd2 |= DC_HPDx_INT_EN;
1602	}
1603	if (rdev->irq.hpd[2]) {
1604		DRM_DEBUG("evergreen_irq_set: hpd 3\n");
1605		hpd3 |= DC_HPDx_INT_EN;
1606	}
1607	if (rdev->irq.hpd[3]) {
1608		DRM_DEBUG("evergreen_irq_set: hpd 4\n");
1609		hpd4 |= DC_HPDx_INT_EN;
1610	}
1611	if (rdev->irq.hpd[4]) {
1612		DRM_DEBUG("evergreen_irq_set: hpd 5\n");
1613		hpd5 |= DC_HPDx_INT_EN;
1614	}
1615	if (rdev->irq.hpd[5]) {
1616		DRM_DEBUG("evergreen_irq_set: hpd 6\n");
1617		hpd6 |= DC_HPDx_INT_EN;
1618	}
1619	if (rdev->irq.gui_idle) {
1620		DRM_DEBUG("gui idle\n");
1621		grbm_int_cntl |= GUI_IDLE_INT_ENABLE;
1622	}
1623
1624	WREG32(CP_INT_CNTL, cp_int_cntl);
1625	WREG32(GRBM_INT_CNTL, grbm_int_cntl);
1626
1627	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
1628	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
1629	WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
1630	WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
1631	WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
1632	WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
1633
1634	WREG32(DC_HPD1_INT_CONTROL, hpd1);
1635	WREG32(DC_HPD2_INT_CONTROL, hpd2);
1636	WREG32(DC_HPD3_INT_CONTROL, hpd3);
1637	WREG32(DC_HPD4_INT_CONTROL, hpd4);
1638	WREG32(DC_HPD5_INT_CONTROL, hpd5);
1639	WREG32(DC_HPD6_INT_CONTROL, hpd6);
1640
1641	return 0;
1642}
1643
1644static inline void evergreen_irq_ack(struct radeon_device *rdev,
1645				     u32 *disp_int,
1646				     u32 *disp_int_cont,
1647				     u32 *disp_int_cont2,
1648				     u32 *disp_int_cont3,
1649				     u32 *disp_int_cont4,
1650				     u32 *disp_int_cont5)
1651{
1652	u32 tmp;
1653
1654	*disp_int = RREG32(DISP_INTERRUPT_STATUS);
1655	*disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
1656	*disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
1657	*disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
1658	*disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
1659	*disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
1660
1661	if (*disp_int & LB_D1_VBLANK_INTERRUPT)
1662		WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
1663	if (*disp_int & LB_D1_VLINE_INTERRUPT)
1664		WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
1665
1666	if (*disp_int_cont & LB_D2_VBLANK_INTERRUPT)
1667		WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
1668	if (*disp_int_cont & LB_D2_VLINE_INTERRUPT)
1669		WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
1670
1671	if (*disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
1672		WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
1673	if (*disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
1674		WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
1675
1676	if (*disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
1677		WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
1678	if (*disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
1679		WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
1680
1681	if (*disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
1682		WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
1683	if (*disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
1684		WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
1685
1686	if (*disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
1687		WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
1688	if (*disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
1689		WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
1690
1691	if (*disp_int & DC_HPD1_INTERRUPT) {
1692		tmp = RREG32(DC_HPD1_INT_CONTROL);
1693		tmp |= DC_HPDx_INT_ACK;
1694		WREG32(DC_HPD1_INT_CONTROL, tmp);
1695	}
1696	if (*disp_int_cont & DC_HPD2_INTERRUPT) {
1697		tmp = RREG32(DC_HPD2_INT_CONTROL);
1698		tmp |= DC_HPDx_INT_ACK;
1699		WREG32(DC_HPD2_INT_CONTROL, tmp);
1700	}
1701	if (*disp_int_cont2 & DC_HPD3_INTERRUPT) {
1702		tmp = RREG32(DC_HPD3_INT_CONTROL);
1703		tmp |= DC_HPDx_INT_ACK;
1704		WREG32(DC_HPD3_INT_CONTROL, tmp);
1705	}
1706	if (*disp_int_cont3 & DC_HPD4_INTERRUPT) {
1707		tmp = RREG32(DC_HPD4_INT_CONTROL);
1708		tmp |= DC_HPDx_INT_ACK;
1709		WREG32(DC_HPD4_INT_CONTROL, tmp);
1710	}
1711	if (*disp_int_cont4 & DC_HPD5_INTERRUPT) {
1712		tmp = RREG32(DC_HPD5_INT_CONTROL);
1713		tmp |= DC_HPDx_INT_ACK;
1714		WREG32(DC_HPD5_INT_CONTROL, tmp);
1715	}
1716	if (*disp_int_cont5 & DC_HPD6_INTERRUPT) {
1717		tmp = RREG32(DC_HPD5_INT_CONTROL);
1718		tmp |= DC_HPDx_INT_ACK;
1719		WREG32(DC_HPD6_INT_CONTROL, tmp);
1720	}
1721}
1722
1723void evergreen_irq_disable(struct radeon_device *rdev)
1724{
1725	u32 disp_int, disp_int_cont, disp_int_cont2;
1726	u32 disp_int_cont3, disp_int_cont4, disp_int_cont5;
1727
1728	r600_disable_interrupts(rdev);
1729	/* Wait and acknowledge irq */
1730	mdelay(1);
1731	evergreen_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2,
1732			  &disp_int_cont3, &disp_int_cont4, &disp_int_cont5);
1733	evergreen_disable_interrupt_state(rdev);
1734}
1735
1736static void evergreen_irq_suspend(struct radeon_device *rdev)
1737{
1738	evergreen_irq_disable(rdev);
1739	r600_rlc_stop(rdev);
1740}
1741
1742static inline u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
1743{
1744	u32 wptr, tmp;
1745
1746	wptr = RREG32(IH_RB_WPTR);
1747
1748	if (wptr & RB_OVERFLOW) {
1749		/* When a ring buffer overflow happen start parsing interrupt
1750		 * from the last not overwritten vector (wptr + 16). Hopefully
1751		 * this should allow us to catchup.
1752		 */
1753		dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
1754			wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
1755		rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
1756		tmp = RREG32(IH_RB_CNTL);
1757		tmp |= IH_WPTR_OVERFLOW_CLEAR;
1758		WREG32(IH_RB_CNTL, tmp);
1759	}
1760	return (wptr & rdev->ih.ptr_mask);
1761}
1762
1763int evergreen_irq_process(struct radeon_device *rdev)
1764{
1765	u32 wptr = evergreen_get_ih_wptr(rdev);
1766	u32 rptr = rdev->ih.rptr;
1767	u32 src_id, src_data;
1768	u32 ring_index;
1769	u32 disp_int, disp_int_cont, disp_int_cont2;
1770	u32 disp_int_cont3, disp_int_cont4, disp_int_cont5;
1771	unsigned long flags;
1772	bool queue_hotplug = false;
1773
1774	DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
1775	if (!rdev->ih.enabled)
1776		return IRQ_NONE;
1777
1778	spin_lock_irqsave(&rdev->ih.lock, flags);
1779
1780	if (rptr == wptr) {
1781		spin_unlock_irqrestore(&rdev->ih.lock, flags);
1782		return IRQ_NONE;
1783	}
1784	if (rdev->shutdown) {
1785		spin_unlock_irqrestore(&rdev->ih.lock, flags);
1786		return IRQ_NONE;
1787	}
1788
1789restart_ih:
1790	/* display interrupts */
1791	evergreen_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2,
1792			  &disp_int_cont3, &disp_int_cont4, &disp_int_cont5);
1793
1794	rdev->ih.wptr = wptr;
1795	while (rptr != wptr) {
1796		/* wptr/rptr are in bytes! */
1797		ring_index = rptr / 4;
1798		src_id =  rdev->ih.ring[ring_index] & 0xff;
1799		src_data = rdev->ih.ring[ring_index + 1] & 0xfffffff;
1800
1801		switch (src_id) {
1802		case 1: /* D1 vblank/vline */
1803			switch (src_data) {
1804			case 0: /* D1 vblank */
1805				if (disp_int & LB_D1_VBLANK_INTERRUPT) {
1806					drm_handle_vblank(rdev->ddev, 0);
1807					wake_up(&rdev->irq.vblank_queue);
1808					disp_int &= ~LB_D1_VBLANK_INTERRUPT;
1809					DRM_DEBUG("IH: D1 vblank\n");
1810				}
1811				break;
1812			case 1: /* D1 vline */
1813				if (disp_int & LB_D1_VLINE_INTERRUPT) {
1814					disp_int &= ~LB_D1_VLINE_INTERRUPT;
1815					DRM_DEBUG("IH: D1 vline\n");
1816				}
1817				break;
1818			default:
1819				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1820				break;
1821			}
1822			break;
1823		case 2: /* D2 vblank/vline */
1824			switch (src_data) {
1825			case 0: /* D2 vblank */
1826				if (disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
1827					drm_handle_vblank(rdev->ddev, 1);
1828					wake_up(&rdev->irq.vblank_queue);
1829					disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
1830					DRM_DEBUG("IH: D2 vblank\n");
1831				}
1832				break;
1833			case 1: /* D2 vline */
1834				if (disp_int_cont & LB_D2_VLINE_INTERRUPT) {
1835					disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
1836					DRM_DEBUG("IH: D2 vline\n");
1837				}
1838				break;
1839			default:
1840				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1841				break;
1842			}
1843			break;
1844		case 3: /* D3 vblank/vline */
1845			switch (src_data) {
1846			case 0: /* D3 vblank */
1847				if (disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
1848					drm_handle_vblank(rdev->ddev, 2);
1849					wake_up(&rdev->irq.vblank_queue);
1850					disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
1851					DRM_DEBUG("IH: D3 vblank\n");
1852				}
1853				break;
1854			case 1: /* D3 vline */
1855				if (disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
1856					disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
1857					DRM_DEBUG("IH: D3 vline\n");
1858				}
1859				break;
1860			default:
1861				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1862				break;
1863			}
1864			break;
1865		case 4: /* D4 vblank/vline */
1866			switch (src_data) {
1867			case 0: /* D4 vblank */
1868				if (disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
1869					drm_handle_vblank(rdev->ddev, 3);
1870					wake_up(&rdev->irq.vblank_queue);
1871					disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
1872					DRM_DEBUG("IH: D4 vblank\n");
1873				}
1874				break;
1875			case 1: /* D4 vline */
1876				if (disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
1877					disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
1878					DRM_DEBUG("IH: D4 vline\n");
1879				}
1880				break;
1881			default:
1882				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1883				break;
1884			}
1885			break;
1886		case 5: /* D5 vblank/vline */
1887			switch (src_data) {
1888			case 0: /* D5 vblank */
1889				if (disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
1890					drm_handle_vblank(rdev->ddev, 4);
1891					wake_up(&rdev->irq.vblank_queue);
1892					disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
1893					DRM_DEBUG("IH: D5 vblank\n");
1894				}
1895				break;
1896			case 1: /* D5 vline */
1897				if (disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
1898					disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
1899					DRM_DEBUG("IH: D5 vline\n");
1900				}
1901				break;
1902			default:
1903				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1904				break;
1905			}
1906			break;
1907		case 6: /* D6 vblank/vline */
1908			switch (src_data) {
1909			case 0: /* D6 vblank */
1910				if (disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
1911					drm_handle_vblank(rdev->ddev, 5);
1912					wake_up(&rdev->irq.vblank_queue);
1913					disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
1914					DRM_DEBUG("IH: D6 vblank\n");
1915				}
1916				break;
1917			case 1: /* D6 vline */
1918				if (disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
1919					disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
1920					DRM_DEBUG("IH: D6 vline\n");
1921				}
1922				break;
1923			default:
1924				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1925				break;
1926			}
1927			break;
1928		case 42: /* HPD hotplug */
1929			switch (src_data) {
1930			case 0:
1931				if (disp_int & DC_HPD1_INTERRUPT) {
1932					disp_int &= ~DC_HPD1_INTERRUPT;
1933					queue_hotplug = true;
1934					DRM_DEBUG("IH: HPD1\n");
1935				}
1936				break;
1937			case 1:
1938				if (disp_int_cont & DC_HPD2_INTERRUPT) {
1939					disp_int_cont &= ~DC_HPD2_INTERRUPT;
1940					queue_hotplug = true;
1941					DRM_DEBUG("IH: HPD2\n");
1942				}
1943				break;
1944			case 2:
1945				if (disp_int_cont2 & DC_HPD3_INTERRUPT) {
1946					disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
1947					queue_hotplug = true;
1948					DRM_DEBUG("IH: HPD3\n");
1949				}
1950				break;
1951			case 3:
1952				if (disp_int_cont3 & DC_HPD4_INTERRUPT) {
1953					disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
1954					queue_hotplug = true;
1955					DRM_DEBUG("IH: HPD4\n");
1956				}
1957				break;
1958			case 4:
1959				if (disp_int_cont4 & DC_HPD5_INTERRUPT) {
1960					disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
1961					queue_hotplug = true;
1962					DRM_DEBUG("IH: HPD5\n");
1963				}
1964				break;
1965			case 5:
1966				if (disp_int_cont5 & DC_HPD6_INTERRUPT) {
1967					disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
1968					queue_hotplug = true;
1969					DRM_DEBUG("IH: HPD6\n");
1970				}
1971				break;
1972			default:
1973				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1974				break;
1975			}
1976			break;
1977		case 176: /* CP_INT in ring buffer */
1978		case 177: /* CP_INT in IB1 */
1979		case 178: /* CP_INT in IB2 */
1980			DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
1981			radeon_fence_process(rdev);
1982			break;
1983		case 181: /* CP EOP event */
1984			DRM_DEBUG("IH: CP EOP\n");
1985			break;
1986		case 233: /* GUI IDLE */
1987			DRM_DEBUG("IH: CP EOP\n");
1988			rdev->pm.gui_idle = true;
1989			wake_up(&rdev->irq.idle_queue);
1990			break;
1991		default:
1992			DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1993			break;
1994		}
1995
1996		/* wptr/rptr are in bytes! */
1997		rptr += 16;
1998		rptr &= rdev->ih.ptr_mask;
1999	}
2000	/* make sure wptr hasn't changed while processing */
2001	wptr = evergreen_get_ih_wptr(rdev);
2002	if (wptr != rdev->ih.wptr)
2003		goto restart_ih;
2004	if (queue_hotplug)
2005		queue_work(rdev->wq, &rdev->hotplug_work);
2006	rdev->ih.rptr = rptr;
2007	WREG32(IH_RB_RPTR, rdev->ih.rptr);
2008	spin_unlock_irqrestore(&rdev->ih.lock, flags);
2009	return IRQ_HANDLED;
2010}
2011
2012static int evergreen_startup(struct radeon_device *rdev)
2013{
2014	int r;
2015
2016	if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
2017		r = r600_init_microcode(rdev);
2018		if (r) {
2019			DRM_ERROR("Failed to load firmware!\n");
2020			return r;
2021		}
2022	}
2023
2024	evergreen_mc_program(rdev);
2025	if (rdev->flags & RADEON_IS_AGP) {
2026		evergreen_agp_enable(rdev);
2027	} else {
2028		r = evergreen_pcie_gart_enable(rdev);
2029		if (r)
2030			return r;
2031	}
2032	evergreen_gpu_init(rdev);
2033
2034	/* Enable IRQ */
2035	r = r600_irq_init(rdev);
2036	if (r) {
2037		DRM_ERROR("radeon: IH init failed (%d).\n", r);
2038		radeon_irq_kms_fini(rdev);
2039		return r;
2040	}
2041	evergreen_irq_set(rdev);
2042
2043	r = radeon_ring_init(rdev, rdev->cp.ring_size);
2044	if (r)
2045		return r;
2046	r = evergreen_cp_load_microcode(rdev);
2047	if (r)
2048		return r;
2049	r = evergreen_cp_resume(rdev);
2050	if (r)
2051		return r;
2052	/* write back buffer are not vital so don't worry about failure */
2053	r600_wb_enable(rdev);
2054
2055	return 0;
2056}
2057
2058int evergreen_resume(struct radeon_device *rdev)
2059{
2060	int r;
2061
2062	/* reset the asic, the gfx blocks are often in a bad state
2063	 * after the driver is unloaded or after a resume
2064	 */
2065	if (radeon_asic_reset(rdev))
2066		dev_warn(rdev->dev, "GPU reset failed !\n");
2067	/* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
2068	 * posting will perform necessary task to bring back GPU into good
2069	 * shape.
2070	 */
2071	/* post card */
2072	atom_asic_init(rdev->mode_info.atom_context);
2073
2074	r = evergreen_startup(rdev);
2075	if (r) {
2076		DRM_ERROR("r600 startup failed on resume\n");
2077		return r;
2078	}
2079
2080	r = r600_ib_test(rdev);
2081	if (r) {
2082		DRM_ERROR("radeon: failled testing IB (%d).\n", r);
2083		return r;
2084	}
2085
2086	return r;
2087
2088}
2089
2090int evergreen_suspend(struct radeon_device *rdev)
2091{
2092	r700_cp_stop(rdev);
2093	rdev->cp.ready = false;
2094	evergreen_irq_suspend(rdev);
2095	r600_wb_disable(rdev);
2096	evergreen_pcie_gart_disable(rdev);
2097	return 0;
2098}
2099
2100static bool evergreen_card_posted(struct radeon_device *rdev)
2101{
2102	u32 reg;
2103
2104	/* first check CRTCs */
2105	reg = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) |
2106		RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) |
2107		RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) |
2108		RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) |
2109		RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) |
2110		RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
2111	if (reg & EVERGREEN_CRTC_MASTER_EN)
2112		return true;
2113
2114	/* then check MEM_SIZE, in case the crtcs are off */
2115	if (RREG32(CONFIG_MEMSIZE))
2116		return true;
2117
2118	return false;
2119}
2120
2121/* Plan is to move initialization in that function and use
2122 * helper function so that radeon_device_init pretty much
2123 * do nothing more than calling asic specific function. This
2124 * should also allow to remove a bunch of callback function
2125 * like vram_info.
2126 */
2127int evergreen_init(struct radeon_device *rdev)
2128{
2129	int r;
2130
2131	r = radeon_dummy_page_init(rdev);
2132	if (r)
2133		return r;
2134	/* This don't do much */
2135	r = radeon_gem_init(rdev);
2136	if (r)
2137		return r;
2138	/* Read BIOS */
2139	if (!radeon_get_bios(rdev)) {
2140		if (ASIC_IS_AVIVO(rdev))
2141			return -EINVAL;
2142	}
2143	/* Must be an ATOMBIOS */
2144	if (!rdev->is_atom_bios) {
2145		dev_err(rdev->dev, "Expecting atombios for R600 GPU\n");
2146		return -EINVAL;
2147	}
2148	r = radeon_atombios_init(rdev);
2149	if (r)
2150		return r;
2151	/* reset the asic, the gfx blocks are often in a bad state
2152	 * after the driver is unloaded or after a resume
2153	 */
2154	if (radeon_asic_reset(rdev))
2155		dev_warn(rdev->dev, "GPU reset failed !\n");
2156	/* Post card if necessary */
2157	if (!evergreen_card_posted(rdev)) {
2158		if (!rdev->bios) {
2159			dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
2160			return -EINVAL;
2161		}
2162		DRM_INFO("GPU not posted. posting now...\n");
2163		atom_asic_init(rdev->mode_info.atom_context);
2164	}
2165	/* Initialize scratch registers */
2166	r600_scratch_init(rdev);
2167	/* Initialize surface registers */
2168	radeon_surface_init(rdev);
2169	/* Initialize clocks */
2170	radeon_get_clock_info(rdev->ddev);
2171	/* Fence driver */
2172	r = radeon_fence_driver_init(rdev);
2173	if (r)
2174		return r;
2175	/* initialize AGP */
2176	if (rdev->flags & RADEON_IS_AGP) {
2177		r = radeon_agp_init(rdev);
2178		if (r)
2179			radeon_agp_disable(rdev);
2180	}
2181	/* initialize memory controller */
2182	r = evergreen_mc_init(rdev);
2183	if (r)
2184		return r;
2185	/* Memory manager */
2186	r = radeon_bo_init(rdev);
2187	if (r)
2188		return r;
2189
2190	r = radeon_irq_kms_init(rdev);
2191	if (r)
2192		return r;
2193
2194	rdev->cp.ring_obj = NULL;
2195	r600_ring_init(rdev, 1024 * 1024);
2196
2197	rdev->ih.ring_obj = NULL;
2198	r600_ih_ring_init(rdev, 64 * 1024);
2199
2200	r = r600_pcie_gart_init(rdev);
2201	if (r)
2202		return r;
2203
2204	rdev->accel_working = true;
2205	r = evergreen_startup(rdev);
2206	if (r) {
2207		dev_err(rdev->dev, "disabling GPU acceleration\n");
2208		r700_cp_fini(rdev);
2209		r600_wb_fini(rdev);
2210		r600_irq_fini(rdev);
2211		radeon_irq_kms_fini(rdev);
2212		evergreen_pcie_gart_fini(rdev);
2213		rdev->accel_working = false;
2214	}
2215	if (rdev->accel_working) {
2216		r = radeon_ib_pool_init(rdev);
2217		if (r) {
2218			DRM_ERROR("radeon: failed initializing IB pool (%d).\n", r);
2219			rdev->accel_working = false;
2220		}
2221		r = r600_ib_test(rdev);
2222		if (r) {
2223			DRM_ERROR("radeon: failed testing IB (%d).\n", r);
2224			rdev->accel_working = false;
2225		}
2226	}
2227	return 0;
2228}
2229
2230void evergreen_fini(struct radeon_device *rdev)
2231{
2232	/*r600_blit_fini(rdev);*/
2233	r700_cp_fini(rdev);
2234	r600_wb_fini(rdev);
2235	r600_irq_fini(rdev);
2236	radeon_irq_kms_fini(rdev);
2237	evergreen_pcie_gart_fini(rdev);
2238	radeon_gem_fini(rdev);
2239	radeon_fence_driver_fini(rdev);
2240	radeon_agp_fini(rdev);
2241	radeon_bo_fini(rdev);
2242	radeon_atombios_fini(rdev);
2243	kfree(rdev->bios);
2244	rdev->bios = NULL;
2245	radeon_dummy_page_fini(rdev);
2246}
2247