1/*
2 * Copyright 2007-11 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be included in
13 * all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
22 *
23 * Authors: Dave Airlie
24 *          Alex Deucher
25 */
26
27#include <linux/pci.h>
28
29#include <acpi/video.h>
30
31#include <drm/drm_edid.h>
32#include <drm/amdgpu_drm.h>
33#include "amdgpu.h"
34#include "amdgpu_connectors.h"
35#include "amdgpu_display.h"
36#include "atom.h"
37#include "atombios_encoders.h"
38#include "atombios_dp.h"
39#include <linux/backlight.h>
40#include "bif/bif_4_1_d.h"
41
42u8
43amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev)
44{
45	u8 backlight_level;
46	u32 bios_2_scratch;
47
48	bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
49
50	backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >>
51			   ATOM_S2_CURRENT_BL_LEVEL_SHIFT);
52
53	return backlight_level;
54}
55
56void
57amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev,
58					    u8 backlight_level)
59{
60	u32 bios_2_scratch;
61
62	bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
63
64	bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK;
65	bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) &
66			   ATOM_S2_CURRENT_BL_LEVEL_MASK);
67
68	WREG32(mmBIOS_SCRATCH_2, bios_2_scratch);
69}
70
71u8
72amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder)
73{
74	struct drm_device *dev = amdgpu_encoder->base.dev;
75	struct amdgpu_device *adev = drm_to_adev(dev);
76
77	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
78		return 0;
79
80	return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
81}
82
83void
84amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder,
85				     u8 level)
86{
87	struct drm_encoder *encoder = &amdgpu_encoder->base;
88	struct drm_device *dev = amdgpu_encoder->base.dev;
89	struct amdgpu_device *adev = drm_to_adev(dev);
90	struct amdgpu_encoder_atom_dig *dig;
91
92	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
93		return;
94
95	if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) &&
96	    amdgpu_encoder->enc_priv) {
97		dig = amdgpu_encoder->enc_priv;
98		dig->backlight_level = level;
99		amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level);
100
101		switch (amdgpu_encoder->encoder_id) {
102		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
103		case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
104		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
105		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
106		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
107			if (dig->backlight_level == 0)
108				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
109								       ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
110			else {
111				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
112								       ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0);
113				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
114								       ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0);
115			}
116			break;
117		default:
118			break;
119		}
120	}
121}
122
123static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd)
124{
125	u8 level;
126
127	/* Convert brightness to hardware level */
128	if (bd->props.brightness < 0)
129		level = 0;
130	else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL)
131		level = AMDGPU_MAX_BL_LEVEL;
132	else
133		level = bd->props.brightness;
134
135	return level;
136}
137
138static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd)
139{
140	struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
141	struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
142
143	amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder,
144					     amdgpu_atombios_encoder_backlight_level(bd));
145
146	return 0;
147}
148
149static int
150amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd)
151{
152	struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
153	struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
154	struct drm_device *dev = amdgpu_encoder->base.dev;
155	struct amdgpu_device *adev = drm_to_adev(dev);
156
157	return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
158}
159
160static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = {
161	.get_brightness = amdgpu_atombios_encoder_get_backlight_brightness,
162	.update_status	= amdgpu_atombios_encoder_update_backlight_status,
163};
164
165void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder,
166				     struct drm_connector *drm_connector)
167{
168	struct drm_device *dev = amdgpu_encoder->base.dev;
169	struct amdgpu_device *adev = drm_to_adev(dev);
170	struct backlight_device *bd;
171	struct backlight_properties props;
172	struct amdgpu_backlight_privdata *pdata;
173	struct amdgpu_encoder_atom_dig *dig;
174	char bl_name[16];
175
176	/* Mac laptops with multiple GPUs use the gmux driver for backlight
177	 * so don't register a backlight device
178	 */
179	if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) &&
180	    (adev->pdev->device == 0x6741))
181		return;
182
183	if (!amdgpu_encoder->enc_priv)
184		return;
185
186	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
187		goto register_acpi_backlight;
188
189	if (!acpi_video_backlight_use_native()) {
190		drm_info(dev, "Skipping amdgpu atom DIG backlight registration\n");
191		goto register_acpi_backlight;
192	}
193
194	pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL);
195	if (!pdata) {
196		DRM_ERROR("Memory allocation failed\n");
197		goto error;
198	}
199
200	memset(&props, 0, sizeof(props));
201	props.max_brightness = AMDGPU_MAX_BL_LEVEL;
202	props.type = BACKLIGHT_RAW;
203	snprintf(bl_name, sizeof(bl_name),
204		 "amdgpu_bl%d", dev->primary->index);
205	bd = backlight_device_register(bl_name, drm_connector->kdev,
206				       pdata, &amdgpu_atombios_encoder_backlight_ops, &props);
207	if (IS_ERR(bd)) {
208		DRM_ERROR("Backlight registration failed\n");
209		goto error;
210	}
211
212	pdata->encoder = amdgpu_encoder;
213
214	dig = amdgpu_encoder->enc_priv;
215	dig->bl_dev = bd;
216
217	bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd);
218	bd->props.power = FB_BLANK_UNBLANK;
219	backlight_update_status(bd);
220
221	DRM_INFO("amdgpu atom DIG backlight initialized\n");
222
223	return;
224
225error:
226	kfree(pdata);
227	return;
228
229register_acpi_backlight:
230	/* Try registering an ACPI video backlight device instead. */
231	acpi_video_register_backlight();
232}
233
234void
235amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder)
236{
237	struct drm_device *dev = amdgpu_encoder->base.dev;
238	struct amdgpu_device *adev = drm_to_adev(dev);
239	struct backlight_device *bd = NULL;
240	struct amdgpu_encoder_atom_dig *dig;
241
242	if (!amdgpu_encoder->enc_priv)
243		return;
244
245	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
246		return;
247
248	dig = amdgpu_encoder->enc_priv;
249	bd = dig->bl_dev;
250	dig->bl_dev = NULL;
251
252	if (bd) {
253		struct amdgpu_legacy_backlight_privdata *pdata;
254
255		pdata = bl_get_data(bd);
256		backlight_device_unregister(bd);
257		kfree(pdata);
258
259		DRM_INFO("amdgpu atom LVDS backlight unloaded\n");
260	}
261}
262
263bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder)
264{
265	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
266	switch (amdgpu_encoder->encoder_id) {
267	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
268	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
269	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
270	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
271	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
272		return true;
273	default:
274		return false;
275	}
276}
277
278bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder,
279				 const struct drm_display_mode *mode,
280				 struct drm_display_mode *adjusted_mode)
281{
282	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
283
284	/* set the active encoder to connector routing */
285	amdgpu_encoder_set_active_device(encoder);
286	drm_mode_set_crtcinfo(adjusted_mode, 0);
287
288	/* hw bug */
289	if ((mode->flags & DRM_MODE_FLAG_INTERLACE)
290	    && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2)))
291		adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2;
292
293	/* vertical FP must be at least 1 */
294	if (mode->crtc_vsync_start == mode->crtc_vdisplay)
295		adjusted_mode->crtc_vsync_start++;
296
297	/* get the native mode for scaling */
298	if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT))
299		amdgpu_panel_mode_fixup(encoder, adjusted_mode);
300	else if (amdgpu_encoder->rmx_type != RMX_OFF)
301		amdgpu_panel_mode_fixup(encoder, adjusted_mode);
302
303	if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) ||
304	    (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
305		struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
306		amdgpu_atombios_dp_set_link_config(connector, adjusted_mode);
307	}
308
309	return true;
310}
311
312static void
313amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action)
314{
315	struct drm_device *dev = encoder->dev;
316	struct amdgpu_device *adev = drm_to_adev(dev);
317	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
318	DAC_ENCODER_CONTROL_PS_ALLOCATION args;
319	int index = 0;
320
321	memset(&args, 0, sizeof(args));
322
323	switch (amdgpu_encoder->encoder_id) {
324	case ENCODER_OBJECT_ID_INTERNAL_DAC1:
325	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
326		index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl);
327		break;
328	case ENCODER_OBJECT_ID_INTERNAL_DAC2:
329	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
330		index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl);
331		break;
332	}
333
334	args.ucAction = action;
335	args.ucDacStandard = ATOM_DAC1_PS2;
336	args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
337
338	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args, sizeof(args));
339
340}
341
342static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder)
343{
344	int bpc = 8;
345
346	if (encoder->crtc) {
347		struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
348		bpc = amdgpu_crtc->bpc;
349	}
350
351	switch (bpc) {
352	case 0:
353		return PANEL_BPC_UNDEFINE;
354	case 6:
355		return PANEL_6BIT_PER_COLOR;
356	case 8:
357	default:
358		return PANEL_8BIT_PER_COLOR;
359	case 10:
360		return PANEL_10BIT_PER_COLOR;
361	case 12:
362		return PANEL_12BIT_PER_COLOR;
363	case 16:
364		return PANEL_16BIT_PER_COLOR;
365	}
366}
367
368union dvo_encoder_control {
369	ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds;
370	DVO_ENCODER_CONTROL_PS_ALLOCATION dvo;
371	DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3;
372	DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4;
373};
374
375static void
376amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action)
377{
378	struct drm_device *dev = encoder->dev;
379	struct amdgpu_device *adev = drm_to_adev(dev);
380	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
381	union dvo_encoder_control args;
382	int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl);
383	uint8_t frev, crev;
384
385	memset(&args, 0, sizeof(args));
386
387	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
388		return;
389
390	switch (frev) {
391	case 1:
392		switch (crev) {
393		case 1:
394			/* R4xx, R5xx */
395			args.ext_tmds.sXTmdsEncoder.ucEnable = action;
396
397			if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
398				args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL;
399
400			args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB;
401			break;
402		case 2:
403			/* RS600/690/740 */
404			args.dvo.sDVOEncoder.ucAction = action;
405			args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
406			/* DFP1, CRT1, TV1 depending on the type of port */
407			args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX;
408
409			if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
410				args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL;
411			break;
412		case 3:
413			/* R6xx */
414			args.dvo_v3.ucAction = action;
415			args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
416			args.dvo_v3.ucDVOConfig = 0; /* XXX */
417			break;
418		case 4:
419			/* DCE8 */
420			args.dvo_v4.ucAction = action;
421			args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
422			args.dvo_v4.ucDVOConfig = 0; /* XXX */
423			args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
424			break;
425		default:
426			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
427			break;
428		}
429		break;
430	default:
431		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
432		break;
433	}
434
435	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args, sizeof(args));
436}
437
438int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder)
439{
440	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
441	struct drm_connector *connector;
442	struct amdgpu_connector *amdgpu_connector;
443	struct amdgpu_connector_atom_dig *dig_connector;
444
445	/* dp bridges are always DP */
446	if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)
447		return ATOM_ENCODER_MODE_DP;
448
449	/* DVO is always DVO */
450	if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) ||
451	    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1))
452		return ATOM_ENCODER_MODE_DVO;
453
454	connector = amdgpu_get_connector_for_encoder(encoder);
455	/* if we don't have an active device yet, just use one of
456	 * the connectors tied to the encoder.
457	 */
458	if (!connector)
459		connector = amdgpu_get_connector_for_encoder_init(encoder);
460	amdgpu_connector = to_amdgpu_connector(connector);
461
462	switch (connector->connector_type) {
463	case DRM_MODE_CONNECTOR_DVII:
464	case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */
465		if (amdgpu_audio != 0) {
466			if (amdgpu_connector->use_digital &&
467			    (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE))
468				return ATOM_ENCODER_MODE_HDMI;
469			else if (connector->display_info.is_hdmi &&
470				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
471				return ATOM_ENCODER_MODE_HDMI;
472			else if (amdgpu_connector->use_digital)
473				return ATOM_ENCODER_MODE_DVI;
474			else
475				return ATOM_ENCODER_MODE_CRT;
476		} else if (amdgpu_connector->use_digital) {
477			return ATOM_ENCODER_MODE_DVI;
478		} else {
479			return ATOM_ENCODER_MODE_CRT;
480		}
481		break;
482	case DRM_MODE_CONNECTOR_DVID:
483	case DRM_MODE_CONNECTOR_HDMIA:
484	default:
485		if (amdgpu_audio != 0) {
486			if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
487				return ATOM_ENCODER_MODE_HDMI;
488			else if (connector->display_info.is_hdmi &&
489				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
490				return ATOM_ENCODER_MODE_HDMI;
491			else
492				return ATOM_ENCODER_MODE_DVI;
493		} else {
494			return ATOM_ENCODER_MODE_DVI;
495		}
496	case DRM_MODE_CONNECTOR_LVDS:
497		return ATOM_ENCODER_MODE_LVDS;
498	case DRM_MODE_CONNECTOR_DisplayPort:
499		dig_connector = amdgpu_connector->con_priv;
500		if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
501		    (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
502			return ATOM_ENCODER_MODE_DP;
503		} else if (amdgpu_audio != 0) {
504			if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
505				return ATOM_ENCODER_MODE_HDMI;
506			else if (connector->display_info.is_hdmi &&
507				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
508				return ATOM_ENCODER_MODE_HDMI;
509			else
510				return ATOM_ENCODER_MODE_DVI;
511		} else {
512			return ATOM_ENCODER_MODE_DVI;
513		}
514	case DRM_MODE_CONNECTOR_eDP:
515		return ATOM_ENCODER_MODE_DP;
516	case DRM_MODE_CONNECTOR_DVIA:
517	case DRM_MODE_CONNECTOR_VGA:
518		return ATOM_ENCODER_MODE_CRT;
519	case DRM_MODE_CONNECTOR_Composite:
520	case DRM_MODE_CONNECTOR_SVIDEO:
521	case DRM_MODE_CONNECTOR_9PinDIN:
522		/* fix me */
523		return ATOM_ENCODER_MODE_TV;
524	}
525}
526
527/*
528 * DIG Encoder/Transmitter Setup
529 *
530 * DCE 6.0
531 * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B).
532 * Supports up to 6 digital outputs
533 * - 6 DIG encoder blocks.
534 * - DIG to PHY mapping is hardcoded
535 * DIG1 drives UNIPHY0 link A, A+B
536 * DIG2 drives UNIPHY0 link B
537 * DIG3 drives UNIPHY1 link A, A+B
538 * DIG4 drives UNIPHY1 link B
539 * DIG5 drives UNIPHY2 link A, A+B
540 * DIG6 drives UNIPHY2 link B
541 *
542 * Routing
543 * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links)
544 * Examples:
545 * crtc0 -> dig2 -> LVTMA   links A+B -> TMDS/HDMI
546 * crtc1 -> dig1 -> UNIPHY0 link  B   -> DP
547 * crtc0 -> dig1 -> UNIPHY2 link  A   -> LVDS
548 * crtc1 -> dig2 -> UNIPHY1 link  B+A -> TMDS/HDMI
549 */
550
551union dig_encoder_control {
552	DIG_ENCODER_CONTROL_PS_ALLOCATION v1;
553	DIG_ENCODER_CONTROL_PARAMETERS_V2 v2;
554	DIG_ENCODER_CONTROL_PARAMETERS_V3 v3;
555	DIG_ENCODER_CONTROL_PARAMETERS_V4 v4;
556	DIG_ENCODER_CONTROL_PARAMETERS_V5 v5;
557};
558
559void
560amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder,
561				   int action, int panel_mode)
562{
563	struct drm_device *dev = encoder->dev;
564	struct amdgpu_device *adev = drm_to_adev(dev);
565	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
566	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
567	struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
568	union dig_encoder_control args;
569	int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl);
570	uint8_t frev, crev;
571	int dp_clock = 0;
572	int dp_lane_count = 0;
573	int hpd_id = AMDGPU_HPD_NONE;
574
575	if (connector) {
576		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
577		struct amdgpu_connector_atom_dig *dig_connector =
578			amdgpu_connector->con_priv;
579
580		dp_clock = dig_connector->dp_clock;
581		dp_lane_count = dig_connector->dp_lane_count;
582		hpd_id = amdgpu_connector->hpd.hpd;
583	}
584
585	/* no dig encoder assigned */
586	if (dig->dig_encoder == -1)
587		return;
588
589	memset(&args, 0, sizeof(args));
590
591	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
592		return;
593
594	switch (frev) {
595	case 1:
596		switch (crev) {
597		case 1:
598			args.v1.ucAction = action;
599			args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
600			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
601				args.v3.ucPanelMode = panel_mode;
602			else
603				args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
604
605			if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode))
606				args.v1.ucLaneNum = dp_lane_count;
607			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
608				args.v1.ucLaneNum = 8;
609			else
610				args.v1.ucLaneNum = 4;
611
612			if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000))
613				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
614			switch (amdgpu_encoder->encoder_id) {
615			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
616				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1;
617				break;
618			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
619			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
620				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2;
621				break;
622			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
623				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3;
624				break;
625			}
626			if (dig->linkb)
627				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB;
628			else
629				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA;
630			break;
631		case 2:
632		case 3:
633			args.v3.ucAction = action;
634			args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
635			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
636				args.v3.ucPanelMode = panel_mode;
637			else
638				args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
639
640			if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode))
641				args.v3.ucLaneNum = dp_lane_count;
642			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
643				args.v3.ucLaneNum = 8;
644			else
645				args.v3.ucLaneNum = 4;
646
647			if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000))
648				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
649			args.v3.acConfig.ucDigSel = dig->dig_encoder;
650			args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
651			break;
652		case 4:
653			args.v4.ucAction = action;
654			args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
655			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
656				args.v4.ucPanelMode = panel_mode;
657			else
658				args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
659
660			if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode))
661				args.v4.ucLaneNum = dp_lane_count;
662			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
663				args.v4.ucLaneNum = 8;
664			else
665				args.v4.ucLaneNum = 4;
666
667			if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) {
668				if (dp_clock == 540000)
669					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ;
670				else if (dp_clock == 324000)
671					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ;
672				else if (dp_clock == 270000)
673					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ;
674				else
675					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ;
676			}
677			args.v4.acConfig.ucDigSel = dig->dig_encoder;
678			args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
679			if (hpd_id == AMDGPU_HPD_NONE)
680				args.v4.ucHPD_ID = 0;
681			else
682				args.v4.ucHPD_ID = hpd_id + 1;
683			break;
684		case 5:
685			switch (action) {
686			case ATOM_ENCODER_CMD_SETUP_PANEL_MODE:
687				args.v5.asDPPanelModeParam.ucAction = action;
688				args.v5.asDPPanelModeParam.ucPanelMode = panel_mode;
689				args.v5.asDPPanelModeParam.ucDigId = dig->dig_encoder;
690				break;
691			case ATOM_ENCODER_CMD_STREAM_SETUP:
692				args.v5.asStreamParam.ucAction = action;
693				args.v5.asStreamParam.ucDigId = dig->dig_encoder;
694				args.v5.asStreamParam.ucDigMode =
695					amdgpu_atombios_encoder_get_encoder_mode(encoder);
696				if (ENCODER_MODE_IS_DP(args.v5.asStreamParam.ucDigMode))
697					args.v5.asStreamParam.ucLaneNum = dp_lane_count;
698				else if (amdgpu_dig_monitor_is_duallink(encoder,
699									amdgpu_encoder->pixel_clock))
700					args.v5.asStreamParam.ucLaneNum = 8;
701				else
702					args.v5.asStreamParam.ucLaneNum = 4;
703				args.v5.asStreamParam.ulPixelClock =
704					cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
705				args.v5.asStreamParam.ucBitPerColor =
706					amdgpu_atombios_encoder_get_bpc(encoder);
707				args.v5.asStreamParam.ucLinkRateIn270Mhz = dp_clock / 27000;
708				break;
709			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_START:
710			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1:
711			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2:
712			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3:
713			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN4:
714			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE:
715			case ATOM_ENCODER_CMD_DP_VIDEO_OFF:
716			case ATOM_ENCODER_CMD_DP_VIDEO_ON:
717				args.v5.asCmdParam.ucAction = action;
718				args.v5.asCmdParam.ucDigId = dig->dig_encoder;
719				break;
720			default:
721				DRM_ERROR("Unsupported action 0x%x\n", action);
722				break;
723			}
724			break;
725		default:
726			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
727			break;
728		}
729		break;
730	default:
731		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
732		break;
733	}
734
735	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args, sizeof(args));
736
737}
738
739union dig_transmitter_control {
740	DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1;
741	DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2;
742	DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3;
743	DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4;
744	DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5;
745	DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_6 v6;
746};
747
748void
749amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action,
750					      uint8_t lane_num, uint8_t lane_set)
751{
752	struct drm_device *dev = encoder->dev;
753	struct amdgpu_device *adev = drm_to_adev(dev);
754	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
755	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
756	struct drm_connector *connector;
757	union dig_transmitter_control args;
758	int index = 0;
759	uint8_t frev, crev;
760	bool is_dp = false;
761	int pll_id = 0;
762	int dp_clock = 0;
763	int dp_lane_count = 0;
764	int connector_object_id = 0;
765	int dig_encoder = dig->dig_encoder;
766	int hpd_id = AMDGPU_HPD_NONE;
767
768	if (action == ATOM_TRANSMITTER_ACTION_INIT) {
769		connector = amdgpu_get_connector_for_encoder_init(encoder);
770		/* just needed to avoid bailing in the encoder check.  the encoder
771		 * isn't used for init
772		 */
773		dig_encoder = 0;
774	} else
775		connector = amdgpu_get_connector_for_encoder(encoder);
776
777	if (connector) {
778		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
779		struct amdgpu_connector_atom_dig *dig_connector =
780			amdgpu_connector->con_priv;
781
782		hpd_id = amdgpu_connector->hpd.hpd;
783		dp_clock = dig_connector->dp_clock;
784		dp_lane_count = dig_connector->dp_lane_count;
785		connector_object_id =
786			(amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
787	}
788
789	if (encoder->crtc) {
790		struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
791		pll_id = amdgpu_crtc->pll_id;
792	}
793
794	/* no dig encoder assigned */
795	if (dig_encoder == -1)
796		return;
797
798	if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)))
799		is_dp = true;
800
801	memset(&args, 0, sizeof(args));
802
803	switch (amdgpu_encoder->encoder_id) {
804	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
805		index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl);
806		break;
807	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
808	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
809	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
810	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
811		index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
812		break;
813	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
814		index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl);
815		break;
816	}
817
818	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
819		return;
820
821	switch (frev) {
822	case 1:
823		switch (crev) {
824		case 1:
825			args.v1.ucAction = action;
826			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
827				args.v1.usInitInfo = cpu_to_le16(connector_object_id);
828			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
829				args.v1.asMode.ucLaneSel = lane_num;
830				args.v1.asMode.ucLaneSet = lane_set;
831			} else {
832				if (is_dp)
833					args.v1.usPixelClock = cpu_to_le16(dp_clock / 10);
834				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
835					args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
836				else
837					args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
838			}
839
840			args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL;
841
842			if (dig_encoder)
843				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER;
844			else
845				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER;
846
847			if (dig->linkb)
848				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB;
849			else
850				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA;
851
852			if (is_dp)
853				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
854			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
855				if (dig->coherent_mode)
856					args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
857				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
858					args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK;
859			}
860			break;
861		case 2:
862			args.v2.ucAction = action;
863			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
864				args.v2.usInitInfo = cpu_to_le16(connector_object_id);
865			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
866				args.v2.asMode.ucLaneSel = lane_num;
867				args.v2.asMode.ucLaneSet = lane_set;
868			} else {
869				if (is_dp)
870					args.v2.usPixelClock = cpu_to_le16(dp_clock / 10);
871				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
872					args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
873				else
874					args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
875			}
876
877			args.v2.acConfig.ucEncoderSel = dig_encoder;
878			if (dig->linkb)
879				args.v2.acConfig.ucLinkSel = 1;
880
881			switch (amdgpu_encoder->encoder_id) {
882			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
883				args.v2.acConfig.ucTransmitterSel = 0;
884				break;
885			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
886				args.v2.acConfig.ucTransmitterSel = 1;
887				break;
888			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
889				args.v2.acConfig.ucTransmitterSel = 2;
890				break;
891			}
892
893			if (is_dp) {
894				args.v2.acConfig.fCoherentMode = 1;
895				args.v2.acConfig.fDPConnector = 1;
896			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
897				if (dig->coherent_mode)
898					args.v2.acConfig.fCoherentMode = 1;
899				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
900					args.v2.acConfig.fDualLinkConnector = 1;
901			}
902			break;
903		case 3:
904			args.v3.ucAction = action;
905			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
906				args.v3.usInitInfo = cpu_to_le16(connector_object_id);
907			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
908				args.v3.asMode.ucLaneSel = lane_num;
909				args.v3.asMode.ucLaneSet = lane_set;
910			} else {
911				if (is_dp)
912					args.v3.usPixelClock = cpu_to_le16(dp_clock / 10);
913				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
914					args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
915				else
916					args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
917			}
918
919			if (is_dp)
920				args.v3.ucLaneNum = dp_lane_count;
921			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
922				args.v3.ucLaneNum = 8;
923			else
924				args.v3.ucLaneNum = 4;
925
926			if (dig->linkb)
927				args.v3.acConfig.ucLinkSel = 1;
928			if (dig_encoder & 1)
929				args.v3.acConfig.ucEncoderSel = 1;
930
931			/* Select the PLL for the PHY
932			 * DP PHY should be clocked from external src if there is
933			 * one.
934			 */
935			/* On DCE4, if there is an external clock, it generates the DP ref clock */
936			if (is_dp && adev->clock.dp_extclk)
937				args.v3.acConfig.ucRefClkSource = 2; /* external src */
938			else
939				args.v3.acConfig.ucRefClkSource = pll_id;
940
941			switch (amdgpu_encoder->encoder_id) {
942			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
943				args.v3.acConfig.ucTransmitterSel = 0;
944				break;
945			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
946				args.v3.acConfig.ucTransmitterSel = 1;
947				break;
948			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
949				args.v3.acConfig.ucTransmitterSel = 2;
950				break;
951			}
952
953			if (is_dp)
954				args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */
955			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
956				if (dig->coherent_mode)
957					args.v3.acConfig.fCoherentMode = 1;
958				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
959					args.v3.acConfig.fDualLinkConnector = 1;
960			}
961			break;
962		case 4:
963			args.v4.ucAction = action;
964			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
965				args.v4.usInitInfo = cpu_to_le16(connector_object_id);
966			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
967				args.v4.asMode.ucLaneSel = lane_num;
968				args.v4.asMode.ucLaneSet = lane_set;
969			} else {
970				if (is_dp)
971					args.v4.usPixelClock = cpu_to_le16(dp_clock / 10);
972				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
973					args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
974				else
975					args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
976			}
977
978			if (is_dp)
979				args.v4.ucLaneNum = dp_lane_count;
980			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
981				args.v4.ucLaneNum = 8;
982			else
983				args.v4.ucLaneNum = 4;
984
985			if (dig->linkb)
986				args.v4.acConfig.ucLinkSel = 1;
987			if (dig_encoder & 1)
988				args.v4.acConfig.ucEncoderSel = 1;
989
990			/* Select the PLL for the PHY
991			 * DP PHY should be clocked from external src if there is
992			 * one.
993			 */
994			/* On DCE5 DCPLL usually generates the DP ref clock */
995			if (is_dp) {
996				if (adev->clock.dp_extclk)
997					args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK;
998				else
999					args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL;
1000			} else
1001				args.v4.acConfig.ucRefClkSource = pll_id;
1002
1003			switch (amdgpu_encoder->encoder_id) {
1004			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1005				args.v4.acConfig.ucTransmitterSel = 0;
1006				break;
1007			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1008				args.v4.acConfig.ucTransmitterSel = 1;
1009				break;
1010			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1011				args.v4.acConfig.ucTransmitterSel = 2;
1012				break;
1013			}
1014
1015			if (is_dp)
1016				args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */
1017			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1018				if (dig->coherent_mode)
1019					args.v4.acConfig.fCoherentMode = 1;
1020				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1021					args.v4.acConfig.fDualLinkConnector = 1;
1022			}
1023			break;
1024		case 5:
1025			args.v5.ucAction = action;
1026			if (is_dp)
1027				args.v5.usSymClock = cpu_to_le16(dp_clock / 10);
1028			else
1029				args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1030
1031			switch (amdgpu_encoder->encoder_id) {
1032			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1033				if (dig->linkb)
1034					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1035				else
1036					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1037				break;
1038			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1039				if (dig->linkb)
1040					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1041				else
1042					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1043				break;
1044			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1045				if (dig->linkb)
1046					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1047				else
1048					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1049				break;
1050			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1051				args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1052				break;
1053			}
1054			if (is_dp)
1055				args.v5.ucLaneNum = dp_lane_count;
1056			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1057				args.v5.ucLaneNum = 8;
1058			else
1059				args.v5.ucLaneNum = 4;
1060			args.v5.ucConnObjId = connector_object_id;
1061			args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1062
1063			if (is_dp && adev->clock.dp_extclk)
1064				args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK;
1065			else
1066				args.v5.asConfig.ucPhyClkSrcId = pll_id;
1067
1068			if (is_dp)
1069				args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */
1070			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1071				if (dig->coherent_mode)
1072					args.v5.asConfig.ucCoherentMode = 1;
1073			}
1074			if (hpd_id == AMDGPU_HPD_NONE)
1075				args.v5.asConfig.ucHPDSel = 0;
1076			else
1077				args.v5.asConfig.ucHPDSel = hpd_id + 1;
1078			args.v5.ucDigEncoderSel = 1 << dig_encoder;
1079			args.v5.ucDPLaneSet = lane_set;
1080			break;
1081		case 6:
1082			args.v6.ucAction = action;
1083			if (is_dp)
1084				args.v6.ulSymClock = cpu_to_le32(dp_clock / 10);
1085			else
1086				args.v6.ulSymClock = cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
1087
1088			switch (amdgpu_encoder->encoder_id) {
1089			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1090				if (dig->linkb)
1091					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1092				else
1093					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1094				break;
1095			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1096				if (dig->linkb)
1097					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1098				else
1099					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1100				break;
1101			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1102				if (dig->linkb)
1103					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1104				else
1105					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1106				break;
1107			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1108				args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1109				break;
1110			}
1111			if (is_dp)
1112				args.v6.ucLaneNum = dp_lane_count;
1113			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1114				args.v6.ucLaneNum = 8;
1115			else
1116				args.v6.ucLaneNum = 4;
1117			args.v6.ucConnObjId = connector_object_id;
1118			if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH)
1119				args.v6.ucDPLaneSet = lane_set;
1120			else
1121				args.v6.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1122
1123			if (hpd_id == AMDGPU_HPD_NONE)
1124				args.v6.ucHPDSel = 0;
1125			else
1126				args.v6.ucHPDSel = hpd_id + 1;
1127			args.v6.ucDigEncoderSel = 1 << dig_encoder;
1128			break;
1129		default:
1130			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1131			break;
1132		}
1133		break;
1134	default:
1135		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1136		break;
1137	}
1138
1139	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args, sizeof(args));
1140}
1141
1142bool
1143amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector,
1144				     int action)
1145{
1146	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1147	struct drm_device *dev = amdgpu_connector->base.dev;
1148	struct amdgpu_device *adev = drm_to_adev(dev);
1149	union dig_transmitter_control args;
1150	int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
1151	uint8_t frev, crev;
1152
1153	if (connector->connector_type != DRM_MODE_CONNECTOR_eDP)
1154		goto done;
1155
1156	if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) &&
1157	    (action != ATOM_TRANSMITTER_ACTION_POWER_OFF))
1158		goto done;
1159
1160	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1161		goto done;
1162
1163	memset(&args, 0, sizeof(args));
1164
1165	args.v1.ucAction = action;
1166
1167	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args, sizeof(args));
1168
1169	/* wait for the panel to power up */
1170	if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) {
1171		int i;
1172
1173		for (i = 0; i < 300; i++) {
1174			if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd))
1175				return true;
1176			mdelay(1);
1177		}
1178		return false;
1179	}
1180done:
1181	return true;
1182}
1183
1184union external_encoder_control {
1185	EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1;
1186	EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3;
1187};
1188
1189static void
1190amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder,
1191					struct drm_encoder *ext_encoder,
1192					int action)
1193{
1194	struct drm_device *dev = encoder->dev;
1195	struct amdgpu_device *adev = drm_to_adev(dev);
1196	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1197	struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder);
1198	union external_encoder_control args;
1199	struct drm_connector *connector;
1200	int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl);
1201	u8 frev, crev;
1202	int dp_clock = 0;
1203	int dp_lane_count = 0;
1204	int connector_object_id = 0;
1205	u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
1206
1207	if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1208		connector = amdgpu_get_connector_for_encoder_init(encoder);
1209	else
1210		connector = amdgpu_get_connector_for_encoder(encoder);
1211
1212	if (connector) {
1213		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1214		struct amdgpu_connector_atom_dig *dig_connector =
1215			amdgpu_connector->con_priv;
1216
1217		dp_clock = dig_connector->dp_clock;
1218		dp_lane_count = dig_connector->dp_lane_count;
1219		connector_object_id =
1220			(amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
1221	}
1222
1223	memset(&args, 0, sizeof(args));
1224
1225	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1226		return;
1227
1228	switch (frev) {
1229	case 1:
1230		/* no params on frev 1 */
1231		break;
1232	case 2:
1233		switch (crev) {
1234		case 1:
1235		case 2:
1236			args.v1.sDigEncoder.ucAction = action;
1237			args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1238			args.v1.sDigEncoder.ucEncoderMode =
1239				amdgpu_atombios_encoder_get_encoder_mode(encoder);
1240
1241			if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) {
1242				if (dp_clock == 270000)
1243					args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
1244				args.v1.sDigEncoder.ucLaneNum = dp_lane_count;
1245			} else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1246				args.v1.sDigEncoder.ucLaneNum = 8;
1247			else
1248				args.v1.sDigEncoder.ucLaneNum = 4;
1249			break;
1250		case 3:
1251			args.v3.sExtEncoder.ucAction = action;
1252			if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1253				args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id);
1254			else
1255				args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1256			args.v3.sExtEncoder.ucEncoderMode =
1257				amdgpu_atombios_encoder_get_encoder_mode(encoder);
1258
1259			if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) {
1260				if (dp_clock == 270000)
1261					args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
1262				else if (dp_clock == 540000)
1263					args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ;
1264				args.v3.sExtEncoder.ucLaneNum = dp_lane_count;
1265			} else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1266				args.v3.sExtEncoder.ucLaneNum = 8;
1267			else
1268				args.v3.sExtEncoder.ucLaneNum = 4;
1269			switch (ext_enum) {
1270			case GRAPH_OBJECT_ENUM_ID1:
1271				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1;
1272				break;
1273			case GRAPH_OBJECT_ENUM_ID2:
1274				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2;
1275				break;
1276			case GRAPH_OBJECT_ENUM_ID3:
1277				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3;
1278				break;
1279			}
1280			args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
1281			break;
1282		default:
1283			DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1284			return;
1285		}
1286		break;
1287	default:
1288		DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1289		return;
1290	}
1291	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args, sizeof(args));
1292}
1293
1294static void
1295amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action)
1296{
1297	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1298	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1299	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
1300	struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1301	struct amdgpu_connector *amdgpu_connector = NULL;
1302	struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL;
1303
1304	if (connector) {
1305		amdgpu_connector = to_amdgpu_connector(connector);
1306		amdgpu_dig_connector = amdgpu_connector->con_priv;
1307	}
1308
1309	if (action == ATOM_ENABLE) {
1310		if (!connector)
1311			dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
1312		else
1313			dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector);
1314
1315		/* setup and enable the encoder */
1316		amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0);
1317		amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1318						   ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
1319						   dig->panel_mode);
1320		if (ext_encoder)
1321			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1322								EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP);
1323		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1324		    connector) {
1325			if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1326				amdgpu_atombios_encoder_set_edp_panel_power(connector,
1327								     ATOM_TRANSMITTER_ACTION_POWER_ON);
1328				amdgpu_dig_connector->edp_on = true;
1329			}
1330		}
1331		/* enable the transmitter */
1332		amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1333						       ATOM_TRANSMITTER_ACTION_ENABLE,
1334						       0, 0);
1335		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1336		    connector) {
1337			/* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */
1338			amdgpu_atombios_dp_link_train(encoder, connector);
1339			amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0);
1340		}
1341		if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1342			amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level);
1343		if (ext_encoder)
1344			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE);
1345	} else {
1346		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1347		    connector)
1348			amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1349							   ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0);
1350		if (ext_encoder)
1351			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE);
1352		if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1353			amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1354							       ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
1355
1356		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1357		    connector)
1358			amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3);
1359		/* disable the transmitter */
1360		amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1361						       ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0);
1362		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1363		    connector) {
1364			if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1365				amdgpu_atombios_encoder_set_edp_panel_power(connector,
1366								     ATOM_TRANSMITTER_ACTION_POWER_OFF);
1367				amdgpu_dig_connector->edp_on = false;
1368			}
1369		}
1370	}
1371}
1372
1373void
1374amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode)
1375{
1376	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1377
1378	DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n",
1379		  amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices,
1380		  amdgpu_encoder->active_device);
1381	switch (amdgpu_encoder->encoder_id) {
1382	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1383	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1384	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1385	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1386		switch (mode) {
1387		case DRM_MODE_DPMS_ON:
1388			amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE);
1389			break;
1390		case DRM_MODE_DPMS_STANDBY:
1391		case DRM_MODE_DPMS_SUSPEND:
1392		case DRM_MODE_DPMS_OFF:
1393			amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE);
1394			break;
1395		}
1396		break;
1397	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1398		switch (mode) {
1399		case DRM_MODE_DPMS_ON:
1400			amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE);
1401			break;
1402		case DRM_MODE_DPMS_STANDBY:
1403		case DRM_MODE_DPMS_SUSPEND:
1404		case DRM_MODE_DPMS_OFF:
1405			amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE);
1406			break;
1407		}
1408		break;
1409	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1410		switch (mode) {
1411		case DRM_MODE_DPMS_ON:
1412			amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE);
1413			break;
1414		case DRM_MODE_DPMS_STANDBY:
1415		case DRM_MODE_DPMS_SUSPEND:
1416		case DRM_MODE_DPMS_OFF:
1417			amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE);
1418			break;
1419		}
1420		break;
1421	default:
1422		return;
1423	}
1424}
1425
1426union crtc_source_param {
1427	SELECT_CRTC_SOURCE_PS_ALLOCATION v1;
1428	SELECT_CRTC_SOURCE_PARAMETERS_V2 v2;
1429	SELECT_CRTC_SOURCE_PARAMETERS_V3 v3;
1430};
1431
1432void
1433amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder)
1434{
1435	struct drm_device *dev = encoder->dev;
1436	struct amdgpu_device *adev = drm_to_adev(dev);
1437	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1438	struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
1439	union crtc_source_param args;
1440	int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source);
1441	uint8_t frev, crev;
1442	struct amdgpu_encoder_atom_dig *dig;
1443
1444	memset(&args, 0, sizeof(args));
1445
1446	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1447		return;
1448
1449	switch (frev) {
1450	case 1:
1451		switch (crev) {
1452		case 1:
1453		default:
1454			args.v1.ucCRTC = amdgpu_crtc->crtc_id;
1455			switch (amdgpu_encoder->encoder_id) {
1456			case ENCODER_OBJECT_ID_INTERNAL_TMDS1:
1457			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
1458				args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX;
1459				break;
1460			case ENCODER_OBJECT_ID_INTERNAL_LVDS:
1461			case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
1462				if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT)
1463					args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX;
1464				else
1465					args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX;
1466				break;
1467			case ENCODER_OBJECT_ID_INTERNAL_DVO1:
1468			case ENCODER_OBJECT_ID_INTERNAL_DDI:
1469			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1470				args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX;
1471				break;
1472			case ENCODER_OBJECT_ID_INTERNAL_DAC1:
1473			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1474				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1475					args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1476				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1477					args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1478				else
1479					args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX;
1480				break;
1481			case ENCODER_OBJECT_ID_INTERNAL_DAC2:
1482			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1483				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1484					args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1485				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1486					args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1487				else
1488					args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX;
1489				break;
1490			}
1491			break;
1492		case 2:
1493			args.v2.ucCRTC = amdgpu_crtc->crtc_id;
1494			if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1495				struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1496
1497				if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1498					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1499				else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1500					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1501				else
1502					args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1503			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1504				args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1505			} else {
1506				args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1507			}
1508			switch (amdgpu_encoder->encoder_id) {
1509			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1510			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1511			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1512			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1513			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1514				dig = amdgpu_encoder->enc_priv;
1515				switch (dig->dig_encoder) {
1516				case 0:
1517					args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1518					break;
1519				case 1:
1520					args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1521					break;
1522				case 2:
1523					args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1524					break;
1525				case 3:
1526					args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1527					break;
1528				case 4:
1529					args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1530					break;
1531				case 5:
1532					args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1533					break;
1534				case 6:
1535					args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1536					break;
1537				}
1538				break;
1539			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1540				args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1541				break;
1542			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1543				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1544					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1545				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1546					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1547				else
1548					args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1549				break;
1550			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1551				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1552					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1553				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1554					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1555				else
1556					args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1557				break;
1558			}
1559			break;
1560		case 3:
1561			args.v3.ucCRTC = amdgpu_crtc->crtc_id;
1562			if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1563				struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1564
1565				if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1566					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1567				else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1568					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1569				else
1570					args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1571			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1572				args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1573			} else {
1574				args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1575			}
1576			args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder);
1577			switch (amdgpu_encoder->encoder_id) {
1578			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1579			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1580			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1581			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1582			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1583				dig = amdgpu_encoder->enc_priv;
1584				switch (dig->dig_encoder) {
1585				case 0:
1586					args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1587					break;
1588				case 1:
1589					args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1590					break;
1591				case 2:
1592					args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1593					break;
1594				case 3:
1595					args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1596					break;
1597				case 4:
1598					args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1599					break;
1600				case 5:
1601					args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1602					break;
1603				case 6:
1604					args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1605					break;
1606				}
1607				break;
1608			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1609				args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1610				break;
1611			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1612				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1613					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1614				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1615					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1616				else
1617					args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1618				break;
1619			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1620				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1621					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1622				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1623					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1624				else
1625					args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1626				break;
1627			}
1628			break;
1629		}
1630		break;
1631	default:
1632		DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1633		return;
1634	}
1635
1636	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args, sizeof(args));
1637}
1638
1639/* This only needs to be called once at startup */
1640void
1641amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev)
1642{
1643	struct drm_device *dev = adev_to_drm(adev);
1644	struct drm_encoder *encoder;
1645
1646	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1647		struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1648		struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1649
1650		switch (amdgpu_encoder->encoder_id) {
1651		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1652		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1653		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1654		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1655			amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT,
1656							       0, 0);
1657			break;
1658		}
1659
1660		if (ext_encoder)
1661			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1662								EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT);
1663	}
1664}
1665
1666static bool
1667amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder,
1668				 struct drm_connector *connector)
1669{
1670	struct drm_device *dev = encoder->dev;
1671	struct amdgpu_device *adev = drm_to_adev(dev);
1672	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1673	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1674
1675	if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT |
1676				       ATOM_DEVICE_CV_SUPPORT |
1677				       ATOM_DEVICE_CRT_SUPPORT)) {
1678		DAC_LOAD_DETECTION_PS_ALLOCATION args;
1679		int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection);
1680		uint8_t frev, crev;
1681
1682		memset(&args, 0, sizeof(args));
1683
1684		if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1685			return false;
1686
1687		args.sDacload.ucMisc = 0;
1688
1689		if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) ||
1690		    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1))
1691			args.sDacload.ucDacType = ATOM_DAC_A;
1692		else
1693			args.sDacload.ucDacType = ATOM_DAC_B;
1694
1695		if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)
1696			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT);
1697		else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)
1698			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT);
1699		else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1700			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT);
1701			if (crev >= 3)
1702				args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1703		} else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1704			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT);
1705			if (crev >= 3)
1706				args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1707		}
1708
1709		amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args, sizeof(args));
1710
1711		return true;
1712	} else
1713		return false;
1714}
1715
1716enum drm_connector_status
1717amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder,
1718			    struct drm_connector *connector)
1719{
1720	struct drm_device *dev = encoder->dev;
1721	struct amdgpu_device *adev = drm_to_adev(dev);
1722	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1723	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1724	uint32_t bios_0_scratch;
1725
1726	if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) {
1727		DRM_DEBUG_KMS("detect returned false \n");
1728		return connector_status_unknown;
1729	}
1730
1731	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1732
1733	DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1734	if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1735		if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1736			return connector_status_connected;
1737	}
1738	if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1739		if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1740			return connector_status_connected;
1741	}
1742	if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1743		if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1744			return connector_status_connected;
1745	}
1746	if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1747		if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1748			return connector_status_connected; /* CTV */
1749		else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1750			return connector_status_connected; /* STV */
1751	}
1752	return connector_status_disconnected;
1753}
1754
1755enum drm_connector_status
1756amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder,
1757			    struct drm_connector *connector)
1758{
1759	struct drm_device *dev = encoder->dev;
1760	struct amdgpu_device *adev = drm_to_adev(dev);
1761	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1762	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1763	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1764	u32 bios_0_scratch;
1765
1766	if (!ext_encoder)
1767		return connector_status_unknown;
1768
1769	if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0)
1770		return connector_status_unknown;
1771
1772	/* load detect on the dp bridge */
1773	amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1774						EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION);
1775
1776	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1777
1778	DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1779	if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1780		if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1781			return connector_status_connected;
1782	}
1783	if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1784		if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1785			return connector_status_connected;
1786	}
1787	if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1788		if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1789			return connector_status_connected;
1790	}
1791	if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1792		if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1793			return connector_status_connected; /* CTV */
1794		else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1795			return connector_status_connected; /* STV */
1796	}
1797	return connector_status_disconnected;
1798}
1799
1800void
1801amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder)
1802{
1803	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1804
1805	if (ext_encoder)
1806		/* ddc_setup on the dp bridge */
1807		amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1808							EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP);
1809
1810}
1811
1812void
1813amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector,
1814				       struct drm_encoder *encoder,
1815				       bool connected)
1816{
1817	struct drm_device *dev = connector->dev;
1818	struct amdgpu_device *adev = drm_to_adev(dev);
1819	struct amdgpu_connector *amdgpu_connector =
1820	    to_amdgpu_connector(connector);
1821	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1822	uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch;
1823
1824	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1825	bios_3_scratch = RREG32(mmBIOS_SCRATCH_3);
1826	bios_6_scratch = RREG32(mmBIOS_SCRATCH_6);
1827
1828	if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) &&
1829	    (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) {
1830		if (connected) {
1831			DRM_DEBUG_KMS("LCD1 connected\n");
1832			bios_0_scratch |= ATOM_S0_LCD1;
1833			bios_3_scratch |= ATOM_S3_LCD1_ACTIVE;
1834			bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1;
1835		} else {
1836			DRM_DEBUG_KMS("LCD1 disconnected\n");
1837			bios_0_scratch &= ~ATOM_S0_LCD1;
1838			bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE;
1839			bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1;
1840		}
1841	}
1842	if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) &&
1843	    (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) {
1844		if (connected) {
1845			DRM_DEBUG_KMS("CRT1 connected\n");
1846			bios_0_scratch |= ATOM_S0_CRT1_COLOR;
1847			bios_3_scratch |= ATOM_S3_CRT1_ACTIVE;
1848			bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1;
1849		} else {
1850			DRM_DEBUG_KMS("CRT1 disconnected\n");
1851			bios_0_scratch &= ~ATOM_S0_CRT1_MASK;
1852			bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE;
1853			bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1;
1854		}
1855	}
1856	if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) &&
1857	    (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) {
1858		if (connected) {
1859			DRM_DEBUG_KMS("CRT2 connected\n");
1860			bios_0_scratch |= ATOM_S0_CRT2_COLOR;
1861			bios_3_scratch |= ATOM_S3_CRT2_ACTIVE;
1862			bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2;
1863		} else {
1864			DRM_DEBUG_KMS("CRT2 disconnected\n");
1865			bios_0_scratch &= ~ATOM_S0_CRT2_MASK;
1866			bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE;
1867			bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2;
1868		}
1869	}
1870	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) &&
1871	    (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) {
1872		if (connected) {
1873			DRM_DEBUG_KMS("DFP1 connected\n");
1874			bios_0_scratch |= ATOM_S0_DFP1;
1875			bios_3_scratch |= ATOM_S3_DFP1_ACTIVE;
1876			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1;
1877		} else {
1878			DRM_DEBUG_KMS("DFP1 disconnected\n");
1879			bios_0_scratch &= ~ATOM_S0_DFP1;
1880			bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE;
1881			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1;
1882		}
1883	}
1884	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) &&
1885	    (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) {
1886		if (connected) {
1887			DRM_DEBUG_KMS("DFP2 connected\n");
1888			bios_0_scratch |= ATOM_S0_DFP2;
1889			bios_3_scratch |= ATOM_S3_DFP2_ACTIVE;
1890			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2;
1891		} else {
1892			DRM_DEBUG_KMS("DFP2 disconnected\n");
1893			bios_0_scratch &= ~ATOM_S0_DFP2;
1894			bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE;
1895			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2;
1896		}
1897	}
1898	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) &&
1899	    (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) {
1900		if (connected) {
1901			DRM_DEBUG_KMS("DFP3 connected\n");
1902			bios_0_scratch |= ATOM_S0_DFP3;
1903			bios_3_scratch |= ATOM_S3_DFP3_ACTIVE;
1904			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3;
1905		} else {
1906			DRM_DEBUG_KMS("DFP3 disconnected\n");
1907			bios_0_scratch &= ~ATOM_S0_DFP3;
1908			bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE;
1909			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3;
1910		}
1911	}
1912	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) &&
1913	    (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) {
1914		if (connected) {
1915			DRM_DEBUG_KMS("DFP4 connected\n");
1916			bios_0_scratch |= ATOM_S0_DFP4;
1917			bios_3_scratch |= ATOM_S3_DFP4_ACTIVE;
1918			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4;
1919		} else {
1920			DRM_DEBUG_KMS("DFP4 disconnected\n");
1921			bios_0_scratch &= ~ATOM_S0_DFP4;
1922			bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE;
1923			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4;
1924		}
1925	}
1926	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) &&
1927	    (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) {
1928		if (connected) {
1929			DRM_DEBUG_KMS("DFP5 connected\n");
1930			bios_0_scratch |= ATOM_S0_DFP5;
1931			bios_3_scratch |= ATOM_S3_DFP5_ACTIVE;
1932			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5;
1933		} else {
1934			DRM_DEBUG_KMS("DFP5 disconnected\n");
1935			bios_0_scratch &= ~ATOM_S0_DFP5;
1936			bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE;
1937			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5;
1938		}
1939	}
1940	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) &&
1941	    (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) {
1942		if (connected) {
1943			DRM_DEBUG_KMS("DFP6 connected\n");
1944			bios_0_scratch |= ATOM_S0_DFP6;
1945			bios_3_scratch |= ATOM_S3_DFP6_ACTIVE;
1946			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6;
1947		} else {
1948			DRM_DEBUG_KMS("DFP6 disconnected\n");
1949			bios_0_scratch &= ~ATOM_S0_DFP6;
1950			bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE;
1951			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6;
1952		}
1953	}
1954
1955	WREG32(mmBIOS_SCRATCH_0, bios_0_scratch);
1956	WREG32(mmBIOS_SCRATCH_3, bios_3_scratch);
1957	WREG32(mmBIOS_SCRATCH_6, bios_6_scratch);
1958}
1959
1960union lvds_info {
1961	struct _ATOM_LVDS_INFO info;
1962	struct _ATOM_LVDS_INFO_V12 info_12;
1963};
1964
1965struct amdgpu_encoder_atom_dig *
1966amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder)
1967{
1968	struct drm_device *dev = encoder->base.dev;
1969	struct amdgpu_device *adev = drm_to_adev(dev);
1970	struct amdgpu_mode_info *mode_info = &adev->mode_info;
1971	int index = GetIndexIntoMasterTable(DATA, LVDS_Info);
1972	uint16_t data_offset, misc;
1973	union lvds_info *lvds_info;
1974	uint8_t frev, crev;
1975	struct amdgpu_encoder_atom_dig *lvds = NULL;
1976	int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
1977
1978	if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL,
1979				   &frev, &crev, &data_offset)) {
1980		lvds_info =
1981			(union lvds_info *)(mode_info->atom_context->bios + data_offset);
1982		lvds =
1983		    kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
1984
1985		if (!lvds)
1986			return NULL;
1987
1988		lvds->native_mode.clock =
1989		    le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10;
1990		lvds->native_mode.hdisplay =
1991		    le16_to_cpu(lvds_info->info.sLCDTiming.usHActive);
1992		lvds->native_mode.vdisplay =
1993		    le16_to_cpu(lvds_info->info.sLCDTiming.usVActive);
1994		lvds->native_mode.htotal = lvds->native_mode.hdisplay +
1995			le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time);
1996		lvds->native_mode.hsync_start = lvds->native_mode.hdisplay +
1997			le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset);
1998		lvds->native_mode.hsync_end = lvds->native_mode.hsync_start +
1999			le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth);
2000		lvds->native_mode.vtotal = lvds->native_mode.vdisplay +
2001			le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time);
2002		lvds->native_mode.vsync_start = lvds->native_mode.vdisplay +
2003			le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset);
2004		lvds->native_mode.vsync_end = lvds->native_mode.vsync_start +
2005			le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth);
2006		lvds->panel_pwr_delay =
2007		    le16_to_cpu(lvds_info->info.usOffDelayInMs);
2008		lvds->lcd_misc = lvds_info->info.ucLVDS_Misc;
2009
2010		misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess);
2011		if (misc & ATOM_VSYNC_POLARITY)
2012			lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC;
2013		if (misc & ATOM_HSYNC_POLARITY)
2014			lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC;
2015		if (misc & ATOM_COMPOSITESYNC)
2016			lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC;
2017		if (misc & ATOM_INTERLACE)
2018			lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE;
2019		if (misc & ATOM_DOUBLE_CLOCK_MODE)
2020			lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN;
2021
2022		lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize);
2023		lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize);
2024
2025		/* set crtc values */
2026		drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V);
2027
2028		lvds->lcd_ss_id = lvds_info->info.ucSS_Id;
2029
2030		encoder->native_mode = lvds->native_mode;
2031
2032		if (encoder_enum == 2)
2033			lvds->linkb = true;
2034		else
2035			lvds->linkb = false;
2036
2037		/* parse the lcd record table */
2038		if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) {
2039			ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record;
2040			ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record;
2041			bool bad_record = false;
2042			u8 *record;
2043
2044			if ((frev == 1) && (crev < 2))
2045				/* absolute */
2046				record = (u8 *)(mode_info->atom_context->bios +
2047						le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2048			else
2049				/* relative */
2050				record = (u8 *)(mode_info->atom_context->bios +
2051						data_offset +
2052						le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2053			while (*record != ATOM_RECORD_END_TYPE) {
2054				switch (*record) {
2055				case LCD_MODE_PATCH_RECORD_MODE_TYPE:
2056					record += sizeof(ATOM_PATCH_RECORD_MODE);
2057					break;
2058				case LCD_RTS_RECORD_TYPE:
2059					record += sizeof(ATOM_LCD_RTS_RECORD);
2060					break;
2061				case LCD_CAP_RECORD_TYPE:
2062					record += sizeof(ATOM_LCD_MODE_CONTROL_CAP);
2063					break;
2064				case LCD_FAKE_EDID_PATCH_RECORD_TYPE:
2065					fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record;
2066					if (fake_edid_record->ucFakeEDIDLength) {
2067						struct edid *edid;
2068						int edid_size =
2069							max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength);
2070						edid = kmalloc(edid_size, GFP_KERNEL);
2071						if (edid) {
2072							memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0],
2073							       fake_edid_record->ucFakeEDIDLength);
2074
2075							if (drm_edid_is_valid(edid)) {
2076								adev->mode_info.bios_hardcoded_edid = edid;
2077								adev->mode_info.bios_hardcoded_edid_size = edid_size;
2078							} else
2079								kfree(edid);
2080						}
2081					}
2082					record += fake_edid_record->ucFakeEDIDLength ?
2083						  struct_size(fake_edid_record,
2084							      ucFakeEDIDString,
2085							      fake_edid_record->ucFakeEDIDLength) :
2086						  /* empty fake edid record must be 3 bytes long */
2087						  sizeof(ATOM_FAKE_EDID_PATCH_RECORD) + 1;
2088					break;
2089				case LCD_PANEL_RESOLUTION_RECORD_TYPE:
2090					panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record;
2091					lvds->native_mode.width_mm = panel_res_record->usHSize;
2092					lvds->native_mode.height_mm = panel_res_record->usVSize;
2093					record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD);
2094					break;
2095				default:
2096					DRM_ERROR("Bad LCD record %d\n", *record);
2097					bad_record = true;
2098					break;
2099				}
2100				if (bad_record)
2101					break;
2102			}
2103		}
2104	}
2105	return lvds;
2106}
2107
2108struct amdgpu_encoder_atom_dig *
2109amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder)
2110{
2111	int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
2112	struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
2113
2114	if (!dig)
2115		return NULL;
2116
2117	/* coherent mode by default */
2118	dig->coherent_mode = true;
2119	dig->dig_encoder = -1;
2120
2121	if (encoder_enum == 2)
2122		dig->linkb = true;
2123	else
2124		dig->linkb = false;
2125
2126	return dig;
2127}
2128
2129