1// SPDX-License-Identifier: GPL-2.0
2/*
3 * Copyright (C) 2021 - 2022, Xilinx Inc.
4 * Copyright (C) 2022 - 2023, Advanced Micro Devices, Inc.
5 *
6 * Xilinx displayport(DP) Tx Subsytem driver
7 */
8
9#include <common.h>
10#include <clk.h>
11#include <cpu_func.h>
12#include <dm.h>
13#include <errno.h>
14#include <generic-phy.h>
15#include <stdlib.h>
16#include <video.h>
17#include <wait_bit.h>
18#include <dm/device_compat.h>
19#include <asm/io.h>
20#include <linux/delay.h>
21#include <linux/ioport.h>
22#include <dm/device_compat.h>
23#include <asm/global_data.h>
24
25#include "zynqmp_dpsub.h"
26
27DECLARE_GLOBAL_DATA_PTR;
28
29/* Maximum supported resolution */
30#define WIDTH				1024
31#define HEIGHT				768
32
33static struct dp_dma dp_dma;
34static struct dp_dma_descriptor cur_desc __aligned(256);
35
36static void dma_init_video_descriptor(struct udevice *dev)
37{
38	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
39	struct dp_dma_frame_buffer *frame_buffer = &dp_sub->frame_buffer;
40
41	cur_desc.control = DPDMA_DESC_PREAMBLE | DPDMA_DESC_IGNR_DONE |
42			   DPDMA_DESC_LAST_FRAME;
43	cur_desc.dscr_id = 0;
44	cur_desc.xfer_size = frame_buffer->size;
45	cur_desc.line_size_stride = ((frame_buffer->stride >> 4) <<
46				     DPDMA_DESCRIPTOR_LINE_SIZE_STRIDE_SHIFT) |
47				     (frame_buffer->line_size);
48	cur_desc.addr_ext = (((u32)(frame_buffer->address >>
49			     DPDMA_DESCRIPTOR_SRC_ADDR_WIDTH) <<
50			     DPDMA_DESCRIPTOR_ADDR_EXT_SRC_ADDR_EXT_SHIFT) |
51			     (upper_32_bits((u64)&cur_desc)));
52	cur_desc.next_desr = lower_32_bits((u64)&cur_desc);
53	cur_desc.src_addr = lower_32_bits((u64)gd->fb_base);
54}
55
56static void dma_set_descriptor_address(struct udevice *dev)
57{
58	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
59
60	flush_dcache_range((u64)&cur_desc,
61			   ALIGN(((u64)&cur_desc + sizeof(cur_desc)),
62				 CONFIG_SYS_CACHELINE_SIZE));
63	writel(upper_32_bits((u64)&cur_desc), dp_sub->dp_dma->base_addr +
64	       DPDMA_CH3_DSCR_STRT_ADDRE);
65	writel(lower_32_bits((u64)&cur_desc), dp_sub->dp_dma->base_addr +
66	       DPDMA_CH3_DSCR_STRT_ADDR);
67}
68
69static void dma_setup_channel(struct udevice *dev)
70{
71	dma_init_video_descriptor(dev);
72	dma_set_descriptor_address(dev);
73}
74
75static void dma_set_channel_state(struct udevice *dev)
76{
77	u32 mask = 0, regval = 0;
78	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
79
80	mask = DPDMA_CH_CNTL_EN_MASK | DPDMA_CH_CNTL_PAUSE_MASK;
81	regval = DPDMA_CH_CNTL_EN_MASK;
82
83	clrsetbits_le32(dp_sub->dp_dma->base_addr + DPDMA_CH3_CNTL,
84			mask, regval);
85}
86
87static void dma_trigger(struct udevice *dev)
88{
89	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
90	u32 trigger;
91
92	trigger = DPDMA_GBL_TRG_CH3_MASK;
93	dp_sub->dp_dma->gfx.trigger_status = DPDMA_TRIGGER_DONE;
94	writel(trigger, dp_sub->dp_dma->base_addr + DPDMA_GBL);
95}
96
97static void dma_vsync_intr_handler(struct udevice *dev)
98{
99	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
100
101	dma_setup_channel(dev);
102	dma_set_channel_state(dev);
103	dma_trigger(dev);
104
105	/* Clear VSync Interrupt */
106	writel(DPDMA_ISR_VSYNC_INT_MASK, dp_sub->dp_dma->base_addr + DPDMA_ISR);
107}
108
109/**
110 * wait_phy_ready() - Wait for the DisplayPort PHY to come out of reset
111 * @dev:  The DP device
112 *
113 * Return: 0 if wait succeeded, -ve if error occurred
114 */
115static int wait_phy_ready(struct udevice *dev)
116{
117	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
118	u32 timeout = 100, phy_status;
119	u8 phy_ready_mask =  DP_PHY_STATUS_RESET_LANE_0_DONE_MASK |
120			     DP_PHY_STATUS_GT_PLL_LOCK_MASK;
121
122	/* Wait until the PHY is ready. */
123	do {
124		udelay(20);
125		phy_status = readl(dp_sub->base_addr + DP_PHY_STATUS);
126		phy_status &= phy_ready_mask;
127		/* Protect against an infinite loop. */
128		if (!timeout--)
129			return -ETIMEDOUT;
130	} while (phy_status != phy_ready_mask);
131
132	return 0;
133}
134
135static int init_dp_tx(struct udevice *dev)
136{
137	u32 status, phyval, regval, rate;
138	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
139
140	phyval = readl(dp_sub->base_addr + DP_PHY_CONFIG);
141	writel(DP_SOFT_RESET_EN, dp_sub->base_addr + DP_SOFT_RESET);
142	status = readl(dp_sub->base_addr + DP_SOFT_RESET);
143	writel(DP_DISABLE, dp_sub->base_addr + DP_ENABLE);
144
145	regval = (readl(dp_sub->base_addr + DP_AUX_CLK_DIVIDER) &
146		  ~DP_AUX_CLK_DIVIDER_VAL_MASK) |
147		  (60 << 8) |
148		  (dp_sub->clock / 1000000);
149	writel(regval, dp_sub->base_addr + DP_AUX_CLK_DIVIDER);
150
151	writel(DP_PHY_CLOCK_SELECT_540GBPS, dp_sub->base_addr + DP_PHY_CLOCK_SELECT);
152
153	regval = phyval & ~DP_PHY_CONFIG_GT_ALL_RESET_MASK;
154	writel(regval, dp_sub->base_addr + DP_PHY_CONFIG);
155	status = wait_phy_ready(dev);
156	if (status)
157		return -EINVAL;
158
159	writel(DP_ENABLE, dp_sub->base_addr + DP_ENABLE);
160
161	rate = ~DP_INTR_HPD_PULSE_DETECTED_MASK & ~DP_INTR_HPD_EVENT_MASK
162		& ~DP_INTR_HPD_IRQ_MASK;
163	writel(rate, dp_sub->base_addr + DP_INTR_MASK);
164	return 0;
165}
166
167static int set_nonlive_gfx_format(struct udevice *dev, enum av_buf_video_format format)
168{
169	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
170	struct av_buf_vid_attribute *ptr = (struct av_buf_vid_attribute *)avbuf_supported_formats;
171
172	while (1) {
173		dev_dbg(dev, "Format %d\n", ptr->video_format);
174
175		if (!ptr->video_format)
176			return -EINVAL;
177
178		if (ptr->video_format == format) {
179			dp_sub->non_live_graphics = ptr;
180			break;
181		}
182		ptr++;
183	}
184	dev_dbg(dev, "Video format found. BPP %d\n", dp_sub->non_live_graphics->bpp);
185	return 0;
186}
187
188/* DP dma setup */
189static void set_qos(struct udevice *dev, u8 qos)
190{
191	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
192	u8 index;
193	u32 regval = 0, mask;
194
195	regval = (((u32)qos << DPDMA_CH_CNTL_QOS_DATA_RD_SHIFT) |
196		 ((u32)qos << DPDMA_CH_CNTL_QOS_DSCR_RD_SHIFT) |
197		 ((u32)qos << DPDMA_CH_CNTL_QOS_DSCR_WR_SHIFT));
198
199	mask = DPDMA_CH_CNTL_QOS_DATA_RD_MASK |
200	       DPDMA_CH_CNTL_QOS_DSCR_RD_MASK |
201	       DPDMA_CH_CNTL_QOS_DSCR_WR_MASK;
202	for (index = 0; index <= DPDMA_AUDIO_CHANNEL1; index++) {
203		clrsetbits_le32(dp_sub->dp_dma->base_addr +
204				DPDMA_CH0_CNTL +
205				(DPDMA_CH_OFFSET * (u32)index),
206				mask, regval);
207	}
208}
209
210static void enable_gfx_buffers(struct udevice *dev, u8 enable)
211{
212	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
213	u32 regval = 0;
214
215	regval = (0xF << AVBUF_CHBUF3_BURST_LEN_SHIFT) |
216			 AVBUF_CHBUF3_FLUSH_MASK;
217	writel(regval, dp_sub->base_addr + AVBUF_CHBUF3);
218	if (enable) {
219		regval = (0xF << AVBUF_CHBUF3_BURST_LEN_SHIFT) |
220				 AVBUF_CHBUF0_EN_MASK;
221		writel(regval, dp_sub->base_addr + AVBUF_CHBUF3);
222	}
223}
224
225static void avbuf_video_select(struct udevice *dev, enum av_buf_video_stream vid_stream,
226			       enum av_buf_gfx_stream gfx_stream)
227{
228	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
229
230	dp_sub->av_mode.video_src = vid_stream;
231	dp_sub->av_mode.gfx_src = gfx_stream;
232
233	clrsetbits_le32(dp_sub->base_addr +
234			AVBUF_BUF_OUTPUT_AUD_VID_SELECT,
235			AVBUF_BUF_OUTPUT_AUD_VID_SELECT_VID_STREAM2_SEL_MASK |
236			AVBUF_BUF_OUTPUT_AUD_VID_SELECT_VID_STREAM1_SEL_MASK,
237			vid_stream | gfx_stream);
238}
239
240static void config_gfx_pipeline(struct udevice *dev)
241{
242	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
243	u16 *csc_matrix, *offset_matrix;
244	u32 regval = 0, index = 0, *scaling_factors = NULL;
245	u16 rgb_coeffs[] = { 0x1000, 0x0000, 0x0000,
246			     0x0000, 0x1000, 0x0000,
247			     0x0000, 0x0000, 0x1000 };
248	u16 rgb_offset[] = { 0x0000, 0x0000, 0x0000 };
249	struct av_buf_vid_attribute *video = dp_sub->non_live_graphics;
250
251	scaling_factors = video->sf;
252
253	clrsetbits_le32(dp_sub->base_addr + AVBUF_BUF_FORMAT,
254			AVBUF_BUF_FORMAT_NL_GRAPHX_FORMAT_MASK,
255			(video->value) << AVBUF_BUF_FORMAT_NL_GRAPHX_FORMAT_SHIFT);
256
257	for (index = 0; index < 3; index++) {
258		writel(scaling_factors[index], dp_sub->base_addr +
259		       AVBUF_BUF_GRAPHICS_COMP0_SCALE_FACTOR + (index * 4));
260	}
261	regval = (video->is_rgb << AVBUF_V_BLEND_LAYER0_CONTROL_RGB_MODE_SHIFT) |
262								video->sampling_en;
263	writel(regval, dp_sub->base_addr + AVBUF_V_BLEND_LAYER1_CONTROL);
264
265	if (video->is_rgb) {
266		csc_matrix = rgb_coeffs;
267		offset_matrix = rgb_offset;
268	}
269	/* Program Colorspace conversion coefficients */
270	for (index = 9; index < 12; index++) {
271		writel(offset_matrix[index - 9], dp_sub->base_addr +
272		       AVBUF_V_BLEND_IN2CSC_COEFF0 + (index * 4));
273	}
274
275	/* Program Colorspace conversion matrix */
276	for (index = 0; index < 9; index++) {
277		writel(csc_matrix[index], dp_sub->base_addr +
278		       AVBUF_V_BLEND_IN2CSC_COEFF0 + (index * 4));
279	}
280}
281
282static void set_blender_alpha(struct udevice *dev, u8 alpha, u8 enable)
283{
284	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
285	u32 regval;
286
287	regval = enable;
288	regval |= alpha << AVBUF_V_BLEND_SET_GLOBAL_ALPHA_REG_VALUE_SHIFT;
289	writel(regval, dp_sub->base_addr +
290	       AVBUF_V_BLEND_SET_GLOBAL_ALPHA_REG);
291}
292
293static void config_output_video(struct udevice *dev)
294{
295	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
296	u32 regval = 0, index;
297	u16 rgb_coeffs[] = { 0x1000, 0x0000, 0x0000,
298			     0x0000, 0x1000, 0x0000,
299			     0x0000, 0x0000, 0x1000 };
300	u16 rgb_offset[] = { 0x0000, 0x0000, 0x0000 };
301	u16 *matrix_coeff = rgb_coeffs, *matrix_offset = rgb_offset;
302
303	struct av_buf_vid_attribute *output_video = dp_sub->non_live_graphics;
304
305	regval |= output_video->sampling_en <<
306		  AVBUF_V_BLEND_OUTPUT_VID_FORMAT_EN_DOWNSAMPLE_SHIFT;
307	regval |= output_video->value;
308	writel(regval, dp_sub->base_addr + AVBUF_V_BLEND_OUTPUT_VID_FORMAT);
309
310	for (index = 0; index < 9; index++) {
311		writel(matrix_coeff[index], dp_sub->base_addr +
312		       AVBUF_V_BLEND_RGB2YCBCR_COEFF0 + (index * 4));
313	}
314
315	for (index = 0; index < 3; index++) {
316		writel((matrix_offset[index] <<
317			AVBUF_V_BLEND_LUMA_IN1CSC_OFFSET_POST_OFFSET_SHIFT),
318			dp_sub->base_addr +
319			AVBUF_V_BLEND_LUMA_OUTCSC_OFFSET
320			+ (index * 4));
321	}
322
323	set_blender_alpha(dev, 0, 0);
324}
325
326static void config_msa_sync_clk_mode(struct udevice *dev, u8 enable)
327{
328	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
329	struct main_stream_attributes *msa_config;
330
331	msa_config = &dp_sub->msa_config;
332	msa_config->synchronous_clock_mode = enable;
333
334	if (enable == 1) {
335		msa_config->misc0 |= (1 <<
336				     DP_MAIN_STREAM_MISC0_COMPONENT_FORMAT_SHIFT);
337	} else {
338		msa_config->misc0 &= ~(1 <<
339				      DP_MAIN_STREAM_MISC0_COMPONENT_FORMAT_SHIFT);
340	}
341}
342
343static void av_buf_soft_reset(struct udevice *dev)
344{
345	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
346
347	writel(AVBUF_BUF_SRST_REG_VID_RST_MASK,
348	       dp_sub->base_addr + AVBUF_BUF_SRST_REG);
349	writel(0, dp_sub->base_addr + AVBUF_BUF_SRST_REG);
350}
351
352static void set_video_clk_source(struct udevice *dev, u8 video_clk, u8 audio_clk)
353{
354	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
355	u32 regval = 0;
356
357	if (dp_sub->av_mode.video_src != AVBUF_VIDSTREAM1_LIVE &&
358	    dp_sub->av_mode.gfx_src != AVBUF_VIDSTREAM2_LIVE_GFX) {
359		regval = 1 << AVBUF_BUF_AUD_VID_CLK_SOURCE_VID_TIMING_SRC_SHIFT;
360	} else if (dp_sub->av_mode.video_src == AVBUF_VIDSTREAM1_LIVE ||
361		   dp_sub->av_mode.gfx_src == AVBUF_VIDSTREAM2_LIVE_GFX) {
362		video_clk = AVBUF_PL_CLK;
363	}
364
365	regval |= (video_clk << AVBUF_BUF_AUD_VID_CLK_SOURCE_VID_CLK_SRC_SHIFT) |
366		  (audio_clk << AVBUF_BUF_AUD_VID_CLK_SOURCE_AUD_CLK_SRC_SHIFT);
367	writel(regval, dp_sub->base_addr + AVBUF_BUF_AUD_VID_CLK_SOURCE);
368
369	av_buf_soft_reset(dev);
370}
371
372static int init_dpdma_subsys(struct udevice *dev)
373{
374	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
375
376	dp_sub->dp_dma->base_addr = DPDMA_BASE_ADDRESS;
377	dp_sub->dp_dma->gfx.channel.cur = NULL;
378	dp_sub->dp_dma->gfx.trigger_status = DPDMA_TRIGGER_DONE;
379
380	set_qos(dev, 11);
381	return 0;
382}
383
384/**
385 * is_dp_connected() - Check if there is a connected RX device
386 * @dev: The DP device
387 *
388 *
389 * Return: true if a connected RX device was detected, false otherwise
390 */
391static bool is_dp_connected(struct udevice *dev)
392{
393	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
394	u32 status;
395	u8 retries = 0;
396
397	do {
398		status = readl(dp_sub->base_addr +
399				DP_INTERRUPT_SIG_STATE)
400				& DP_INTERRUPT_SIG_STATE_HPD_STATE_MASK;
401
402		if (retries > DP_IS_CONNECTED_MAX_TIMEOUT_COUNT)
403			return 0;
404
405		retries++;
406		udelay(1000);
407	} while (status == 0);
408
409	return 1;
410}
411
412/**
413 * aux_wait_ready() -  Wait until another request is no longer in progress
414 * @dev: The DP device
415 *
416 * Return: 0 if wait succeeded, -ve if error occurred
417 */
418static int aux_wait_ready(struct udevice *dev)
419{
420	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
421	u32 status, timeout = 100;
422
423	do {
424		status = readl(dp_sub->base_addr +
425			       DP_INTERRUPT_SIG_STATE);
426		if (!timeout--)
427			return -ETIMEDOUT;
428
429		udelay(20);
430	} while (status & DP_REPLY_STATUS_REPLY_IN_PROGRESS_MASK);
431
432	return 0;
433}
434
435/**
436 * aux_wait_reply() - Wait for reply on AUX channel
437 * @dev: The DP device
438 *
439 * Wait for a reply indicating that the most recent AUX request
440 * has been received by the RX device.
441 *
442 * Return: 0 if wait succeeded, -ve if error occurred
443 */
444static int aux_wait_reply(struct udevice *dev)
445{
446	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
447	u32 timeout = DP_AUX_MAX_WAIT, status;
448
449	while (timeout > 0) {
450		status = readl(dp_sub->base_addr + DP_REPLY_STATUS);
451		if (status & DP_REPLY_STATUS_REPLY_ERROR_MASK)
452			return -ETIMEDOUT;
453
454		if ((status & DP_REPLY_STATUS_REPLY_RECEIVED_MASK) &&
455		    !(status & DP_REPLY_STATUS_REQUEST_IN_PROGRESS_MASK) &&
456		    !(status & DP_REPLY_STATUS_REPLY_IN_PROGRESS_MASK)) {
457			return 0;
458		}
459		timeout--;
460		udelay(20);
461	}
462	return -ETIMEDOUT;
463}
464
465/**
466 * aux_request_send() - Send request on the AUX channel
467 * @dev:     The DP device
468 * @request: The request to send
469 *
470 * Submit the supplied AUX request to the RX device over the AUX
471 * channel by writing the command, the destination address, (the write buffer
472 * for write commands), and the data size to the DisplayPort TX core.
473 *
474 * This is the lower-level sending routine, which is called by aux_request().
475 *
476 * Return: 0 if request was sent successfully, -ve on error
477 */
478static int aux_request_send(struct udevice *dev, struct aux_transaction *request)
479{
480	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
481	u32 timeout_count = 0, status;
482	u8 index;
483
484	do {
485		status = readl(dp_sub->base_addr +
486			       DP_REPLY_STATUS);
487
488		udelay(20);
489		timeout_count++;
490		if (timeout_count >= DP_AUX_MAX_TIMEOUT_COUNT)
491			return -ETIMEDOUT;
492
493	} while ((status & DP_REPLY_STATUS_REQUEST_IN_PROGRESS_MASK) ||
494		(status & DP_REPLY_STATUS_REPLY_IN_PROGRESS_MASK));
495	/* Set the address for the request. */
496	writel(request->address, dp_sub->base_addr + DP_AUX_ADDRESS);
497
498	if (request->cmd_code == DP_AUX_CMD_WRITE ||
499	    request->cmd_code == DP_AUX_CMD_I2C_WRITE ||
500	    request->cmd_code == DP_AUX_CMD_I2C_WRITE_MOT) {
501		/* Feed write data into the DisplayPort TX core's write FIFO. */
502		for (index = 0; index < request->num_bytes; index++) {
503			writel(request->data[index],
504			       dp_sub->base_addr +
505			       DP_AUX_WRITE_FIFO);
506		}
507	}
508
509	status = ((request->cmd_code << DP_AUX_CMD_SHIFT) |
510		 ((request->num_bytes - 1) &
511		 DP_AUX_CMD_NBYTES_TRANSFER_MASK));
512
513	/* Submit the command and the data size. */
514	writel(((request->cmd_code << DP_AUX_CMD_SHIFT) |
515		((request->num_bytes - 1) & DP_AUX_CMD_NBYTES_TRANSFER_MASK)),
516		dp_sub->base_addr + DP_AUX_CMD);
517
518	/* Check for a reply from the RX device to the submitted request. */
519	status = aux_wait_reply(dev);
520	if (status)
521		/* Waiting for a reply timed out. */
522		return -ETIMEDOUT;
523
524	/* Analyze the reply. */
525	status = readl(dp_sub->base_addr + DP_AUX_REPLY_CODE);
526	if (status == DP_AUX_REPLY_CODE_DEFER ||
527	    status == DP_AUX_REPLY_CODE_I2C_DEFER) {
528		/* The request was deferred. */
529		return -EAGAIN;
530	} else if (status == DP_AUX_REPLY_CODE_NACK ||
531		   status == DP_AUX_REPLY_CODE_I2C_NACK) {
532		/* The request was not acknowledged. */
533		return -EIO;
534	}
535
536	/* The request was acknowledged. */
537	if (request->cmd_code == DP_AUX_CMD_READ ||
538	    request->cmd_code == DP_AUX_CMD_I2C_READ ||
539	    request->cmd_code == DP_AUX_CMD_I2C_READ_MOT) {
540		/* Wait until all data has been received. */
541		timeout_count = 0;
542		do {
543			status = readl(dp_sub->base_addr +
544				       DP_REPLY_DATA_COUNT);
545			udelay(100);
546			timeout_count++;
547			if (timeout_count >= DP_AUX_MAX_TIMEOUT_COUNT)
548				return -ETIMEDOUT;
549		} while (status != request->num_bytes);
550
551		/* Obtain the read data from the reply FIFO. */
552		for (index = 0; index < request->num_bytes; index++) {
553			request->data[index] = readl(dp_sub->base_addr +
554						     DP_AUX_REPLY_DATA);
555		}
556	}
557	return 0;
558}
559
560/**
561 * aux_request() - Submit request on the AUX channel
562 * @dev:     The DP device
563 * @request: The request to submit
564 *
565 * Submit the supplied AUX request to the RX device over the AUX
566 * channel. If waiting for a reply times out, or if the DisplayPort TX core
567 * indicates that the request was deferred, the request is sent again (up to a
568 * maximum specified by DP_AUX_MAX_DEFER_COUNT|DP_AUX_MAX_TIMEOUT_COUNT).
569 *
570 * Return: 0 if request was submitted successfully, -ve on error
571 */
572static int aux_request(struct udevice *dev, struct aux_transaction *request)
573{
574	u32 status, defer_count = 0, timeout_count = 0;
575
576	do {
577		status = aux_wait_ready(dev);
578		if (status) {
579			/* The RX device isn't ready yet. */
580			timeout_count++;
581			continue;
582		}
583		/* Send the request. */
584		status = aux_request_send(dev, request);
585		if (status == -EAGAIN) {
586			/* The request was deferred. */
587			defer_count++;
588		} else if (status == -ETIMEDOUT) {
589			/* Waiting for a reply timed out. */
590			timeout_count++;
591		} else {
592			return status;
593		}
594
595		udelay(100);
596	} while ((defer_count < DP_AUX_MAX_DEFER_COUNT) &&
597		(timeout_count < DP_AUX_MAX_TIMEOUT_COUNT));
598
599	/* The request was not successfully received by the RX device. */
600	return -ETIMEDOUT;
601}
602
603/**
604 * aux_common() - Common (read/write) AUX communication transmission
605 * @dev:       The DP device
606 * @cmd_type:  Command code of the transaction
607 * @address:   The DPCD address of the transaction
608 * @num_bytes: Number of bytes in the payload data
609 * @data:      The payload data of the AUX command
610 *
611 * Common sequence of submitting an AUX command for AUX read, AUX write,
612 * I2C-over-AUX read, and I2C-over-AUX write transactions. If required, the
613 * reads and writes are split into multiple requests, each acting on a maximum
614 * of 16 bytes.
615 *
616 * Return: 0 if OK, -ve on error
617 */
618static int aux_common(struct udevice *dev, u32 cmd_type, u32 address,
619		      u32 num_bytes, u8 *data)
620{
621	u32 status, bytes_left;
622	struct aux_transaction request;
623
624	if (!is_dp_connected(dev))
625		return -ENODEV;
626
627	/*
628	 * Set the start address for AUX transactions. For I2C transactions,
629	 * this is the address of the I2C bus.
630	 */
631	request.address = address;
632	bytes_left = num_bytes;
633	while (bytes_left > 0) {
634		request.cmd_code = cmd_type;
635
636		if (cmd_type == DP_AUX_CMD_READ ||
637		    cmd_type == DP_AUX_CMD_WRITE) {
638			/* Increment address for normal AUX transactions. */
639			request.address = address + (num_bytes - bytes_left);
640		}
641
642		/* Increment the pointer to the supplied data buffer. */
643		request.data = &data[num_bytes - bytes_left];
644
645		if (bytes_left > 16)
646			request.num_bytes = 16;
647		else
648			request.num_bytes = bytes_left;
649
650		bytes_left -= request.num_bytes;
651
652		if (cmd_type == DP_AUX_CMD_I2C_READ && bytes_left > 0) {
653			/*
654			 * Middle of a transaction I2C read request. Override
655			 * the command code that was set to CmdType.
656			 */
657			request.cmd_code = DP_AUX_CMD_I2C_READ_MOT;
658		} else if (cmd_type == DP_AUX_CMD_I2C_WRITE && bytes_left > 0) {
659			/*
660			 * Middle of a transaction I2C write request. Override
661			 * the command code that was set to CmdType.
662			 */
663			request.cmd_code = DP_AUX_CMD_I2C_WRITE_MOT;
664		}
665
666		status = aux_request(dev, &request);
667		if (status)
668			return status;
669	}
670	return 0;
671}
672
673/**
674 * aux_write() - Issue AUX write request
675 * @dev:            The DP device
676 * @dpcd_address:   The DPCD address to write to
677 * @bytes_to_write: Number of bytes to write
678 * @write_data:     Buffer containig data to be written
679 *
680 * Issue a write request over the AUX channel that will write to
681 * the RX device's DisplayPort Configuration data (DPCD) address space. The
682 * write message will be divided into multiple transactions which write a
683 * maximum of 16 bytes each.
684 *
685 * Return: 0 if write operation was successful, -ve on error
686 */
687static int aux_write(struct udevice *dev, u32 dpcd_address, u32 bytes_to_write,
688		     void *write_data)
689{
690	return aux_common(dev, DP_AUX_CMD_WRITE, dpcd_address,
691			  bytes_to_write, (u8 *)write_data);
692}
693
694/**
695 * aux_read() - Issue AUX read request
696 * @dev:           The DP device
697 * @dpcd_address:  The DPCD address to read from
698 * @bytes_to_read: Number of bytes to read
699 * @read_data:     Buffer to receive the read data
700 *
701 * Issue a read request over the AUX channel that will read from the RX
702 * device's DisplayPort Configuration data (DPCD) address space. The read
703 * message will be divided into multiple transactions which read a maximum of
704 * 16 bytes each.
705 *
706 * Return: 0 if read operation was successful, -ve on error
707 */
708static int aux_read(struct udevice *dev, u32 dpcd_address, u32 bytes_to_read, void *read_data)
709{
710	return aux_common(dev, DP_AUX_CMD_READ, dpcd_address,
711			  bytes_to_read, (u8 *)read_data);
712}
713
714static int dp_tx_wakeup(struct udevice *dev)
715{
716	u32 status;
717	u8 aux_data;
718
719	aux_data = 0x1;
720	status = aux_write(dev, DP_DPCD_SET_POWER_DP_PWR_VOLTAGE, 1, &aux_data);
721	if (status)
722		debug("! 1st power wake-up - AUX write failed.\n");
723	status = aux_write(dev, DP_DPCD_SET_POWER_DP_PWR_VOLTAGE, 1, &aux_data);
724	if (status)
725		debug("! 2nd power wake-up - AUX write failed.\n");
726
727	return status;
728}
729
730/**
731 * enable_main_link() - Switch on main link for a device
732 * @dev: The DP device
733 */
734static void enable_main_link(struct udevice *dev, u8 enable)
735{
736	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
737
738	/* Reset the scrambler. */
739	writel(1, dp_sub->base_addr + DP_FORCE_SCRAMBLER_RESET);
740	/* Enable the main stream. */
741	writel(enable, dp_sub->base_addr + DP_ENABLE_MAIN_STREAM);
742}
743
744/**
745 * get_rx_capabilities() - Check if capabilities of RX device are valid for TX
746 *                         device
747 * @dev: The DP device
748 *
749 * Return: 0 if the capabilities of the RX device are valid for the TX device,
750 *         -ve if not, of an error occurred during capability determination
751 */
752static int get_rx_capabilities(struct udevice *dev)
753{
754	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
755	u8 rx_max_link_rate, rx_max_lane_count, *dpcd = NULL;
756	u32 status;
757	struct link_config *link_config = NULL;
758
759	dpcd = dp_sub->dpcd_rx_caps;
760	link_config = &dp_sub->link_config;
761
762	status = aux_read(dev, DP_DPCD_RECEIVER_CAP_FIELD_START, 16, dpcd);
763	if (status)
764		return status;
765
766	rx_max_link_rate = dpcd[DP_DPCD_MAX_LINK_RATE];
767	rx_max_lane_count = dpcd[DP_DPCD_MAX_LANE_COUNT] & DP_DPCD_MAX_LANE_COUNT_MASK;
768	link_config->max_link_rate = (rx_max_link_rate > DP_0_LINK_RATE) ?
769				      DP_0_LINK_RATE : rx_max_link_rate;
770	link_config->max_lane_count = (rx_max_lane_count > DP_0_LANE_COUNT) ?
771				       DP_0_LANE_COUNT : rx_max_lane_count;
772	link_config->support_enhanced_framing_mode = dpcd[DP_DPCD_MAX_LANE_COUNT] &
773						     DP_DPCD_ENHANCED_FRAME_SUPPORT_MASK;
774	link_config->support_downspread_control = dpcd[DP_DPCD_MAX_DOWNSPREAD] &
775						  DP_DPCD_MAX_DOWNSPREAD_MASK;
776
777	return 0;
778}
779
780/**
781 * set_enhanced_frame_mode() - Enable/Disable enhanced frame mode
782 * @dev:    The DP device
783 * @enable: Flag to determine whether to enable (1) or disable (0) the enhanced
784 *          frame mode
785 *
786 * Enable or disable the enhanced framing symbol sequence for
787 * both the DisplayPort TX core and the RX device.
788 *
789 * Return: 0 if enabling/disabling the enhanced frame mode was successful, -ve
790 *         on error
791 */
792static int set_enhanced_frame_mode(struct udevice *dev, u8 enable)
793{
794	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
795	u32 status;
796	u8 regval;
797
798	dp_sub->link_config.enhanced_framing_mode = enable;
799	/* Write enhanced frame mode enable to the DisplayPort TX core. */
800	writel(dp_sub->link_config.enhanced_framing_mode,
801	       dp_sub->base_addr + DP_ENHANCED_FRAME_EN);
802
803	/* Preserve the current RX device settings. */
804	status = aux_read(dev, DP_DPCD_LANE_COUNT_SET, 0x1, &regval);
805	if (status)
806		return status;
807
808	if (dp_sub->link_config.enhanced_framing_mode)
809		regval |= DP_DPCD_ENHANCED_FRAME_EN_MASK;
810	else
811		regval &= ~DP_DPCD_ENHANCED_FRAME_EN_MASK;
812
813	/* Write enhanced frame mode enable to the RX device. */
814	return aux_write(dev, DP_DPCD_LANE_COUNT_SET, 0x1, &regval);
815}
816
817/**
818 * set_lane_count() - Set the lane count
819 * @dev:        The DP device
820 * @lane_count: Lane count to set
821 *
822 * Set the number of lanes to be used by the main link for both
823 * the DisplayPort TX core and the RX device.
824 *
825 * Return: 0 if setting the lane count was successful, -ve on error
826 */
827static int set_lane_count(struct udevice *dev, u8 lane_count)
828{
829	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
830	u32 status;
831	u8 regval;
832
833	dp_sub->link_config.lane_count = lane_count;
834	/* Write the new lane count to the DisplayPort TX core. */
835	writel(dp_sub->link_config.lane_count,
836	       dp_sub->base_addr + DP_LANE_COUNT_SET);
837
838	/* Preserve the current RX device settings. */
839	status = aux_read(dev, DP_DPCD_LANE_COUNT_SET, 0x1, &regval);
840	if (status)
841		return status;
842
843	regval &= ~DP_DPCD_LANE_COUNT_SET_MASK;
844	regval |= dp_sub->link_config.lane_count;
845
846	/* Write the new lane count to the RX device. */
847	return aux_write(dev, DP_DPCD_LANE_COUNT_SET, 0x1, &regval);
848}
849
850/**
851 * set_clk_speed() - Set DP phy clock speed
852 * @dev:   The DP device
853 * @speed: The clock frquency to set (one of PHY_CLOCK_SELECT_*)
854 *
855 * Set the clock frequency for the DisplayPort PHY corresponding to a desired
856 * data rate.
857 *
858 * Return: 0 if setting the DP phy clock speed was successful, -ve on error
859 */
860static int set_clk_speed(struct udevice *dev, u32 speed)
861{
862	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
863	u32 regval;
864
865	/* Disable the DisplayPort TX core first. */
866	regval = readl(dp_sub->base_addr + DP_ENABLE);
867	writel(0, dp_sub->base_addr + DP_ENABLE);
868
869	/* Change speed of the feedback clock. */
870	writel(speed, dp_sub->base_addr + DP_PHY_CLOCK_SELECT);
871
872	/* Re-enable the DisplayPort TX core if it was previously enabled. */
873	if (regval)
874		writel(regval, dp_sub->base_addr + DP_ENABLE);
875
876	/* Wait until the PHY is ready. */
877	return wait_phy_ready(dev);
878}
879
880/**
881 * set_link_rate() - Set the link rate
882 * @dev:       The DP device
883 * @link_rate: The link rate to set (one of LINK_BW_SET_*)
884 *
885 * Set the data rate to be used by the main link for both the DisplayPort TX
886 * core and the RX device.
887 *
888 * Return: 0 if setting the link rate was successful, -ve on error
889 */
890static int set_link_rate(struct udevice *dev, u8 link_rate)
891{
892	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
893	u32 status;
894
895	/* Write a corresponding clock frequency to the DisplayPort TX core. */
896	switch (link_rate) {
897	case DP_LINK_BW_SET_162GBPS:
898		status = set_clk_speed(dev, DP_PHY_CLOCK_SELECT_162GBPS);
899		break;
900	case DP_LINK_BW_SET_270GBPS:
901		status = set_clk_speed(dev, DP_PHY_CLOCK_SELECT_270GBPS);
902		break;
903	case DP_LINK_BW_SET_540GBPS:
904		status = set_clk_speed(dev, DP_PHY_CLOCK_SELECT_540GBPS);
905		break;
906	default:
907		status = -EINVAL;
908		break;
909	}
910	if (status)
911		return status;
912
913	dp_sub->link_config.link_rate = link_rate;
914	/* Write new link rate to the DisplayPort TX core. */
915	writel(dp_sub->link_config.link_rate,
916	       dp_sub->base_addr +
917	       DP_LINK_BW_SET);
918
919	/* Write new link rate to the RX device. */
920	return aux_write(dev, DP_DPCD_LINK_BW_SET, 0x1,
921			 &dp_sub->link_config.link_rate);
922}
923
924static int set_downspread(struct udevice *dev, u8 enable)
925{
926	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
927	u32 status;
928	u8 regval;
929
930	dp_sub->link_config.support_downspread_control = enable;
931	/* Write downspread enable to the DisplayPort TX core. */
932	writel(dp_sub->link_config.support_downspread_control,
933	       dp_sub->base_addr + DP_DOWNSPREAD_CTRL);
934
935	/* Preserve the current RX device settings. */
936	status = aux_read(dev, DP_DPCD_DOWNSPREAD_CTRL, 0x1, &regval);
937	if (status)
938		return status;
939
940	if (dp_sub->link_config.support_downspread_control)
941		regval |= DP_DPCD_SPREAD_AMP_MASK;
942	else
943		regval &= ~DP_DPCD_SPREAD_AMP_MASK;
944
945	/* Write downspread enable to the RX device. */
946	return aux_write(dev, DP_DPCD_DOWNSPREAD_CTRL, 0x1, &regval);
947}
948
949static void set_serdes_vswing_preemp(struct udevice *dev)
950{
951	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
952	u8  index;
953	u8  vs_level_rx = dp_sub->link_config.vs_level;
954	u8  pe_level_rx = dp_sub->link_config.pe_level;
955
956	for (index = 0; index < dp_sub->link_config.lane_count; index++) {
957		/* Write new voltage swing levels to the TX registers. */
958		writel(vs[pe_level_rx][vs_level_rx], (ulong)SERDES_BASEADDR +
959			SERDES_L0_TX_MARGININGF + index * SERDES_LANE_OFFSET);
960		/* Write new pre-emphasis levels to the TX registers. */
961		writel(pe[pe_level_rx][vs_level_rx], (ulong)SERDES_BASEADDR +
962			SERDES_L0_TX_DEEMPHASIS + index * SERDES_LANE_OFFSET);
963	}
964}
965
966/**
967 * set_vswing_preemp() - Build AUX data to set voltage swing and pre-emphasis
968 * @dev:      The DP device
969 * @aux_data: Buffer to receive the built AUX data
970 *
971 * Build AUX data to set current voltage swing and pre-emphasis level settings;
972 * the necessary data is taken from the link_config structure.
973 */
974static void set_vswing_preemp(struct udevice *dev, u8 *aux_data)
975{
976	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
977	u8 data = 0;
978	u8 vs_level_rx = dp_sub->link_config.vs_level;
979	u8 pe_level_rx = dp_sub->link_config.pe_level;
980
981	if (vs_level_rx >= DP_MAXIMUM_VS_LEVEL)
982		data |= DP_DPCD_TRAINING_LANEX_SET_MAX_VS_MASK;
983
984	/* The maximum pre-emphasis level has been reached. */
985	if (pe_level_rx >= DP_MAXIMUM_PE_LEVEL)
986		data |= DP_DPCD_TRAINING_LANEX_SET_MAX_PE_MASK;
987
988	/* Set up the data buffer for writing to the RX device. */
989	data |= (pe_level_rx << DP_DPCD_TRAINING_LANEX_SET_PE_SHIFT) |
990		 vs_level_rx;
991	memset(aux_data, data, 4);
992
993	set_serdes_vswing_preemp(dev);
994}
995
996static int set_training_pattern(struct udevice *dev, u32 pattern)
997{
998	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
999	u8 aux_data[5];
1000
1001	writel(pattern, dp_sub->base_addr + TRAINING_PATTERN_SET);
1002
1003	aux_data[0] = pattern;
1004	switch (pattern) {
1005	case TRAINING_PATTERN_SET_OFF:
1006		writel(0, dp_sub->base_addr + SCRAMBLING_DISABLE);
1007		dp_sub->link_config.scrambler_en = 1;
1008		break;
1009	case TRAINING_PATTERN_SET_TP1:
1010	case TRAINING_PATTERN_SET_TP2:
1011	case TRAINING_PATTERN_SET_TP3:
1012		aux_data[0] |= DP_DPCD_TP_SET_SCRAMB_DIS_MASK;
1013		writel(1, dp_sub->base_addr + SCRAMBLING_DISABLE);
1014		dp_sub->link_config.scrambler_en = 0;
1015		break;
1016	default:
1017		break;
1018	}
1019	/*
1020	 * Make the adjustments to both the DisplayPort TX core and the RX
1021	 * device.
1022	 */
1023	set_vswing_preemp(dev, &aux_data[1]);
1024	/*
1025	 * Write the voltage swing and pre-emphasis levels for each lane to the
1026	 * RX device.
1027	 */
1028	if (pattern == TRAINING_PATTERN_SET_OFF)
1029		return aux_write(dev, DP_DPCD_TP_SET, 1, aux_data);
1030	else
1031		return aux_write(dev, DP_DPCD_TP_SET, 5, aux_data);
1032}
1033
1034static int get_lane_status_adj_reqs(struct udevice *dev)
1035{
1036	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1037	u32 status;
1038	u8 aux_data[8];
1039
1040	status = aux_read(dev, DP_DPCD_SINK_COUNT, 8, aux_data);
1041	if (status)
1042		return status;
1043
1044	/* Save XDPPSU_DPCD_SINK_COUNT contents. */
1045	dp_sub->sink_count =
1046		((aux_data[0] & DP_DPCD_SINK_COUNT_HIGH_MASK) >>
1047		DP_DPCD_SINK_COUNT_HIGH_LOW_SHIFT) |
1048		(aux_data[0] & DP_DPCD_SINK_COUNT_LOW_MASK);
1049	memcpy(dp_sub->lane_status_ajd_reqs, &aux_data[2], 6);
1050	return 0;
1051}
1052
1053/**
1054 * check_clock_recovery() - Check clock recovery success
1055 * @dev:        The LogiCore DP TX device in question
1056 * @lane_count: The number of lanes for which to check clock recovery success
1057 *
1058 * Check if the RX device's DisplayPort Configuration data (DPCD) indicates
1059 * that the clock recovery sequence during link training was successful - the
1060 * RX device's link clock and data recovery unit has realized and maintained
1061 * the frequency lock for all lanes currently in use.
1062 *
1063 * Return: 0 if clock recovery was successful on all lanes in question, -ve if
1064 *         not
1065 */
1066static int check_clock_recovery(struct udevice *dev, u8 lane_count)
1067{
1068	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1069	u8 *lane_status = dp_sub->lane_status_ajd_reqs;
1070
1071	switch (lane_count) {
1072	case DP_LANE_COUNT_SET_2:
1073		if (!(lane_status[0] & DP_DPCD_STATUS_LANE_1_CR_DONE_MASK))
1074			return -EINVAL;
1075	case DP_LANE_COUNT_SET_1:
1076		if (!(lane_status[0] & DP_DPCD_STATUS_LANE_0_CR_DONE_MASK))
1077			return -EINVAL;
1078	default:
1079		/* All (LaneCount) lanes have achieved clock recovery. */
1080		break;
1081	}
1082	return 0;
1083}
1084
1085/**
1086 * adj_vswing_preemp() - Adjust voltage swing and pre-emphasis
1087 * @dev: The DP device
1088 *
1089 * Set new voltage swing and pre-emphasis levels using the
1090 * adjustment requests obtained from the RX device.
1091 *
1092 * Return: 0 if voltage swing and pre-emphasis could be adjusted successfully,
1093 *         -ve on error
1094 */
1095static int adj_vswing_preemp(struct udevice *dev)
1096{
1097	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1098	u8 index, vs_level_adj_req[4], pe_level_adj_req[4];
1099	u8 aux_data[4];
1100	u8 *adj_reqs = &dp_sub->lane_status_ajd_reqs[4];
1101
1102	/*
1103	 * Analyze the adjustment requests for changes in voltage swing and
1104	 * pre-emphasis levels.
1105	 */
1106	vs_level_adj_req[0] = adj_reqs[0] & DP_DPCD_ADJ_REQ_LANE_0_2_VS_MASK;
1107	vs_level_adj_req[1] = (adj_reqs[0] & DP_DPCD_ADJ_REQ_LANE_1_3_VS_MASK) >>
1108			      DP_DPCD_ADJ_REQ_LANE_1_3_VS_SHIFT;
1109	pe_level_adj_req[0] = (adj_reqs[0] & DP_DPCD_ADJ_REQ_LANE_0_2_PE_MASK) >>
1110			      DP_DPCD_ADJ_REQ_LANE_0_2_PE_SHIFT;
1111	pe_level_adj_req[1] = (adj_reqs[0] & DP_DPCD_ADJ_REQ_LANE_1_3_PE_MASK) >>
1112			      DP_DPCD_ADJ_REQ_LANE_1_3_PE_SHIFT;
1113
1114	/*
1115	 * Change the drive settings to match the adjustment requests. Use the
1116	 * greatest level requested.
1117	 */
1118	dp_sub->link_config.vs_level = 0;
1119	dp_sub->link_config.pe_level = 0;
1120	for (index = 0; index < dp_sub->link_config.lane_count; index++) {
1121		if (vs_level_adj_req[index] > dp_sub->link_config.vs_level)
1122			dp_sub->link_config.vs_level = vs_level_adj_req[index];
1123
1124		if (pe_level_adj_req[index] > dp_sub->link_config.pe_level)
1125			dp_sub->link_config.pe_level = pe_level_adj_req[index];
1126	}
1127
1128	if (dp_sub->link_config.pe_level > DP_MAXIMUM_PE_LEVEL)
1129		dp_sub->link_config.pe_level = DP_MAXIMUM_PE_LEVEL;
1130
1131	if (dp_sub->link_config.vs_level > DP_MAXIMUM_VS_LEVEL)
1132		dp_sub->link_config.vs_level = DP_MAXIMUM_VS_LEVEL;
1133
1134	if (dp_sub->link_config.pe_level >
1135				(4 - dp_sub->link_config.vs_level)) {
1136		dp_sub->link_config.pe_level =
1137				4 - dp_sub->link_config.vs_level;
1138	}
1139	/*
1140	 * Make the adjustments to both the DisplayPort TX core and the RX
1141	 * device.
1142	 */
1143	set_vswing_preemp(dev, aux_data);
1144	/*
1145	 * Write the voltage swing and pre-emphasis levels for each lane to the
1146	 * RX device.
1147	 */
1148	return aux_write(dev, DP_DPCD_TRAINING_LANE0_SET, 2, aux_data);
1149}
1150
1151/**
1152 * get_training_delay() - Get training delay
1153 * @dev:            The DP device
1154 * @training_state: The training state for which the required training delay
1155 *                  should be queried
1156 *
1157 * Determine what the RX device's required training delay is for
1158 * link training.
1159 *
1160 * Return: The training delay in us
1161 */
1162static u32 get_training_delay(struct udevice *dev)
1163{
1164	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1165	u8 *dpcd = dp_sub->dpcd_rx_caps;
1166
1167	if (dpcd[DP_DPCD_TRAIN_AUX_RD_INTERVAL])
1168		return 400 * dpcd[DP_DPCD_TRAIN_AUX_RD_INTERVAL] * 10;
1169
1170	return 400;
1171}
1172
1173/**
1174 * training_state_clock_recovery() - Run clock recovery part of link training
1175 * @dev: The DP device
1176 *
1177 * Run the clock recovery sequence as part of link training. The
1178 * sequence is as follows:
1179 *
1180 *      0) Start signaling at the minimum voltage swing, pre-emphasis, and
1181 *         post- cursor levels.
1182 *      1) Transmit training pattern 1 over the main link with symbol
1183 *         scrambling disabled.
1184 *      2) The clock recovery loop. If clock recovery is unsuccessful after
1185 *         MaxIterations loop iterations, return.
1186 *      2a) Wait for at least the period of time specified in the RX device's
1187 *          DisplayPort Configuration data (DPCD) register,
1188 *          TRAINING_AUX_RD_INTERVAL.
1189 *      2b) Check if all lanes have achieved clock recovery lock. If so,
1190 *          return.
1191 *      2c) Check if the same voltage swing level has been used 5 consecutive
1192 *          times or if the maximum level has been reached. If so, return.
1193 *      2d) Adjust the voltage swing, pre-emphasis, and post-cursor levels as
1194 *          requested by the RX device.
1195 *      2e) Loop back to 2a.
1196 *
1197 * For a more detailed description of the clock recovery sequence, see section
1198 * 3.5.1.2.1 of the DisplayPort 1.2a specification document.
1199 *
1200 * Return: The next state machine state to advance to
1201 */
1202static enum link_training_states training_state_clock_recovery(struct udevice *dev)
1203{
1204	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1205	u32 status, delay_us;
1206	u8 prev_vs_level = 0, same_vs_level_count  = 0;
1207	struct link_config *link_config = &dp_sub->link_config;
1208
1209	delay_us = get_training_delay(dev);
1210	/* Start CRLock. */
1211	/* Start from minimal voltage swing and pre-emphasis levels. */
1212	dp_sub->link_config.vs_level = 0;
1213	dp_sub->link_config.pe_level = 0;
1214	/* Transmit training pattern 1. */
1215	status = set_training_pattern(dev, TRAINING_PATTERN_SET_TP1);
1216	if (status)
1217		return TS_FAILURE;
1218
1219	while (1) {
1220		/* Wait delay specified in TRAINING_AUX_RD_INTERVAL. */
1221		udelay(delay_us);
1222		/* Get lane and adjustment requests. */
1223		status = get_lane_status_adj_reqs(dev);
1224		if (status)
1225			/* The AUX read failed. */
1226			return TS_FAILURE;
1227
1228		/*
1229		 * Check if all lanes have realized and maintained the frequency
1230		 * lock and get adjustment requests.
1231		 */
1232		status = check_clock_recovery(dev, dp_sub->link_config.lane_count);
1233		if (status == 0)
1234			return TS_CHANNEL_EQUALIZATION;
1235		/*
1236		 * Check if the same voltage swing for each lane has been used 5
1237		 * consecutive times.
1238		 */
1239		if (prev_vs_level == link_config->vs_level) {
1240			same_vs_level_count++;
1241		} else {
1242			same_vs_level_count = 0;
1243			prev_vs_level = link_config->vs_level;
1244		}
1245		if (same_vs_level_count >= 5)
1246			break;
1247
1248		/* Only try maximum voltage swing once. */
1249		if (link_config->vs_level == DP_MAXIMUM_VS_LEVEL)
1250			break;
1251
1252		/* Adjust the drive settings as requested by the RX device. */
1253		status = adj_vswing_preemp(dev);
1254		if (status)
1255			/* The AUX write failed. */
1256			return TS_FAILURE;
1257	}
1258	return TS_ADJUST_LINK_RATE;
1259}
1260
1261/**
1262 * check_channel_equalization() - Check channel equalization success
1263 * @dev:        The DP device
1264 * @lane_count: The number of lanes for which to check channel equalization
1265 *              success
1266 *
1267 * Check if the RX device's DisplayPort Configuration data (DPCD) indicates
1268 * that the channel equalization sequence during link training was successful -
1269 * the RX device has achieved channel equalization, symbol lock, and interlane
1270 * alignment for all lanes currently in use.
1271 *
1272 * Return: 0 if channel equalization was successful on all lanes in question,
1273 *         -ve if not
1274 */
1275static int check_channel_equalization(struct udevice *dev, u8 lane_count)
1276{
1277	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1278	u8 *lane_status = dp_sub->lane_status_ajd_reqs;
1279
1280	/* Check that all LANEx_CHANNEL_EQ_DONE bits are set. */
1281	switch (lane_count) {
1282	case DP_LANE_COUNT_SET_2:
1283		if (!(lane_status[0] & DP_DPCD_STATUS_LANE_1_CE_DONE_MASK))
1284			return -EINVAL;
1285	case DP_LANE_COUNT_SET_1:
1286		if (!(lane_status[0] & DP_DPCD_STATUS_LANE_0_CE_DONE_MASK))
1287			return -EINVAL;
1288	default:
1289		/* All (LaneCount) lanes have achieved channel equalization. */
1290		break;
1291	}
1292
1293	/* Check that all LANEx_SYMBOL_LOCKED bits are set. */
1294	switch (lane_count) {
1295	case DP_LANE_COUNT_SET_2:
1296		if (!(lane_status[0] & DP_DPCD_STATUS_LANE_1_SL_DONE_MASK))
1297			return -EINVAL;
1298	case DP_LANE_COUNT_SET_1:
1299		if (!(lane_status[0] & DP_DPCD_STATUS_LANE_0_SL_DONE_MASK))
1300			return -EINVAL;
1301	default:
1302		/* All (LaneCount) lanes have achieved symbol lock. */
1303		break;
1304	}
1305
1306	/* Check that interlane alignment is done. */
1307	if (!(lane_status[2] & DP_DPCD_LANE_ALIGN_STATUS_UPDATED_IA_DONE_MASK))
1308		return -EINVAL;
1309	return 0;
1310}
1311
1312/**
1313 * training_state_channel_equalization() - Run channel equalization part of
1314 *                                         link training
1315 * @dev: The DP device
1316 *
1317 * Run the channel equalization sequence as part of link
1318 * training. The sequence is as follows:
1319 *
1320 *      0) Start signaling with the same drive settings used at the end of the
1321 *         clock recovery sequence.
1322 *      1) Transmit training pattern 2 (or 3) over the main link with symbol
1323 *         scrambling disabled.
1324 *      2) The channel equalization loop. If channel equalization is
1325 *         unsuccessful after 5 loop iterations, return.
1326 *      2a) Wait for at least the period of time specified in the RX device's
1327 *          DisplayPort Configuration data (DPCD) register,
1328 *          TRAINING_AUX_RD_INTERVAL.
1329 *      2b) Check if all lanes have achieved channel equalization, symbol lock,
1330 *          and interlane alignment. If so, return.
1331 *      2c) Check if the same voltage swing level has been used 5 consecutive
1332 *          times or if the maximum level has been reached. If so, return.
1333 *      2d) Adjust the voltage swing, pre-emphasis, and post-cursor levels as
1334 *          requested by the RX device.
1335 *      2e) Loop back to 2a.
1336 *
1337 * For a more detailed description of the channel equalization sequence, see
1338 * section 3.5.1.2.2 of the DisplayPort 1.2a specification document.
1339 *
1340 * Return: The next state machine state to advance to
1341 */
1342static enum link_training_states training_state_channel_equalization(struct udevice *dev)
1343{
1344	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1345	u32 status, delay_us = 400, iteration_count = 0;
1346
1347	/* Write the current drive settings. */
1348	/* Transmit training pattern 2/3. */
1349	if (dp_sub->dpcd_rx_caps[DP_DPCD_MAX_LANE_COUNT] &
1350						  DP_DPCD_TPS3_SUPPORT_MASK)
1351		status = set_training_pattern(dev, TRAINING_PATTERN_SET_TP3);
1352	else
1353		status = set_training_pattern(dev, TRAINING_PATTERN_SET_TP2);
1354
1355	if (status)
1356		return TS_FAILURE;
1357
1358	while (iteration_count < 5) {
1359		/* Wait delay specified in TRAINING_AUX_RD_INTERVAL. */
1360		udelay(delay_us);
1361
1362		/* Get lane and adjustment requests. */
1363		status = get_lane_status_adj_reqs(dev);
1364		if (status)
1365			/* The AUX read failed. */
1366			return TS_FAILURE;
1367
1368		/* Adjust the drive settings as requested by the RX device. */
1369		status = adj_vswing_preemp(dev);
1370		if (status)
1371			/* The AUX write failed. */
1372			return TS_FAILURE;
1373
1374		/* Check that all lanes still have their clocks locked. */
1375		status = check_clock_recovery(dev, dp_sub->link_config.lane_count);
1376		if (status)
1377			break;
1378		/*
1379		 * Check that all lanes have accomplished channel
1380		 * equalization, symbol lock, and interlane alignment.
1381		 */
1382		status = check_channel_equalization(dev, dp_sub->link_config.lane_count);
1383		if (status == 0)
1384			return TS_SUCCESS;
1385		iteration_count++;
1386	}
1387
1388	/*
1389	 * Tried 5 times with no success. Try a reduced bitrate first, then
1390	 * reduce the number of lanes.
1391	 */
1392	return TS_ADJUST_LINK_RATE;
1393}
1394
1395static int check_lane_align(struct udevice *dev)
1396{
1397	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1398	u8 *lane_status = dp_sub->lane_status_ajd_reqs;
1399
1400	if (!(lane_status[2] & DP_DPCD_LANE_ALIGN_STATUS_UPDATED_IA_DONE_MASK))
1401		return -EINVAL;
1402	return 0;
1403}
1404
1405/**
1406 * check_link_status() - Check status of link
1407 * @dev:        The DP device
1408 * @lane_count: The lane count to use for the check
1409 *
1410 * Check if the receiver's DisplayPort Configuration data (DPCD) indicates the
1411 * receiver has achieved and maintained clock recovery, channel equalization,
1412 * symbol lock, and interlane alignment for all lanes currently in use.
1413 *
1414 * Return: 0 if the link status is OK, -ve if a error occurred during checking
1415 */
1416static int check_link_status(struct udevice *dev, u8 lane_count)
1417{
1418	u32 status;
1419
1420	status = get_lane_status_adj_reqs(dev);
1421	if (status)
1422		/* The AUX read failed. */
1423		return status;
1424
1425	/* Check if the link needs training. */
1426	if ((check_clock_recovery(dev, lane_count) == 0) &&
1427	    (check_channel_equalization(dev, lane_count) == 0) &&
1428	    (check_lane_align(dev) == 0)) {
1429		return 0;
1430	}
1431	return -EINVAL;
1432}
1433
1434/**
1435 * run_training() - Run link training
1436 * @dev: The DP device
1437 *
1438 * Run the link training process. It is implemented as a state machine, with
1439 * each state returning the next state. First, the clock recovery sequence will
1440 * be run; if successful, the channel equalization sequence will run. If either
1441 * the clock recovery or channel equalization sequence failed, the link rate or
1442 * the number of lanes used will be reduced and training will be re-attempted.
1443 * If training fails at the minimal data rate, 1.62 Gbps with a single lane,
1444 * training will no longer re-attempt and fail.
1445 *
1446 * There are undocumented timeout constraints in the link training process. In
1447 * DP v1.2a spec, Chapter 3.5.1.2.2 a 10ms limit for the complete training
1448 * process is mentioned. Which individual timeouts are derived and implemented
1449 * by sink manufacturers is unknown. So each step should be as short as
1450 * possible and link training should start as soon as possible after HPD.
1451 *
1452 * Return: 0 if the training sequence ran successfully, -ve if a error occurred
1453 *         or the training failed
1454 */
1455static int run_training(struct udevice *dev)
1456{
1457	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1458	u32 status;
1459	enum link_training_states training_state = TS_CLOCK_RECOVERY;
1460
1461	while (1) {
1462		switch (training_state) {
1463		case TS_CLOCK_RECOVERY:
1464				training_state = training_state_clock_recovery(dev);
1465			break;
1466		case TS_CHANNEL_EQUALIZATION:
1467			training_state = training_state_channel_equalization(dev);
1468			break;
1469		default:
1470			break;
1471		}
1472
1473		if (training_state == TS_SUCCESS)
1474			break;
1475		else if (training_state == TS_FAILURE)
1476			return -EINVAL;
1477
1478		if (training_state == TS_ADJUST_LANE_COUNT ||
1479		    training_state == TS_ADJUST_LINK_RATE) {
1480			status = set_training_pattern(dev, TRAINING_PATTERN_SET_OFF);
1481			if (status)
1482				return -EINVAL;
1483		}
1484	}
1485
1486	/* Final status check. */
1487	return check_link_status(dev, dp_sub->link_config.lane_count);
1488}
1489
1490void reset_dp_phy(struct udevice *dev, u32 reset)
1491{
1492	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1493	u32 phyval, regval;
1494
1495	writel(0, dp_sub->base_addr + DP_ENABLE);
1496	phyval = readl(dp_sub->base_addr + DP_PHY_CONFIG);
1497	regval = phyval | reset;
1498	writel(regval, dp_sub->base_addr + DP_PHY_CONFIG);
1499	/* Remove the reset. */
1500	writel(phyval, dp_sub->base_addr + DP_PHY_CONFIG);
1501	/* Wait for the PHY to be ready. */
1502	wait_phy_ready(dev);
1503
1504	writel(1, dp_sub->base_addr + DP_ENABLE);
1505}
1506
1507/**
1508 * establish_link() - Establish a link
1509 * @dev: The DP device
1510 *
1511 * Check if the link needs training and run the training sequence if training
1512 * is required.
1513 *
1514 * Return: 0 if the link was established successfully, -ve on error
1515 */
1516static int establish_link(struct udevice *dev)
1517{
1518	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1519	u32 status, re_enable_main_link;
1520
1521	reset_dp_phy(dev, DP_PHY_CONFIG_TX_PHY_8B10BEN_MASK |
1522			  DP_PHY_CONFIG_PHY_RESET_MASK);
1523
1524	re_enable_main_link = readl(dp_sub->base_addr + DP_ENABLE_MAIN_STREAM);
1525	if (re_enable_main_link)
1526		enable_main_link(dev, 0);
1527
1528	status = run_training(dev);
1529	if (status)
1530		return status;
1531
1532	status = set_training_pattern(dev, TRAINING_PATTERN_SET_OFF);
1533	if (status)
1534		return status;
1535
1536	if (re_enable_main_link)
1537		enable_main_link(dev, 1);
1538
1539	return check_link_status(dev, dp_sub->link_config.lane_count);
1540}
1541
1542static int dp_hpd_train(struct udevice *dev)
1543{
1544	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1545	struct link_config *link_config = &dp_sub->link_config;
1546	u32 status;
1547
1548	status = get_rx_capabilities(dev);
1549	if (status) {
1550		debug("! Error getting RX caps.\n");
1551		return status;
1552	}
1553
1554	status = set_enhanced_frame_mode(dev, link_config->support_enhanced_framing_mode ? 1 : 0);
1555	if (status) {
1556		debug("! EFM set failed.\n");
1557		return status;
1558	}
1559
1560	status = set_lane_count(dev, (dp_sub->use_max_lane_count) ?
1561				link_config->max_lane_count : dp_sub->lane_count);
1562	if (status) {
1563		debug("! Lane count set failed.\n");
1564		return status;
1565	}
1566
1567	status = set_link_rate(dev, (dp_sub->use_max_link_rate) ?
1568			       link_config->max_link_rate : dp_sub->link_rate);
1569	if (status) {
1570		debug("! Link rate set failed.\n");
1571		return status;
1572	}
1573
1574	status = set_downspread(dev, link_config->support_downspread_control);
1575	if (status) {
1576		debug("! Setting downspread failed.\n");
1577		return status;
1578	}
1579
1580	debug("Lane count =%d\n", dp_sub->link_config.lane_count);
1581	debug("Link rate =%d\n",  dp_sub->link_config.link_rate);
1582
1583	debug("Starting Training...\n");
1584	status = establish_link(dev);
1585	if (status == 0)
1586		debug("! Training succeeded.\n");
1587	else
1588		debug("! Training failed.\n");
1589
1590	return status;
1591}
1592
1593static void display_gfx_frame_buffer(struct udevice *dev)
1594{
1595	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1596
1597	if (!dp_sub->dp_dma->gfx.channel.cur)
1598		dp_sub->dp_dma->gfx.trigger_status = DPDMA_TRIGGER_EN;
1599}
1600
1601static void set_color_encode(struct udevice *dev)
1602{
1603	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1604	struct main_stream_attributes *msa_config = &dp_sub->msa_config;
1605
1606	msa_config->y_cb_cr_colorimetry = 0;
1607	msa_config->dynamic_range       = 0;
1608	msa_config->component_format    = 0;
1609	msa_config->misc0               = 0;
1610	msa_config->misc1               = 0;
1611	msa_config->component_format    = DP_MAIN_STREAM_MISC0_COMPONENT_FORMAT_RGB;
1612}
1613
1614static void config_msa_recalculate(struct udevice *dev)
1615{
1616	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1617	u32 video_bw, link_bw, words_per_line;
1618	u8 bits_per_pixel;
1619	struct main_stream_attributes *msa_config;
1620	struct link_config *link_config;
1621
1622	msa_config = &dp_sub->msa_config;
1623	link_config = &dp_sub->link_config;
1624
1625	msa_config->user_pixel_width = 1;
1626
1627	/* Compute the rest of the MSA values. */
1628	msa_config->n_vid = 27 * 1000 * link_config->link_rate;
1629	msa_config->h_start = msa_config->vid_timing_mode.video_timing.h_sync_width +
1630			      msa_config->vid_timing_mode.video_timing.h_back_porch;
1631	msa_config->v_start = msa_config->vid_timing_mode.video_timing.f0_pv_sync_width +
1632			      msa_config->vid_timing_mode.video_timing.f0_pv_back_porch;
1633
1634	/* Miscellaneous attributes. */
1635	if (msa_config->bits_per_color == 6)
1636		msa_config->misc0 = DP_MAIN_STREAM_MISC0_BDC_6BPC;
1637	else if (msa_config->bits_per_color == 8)
1638		msa_config->misc0 = DP_MAIN_STREAM_MISC0_BDC_8BPC;
1639	else if (msa_config->bits_per_color == 10)
1640		msa_config->misc0 = DP_MAIN_STREAM_MISC0_BDC_10BPC;
1641	else if (msa_config->bits_per_color == 12)
1642		msa_config->misc0 = DP_MAIN_STREAM_MISC0_BDC_12BPC;
1643	else if (msa_config->bits_per_color == 16)
1644		msa_config->misc0 = DP_MAIN_STREAM_MISC0_BDC_16BPC;
1645
1646	msa_config->misc0 <<= DP_MAIN_STREAM_MISC0_BDC_SHIFT;
1647
1648	/* Need to set this. */
1649	msa_config->misc0 |= msa_config->component_format <<
1650			     DP_MAIN_STREAM_MISC0_COMPONENT_FORMAT_SHIFT;
1651
1652	msa_config->misc0 |= msa_config->dynamic_range <<
1653			     DP_MAIN_STREAM_MISC0_DYNAMIC_RANGE_SHIFT;
1654
1655	msa_config->misc0 |= msa_config->y_cb_cr_colorimetry <<
1656			     DP_MAIN_STREAM_MISC0_YCBCR_COLORIMETRY_SHIFT;
1657
1658	msa_config->misc0 |= msa_config->synchronous_clock_mode;
1659	/*
1660	 * Determine the number of bits per pixel for the specified color
1661	 * component format.
1662	 */
1663	if (msa_config->misc1 == DP_MAIN_STREAM_MISC1_Y_ONLY_EN_MASK)
1664		bits_per_pixel = msa_config->bits_per_color;
1665	else if (msa_config->component_format ==
1666			DP_MAIN_STREAM_MISC0_COMPONENT_FORMAT_YCBCR422)
1667		/* YCbCr422 color component format. */
1668		bits_per_pixel = msa_config->bits_per_color * 2;
1669	else
1670		/* RGB or YCbCr 4:4:4 color component format. */
1671		bits_per_pixel = msa_config->bits_per_color * 3;
1672
1673	/* Calculate the data per lane. */
1674	words_per_line = msa_config->vid_timing_mode.video_timing.h_active * bits_per_pixel;
1675	if (words_per_line % 16)
1676		words_per_line += 16;
1677
1678	words_per_line /= 16;
1679	msa_config->data_per_lane = words_per_line - link_config->lane_count;
1680	if (words_per_line % link_config->lane_count)
1681		msa_config->data_per_lane += (words_per_line % link_config->lane_count);
1682
1683	/* Allocate a fixed size for single-stream transport (SST) operation. */
1684	msa_config->transfer_unit_size = 64;
1685
1686	/*
1687	 * Calculate the average number of bytes per transfer unit.
1688	 * Note: Both the integer and the fractional part is stored in
1689	 * AvgBytesPerTU.
1690	 */
1691	video_bw = ((msa_config->pixel_clock_hz / 1000) * bits_per_pixel) / 8;
1692	link_bw = (link_config->lane_count * link_config->link_rate * 27);
1693	msa_config->avg_bytes_per_tu = ((10 *
1694					(video_bw * msa_config->transfer_unit_size)
1695					/ link_bw) + 5) / 10;
1696	/*
1697	 * The number of initial wait cycles at the start of a new line by the
1698	 * framing logic. This allows enough data to be buffered in the input
1699	 * FIFO before video is sent.
1700	 */
1701	if ((msa_config->avg_bytes_per_tu / 1000) <= 4)
1702		msa_config->init_wait = 64;
1703	else
1704		msa_config->init_wait = msa_config->transfer_unit_size -
1705					(msa_config->avg_bytes_per_tu / 1000);
1706}
1707
1708static void set_msa_bpc(struct udevice *dev, u8 bits_per_color)
1709{
1710	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1711
1712	dp_sub->msa_config.bits_per_color = bits_per_color;
1713	/* Calculate the rest of the MSA values. */
1714	config_msa_recalculate(dev);
1715}
1716
1717const struct video_timing_mode *get_video_mode_data(enum video_mode vm_id)
1718{
1719	if (vm_id < VIDC_VM_NUM_SUPPORTED)
1720		return &vidc_video_timing_modes[vm_id];
1721
1722	return NULL;
1723}
1724
1725static u64 get_pixelclk_by_vmid(enum video_mode vm_id)
1726{
1727	const struct video_timing_mode *vm;
1728	u64 clk_hz;
1729
1730	vm = get_video_mode_data(vm_id);
1731	/* For progressive mode, use only frame 0 vertical total. */
1732	clk_hz = vm->video_timing.f0_pv_total;
1733	/* Multiply the number of pixels by the frame rate. */
1734	clk_hz *= vm->frame_rate;
1735
1736	/*
1737	 * Multiply the vertical total by the horizontal total for number of
1738	 * pixels.
1739	 */
1740	clk_hz *= vm->video_timing.h_total;
1741
1742	return clk_hz;
1743}
1744
1745/**
1746 * config_msa_video_mode() - Enable video output
1747 * @dev: The DP device
1748 * @msa: The MSA values to set for the device
1749 *
1750 * Return: 0 if the video was enabled successfully, -ve on error
1751 */
1752static void config_msa_video_mode(struct udevice *dev, enum video_mode videomode)
1753{
1754	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1755	struct main_stream_attributes *msa_config;
1756
1757	msa_config = &dp_sub->msa_config;
1758
1759	/* Configure the MSA values from the display monitor DMT table. */
1760	msa_config->vid_timing_mode.vid_mode = vidc_video_timing_modes[videomode].vid_mode;
1761	msa_config->vid_timing_mode.frame_rate = vidc_video_timing_modes[videomode].frame_rate;
1762	msa_config->vid_timing_mode.video_timing.h_active =
1763				vidc_video_timing_modes[videomode].video_timing.h_active;
1764	msa_config->vid_timing_mode.video_timing.h_front_porch =
1765				vidc_video_timing_modes[videomode].video_timing.h_front_porch;
1766	msa_config->vid_timing_mode.video_timing.h_sync_width =
1767				vidc_video_timing_modes[videomode].video_timing.h_sync_width;
1768	msa_config->vid_timing_mode.video_timing.h_back_porch =
1769				vidc_video_timing_modes[videomode].video_timing.h_back_porch;
1770	msa_config->vid_timing_mode.video_timing.h_total =
1771				vidc_video_timing_modes[videomode].video_timing.h_total;
1772	msa_config->vid_timing_mode.video_timing.h_sync_polarity =
1773			vidc_video_timing_modes[videomode].video_timing.h_sync_polarity;
1774	msa_config->vid_timing_mode.video_timing.v_active =
1775			vidc_video_timing_modes[videomode].video_timing.v_active;
1776	msa_config->vid_timing_mode.video_timing.f0_pv_front_porch =
1777			vidc_video_timing_modes[videomode].video_timing.f0_pv_front_porch;
1778	msa_config->vid_timing_mode.video_timing.f0_pv_sync_width =
1779			vidc_video_timing_modes[videomode].video_timing.f0_pv_sync_width;
1780	msa_config->vid_timing_mode.video_timing.f0_pv_back_porch =
1781			vidc_video_timing_modes[videomode].video_timing.f0_pv_back_porch;
1782	msa_config->vid_timing_mode.video_timing.f0_pv_total =
1783			vidc_video_timing_modes[videomode].video_timing.f0_pv_total;
1784	msa_config->vid_timing_mode.video_timing.f1_v_front_porch =
1785			vidc_video_timing_modes[videomode].video_timing.f1_v_front_porch;
1786	msa_config->vid_timing_mode.video_timing.f1_v_sync_width =
1787			vidc_video_timing_modes[videomode].video_timing.f1_v_sync_width;
1788	msa_config->vid_timing_mode.video_timing.f1_v_back_porch =
1789			vidc_video_timing_modes[videomode].video_timing.f1_v_back_porch;
1790	msa_config->vid_timing_mode.video_timing.f1_v_total =
1791			vidc_video_timing_modes[videomode].video_timing.f1_v_total;
1792	msa_config->vid_timing_mode.video_timing.v_sync_polarity =
1793			vidc_video_timing_modes[videomode].video_timing.v_sync_polarity;
1794	msa_config->pixel_clock_hz = get_pixelclk_by_vmid(msa_config->vid_timing_mode.vid_mode);
1795
1796	/* Calculate the rest of the MSA values. */
1797	config_msa_recalculate(dev);
1798}
1799
1800static void set_pixel_clock(u64 freq_hz)
1801{
1802	u64 ext_divider, vco, vco_int_frac;
1803	u32 pll_assigned, frac_int_fb_div, fraction, regpll = 0;
1804	u8 pll;
1805
1806	pll_assigned = readl(CLK_FPD_BASEADDR + VIDEO_REF_CTRL) & VIDEO_REF_CTRL_SRCSEL_MASK;
1807	if (pll_assigned)
1808		pll = VPLL;
1809
1810	ext_divider = PLL_OUT_FREQ / freq_hz;
1811	vco = freq_hz * ext_divider * 2;
1812	vco_int_frac = (vco * INPUT_FREQ_PRECISION * SHIFT_DECIMAL) /
1813			AVBUF_INPUT_REF_CLK;
1814	frac_int_fb_div = vco_int_frac >> PRECISION;
1815	fraction = vco_int_frac &  AVBUF_DECIMAL;
1816
1817	regpll |= ENABLE_BIT << PLL_CTRL_BYPASS_SHIFT;
1818	regpll |= frac_int_fb_div << PLL_CTRL_FBDIV_SHIFT;
1819	regpll |= (1 << PLL_CTRL_DIV2_SHIFT);
1820	regpll |= (PSS_REF_CLK << PLL_CTRL_PRE_SRC_SHIFT);
1821	writel(regpll, CLK_FPD_BASEADDR + VPLL_CTRL);
1822
1823	regpll = 0;
1824	regpll |= VPLL_CFG_CP << PLL_CFG_CP_SHIFT;
1825	regpll |= VPLL_CFG_RES << PLL_CFG_RES_SHIFT;
1826	regpll |= VPLL_CFG_LFHF << PLL_CFG_LFHF_SHIFT;
1827	regpll |= VPLL_CFG_LOCK_DLY << PLL_CFG_LOCK_DLY_SHIFT;
1828	regpll |= VPLL_CFG_LOCK_CNT << PLL_CFG_LOCK_CNT_SHIFT;
1829	writel(regpll, CLK_FPD_BASEADDR + VPLL_CFG);
1830
1831	regpll = (1U << PLL_FRAC_CFG_ENABLED_SHIFT) |
1832		 (fraction << PLL_FRAC_CFG_DATA_SHIFT);
1833	writel(regpll, CLK_FPD_BASEADDR + VPLL_FRAC_CFG);
1834
1835	clrsetbits_le32(CLK_FPD_BASEADDR + VPLL_CTRL,
1836			PLL_CTRL_RESET_MASK,
1837			(ENABLE_BIT << PLL_CTRL_RESET_SHIFT));
1838
1839	/* Deassert reset to the PLL. */
1840	clrsetbits_le32(CLK_FPD_BASEADDR + VPLL_CTRL,
1841			PLL_CTRL_RESET_MASK,
1842			(DISABLE_BIT << PLL_CTRL_RESET_SHIFT));
1843
1844	while (!(readl(CLK_FPD_BASEADDR + PLL_STATUS) &
1845		(1 << PLL_STATUS_VPLL_LOCK)))
1846		;
1847
1848	/* Deassert Bypass. */
1849	clrsetbits_le32(CLK_FPD_BASEADDR + VPLL_CTRL,
1850			PLL_CTRL_BYPASS_MASK,
1851			(DISABLE_BIT << PLL_CTRL_BYPASS_SHIFT));
1852	udelay(1);
1853
1854	clrsetbits_le32(CLK_FPD_BASEADDR + VIDEO_REF_CTRL,
1855			VIDEO_REF_CTRL_CLKACT_MASK,
1856			(DISABLE_BIT << VIDEO_REF_CTRL_CLKACT_SHIFT));
1857
1858	clrsetbits_le32(CLK_FPD_BASEADDR + VIDEO_REF_CTRL,
1859			VIDEO_REF_CTRL_DIVISOR1_MASK,
1860			(ENABLE_BIT << VIDEO_REF_CTRL_DIVISOR1_SHIFT));
1861
1862	clrsetbits_le32(CLK_FPD_BASEADDR + VIDEO_REF_CTRL,
1863			VIDEO_REF_CTRL_DIVISOR0_MASK,
1864			(ext_divider << VIDEO_REF_CTRL_DIVISOR0_SHIFT));
1865
1866	clrsetbits_le32(CLK_FPD_BASEADDR + VIDEO_REF_CTRL,
1867			VIDEO_REF_CTRL_CLKACT_MASK,
1868			(ENABLE_BIT << VIDEO_REF_CTRL_CLKACT_SHIFT));
1869}
1870
1871/**
1872 * set_msa_values() - Set MSA values
1873 * @dev: The DP device
1874 *
1875 * Set the main stream attributes registers of the DisplayPort TX
1876 * core with the values specified in the main stream attributes configuration
1877 * structure.
1878 */
1879static void set_msa_values(struct udevice *dev)
1880{
1881	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1882	struct main_stream_attributes *msa_config;
1883
1884	msa_config = &dp_sub->msa_config;
1885
1886	/*
1887	 * Set the main stream attributes to the associated DisplayPort TX core
1888	 * registers.
1889	 */
1890	writel(msa_config->vid_timing_mode.video_timing.h_total,
1891	       dp_sub->base_addr + DP_MAIN_STREAM_HTOTAL);
1892	writel(msa_config->vid_timing_mode.video_timing.f0_pv_total,
1893	       dp_sub->base_addr + DP_MAIN_STREAM_VTOTAL);
1894	writel(msa_config->vid_timing_mode.video_timing.h_sync_polarity |
1895	       (msa_config->vid_timing_mode.video_timing.v_sync_polarity
1896		<< DP_MAIN_STREAM_POLARITY_VSYNC_POL_SHIFT),
1897		dp_sub->base_addr + DP_MAIN_STREAM_POLARITY);
1898	writel(msa_config->vid_timing_mode.video_timing.h_sync_width,
1899	       dp_sub->base_addr + DP_MAIN_STREAM_HSWIDTH);
1900	writel(msa_config->vid_timing_mode.video_timing.f0_pv_sync_width,
1901	       dp_sub->base_addr + DP_MAIN_STREAM_VSWIDTH);
1902	writel(msa_config->vid_timing_mode.video_timing.h_active,
1903	       dp_sub->base_addr + DP_MAIN_STREAM_HRES);
1904	writel(msa_config->vid_timing_mode.video_timing.v_active,
1905	       dp_sub->base_addr + DP_MAIN_STREAM_VRES);
1906	writel(msa_config->h_start, dp_sub->base_addr + DP_MAIN_STREAM_HSTART);
1907	writel(msa_config->v_start, dp_sub->base_addr + DP_MAIN_STREAM_VSTART);
1908	writel(msa_config->misc0, dp_sub->base_addr + DP_MAIN_STREAM_MISC0);
1909	writel(msa_config->misc1, dp_sub->base_addr + DP_MAIN_STREAM_MISC1);
1910	writel(msa_config->pixel_clock_hz / 1000, dp_sub->base_addr + DP_M_VID);
1911	writel(msa_config->n_vid, dp_sub->base_addr + DP_N_VID);
1912	writel(msa_config->user_pixel_width, dp_sub->base_addr + DP_USER_PIXEL_WIDTH);
1913	writel(msa_config->data_per_lane, dp_sub->base_addr + DP_USER_DATA_COUNT_PER_LANE);
1914	/*
1915	 * Set the transfer unit values to the associated DisplayPort TX core
1916	 * registers.
1917	 */
1918	writel(msa_config->transfer_unit_size, dp_sub->base_addr + DP_TU_SIZE);
1919	writel(msa_config->avg_bytes_per_tu / 1000,
1920	       dp_sub->base_addr + DP_MIN_BYTES_PER_TU);
1921	writel((msa_config->avg_bytes_per_tu % 1000) * 1000,
1922	       dp_sub->base_addr + DP_FRAC_BYTES_PER_TU);
1923	writel(msa_config->init_wait, dp_sub->base_addr + DP_INIT_WAIT);
1924}
1925
1926static void setup_video_stream(struct udevice *dev)
1927{
1928	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1929	struct main_stream_attributes *msa_config = &dp_sub->msa_config;
1930
1931	set_color_encode(dev);
1932	set_msa_bpc(dev, dp_sub->bpc);
1933	config_msa_video_mode(dev, dp_sub->video_mode);
1934
1935	/* Set pixel clock. */
1936	dp_sub->pix_clk = msa_config->pixel_clock_hz;
1937	set_pixel_clock(dp_sub->pix_clk);
1938
1939	/* Reset the transmitter. */
1940	writel(1, dp_sub->base_addr + DP_SOFT_RESET);
1941	udelay(10);
1942	writel(0, dp_sub->base_addr + DP_SOFT_RESET);
1943
1944	set_msa_values(dev);
1945
1946	/* Issuing a soft-reset (AV_BUF_SRST_REG). */
1947	writel(3, dp_sub->base_addr + AVBUF_BUF_SRST_REG); // Assert reset.
1948	udelay(10);
1949	writel(0, dp_sub->base_addr + AVBUF_BUF_SRST_REG); // De-ssert reset.
1950
1951	enable_main_link(dev, 1);
1952
1953	debug("DONE!\n");
1954}
1955
1956static int dp_tx_start_link_training(struct udevice *dev)
1957{
1958	u32 status;
1959	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1960
1961	enable_main_link(dev, 0);
1962
1963	if (!is_dp_connected(dev)) {
1964		debug("! Disconnected.\n");
1965		return -ENODEV;
1966	}
1967
1968	status = dp_tx_wakeup(dev);
1969	if (status) {
1970		debug("! Wakeup failed.\n");
1971		return -EIO;
1972	}
1973
1974	do {
1975		mdelay(100);
1976		status = dp_hpd_train(dev);
1977		if (status == -EINVAL) {
1978			debug("Lost connection\n\r");
1979			return -EIO;
1980		} else if (status) {
1981			continue;
1982		}
1983		display_gfx_frame_buffer(dev);
1984		setup_video_stream(dev);
1985		status = check_link_status(dev, dp_sub->link_config.lane_count);
1986		if (status == -EINVAL)
1987			return -EIO;
1988	} while (status != 0);
1989
1990	return 0;
1991}
1992
1993static void init_run_config(struct udevice *dev)
1994{
1995	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1996
1997	dp_sub->dp_dma               = &dp_dma;
1998	dp_sub->video_mode           = VIDC_VM_1024x768_60_P;
1999	dp_sub->bpc                  = VIDC_BPC_8;
2000	dp_sub->color_encode         = DP_CENC_RGB;
2001	dp_sub->use_max_cfg_caps     = 1;
2002	dp_sub->lane_count           = LANE_COUNT_1;
2003	dp_sub->link_rate            = LINK_RATE_540GBPS;
2004	dp_sub->en_sync_clk_mode     = 0;
2005	dp_sub->use_max_lane_count   = 1;
2006	dp_sub->use_max_link_rate    = 1;
2007}
2008
2009static int dpdma_setup(struct udevice *dev)
2010{
2011	int status;
2012	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
2013
2014	writel(DPDMA_ISR_VSYNC_INT_MASK, dp_sub->dp_dma->base_addr + DPDMA_IEN);
2015	status = wait_for_bit_le32((u32 *)dp_sub->dp_dma->base_addr + DPDMA_ISR,
2016				   DPDMA_ISR_VSYNC_INT_MASK, false, 1000, false);
2017	if (status) {
2018		debug("%s: INTR TIMEDOUT\n", __func__);
2019		return status;
2020	}
2021	debug("INTR dma_vsync_intr_handler called...\n");
2022	dma_vsync_intr_handler(dev);
2023
2024	return 0;
2025}
2026
2027static int zynqmp_dpsub_init(struct udevice *dev)
2028{
2029	int status;
2030	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
2031
2032	/* Initialize the dpdma configuration */
2033	status = init_dpdma_subsys(dev);
2034	if (status)
2035		return -EINVAL;
2036
2037	config_msa_sync_clk_mode(dev, dp_sub->en_sync_clk_mode);
2038	set_video_clk_source(dev, AVBUF_PS_CLK, AVBUF_PS_CLK);
2039
2040	return 0;
2041}
2042
2043static int dp_tx_run(struct udevice *dev)
2044{
2045	u32 interrupt_signal_state, interrupt_status, hpd_state, hpd_event;
2046	u32 hpd_pulse_detected, hpd_duration, status;
2047	int attempts = 0;
2048	struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
2049
2050	/* Continuously poll for HPD events. */
2051	while (attempts < 5) {
2052		/* Read interrupt registers. */
2053		interrupt_signal_state = readl(dp_sub->base_addr + DP_INTERRUPT_SIG_STATE);
2054		interrupt_status = readl(dp_sub->base_addr + DP_INTR_STATUS);
2055		/* Check for HPD events. */
2056		hpd_state = interrupt_signal_state & DP_INTERRUPT_SIG_STATE_HPD_STATE_MASK;
2057		hpd_event = interrupt_status & DP_INTR_HPD_EVENT_MASK;
2058		hpd_pulse_detected = interrupt_status & DP_INTR_HPD_PULSE_DETECTED_MASK;
2059		if (hpd_pulse_detected)
2060			hpd_duration = readl(dp_sub->base_addr + DP_HPD_DURATION);
2061		else
2062			attempts++;
2063
2064		/* HPD event handling. */
2065		if (hpd_state && hpd_event) {
2066			debug("+===> HPD connection event detected.\n");
2067			/* Initiate link training. */
2068			status = dp_tx_start_link_training(dev);
2069			if (status) {
2070				debug("Link training failed\n");
2071				return status;
2072			}
2073			return 0;
2074		} else if (hpd_state && hpd_pulse_detected && (hpd_duration >= 250)) {
2075			debug("===> HPD pulse detected.\n");
2076			/* Re-train if needed. */
2077			status = dp_tx_start_link_training(dev);
2078			if (status) {
2079				debug("HPD pulse detection failed\n");
2080				return status;
2081			}
2082			return 0;
2083		} else if (!hpd_state && hpd_event) {
2084			debug("+===> HPD disconnection event detected.\n\n");
2085			/* Disable main link. */
2086			enable_main_link(dev, 0);
2087			break;
2088		}
2089	}
2090	return -EINVAL;
2091}
2092
2093static int zynqmp_dpsub_probe(struct udevice *dev)
2094{
2095	struct video_priv *uc_priv = dev_get_uclass_priv(dev);
2096	struct zynqmp_dpsub_priv *priv = dev_get_priv(dev);
2097	struct clk clk;
2098	int ret;
2099	int mode = RGBA8888;
2100
2101	ret = clk_get_by_name(dev, "dp_apb_clk", &clk);
2102	if (ret < 0) {
2103		dev_err(dev, "failed to get clock\n");
2104		return ret;
2105	}
2106
2107	priv->clock = clk_get_rate(&clk);
2108	if (IS_ERR_VALUE(priv->clock)) {
2109		dev_err(dev, "failed to get rate\n");
2110		return priv->clock;
2111	}
2112
2113	ret = clk_enable(&clk);
2114	if (ret) {
2115		dev_err(dev, "failed to enable clock\n");
2116		return ret;
2117	}
2118
2119	dev_dbg(dev, "Base addr 0x%x, clock %d\n", (u32)priv->base_addr,
2120		priv->clock);
2121
2122	/* Initialize the DisplayPort TX core. */
2123	ret = init_dp_tx(dev);
2124	if (ret)
2125		return -EINVAL;
2126
2127	/* Initialize the runtime configuration */
2128	init_run_config(dev);
2129	/* Set the format graphics frame for Video Pipeline */
2130	ret = set_nonlive_gfx_format(dev, mode);
2131	if (ret)
2132		return ret;
2133
2134	uc_priv->bpix = ffs(priv->non_live_graphics->bpp) - 1;
2135	dev_dbg(dev, "BPP in bits %d, bpix %d\n",
2136		priv->non_live_graphics->bpp, uc_priv->bpix);
2137
2138	uc_priv->fb = (void *)gd->fb_base;
2139	uc_priv->xsize = vidc_video_timing_modes[priv->video_mode].video_timing.h_active;
2140	uc_priv->ysize = vidc_video_timing_modes[priv->video_mode].video_timing.v_active;
2141	/* Calculated by core but need it for my own setup */
2142	uc_priv->line_length = uc_priv->xsize * VNBYTES(uc_priv->bpix);
2143	/* Will be calculated again in video_post_probe() but I need that value now */
2144	uc_priv->fb_size = uc_priv->line_length * uc_priv->ysize;
2145
2146	switch (mode) {
2147	case RGBA8888:
2148		uc_priv->format = VIDEO_RGBA8888;
2149		break;
2150	default:
2151		debug("Unsupported mode\n");
2152		return -EINVAL;
2153	}
2154
2155	video_set_flush_dcache(dev, true);
2156	debug("Video: WIDTH[%d]xHEIGHT[%d]xBPP[%d/%d] -- line length %d\n", uc_priv->xsize,
2157	      uc_priv->ysize, uc_priv->bpix, VNBYTES(uc_priv->bpix), uc_priv->line_length);
2158
2159	enable_gfx_buffers(dev, 1);
2160	avbuf_video_select(dev, AVBUF_VIDSTREAM1_NONE, AVBUF_VIDSTREAM2_NONLIVE_GFX);
2161	config_gfx_pipeline(dev);
2162	config_output_video(dev);
2163
2164	ret = zynqmp_dpsub_init(dev);
2165	if (ret)
2166		return ret;
2167
2168	/* Populate the FrameBuffer structure with the frame attributes */
2169	priv->frame_buffer.stride = uc_priv->line_length;
2170	priv->frame_buffer.line_size = priv->frame_buffer.stride;
2171	priv->frame_buffer.size = priv->frame_buffer.line_size * uc_priv->ysize;
2172
2173	ret = dp_tx_run(dev);
2174	if (ret)
2175		return ret;
2176
2177	return dpdma_setup(dev);
2178}
2179
2180static int zynqmp_dpsub_bind(struct udevice *dev)
2181{
2182	struct video_uc_plat *plat = dev_get_uclass_plat(dev);
2183
2184	/* This is maximum size to allocate - it depends on BPP setting */
2185	plat->size = WIDTH * HEIGHT * 4;
2186	/* plat->align is not defined that's why 1MB alignment is used */
2187
2188	/*
2189	 * plat->base can be used for allocating own location for FB
2190	 * if not defined then it is allocated by u-boot itself
2191	 */
2192
2193	return 0;
2194}
2195
2196static int zynqmp_dpsub_of_to_plat(struct udevice *dev)
2197{
2198	struct zynqmp_dpsub_priv *priv = dev_get_priv(dev);
2199	struct resource res;
2200	int ret;
2201
2202	ret = dev_read_resource_byname(dev, "dp", &res);
2203	if (ret)
2204		return ret;
2205
2206	priv->base_addr = res.start;
2207
2208	return 0;
2209}
2210
2211static const struct udevice_id zynqmp_dpsub_ids[] = {
2212	{ .compatible = "xlnx,zynqmp-dpsub-1.7" },
2213	{ }
2214};
2215
2216U_BOOT_DRIVER(zynqmp_dpsub_video) = {
2217	.name = "zynqmp_dpsub_video",
2218	.id = UCLASS_VIDEO,
2219	.of_match = zynqmp_dpsub_ids,
2220	.plat_auto = sizeof(struct video_uc_plat),
2221	.bind = zynqmp_dpsub_bind,
2222	.probe = zynqmp_dpsub_probe,
2223	.priv_auto = sizeof(struct zynqmp_dpsub_priv),
2224	.of_to_plat = zynqmp_dpsub_of_to_plat,
2225};
2226