185199Sdfr// SPDX-License-Identifier: GPL-2.0
2121415Smarcel// Copyright (C) 2018 Spreadtrum Communications Inc.
385199Sdfr
485199Sdfr#include <linux/clk.h>
585199Sdfr#include <linux/dmaengine.h>
685199Sdfr#include <linux/dma-mapping.h>
785199Sdfr#include <linux/dma/sprd-dma.h>
885199Sdfr#include <linux/interrupt.h>
985199Sdfr#include <linux/io.h>
1085199Sdfr#include <linux/iopoll.h>
1185199Sdfr#include <linux/kernel.h>
1285199Sdfr#include <linux/module.h>
1385199Sdfr#include <linux/of.h>
1485199Sdfr#include <linux/of_dma.h>
1585199Sdfr#include <linux/platform_device.h>
1685199Sdfr#include <linux/pm_runtime.h>
1785199Sdfr#include <linux/spi/spi.h>
1885199Sdfr
1985199Sdfr#define SPRD_SPI_TXD			0x0
2085199Sdfr#define SPRD_SPI_CLKD			0x4
2185199Sdfr#define SPRD_SPI_CTL0			0x8
2285199Sdfr#define SPRD_SPI_CTL1			0xc
2385199Sdfr#define SPRD_SPI_CTL2			0x10
2485199Sdfr#define SPRD_SPI_CTL3			0x14
2585199Sdfr#define SPRD_SPI_CTL4			0x18
2685199Sdfr#define SPRD_SPI_CTL5			0x1c
2785199Sdfr#define SPRD_SPI_INT_EN			0x20
2885199Sdfr#define SPRD_SPI_INT_CLR		0x24
2985199Sdfr#define SPRD_SPI_INT_RAW_STS		0x28
3085199Sdfr#define SPRD_SPI_INT_MASK_STS		0x2c
3185199Sdfr#define SPRD_SPI_STS1			0x30
3285199Sdfr#define SPRD_SPI_STS2			0x34
3385199Sdfr#define SPRD_SPI_DSP_WAIT		0x38
34115378Smarcel#define SPRD_SPI_STS3			0x3c
3585199Sdfr#define SPRD_SPI_CTL6			0x40
3685199Sdfr#define SPRD_SPI_STS4			0x44
3785199Sdfr#define SPRD_SPI_FIFO_RST		0x48
38121415Smarcel#define SPRD_SPI_CTL7			0x4c
39121415Smarcel#define SPRD_SPI_STS5			0x50
4085199Sdfr#define SPRD_SPI_CTL8			0x54
41139554Smarcel#define SPRD_SPI_CTL9			0x58
42139554Smarcel#define SPRD_SPI_CTL10			0x5c
43115378Smarcel#define SPRD_SPI_CTL11			0x60
44115378Smarcel#define SPRD_SPI_CTL12			0x64
45139554Smarcel#define SPRD_SPI_STS6			0x68
46139554Smarcel#define SPRD_SPI_STS7			0x6c
47139554Smarcel#define SPRD_SPI_STS8			0x70
48115378Smarcel#define SPRD_SPI_STS9			0x74
49121415Smarcel
50121415Smarcel/* Bits & mask definition for register CTL0 */
5185199Sdfr#define SPRD_SPI_SCK_REV		BIT(13)
52121415Smarcel#define SPRD_SPI_NG_TX			BIT(1)
5385199Sdfr#define SPRD_SPI_NG_RX			BIT(0)
54121415Smarcel#define SPRD_SPI_CHNL_LEN_MASK		GENMASK(4, 0)
55121415Smarcel#define SPRD_SPI_CSN_MASK		GENMASK(11, 8)
56121415Smarcel#define SPRD_SPI_CS0_VALID		BIT(8)
57121415Smarcel
58121415Smarcel/* Bits & mask definition for register SPI_INT_EN */
59121415Smarcel#define SPRD_SPI_TX_END_INT_EN		BIT(8)
60121415Smarcel#define SPRD_SPI_RX_END_INT_EN		BIT(9)
61121415Smarcel
62121415Smarcel/* Bits & mask definition for register SPI_INT_RAW_STS */
63121415Smarcel#define SPRD_SPI_TX_END_RAW		BIT(8)
64121415Smarcel#define SPRD_SPI_RX_END_RAW		BIT(9)
65121415Smarcel
66121415Smarcel/* Bits & mask definition for register SPI_INT_CLR */
67121415Smarcel#define SPRD_SPI_TX_END_CLR		BIT(8)
68121415Smarcel#define SPRD_SPI_RX_END_CLR		BIT(9)
69121415Smarcel
7085199Sdfr/* Bits & mask definition for register INT_MASK_STS */
7185199Sdfr#define SPRD_SPI_MASK_RX_END		BIT(9)
72121415Smarcel#define SPRD_SPI_MASK_TX_END		BIT(8)
73121415Smarcel
7485199Sdfr/* Bits & mask definition for register STS2 */
75121415Smarcel#define SPRD_SPI_TX_BUSY		BIT(8)
76133888Sarun
7785199Sdfr/* Bits & mask definition for register CTL1 */
78121415Smarcel#define SPRD_SPI_RX_MODE		BIT(12)
79121415Smarcel#define SPRD_SPI_TX_MODE		BIT(13)
80121415Smarcel#define SPRD_SPI_RTX_MD_MASK		GENMASK(13, 12)
81133888Sarun
82133888Sarun/* Bits & mask definition for register CTL2 */
83121415Smarcel#define SPRD_SPI_DMA_EN			BIT(6)
84121415Smarcel
85121415Smarcel/* Bits & mask definition for register CTL4 */
86121415Smarcel#define SPRD_SPI_START_RX		BIT(9)
87121415Smarcel#define SPRD_SPI_ONLY_RECV_MASK		GENMASK(8, 0)
88121415Smarcel
89121415Smarcel/* Bits & mask definition for register SPI_INT_CLR */
90121415Smarcel#define SPRD_SPI_RX_END_INT_CLR		BIT(9)
91121415Smarcel#define SPRD_SPI_TX_END_INT_CLR		BIT(8)
92121415Smarcel
93121415Smarcel/* Bits & mask definition for register SPI_INT_RAW */
94121415Smarcel#define SPRD_SPI_RX_END_IRQ		BIT(9)
95121415Smarcel#define SPRD_SPI_TX_END_IRQ		BIT(8)
96121415Smarcel
97121415Smarcel/* Bits & mask definition for register CTL12 */
98121415Smarcel#define SPRD_SPI_SW_RX_REQ		BIT(0)
99121415Smarcel#define SPRD_SPI_SW_TX_REQ		BIT(1)
100121415Smarcel
101121415Smarcel/* Bits & mask definition for register CTL7 */
102121415Smarcel#define SPRD_SPI_DATA_LINE2_EN		BIT(15)
103121415Smarcel#define SPRD_SPI_MODE_MASK		GENMASK(5, 3)
104121415Smarcel#define SPRD_SPI_MODE_OFFSET		3
105121415Smarcel#define SPRD_SPI_3WIRE_MODE		4
106121415Smarcel#define SPRD_SPI_4WIRE_MODE		0
10785199Sdfr
10885199Sdfr/* Bits & mask definition for register CTL8 */
109121449Smarcel#define SPRD_SPI_TX_MAX_LEN_MASK	GENMASK(19, 0)
110121449Smarcel#define SPRD_SPI_TX_LEN_H_MASK		GENMASK(3, 0)
111121449Smarcel#define SPRD_SPI_TX_LEN_H_OFFSET	16
112121449Smarcel
113121449Smarcel/* Bits & mask definition for register CTL9 */
114121449Smarcel#define SPRD_SPI_TX_LEN_L_MASK		GENMASK(15, 0)
115121449Smarcel
116121449Smarcel/* Bits & mask definition for register CTL10 */
117121449Smarcel#define SPRD_SPI_RX_MAX_LEN_MASK	GENMASK(19, 0)
118121449Smarcel#define SPRD_SPI_RX_LEN_H_MASK		GENMASK(3, 0)
119121449Smarcel#define SPRD_SPI_RX_LEN_H_OFFSET	16
120121449Smarcel
121121449Smarcel/* Bits & mask definition for register CTL11 */
122121449Smarcel#define SPRD_SPI_RX_LEN_L_MASK		GENMASK(15, 0)
123121449Smarcel
124121449Smarcel/* Default & maximum word delay cycles */
125121449Smarcel#define SPRD_SPI_MIN_DELAY_CYCLE	14
12685199Sdfr#define SPRD_SPI_MAX_DELAY_CYCLE	130
127121415Smarcel
12885199Sdfr#define SPRD_SPI_FIFO_SIZE		32
129121415Smarcel#define SPRD_SPI_CHIP_CS_NUM		0x4
130121415Smarcel#define SPRD_SPI_CHNL_LEN		2
131139554Smarcel#define SPRD_SPI_DEFAULT_SOURCE		26000000
132121415Smarcel#define SPRD_SPI_MAX_SPEED_HZ		48000000
133139554Smarcel#define SPRD_SPI_AUTOSUSPEND_DELAY	100
134121415Smarcel#define SPRD_SPI_DMA_STEP		8
135121415Smarcel
136121415Smarcelenum sprd_spi_dma_channel {
13785199Sdfr	SPRD_SPI_RX,
138121415Smarcel	SPRD_SPI_TX,
139122162Smarcel	SPRD_SPI_MAX,
140122162Smarcel};
141122162Smarcel
142122162Smarcelstruct sprd_spi_dma {
143122162Smarcel	bool enable;
144122162Smarcel	struct dma_chan *dma_chan[SPRD_SPI_MAX];
145122162Smarcel	enum dma_slave_buswidth width;
146121933Smarcel	u32 fragmens_len;
147121933Smarcel	u32 rx_len;
148121933Smarcel};
149121933Smarcel
150121933Smarcelstruct sprd_spi {
151121933Smarcel	void __iomem *base;
152121933Smarcel	phys_addr_t phy_base;
153121415Smarcel	struct device *dev;
154121415Smarcel	struct clk *clk;
155121415Smarcel	int irq;
156121415Smarcel	u32 src_clk;
157115084Smarcel	u32 hw_mode;
158121449Smarcel	u32 trans_len;
159121415Smarcel	u32 trans_mode;
160121415Smarcel	u32 word_delay;
161139554Smarcel	u32 hw_speed_hz;
162121415Smarcel	u32 len;
163121415Smarcel	int status;
164121415Smarcel	struct sprd_spi_dma dma;
165139554Smarcel	struct completion xfer_completion;
166139554Smarcel	const void *tx_buf;
167121415Smarcel	void *rx_buf;
168139554Smarcel	int (*read_bufs)(struct sprd_spi *ss, u32 len);
169139554Smarcel	int (*write_bufs)(struct sprd_spi *ss, u32 len);
170139554Smarcel};
171139554Smarcel
172139554Smarcelstatic u32 sprd_spi_transfer_max_timeout(struct sprd_spi *ss,
173139554Smarcel					 struct spi_transfer *t)
174139554Smarcel{
175139554Smarcel	/*
176139554Smarcel	 * The time spent on transmission of the full FIFO data is the maximum
177139554Smarcel	 * SPI transmission time.
178139554Smarcel	 */
179139554Smarcel	u32 size = t->bits_per_word * SPRD_SPI_FIFO_SIZE;
180139554Smarcel	u32 bit_time_us = DIV_ROUND_UP(USEC_PER_SEC, ss->hw_speed_hz);
181139554Smarcel	u32 total_time_us = size * bit_time_us;
182139554Smarcel	/*
183139554Smarcel	 * There is an interval between data and the data in our SPI hardware,
184122162Smarcel	 * so the total transmission time need add the interval time.
185122162Smarcel	 */
186122162Smarcel	u32 interval_cycle = SPRD_SPI_FIFO_SIZE * ss->word_delay;
187122162Smarcel	u32 interval_time_us = DIV_ROUND_UP(interval_cycle * USEC_PER_SEC,
188122162Smarcel					    ss->src_clk);
189122162Smarcel
190122162Smarcel	return total_time_us + interval_time_us;
191122162Smarcel}
192122162Smarcel
193122162Smarcelstatic int sprd_spi_wait_for_tx_end(struct sprd_spi *ss, struct spi_transfer *t)
194122162Smarcel{
195122162Smarcel	u32 val, us;
196122162Smarcel	int ret;
197122162Smarcel
198122162Smarcel	us = sprd_spi_transfer_max_timeout(ss, t);
199122162Smarcel	ret = readl_relaxed_poll_timeout(ss->base + SPRD_SPI_INT_RAW_STS, val,
200122162Smarcel					 val & SPRD_SPI_TX_END_IRQ, 0, us);
201122162Smarcel	if (ret) {
202122162Smarcel		dev_err(ss->dev, "SPI error, spi send timeout!\n");
203122162Smarcel		return ret;
204122162Smarcel	}
205139554Smarcel
206139554Smarcel	ret = readl_relaxed_poll_timeout(ss->base + SPRD_SPI_STS2, val,
207139554Smarcel					 !(val & SPRD_SPI_TX_BUSY), 0, us);
208139554Smarcel	if (ret) {
209139554Smarcel		dev_err(ss->dev, "SPI error, spi busy timeout!\n");
210139554Smarcel		return ret;
211139554Smarcel	}
212139554Smarcel
213139554Smarcel	writel_relaxed(SPRD_SPI_TX_END_INT_CLR, ss->base + SPRD_SPI_INT_CLR);
214139554Smarcel
215139554Smarcel	return 0;
216139554Smarcel}
217139554Smarcel
218139554Smarcelstatic int sprd_spi_wait_for_rx_end(struct sprd_spi *ss, struct spi_transfer *t)
219139554Smarcel{
220139554Smarcel	u32 val, us;
221139554Smarcel	int ret;
222139554Smarcel
223139554Smarcel	us = sprd_spi_transfer_max_timeout(ss, t);
224139554Smarcel	ret = readl_relaxed_poll_timeout(ss->base + SPRD_SPI_INT_RAW_STS, val,
225139554Smarcel					 val & SPRD_SPI_RX_END_IRQ, 0, us);
226139554Smarcel	if (ret) {
227139554Smarcel		dev_err(ss->dev, "SPI error, spi rx timeout!\n");
228139554Smarcel		return ret;
229121415Smarcel	}
230121415Smarcel
23185199Sdfr	writel_relaxed(SPRD_SPI_RX_END_INT_CLR, ss->base + SPRD_SPI_INT_CLR);
23285199Sdfr
233121415Smarcel	return 0;
234121415Smarcel}
235121415Smarcel
236121415Smarcelstatic void sprd_spi_tx_req(struct sprd_spi *ss)
237115084Smarcel{
238121449Smarcel	writel_relaxed(SPRD_SPI_SW_TX_REQ, ss->base + SPRD_SPI_CTL12);
239121415Smarcel}
240121415Smarcel
241121415Smarcelstatic void sprd_spi_rx_req(struct sprd_spi *ss)
242121415Smarcel{
243140891Smarcel	writel_relaxed(SPRD_SPI_SW_RX_REQ, ss->base + SPRD_SPI_CTL12);
244140891Smarcel}
245140891Smarcel
246140891Smarcelstatic void sprd_spi_enter_idle(struct sprd_spi *ss)
247121415Smarcel{
248121415Smarcel	u32 val = readl_relaxed(ss->base + SPRD_SPI_CTL1);
249121449Smarcel
250121449Smarcel	val &= ~SPRD_SPI_RTX_MD_MASK;
25185199Sdfr	writel_relaxed(val, ss->base + SPRD_SPI_CTL1);
252115084Smarcel}
25385199Sdfr
25485199Sdfrstatic void sprd_spi_set_transfer_bits(struct sprd_spi *ss, u32 bits)
25585199Sdfr{
256121415Smarcel	u32 val = readl_relaxed(ss->base + SPRD_SPI_CTL0);
25785199Sdfr
258121415Smarcel	/* Set the valid bits for every transaction */
259121415Smarcel	val &= ~(SPRD_SPI_CHNL_LEN_MASK << SPRD_SPI_CHNL_LEN);
260121415Smarcel	val |= bits << SPRD_SPI_CHNL_LEN;
26185199Sdfr	writel_relaxed(val, ss->base + SPRD_SPI_CTL0);
262121415Smarcel}
263121415Smarcel
26485199Sdfrstatic void sprd_spi_set_tx_length(struct sprd_spi *ss, u32 length)
265121415Smarcel{
266121415Smarcel	u32 val = readl_relaxed(ss->base + SPRD_SPI_CTL8);
267121415Smarcel
268121415Smarcel	length &= SPRD_SPI_TX_MAX_LEN_MASK;
26985199Sdfr	val &= ~SPRD_SPI_TX_LEN_H_MASK;
27085199Sdfr	val |= length >> SPRD_SPI_TX_LEN_H_OFFSET;
27185199Sdfr	writel_relaxed(val, ss->base + SPRD_SPI_CTL8);
272139554Smarcel
273139554Smarcel	val = length & SPRD_SPI_TX_LEN_L_MASK;
274139554Smarcel	writel_relaxed(val, ss->base + SPRD_SPI_CTL9);
275139554Smarcel}
276139554Smarcel
277139554Smarcelstatic void sprd_spi_set_rx_length(struct sprd_spi *ss, u32 length)
278139554Smarcel{
279139554Smarcel	u32 val = readl_relaxed(ss->base + SPRD_SPI_CTL10);
28085199Sdfr
281139554Smarcel	length &= SPRD_SPI_RX_MAX_LEN_MASK;
282121415Smarcel	val &= ~SPRD_SPI_RX_LEN_H_MASK;
28385199Sdfr	val |= length >> SPRD_SPI_RX_LEN_H_OFFSET;
284121415Smarcel	writel_relaxed(val, ss->base + SPRD_SPI_CTL10);
285121415Smarcel
28685199Sdfr	val = length & SPRD_SPI_RX_LEN_L_MASK;
287121415Smarcel	writel_relaxed(val, ss->base + SPRD_SPI_CTL11);
28885199Sdfr}
289121415Smarcel
290121415Smarcelstatic void sprd_spi_chipselect(struct spi_device *sdev, bool cs)
291121415Smarcel{
292121415Smarcel	struct spi_controller *sctlr = sdev->controller;
293121415Smarcel	struct sprd_spi *ss = spi_controller_get_devdata(sctlr);
294121415Smarcel	u32 val;
295121415Smarcel
29685199Sdfr	val = readl_relaxed(ss->base + SPRD_SPI_CTL0);
297121415Smarcel	/*  The SPI controller will pull down CS pin if cs is 0 */
29885199Sdfr	if (!cs) {
299121415Smarcel		val &= ~SPRD_SPI_CS0_VALID;
300121415Smarcel		writel_relaxed(val, ss->base + SPRD_SPI_CTL0);
301121415Smarcel	} else {
302121415Smarcel		val |= SPRD_SPI_CSN_MASK;
303121415Smarcel		writel_relaxed(val, ss->base + SPRD_SPI_CTL0);
304121415Smarcel	}
30585685Sdfr}
306121415Smarcel
30785199Sdfrstatic int sprd_spi_write_only_receive(struct sprd_spi *ss, u32 len)
308{
309	u32 val;
310
311	/* Clear the start receive bit and reset receive data number */
312	val = readl_relaxed(ss->base + SPRD_SPI_CTL4);
313	val &= ~(SPRD_SPI_START_RX | SPRD_SPI_ONLY_RECV_MASK);
314	writel_relaxed(val, ss->base + SPRD_SPI_CTL4);
315
316	/* Set the receive data length */
317	val = readl_relaxed(ss->base + SPRD_SPI_CTL4);
318	val |= len & SPRD_SPI_ONLY_RECV_MASK;
319	writel_relaxed(val, ss->base + SPRD_SPI_CTL4);
320
321	/* Trigger to receive data */
322	val = readl_relaxed(ss->base + SPRD_SPI_CTL4);
323	val |= SPRD_SPI_START_RX;
324	writel_relaxed(val, ss->base + SPRD_SPI_CTL4);
325
326	return len;
327}
328
329static int sprd_spi_write_bufs_u8(struct sprd_spi *ss, u32 len)
330{
331	u8 *tx_p = (u8 *)ss->tx_buf;
332	int i;
333
334	for (i = 0; i < len; i++)
335		writeb_relaxed(tx_p[i], ss->base + SPRD_SPI_TXD);
336
337	ss->tx_buf += i;
338	return i;
339}
340
341static int sprd_spi_write_bufs_u16(struct sprd_spi *ss, u32 len)
342{
343	u16 *tx_p = (u16 *)ss->tx_buf;
344	int i;
345
346	for (i = 0; i < len; i++)
347		writew_relaxed(tx_p[i], ss->base + SPRD_SPI_TXD);
348
349	ss->tx_buf += i << 1;
350	return i << 1;
351}
352
353static int sprd_spi_write_bufs_u32(struct sprd_spi *ss, u32 len)
354{
355	u32 *tx_p = (u32 *)ss->tx_buf;
356	int i;
357
358	for (i = 0; i < len; i++)
359		writel_relaxed(tx_p[i], ss->base + SPRD_SPI_TXD);
360
361	ss->tx_buf += i << 2;
362	return i << 2;
363}
364
365static int sprd_spi_read_bufs_u8(struct sprd_spi *ss, u32 len)
366{
367	u8 *rx_p = (u8 *)ss->rx_buf;
368	int i;
369
370	for (i = 0; i < len; i++)
371		rx_p[i] = readb_relaxed(ss->base + SPRD_SPI_TXD);
372
373	ss->rx_buf += i;
374	return i;
375}
376
377static int sprd_spi_read_bufs_u16(struct sprd_spi *ss, u32 len)
378{
379	u16 *rx_p = (u16 *)ss->rx_buf;
380	int i;
381
382	for (i = 0; i < len; i++)
383		rx_p[i] = readw_relaxed(ss->base + SPRD_SPI_TXD);
384
385	ss->rx_buf += i << 1;
386	return i << 1;
387}
388
389static int sprd_spi_read_bufs_u32(struct sprd_spi *ss, u32 len)
390{
391	u32 *rx_p = (u32 *)ss->rx_buf;
392	int i;
393
394	for (i = 0; i < len; i++)
395		rx_p[i] = readl_relaxed(ss->base + SPRD_SPI_TXD);
396
397	ss->rx_buf += i << 2;
398	return i << 2;
399}
400
401static int sprd_spi_txrx_bufs(struct spi_device *sdev, struct spi_transfer *t)
402{
403	struct sprd_spi *ss = spi_controller_get_devdata(sdev->controller);
404	u32 trans_len = ss->trans_len, len;
405	int ret, write_size = 0, read_size = 0;
406
407	while (trans_len) {
408		len = trans_len > SPRD_SPI_FIFO_SIZE ? SPRD_SPI_FIFO_SIZE :
409			trans_len;
410		if (ss->trans_mode & SPRD_SPI_TX_MODE) {
411			sprd_spi_set_tx_length(ss, len);
412			write_size += ss->write_bufs(ss, len);
413
414			/*
415			 * For our 3 wires mode or dual TX line mode, we need
416			 * to request the controller to transfer.
417			 */
418			if (ss->hw_mode & SPI_3WIRE || ss->hw_mode & SPI_TX_DUAL)
419				sprd_spi_tx_req(ss);
420
421			ret = sprd_spi_wait_for_tx_end(ss, t);
422		} else {
423			sprd_spi_set_rx_length(ss, len);
424
425			/*
426			 * For our 3 wires mode or dual TX line mode, we need
427			 * to request the controller to read.
428			 */
429			if (ss->hw_mode & SPI_3WIRE || ss->hw_mode & SPI_TX_DUAL)
430				sprd_spi_rx_req(ss);
431			else
432				write_size += ss->write_bufs(ss, len);
433
434			ret = sprd_spi_wait_for_rx_end(ss, t);
435		}
436
437		if (ret)
438			goto complete;
439
440		if (ss->trans_mode & SPRD_SPI_RX_MODE)
441			read_size += ss->read_bufs(ss, len);
442
443		trans_len -= len;
444	}
445
446	if (ss->trans_mode & SPRD_SPI_TX_MODE)
447		ret = write_size;
448	else
449		ret = read_size;
450complete:
451	sprd_spi_enter_idle(ss);
452
453	return ret;
454}
455
456static void sprd_spi_irq_enable(struct sprd_spi *ss)
457{
458	u32 val;
459
460	/* Clear interrupt status before enabling interrupt. */
461	writel_relaxed(SPRD_SPI_TX_END_CLR | SPRD_SPI_RX_END_CLR,
462		ss->base + SPRD_SPI_INT_CLR);
463	/* Enable SPI interrupt only in DMA mode. */
464	val = readl_relaxed(ss->base + SPRD_SPI_INT_EN);
465	writel_relaxed(val | SPRD_SPI_TX_END_INT_EN |
466		       SPRD_SPI_RX_END_INT_EN,
467		       ss->base + SPRD_SPI_INT_EN);
468}
469
470static void sprd_spi_irq_disable(struct sprd_spi *ss)
471{
472	writel_relaxed(0, ss->base + SPRD_SPI_INT_EN);
473}
474
475static void sprd_spi_dma_enable(struct sprd_spi *ss, bool enable)
476{
477	u32 val = readl_relaxed(ss->base + SPRD_SPI_CTL2);
478
479	if (enable)
480		val |= SPRD_SPI_DMA_EN;
481	else
482		val &= ~SPRD_SPI_DMA_EN;
483
484	writel_relaxed(val, ss->base + SPRD_SPI_CTL2);
485}
486
487static int sprd_spi_dma_submit(struct dma_chan *dma_chan,
488			       struct dma_slave_config *c,
489			       struct sg_table *sg,
490			       enum dma_transfer_direction dir)
491{
492	struct dma_async_tx_descriptor *desc;
493	dma_cookie_t cookie;
494	unsigned long flags;
495	int ret;
496
497	ret = dmaengine_slave_config(dma_chan, c);
498	if (ret < 0)
499		return ret;
500
501	flags = SPRD_DMA_FLAGS(SPRD_DMA_CHN_MODE_NONE, SPRD_DMA_NO_TRG,
502			       SPRD_DMA_FRAG_REQ, SPRD_DMA_TRANS_INT);
503	desc = dmaengine_prep_slave_sg(dma_chan, sg->sgl, sg->nents, dir, flags);
504	if (!desc)
505		return  -ENODEV;
506
507	cookie = dmaengine_submit(desc);
508	if (dma_submit_error(cookie))
509		return dma_submit_error(cookie);
510
511	dma_async_issue_pending(dma_chan);
512
513	return 0;
514}
515
516static int sprd_spi_dma_rx_config(struct sprd_spi *ss, struct spi_transfer *t)
517{
518	struct dma_chan *dma_chan = ss->dma.dma_chan[SPRD_SPI_RX];
519	struct dma_slave_config config = {
520		.src_addr = ss->phy_base,
521		.src_addr_width = ss->dma.width,
522		.dst_addr_width = ss->dma.width,
523		.dst_maxburst = ss->dma.fragmens_len,
524	};
525	int ret;
526
527	ret = sprd_spi_dma_submit(dma_chan, &config, &t->rx_sg, DMA_DEV_TO_MEM);
528	if (ret)
529		return ret;
530
531	return ss->dma.rx_len;
532}
533
534static int sprd_spi_dma_tx_config(struct sprd_spi *ss, struct spi_transfer *t)
535{
536	struct dma_chan *dma_chan = ss->dma.dma_chan[SPRD_SPI_TX];
537	struct dma_slave_config config = {
538		.dst_addr = ss->phy_base,
539		.src_addr_width = ss->dma.width,
540		.dst_addr_width = ss->dma.width,
541		.src_maxburst = ss->dma.fragmens_len,
542	};
543	int ret;
544
545	ret = sprd_spi_dma_submit(dma_chan, &config, &t->tx_sg, DMA_MEM_TO_DEV);
546	if (ret)
547		return ret;
548
549	return t->len;
550}
551
552static int sprd_spi_dma_request(struct sprd_spi *ss)
553{
554	ss->dma.dma_chan[SPRD_SPI_RX] = dma_request_chan(ss->dev, "rx_chn");
555	if (IS_ERR_OR_NULL(ss->dma.dma_chan[SPRD_SPI_RX]))
556		return dev_err_probe(ss->dev, PTR_ERR(ss->dma.dma_chan[SPRD_SPI_RX]),
557				     "request RX DMA channel failed!\n");
558
559	ss->dma.dma_chan[SPRD_SPI_TX]  = dma_request_chan(ss->dev, "tx_chn");
560	if (IS_ERR_OR_NULL(ss->dma.dma_chan[SPRD_SPI_TX])) {
561		dma_release_channel(ss->dma.dma_chan[SPRD_SPI_RX]);
562		return dev_err_probe(ss->dev, PTR_ERR(ss->dma.dma_chan[SPRD_SPI_TX]),
563				     "request TX DMA channel failed!\n");
564	}
565
566	return 0;
567}
568
569static void sprd_spi_dma_release(struct sprd_spi *ss)
570{
571	if (ss->dma.dma_chan[SPRD_SPI_RX])
572		dma_release_channel(ss->dma.dma_chan[SPRD_SPI_RX]);
573
574	if (ss->dma.dma_chan[SPRD_SPI_TX])
575		dma_release_channel(ss->dma.dma_chan[SPRD_SPI_TX]);
576}
577
578static int sprd_spi_dma_txrx_bufs(struct spi_device *sdev,
579				  struct spi_transfer *t)
580{
581	struct sprd_spi *ss = spi_controller_get_devdata(sdev->controller);
582	u32 trans_len = ss->trans_len;
583	int ret, write_size = 0;
584
585	reinit_completion(&ss->xfer_completion);
586	sprd_spi_irq_enable(ss);
587	if (ss->trans_mode & SPRD_SPI_TX_MODE) {
588		write_size = sprd_spi_dma_tx_config(ss, t);
589		sprd_spi_set_tx_length(ss, trans_len);
590
591		/*
592		 * For our 3 wires mode or dual TX line mode, we need
593		 * to request the controller to transfer.
594		 */
595		if (ss->hw_mode & SPI_3WIRE || ss->hw_mode & SPI_TX_DUAL)
596			sprd_spi_tx_req(ss);
597	} else {
598		sprd_spi_set_rx_length(ss, trans_len);
599
600		/*
601		 * For our 3 wires mode or dual TX line mode, we need
602		 * to request the controller to read.
603		 */
604		if (ss->hw_mode & SPI_3WIRE || ss->hw_mode & SPI_TX_DUAL)
605			sprd_spi_rx_req(ss);
606		else
607			write_size = ss->write_bufs(ss, trans_len);
608	}
609
610	if (write_size < 0) {
611		ret = write_size;
612		dev_err(ss->dev, "failed to write, ret = %d\n", ret);
613		goto trans_complete;
614	}
615
616	if (ss->trans_mode & SPRD_SPI_RX_MODE) {
617		/*
618		 * Set up the DMA receive data length, which must be an
619		 * integral multiple of fragment length. But when the length
620		 * of received data is less than fragment length, DMA can be
621		 * configured to receive data according to the actual length
622		 * of received data.
623		 */
624		ss->dma.rx_len = t->len > ss->dma.fragmens_len ?
625			(t->len - t->len % ss->dma.fragmens_len) :
626			 t->len;
627		ret = sprd_spi_dma_rx_config(ss, t);
628		if (ret < 0) {
629			dev_err(&sdev->dev,
630				"failed to configure rx DMA, ret = %d\n", ret);
631			goto trans_complete;
632		}
633	}
634
635	sprd_spi_dma_enable(ss, true);
636	wait_for_completion(&(ss->xfer_completion));
637
638	if (ss->trans_mode & SPRD_SPI_TX_MODE)
639		ret = write_size;
640	else
641		ret = ss->dma.rx_len;
642
643trans_complete:
644	sprd_spi_dma_enable(ss, false);
645	sprd_spi_enter_idle(ss);
646	sprd_spi_irq_disable(ss);
647
648	return ret;
649}
650
651static void sprd_spi_set_speed(struct sprd_spi *ss, u32 speed_hz)
652{
653	/*
654	 * From SPI datasheet, the prescale calculation formula:
655	 * prescale = SPI source clock / (2 * SPI_freq) - 1;
656	 */
657	u32 clk_div = DIV_ROUND_UP(ss->src_clk, speed_hz << 1) - 1;
658
659	/* Save the real hardware speed */
660	ss->hw_speed_hz = (ss->src_clk >> 1) / (clk_div + 1);
661	writel_relaxed(clk_div, ss->base + SPRD_SPI_CLKD);
662}
663
664static int sprd_spi_init_hw(struct sprd_spi *ss, struct spi_transfer *t)
665{
666	struct spi_delay *d = &t->word_delay;
667	u16 word_delay, interval;
668	u32 val;
669
670	if (d->unit != SPI_DELAY_UNIT_SCK)
671		return -EINVAL;
672
673	val = readl_relaxed(ss->base + SPRD_SPI_CTL0);
674	val &= ~(SPRD_SPI_SCK_REV | SPRD_SPI_NG_TX | SPRD_SPI_NG_RX);
675	/* Set default chip selection, clock phase and clock polarity */
676	val |= ss->hw_mode & SPI_CPHA ? SPRD_SPI_NG_RX : SPRD_SPI_NG_TX;
677	val |= ss->hw_mode & SPI_CPOL ? SPRD_SPI_SCK_REV : 0;
678	writel_relaxed(val, ss->base + SPRD_SPI_CTL0);
679
680	/*
681	 * Set the intervals of two SPI frames, and the inteval calculation
682	 * formula as below per datasheet:
683	 * interval time (source clock cycles) = interval * 4 + 10.
684	 */
685	word_delay = clamp_t(u16, d->value, SPRD_SPI_MIN_DELAY_CYCLE,
686			     SPRD_SPI_MAX_DELAY_CYCLE);
687	interval = DIV_ROUND_UP(word_delay - 10, 4);
688	ss->word_delay = interval * 4 + 10;
689	writel_relaxed(interval, ss->base + SPRD_SPI_CTL5);
690
691	/* Reset SPI fifo */
692	writel_relaxed(1, ss->base + SPRD_SPI_FIFO_RST);
693	writel_relaxed(0, ss->base + SPRD_SPI_FIFO_RST);
694
695	/* Set SPI work mode */
696	val = readl_relaxed(ss->base + SPRD_SPI_CTL7);
697	val &= ~SPRD_SPI_MODE_MASK;
698
699	if (ss->hw_mode & SPI_3WIRE)
700		val |= SPRD_SPI_3WIRE_MODE << SPRD_SPI_MODE_OFFSET;
701	else
702		val |= SPRD_SPI_4WIRE_MODE << SPRD_SPI_MODE_OFFSET;
703
704	if (ss->hw_mode & SPI_TX_DUAL)
705		val |= SPRD_SPI_DATA_LINE2_EN;
706	else
707		val &= ~SPRD_SPI_DATA_LINE2_EN;
708
709	writel_relaxed(val, ss->base + SPRD_SPI_CTL7);
710
711	return 0;
712}
713
714static int sprd_spi_setup_transfer(struct spi_device *sdev,
715				   struct spi_transfer *t)
716{
717	struct sprd_spi *ss = spi_controller_get_devdata(sdev->controller);
718	u8 bits_per_word = t->bits_per_word;
719	u32 val, mode = 0;
720	int ret;
721
722	ss->len = t->len;
723	ss->tx_buf = t->tx_buf;
724	ss->rx_buf = t->rx_buf;
725
726	ss->hw_mode = sdev->mode;
727	ret = sprd_spi_init_hw(ss, t);
728	if (ret)
729		return ret;
730
731	/* Set tansfer speed and valid bits */
732	sprd_spi_set_speed(ss, t->speed_hz);
733	sprd_spi_set_transfer_bits(ss, bits_per_word);
734
735	if (bits_per_word > 16)
736		bits_per_word = round_up(bits_per_word, 16);
737	else
738		bits_per_word = round_up(bits_per_word, 8);
739
740	switch (bits_per_word) {
741	case 8:
742		ss->trans_len = t->len;
743		ss->read_bufs = sprd_spi_read_bufs_u8;
744		ss->write_bufs = sprd_spi_write_bufs_u8;
745		ss->dma.width = DMA_SLAVE_BUSWIDTH_1_BYTE;
746		ss->dma.fragmens_len = SPRD_SPI_DMA_STEP;
747		break;
748	case 16:
749		ss->trans_len = t->len >> 1;
750		ss->read_bufs = sprd_spi_read_bufs_u16;
751		ss->write_bufs = sprd_spi_write_bufs_u16;
752		ss->dma.width = DMA_SLAVE_BUSWIDTH_2_BYTES;
753		ss->dma.fragmens_len = SPRD_SPI_DMA_STEP << 1;
754		break;
755	case 32:
756		ss->trans_len = t->len >> 2;
757		ss->read_bufs = sprd_spi_read_bufs_u32;
758		ss->write_bufs = sprd_spi_write_bufs_u32;
759		ss->dma.width = DMA_SLAVE_BUSWIDTH_4_BYTES;
760		ss->dma.fragmens_len = SPRD_SPI_DMA_STEP << 2;
761		break;
762	default:
763		return -EINVAL;
764	}
765
766	/* Set transfer read or write mode */
767	val = readl_relaxed(ss->base + SPRD_SPI_CTL1);
768	val &= ~SPRD_SPI_RTX_MD_MASK;
769	if (t->tx_buf)
770		mode |= SPRD_SPI_TX_MODE;
771	if (t->rx_buf)
772		mode |= SPRD_SPI_RX_MODE;
773
774	writel_relaxed(val | mode, ss->base + SPRD_SPI_CTL1);
775
776	ss->trans_mode = mode;
777
778	/*
779	 * If in only receive mode, we need to trigger the SPI controller to
780	 * receive data automatically.
781	 */
782	if (ss->trans_mode == SPRD_SPI_RX_MODE)
783		ss->write_bufs = sprd_spi_write_only_receive;
784
785	return 0;
786}
787
788static int sprd_spi_transfer_one(struct spi_controller *sctlr,
789				 struct spi_device *sdev,
790				 struct spi_transfer *t)
791{
792	int ret;
793
794	ret = sprd_spi_setup_transfer(sdev, t);
795	if (ret)
796		goto setup_err;
797
798	if (sctlr->can_dma(sctlr, sdev, t))
799		ret = sprd_spi_dma_txrx_bufs(sdev, t);
800	else
801		ret = sprd_spi_txrx_bufs(sdev, t);
802
803	if (ret == t->len)
804		ret = 0;
805	else if (ret >= 0)
806		ret = -EREMOTEIO;
807
808setup_err:
809	spi_finalize_current_transfer(sctlr);
810
811	return ret;
812}
813
814static irqreturn_t sprd_spi_handle_irq(int irq, void *data)
815{
816	struct sprd_spi *ss = (struct sprd_spi *)data;
817	u32 val = readl_relaxed(ss->base + SPRD_SPI_INT_MASK_STS);
818
819	if (val & SPRD_SPI_MASK_TX_END) {
820		writel_relaxed(SPRD_SPI_TX_END_CLR, ss->base + SPRD_SPI_INT_CLR);
821		if (!(ss->trans_mode & SPRD_SPI_RX_MODE))
822			complete(&ss->xfer_completion);
823
824		return IRQ_HANDLED;
825	}
826
827	if (val & SPRD_SPI_MASK_RX_END) {
828		writel_relaxed(SPRD_SPI_RX_END_CLR, ss->base + SPRD_SPI_INT_CLR);
829		if (ss->dma.rx_len < ss->len) {
830			ss->rx_buf += ss->dma.rx_len;
831			ss->dma.rx_len +=
832				ss->read_bufs(ss, ss->len - ss->dma.rx_len);
833		}
834		complete(&ss->xfer_completion);
835
836		return IRQ_HANDLED;
837	}
838
839	return IRQ_NONE;
840}
841
842static int sprd_spi_irq_init(struct platform_device *pdev, struct sprd_spi *ss)
843{
844	int ret;
845
846	ss->irq = platform_get_irq(pdev, 0);
847	if (ss->irq < 0)
848		return ss->irq;
849
850	ret = devm_request_irq(&pdev->dev, ss->irq, sprd_spi_handle_irq,
851				0, pdev->name, ss);
852	if (ret)
853		dev_err(&pdev->dev, "failed to request spi irq %d, ret = %d\n",
854			ss->irq, ret);
855
856	return ret;
857}
858
859static int sprd_spi_clk_init(struct platform_device *pdev, struct sprd_spi *ss)
860{
861	struct clk *clk_spi, *clk_parent;
862
863	clk_spi = devm_clk_get(&pdev->dev, "spi");
864	if (IS_ERR(clk_spi)) {
865		dev_warn(&pdev->dev, "can't get the spi clock\n");
866		clk_spi = NULL;
867	}
868
869	clk_parent = devm_clk_get(&pdev->dev, "source");
870	if (IS_ERR(clk_parent)) {
871		dev_warn(&pdev->dev, "can't get the source clock\n");
872		clk_parent = NULL;
873	}
874
875	ss->clk = devm_clk_get(&pdev->dev, "enable");
876	if (IS_ERR(ss->clk)) {
877		dev_err(&pdev->dev, "can't get the enable clock\n");
878		return PTR_ERR(ss->clk);
879	}
880
881	if (!clk_set_parent(clk_spi, clk_parent))
882		ss->src_clk = clk_get_rate(clk_spi);
883	else
884		ss->src_clk = SPRD_SPI_DEFAULT_SOURCE;
885
886	return 0;
887}
888
889static bool sprd_spi_can_dma(struct spi_controller *sctlr,
890			     struct spi_device *spi, struct spi_transfer *t)
891{
892	struct sprd_spi *ss = spi_controller_get_devdata(sctlr);
893
894	return ss->dma.enable && (t->len > SPRD_SPI_FIFO_SIZE);
895}
896
897static int sprd_spi_dma_init(struct platform_device *pdev, struct sprd_spi *ss)
898{
899	int ret;
900
901	ret = sprd_spi_dma_request(ss);
902	if (ret) {
903		if (ret == -EPROBE_DEFER)
904			return ret;
905
906		dev_warn(&pdev->dev,
907			 "failed to request dma, enter no dma mode, ret = %d\n",
908			 ret);
909
910		return 0;
911	}
912
913	ss->dma.enable = true;
914
915	return 0;
916}
917
918static int sprd_spi_probe(struct platform_device *pdev)
919{
920	struct spi_controller *sctlr;
921	struct resource *res;
922	struct sprd_spi *ss;
923	int ret;
924
925	pdev->id = of_alias_get_id(pdev->dev.of_node, "spi");
926	sctlr = spi_alloc_host(&pdev->dev, sizeof(*ss));
927	if (!sctlr)
928		return -ENOMEM;
929
930	ss = spi_controller_get_devdata(sctlr);
931	ss->base = devm_platform_get_and_ioremap_resource(pdev, 0, &res);
932	if (IS_ERR(ss->base)) {
933		ret = PTR_ERR(ss->base);
934		goto free_controller;
935	}
936
937	ss->phy_base = res->start;
938	ss->dev = &pdev->dev;
939	sctlr->dev.of_node = pdev->dev.of_node;
940	sctlr->mode_bits = SPI_CPOL | SPI_CPHA | SPI_3WIRE | SPI_TX_DUAL;
941	sctlr->bus_num = pdev->id;
942	sctlr->set_cs = sprd_spi_chipselect;
943	sctlr->transfer_one = sprd_spi_transfer_one;
944	sctlr->can_dma = sprd_spi_can_dma;
945	sctlr->auto_runtime_pm = true;
946	sctlr->max_speed_hz = min_t(u32, ss->src_clk >> 1,
947				    SPRD_SPI_MAX_SPEED_HZ);
948
949	init_completion(&ss->xfer_completion);
950	platform_set_drvdata(pdev, sctlr);
951	ret = sprd_spi_clk_init(pdev, ss);
952	if (ret)
953		goto free_controller;
954
955	ret = sprd_spi_irq_init(pdev, ss);
956	if (ret)
957		goto free_controller;
958
959	ret = sprd_spi_dma_init(pdev, ss);
960	if (ret)
961		goto free_controller;
962
963	ret = clk_prepare_enable(ss->clk);
964	if (ret)
965		goto release_dma;
966
967	ret = pm_runtime_set_active(&pdev->dev);
968	if (ret < 0)
969		goto disable_clk;
970
971	pm_runtime_set_autosuspend_delay(&pdev->dev,
972					 SPRD_SPI_AUTOSUSPEND_DELAY);
973	pm_runtime_use_autosuspend(&pdev->dev);
974	pm_runtime_enable(&pdev->dev);
975	ret = pm_runtime_get_sync(&pdev->dev);
976	if (ret < 0) {
977		dev_err(&pdev->dev, "failed to resume SPI controller\n");
978		goto err_rpm_put;
979	}
980
981	ret = devm_spi_register_controller(&pdev->dev, sctlr);
982	if (ret)
983		goto err_rpm_put;
984
985	pm_runtime_mark_last_busy(&pdev->dev);
986	pm_runtime_put_autosuspend(&pdev->dev);
987
988	return 0;
989
990err_rpm_put:
991	pm_runtime_put_noidle(&pdev->dev);
992	pm_runtime_disable(&pdev->dev);
993disable_clk:
994	clk_disable_unprepare(ss->clk);
995release_dma:
996	sprd_spi_dma_release(ss);
997free_controller:
998	spi_controller_put(sctlr);
999
1000	return ret;
1001}
1002
1003static void sprd_spi_remove(struct platform_device *pdev)
1004{
1005	struct spi_controller *sctlr = platform_get_drvdata(pdev);
1006	struct sprd_spi *ss = spi_controller_get_devdata(sctlr);
1007	int ret;
1008
1009	ret = pm_runtime_get_sync(ss->dev);
1010	if (ret < 0)
1011		dev_err(ss->dev, "failed to resume SPI controller\n");
1012
1013	spi_controller_suspend(sctlr);
1014
1015	if (ret >= 0) {
1016		if (ss->dma.enable)
1017			sprd_spi_dma_release(ss);
1018		clk_disable_unprepare(ss->clk);
1019	}
1020	pm_runtime_put_noidle(&pdev->dev);
1021	pm_runtime_disable(&pdev->dev);
1022}
1023
1024static int __maybe_unused sprd_spi_runtime_suspend(struct device *dev)
1025{
1026	struct spi_controller *sctlr = dev_get_drvdata(dev);
1027	struct sprd_spi *ss = spi_controller_get_devdata(sctlr);
1028
1029	if (ss->dma.enable)
1030		sprd_spi_dma_release(ss);
1031
1032	clk_disable_unprepare(ss->clk);
1033
1034	return 0;
1035}
1036
1037static int __maybe_unused sprd_spi_runtime_resume(struct device *dev)
1038{
1039	struct spi_controller *sctlr = dev_get_drvdata(dev);
1040	struct sprd_spi *ss = spi_controller_get_devdata(sctlr);
1041	int ret;
1042
1043	ret = clk_prepare_enable(ss->clk);
1044	if (ret)
1045		return ret;
1046
1047	if (!ss->dma.enable)
1048		return 0;
1049
1050	ret = sprd_spi_dma_request(ss);
1051	if (ret)
1052		clk_disable_unprepare(ss->clk);
1053
1054	return ret;
1055}
1056
1057static const struct dev_pm_ops sprd_spi_pm_ops = {
1058	SET_RUNTIME_PM_OPS(sprd_spi_runtime_suspend,
1059			   sprd_spi_runtime_resume, NULL)
1060};
1061
1062static const struct of_device_id sprd_spi_of_match[] = {
1063	{ .compatible = "sprd,sc9860-spi", },
1064	{ /* sentinel */ }
1065};
1066MODULE_DEVICE_TABLE(of, sprd_spi_of_match);
1067
1068static struct platform_driver sprd_spi_driver = {
1069	.driver = {
1070		.name = "sprd-spi",
1071		.of_match_table = sprd_spi_of_match,
1072		.pm = &sprd_spi_pm_ops,
1073	},
1074	.probe = sprd_spi_probe,
1075	.remove_new = sprd_spi_remove,
1076};
1077
1078module_platform_driver(sprd_spi_driver);
1079
1080MODULE_DESCRIPTION("Spreadtrum SPI Controller driver");
1081MODULE_AUTHOR("Lanqing Liu <lanqing.liu@spreadtrum.com>");
1082MODULE_LICENSE("GPL v2");
1083