1/* SPDX-License-Identifier: GPL-2.0-or-later */
2/*
3 * Copyright (C) 2007-2010 Freescale Semiconductor, Inc. All rights reserved.
4 *
5 * Author:
6 *   Zhang Wei <wei.zhang@freescale.com>, Jul 2007
7 *   Ebony Zhu <ebony.zhu@freescale.com>, May 2007
8 */
9#ifndef __DMA_FSLDMA_H
10#define __DMA_FSLDMA_H
11
12#include <linux/device.h>
13#include <linux/dmapool.h>
14#include <linux/dmaengine.h>
15
16/* Define data structures needed by Freescale
17 * MPC8540 and MPC8349 DMA controller.
18 */
19#define FSL_DMA_MR_CS		0x00000001
20#define FSL_DMA_MR_CC		0x00000002
21#define FSL_DMA_MR_CA		0x00000008
22#define FSL_DMA_MR_EIE		0x00000040
23#define FSL_DMA_MR_XFE		0x00000020
24#define FSL_DMA_MR_EOLNIE	0x00000100
25#define FSL_DMA_MR_EOLSIE	0x00000080
26#define FSL_DMA_MR_EOSIE	0x00000200
27#define FSL_DMA_MR_CDSM		0x00000010
28#define FSL_DMA_MR_CTM		0x00000004
29#define FSL_DMA_MR_EMP_EN	0x00200000
30#define FSL_DMA_MR_EMS_EN	0x00040000
31#define FSL_DMA_MR_DAHE		0x00002000
32#define FSL_DMA_MR_SAHE		0x00001000
33
34#define FSL_DMA_MR_SAHTS_MASK	0x0000C000
35#define FSL_DMA_MR_DAHTS_MASK	0x00030000
36#define FSL_DMA_MR_BWC_MASK	0x0f000000
37
38/*
39 * Bandwidth/pause control determines how many bytes a given
40 * channel is allowed to transfer before the DMA engine pauses
41 * the current channel and switches to the next channel
42 */
43#define FSL_DMA_MR_BWC         0x0A000000
44
45/* Special MR definition for MPC8349 */
46#define FSL_DMA_MR_EOTIE	0x00000080
47#define FSL_DMA_MR_PRC_RM	0x00000800
48
49#define FSL_DMA_SR_CH		0x00000020
50#define FSL_DMA_SR_PE		0x00000010
51#define FSL_DMA_SR_CB		0x00000004
52#define FSL_DMA_SR_TE		0x00000080
53#define FSL_DMA_SR_EOSI		0x00000002
54#define FSL_DMA_SR_EOLSI	0x00000001
55#define FSL_DMA_SR_EOCDI	0x00000001
56#define FSL_DMA_SR_EOLNI	0x00000008
57
58#define FSL_DMA_SATR_SBPATMU			0x20000000
59#define FSL_DMA_SATR_STRANSINT_RIO		0x00c00000
60#define FSL_DMA_SATR_SREADTYPE_SNOOP_READ	0x00050000
61#define FSL_DMA_SATR_SREADTYPE_BP_IORH		0x00020000
62#define FSL_DMA_SATR_SREADTYPE_BP_NREAD		0x00040000
63#define FSL_DMA_SATR_SREADTYPE_BP_MREAD		0x00070000
64
65#define FSL_DMA_DATR_DBPATMU			0x20000000
66#define FSL_DMA_DATR_DTRANSINT_RIO		0x00c00000
67#define FSL_DMA_DATR_DWRITETYPE_SNOOP_WRITE	0x00050000
68#define FSL_DMA_DATR_DWRITETYPE_BP_FLUSH	0x00010000
69
70#define FSL_DMA_EOL		((u64)0x1)
71#define FSL_DMA_SNEN		((u64)0x10)
72#define FSL_DMA_EOSIE		0x8
73#define FSL_DMA_NLDA_MASK	(~(u64)0x1f)
74
75#define FSL_DMA_BCR_MAX_CNT	0x03ffffffu
76
77#define FSL_DMA_DGSR_TE		0x80
78#define FSL_DMA_DGSR_CH		0x20
79#define FSL_DMA_DGSR_PE		0x10
80#define FSL_DMA_DGSR_EOLNI	0x08
81#define FSL_DMA_DGSR_CB		0x04
82#define FSL_DMA_DGSR_EOSI	0x02
83#define FSL_DMA_DGSR_EOLSI	0x01
84
85#define FSL_DMA_BUSWIDTHS	(BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) | \
86				BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) | \
87				BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) | \
88				BIT(DMA_SLAVE_BUSWIDTH_8_BYTES))
89typedef u64 __bitwise v64;
90typedef u32 __bitwise v32;
91
92struct fsl_dma_ld_hw {
93	v64 src_addr;
94	v64 dst_addr;
95	v64 next_ln_addr;
96	v32 count;
97	v32 reserve;
98} __attribute__((aligned(32)));
99
100struct fsl_desc_sw {
101	struct fsl_dma_ld_hw hw;
102	struct list_head node;
103	struct list_head tx_list;
104	struct dma_async_tx_descriptor async_tx;
105} __attribute__((aligned(32)));
106
107struct fsldma_chan_regs {
108	u32 mr;		/* 0x00 - Mode Register */
109	u32 sr;		/* 0x04 - Status Register */
110	u64 cdar;	/* 0x08 - Current descriptor address register */
111	u64 sar;	/* 0x10 - Source Address Register */
112	u64 dar;	/* 0x18 - Destination Address Register */
113	u32 bcr;	/* 0x20 - Byte Count Register */
114	u64 ndar;	/* 0x24 - Next Descriptor Address Register */
115};
116
117struct fsldma_chan;
118#define FSL_DMA_MAX_CHANS_PER_DEVICE 8
119
120struct fsldma_device {
121	void __iomem *regs;	/* DGSR register base */
122	struct device *dev;
123	struct dma_device common;
124	struct fsldma_chan *chan[FSL_DMA_MAX_CHANS_PER_DEVICE];
125	u32 feature;		/* The same as DMA channels */
126	int irq;		/* Channel IRQ */
127};
128
129/* Define macros for fsldma_chan->feature property */
130#define FSL_DMA_LITTLE_ENDIAN	0x00000000
131#define FSL_DMA_BIG_ENDIAN	0x00000001
132
133#define FSL_DMA_IP_MASK		0x00000ff0
134#define FSL_DMA_IP_85XX		0x00000010
135#define FSL_DMA_IP_83XX		0x00000020
136
137#define FSL_DMA_CHAN_PAUSE_EXT	0x00001000
138#define FSL_DMA_CHAN_START_EXT	0x00002000
139
140#ifdef CONFIG_PM
141struct fsldma_chan_regs_save {
142	u32 mr;
143};
144
145enum fsldma_pm_state {
146	RUNNING = 0,
147	SUSPENDED,
148};
149#endif
150
151struct fsldma_chan {
152	char name[8];			/* Channel name */
153	struct fsldma_chan_regs __iomem *regs;
154	spinlock_t desc_lock;		/* Descriptor operation lock */
155	/*
156	 * Descriptors which are queued to run, but have not yet been
157	 * submitted to the hardware for execution
158	 */
159	struct list_head ld_pending;
160	/*
161	 * Descriptors which are currently being executed by the hardware
162	 */
163	struct list_head ld_running;
164	/*
165	 * Descriptors which have finished execution by the hardware. These
166	 * descriptors have already had their cleanup actions run. They are
167	 * waiting for the ACK bit to be set by the async_tx API.
168	 */
169	struct list_head ld_completed;	/* Link descriptors queue */
170	struct dma_chan common;		/* DMA common channel */
171	struct dma_pool *desc_pool;	/* Descriptors pool */
172	struct device *dev;		/* Channel device */
173	int irq;			/* Channel IRQ */
174	int id;				/* Raw id of this channel */
175	struct tasklet_struct tasklet;
176	u32 feature;
177	bool idle;			/* DMA controller is idle */
178#ifdef CONFIG_PM
179	struct fsldma_chan_regs_save regs_save;
180	enum fsldma_pm_state pm_state;
181#endif
182
183	void (*toggle_ext_pause)(struct fsldma_chan *fsl_chan, int enable);
184	void (*toggle_ext_start)(struct fsldma_chan *fsl_chan, int enable);
185	void (*set_src_loop_size)(struct fsldma_chan *fsl_chan, int size);
186	void (*set_dst_loop_size)(struct fsldma_chan *fsl_chan, int size);
187	void (*set_request_count)(struct fsldma_chan *fsl_chan, int size);
188};
189
190#define to_fsl_chan(chan) container_of(chan, struct fsldma_chan, common)
191#define to_fsl_desc(lh) container_of(lh, struct fsl_desc_sw, node)
192#define tx_to_fsl_desc(tx) container_of(tx, struct fsl_desc_sw, async_tx)
193
194#ifdef	CONFIG_PPC
195#define fsl_ioread32(p)		in_le32(p)
196#define fsl_ioread32be(p)	in_be32(p)
197#define fsl_iowrite32(v, p)	out_le32(p, v)
198#define fsl_iowrite32be(v, p)	out_be32(p, v)
199
200#ifdef __powerpc64__
201#define fsl_ioread64(p)		in_le64(p)
202#define fsl_ioread64be(p)	in_be64(p)
203#define fsl_iowrite64(v, p)	out_le64(p, v)
204#define fsl_iowrite64be(v, p)	out_be64(p, v)
205#else
206static u64 fsl_ioread64(const u64 __iomem *addr)
207{
208	u32 val_lo = in_le32((u32 __iomem *)addr);
209	u32 val_hi = in_le32((u32 __iomem *)addr + 1);
210
211	return ((u64)val_hi << 32) + val_lo;
212}
213
214static void fsl_iowrite64(u64 val, u64 __iomem *addr)
215{
216	out_le32((u32 __iomem *)addr + 1, val >> 32);
217	out_le32((u32 __iomem *)addr, (u32)val);
218}
219
220static u64 fsl_ioread64be(const u64 __iomem *addr)
221{
222	u32 val_hi = in_be32((u32 __iomem *)addr);
223	u32 val_lo = in_be32((u32 __iomem *)addr + 1);
224
225	return ((u64)val_hi << 32) + val_lo;
226}
227
228static void fsl_iowrite64be(u64 val, u64 __iomem *addr)
229{
230	out_be32((u32 __iomem *)addr, val >> 32);
231	out_be32((u32 __iomem *)addr + 1, (u32)val);
232}
233#endif
234#endif
235
236#if defined(CONFIG_ARM64) || defined(CONFIG_ARM)
237#define fsl_ioread32(p)		ioread32(p)
238#define fsl_ioread32be(p)	ioread32be(p)
239#define fsl_iowrite32(v, p)	iowrite32(v, p)
240#define fsl_iowrite32be(v, p)	iowrite32be(v, p)
241#define fsl_ioread64(p)		ioread64(p)
242#define fsl_ioread64be(p)	ioread64be(p)
243#define fsl_iowrite64(v, p)	iowrite64(v, p)
244#define fsl_iowrite64be(v, p)	iowrite64be(v, p)
245#endif
246
247#define FSL_DMA_IN(fsl_dma, addr, width)			\
248		(((fsl_dma)->feature & FSL_DMA_BIG_ENDIAN) ?	\
249			fsl_ioread##width##be(addr) : fsl_ioread##width(addr))
250
251#define FSL_DMA_OUT(fsl_dma, addr, val, width)			\
252		(((fsl_dma)->feature & FSL_DMA_BIG_ENDIAN) ?	\
253			fsl_iowrite##width##be(val, addr) : fsl_iowrite	\
254		##width(val, addr))
255
256#define DMA_TO_CPU(fsl_chan, d, width)					\
257		(((fsl_chan)->feature & FSL_DMA_BIG_ENDIAN) ?		\
258			be##width##_to_cpu((__force __be##width)(v##width)d) : \
259			le##width##_to_cpu((__force __le##width)(v##width)d))
260#define CPU_TO_DMA(fsl_chan, c, width)					\
261		(((fsl_chan)->feature & FSL_DMA_BIG_ENDIAN) ?		\
262			(__force v##width)cpu_to_be##width(c) :		\
263			(__force v##width)cpu_to_le##width(c))
264
265#endif	/* __DMA_FSLDMA_H */
266