1/* Wrapper for DMA channel allocator that starts clocks etc */
2
3#include <linux/kernel.h>
4#include <linux/spinlock.h>
5#include <asm/dma.h>
6#include <asm/arch/hwregs/reg_map.h>
7#include <asm/arch/hwregs/reg_rdwr.h>
8#include <asm/arch/hwregs/marb_defs.h>
9#include <asm/arch/hwregs/config_defs.h>
10#include <asm/arch/hwregs/strmux_defs.h>
11#include <linux/errno.h>
12#include <asm/system.h>
13#include <asm/arch/arbiter.h>
14
15static char used_dma_channels[MAX_DMA_CHANNELS];
16static const char * used_dma_channels_users[MAX_DMA_CHANNELS];
17
18static DEFINE_SPINLOCK(dma_lock);
19
20int crisv32_request_dma(unsigned int dmanr, const char * device_id,
21                        unsigned options, unsigned int bandwidth,
22			enum dma_owner owner)
23{
24	unsigned long flags;
25	reg_config_rw_clk_ctrl clk_ctrl;
26	reg_strmux_rw_cfg strmux_cfg;
27
28        if (crisv32_arbiter_allocate_bandwidth(dmanr,
29					       options & DMA_INT_MEM ? INT_REGION : EXT_REGION,
30                                              bandwidth))
31          return -ENOMEM;
32
33	spin_lock_irqsave(&dma_lock, flags);
34
35	if (used_dma_channels[dmanr]) {
36		spin_unlock_irqrestore(&dma_lock, flags);
37		if (options & DMA_VERBOSE_ON_ERROR) {
38			printk("Failed to request DMA %i for %s, already allocated by %s\n", dmanr, device_id, used_dma_channels_users[dmanr]);
39		}
40		if (options & DMA_PANIC_ON_ERROR)
41			panic("request_dma error!");
42		return -EBUSY;
43	}
44	clk_ctrl = REG_RD(config, regi_config, rw_clk_ctrl);
45	strmux_cfg = REG_RD(strmux, regi_strmux, rw_cfg);
46
47	switch(dmanr)
48	{
49	case 0:
50	case 1:
51		clk_ctrl.dma01_eth0 = 1;
52		break;
53	case 2:
54	case 3:
55		clk_ctrl.dma23 = 1;
56		break;
57	case 4:
58	case 5:
59		clk_ctrl.dma45 = 1;
60		break;
61	case 6:
62	case 7:
63		clk_ctrl.dma67 = 1;
64		break;
65	case 8:
66	case 9:
67		clk_ctrl.dma89_strcop = 1;
68		break;
69#if MAX_DMA_CHANNELS-1 != 9
70#error Check dma.c
71#endif
72	default:
73		spin_unlock_irqrestore(&dma_lock, flags);
74		if (options & DMA_VERBOSE_ON_ERROR) {
75			printk("Failed to request DMA %i for %s, only 0-%i valid)\n", dmanr, device_id, MAX_DMA_CHANNELS-1);
76		}
77
78		if (options & DMA_PANIC_ON_ERROR)
79			panic("request_dma error!");
80		return -EINVAL;
81	}
82
83	switch(owner)
84	{
85	case dma_eth0:
86		if (dmanr == 0)
87			strmux_cfg.dma0 = regk_strmux_eth0;
88		else if (dmanr == 1)
89			strmux_cfg.dma1 = regk_strmux_eth0;
90		else
91			panic("Invalid DMA channel for eth0\n");
92		break;
93	case dma_eth1:
94		if (dmanr == 6)
95			strmux_cfg.dma6 = regk_strmux_eth1;
96		else if (dmanr == 7)
97			strmux_cfg.dma7 = regk_strmux_eth1;
98		else
99			panic("Invalid DMA channel for eth1\n");
100		break;
101	case dma_iop0:
102		if (dmanr == 2)
103			strmux_cfg.dma2 = regk_strmux_iop0;
104		else if (dmanr == 3)
105			strmux_cfg.dma3 = regk_strmux_iop0;
106		else
107			panic("Invalid DMA channel for iop0\n");
108		break;
109	case dma_iop1:
110		if (dmanr == 4)
111			strmux_cfg.dma4 = regk_strmux_iop1;
112		else if (dmanr == 5)
113			strmux_cfg.dma5 = regk_strmux_iop1;
114		else
115			panic("Invalid DMA channel for iop1\n");
116		break;
117	case dma_ser0:
118		if (dmanr == 6)
119			strmux_cfg.dma6 = regk_strmux_ser0;
120		else if (dmanr == 7)
121			strmux_cfg.dma7 = regk_strmux_ser0;
122		else
123			panic("Invalid DMA channel for ser0\n");
124		break;
125	case dma_ser1:
126		if (dmanr == 4)
127			strmux_cfg.dma4 = regk_strmux_ser1;
128		else if (dmanr == 5)
129			strmux_cfg.dma5 = regk_strmux_ser1;
130		else
131			panic("Invalid DMA channel for ser1\n");
132		break;
133	case dma_ser2:
134		if (dmanr == 2)
135			strmux_cfg.dma2 = regk_strmux_ser2;
136		else if (dmanr == 3)
137			strmux_cfg.dma3 = regk_strmux_ser2;
138		else
139			panic("Invalid DMA channel for ser2\n");
140		break;
141	case dma_ser3:
142		if (dmanr == 8)
143			strmux_cfg.dma8 = regk_strmux_ser3;
144		else if (dmanr == 9)
145			strmux_cfg.dma9 = regk_strmux_ser3;
146		else
147			panic("Invalid DMA channel for ser3\n");
148		break;
149	case dma_sser0:
150		if (dmanr == 4)
151			strmux_cfg.dma4 = regk_strmux_sser0;
152		else if (dmanr == 5)
153			strmux_cfg.dma5 = regk_strmux_sser0;
154		else
155			panic("Invalid DMA channel for sser0\n");
156		break;
157	case dma_sser1:
158		if (dmanr == 6)
159			strmux_cfg.dma6 = regk_strmux_sser1;
160		else if (dmanr == 7)
161			strmux_cfg.dma7 = regk_strmux_sser1;
162		else
163			panic("Invalid DMA channel for sser1\n");
164		break;
165	case dma_ata:
166		if (dmanr == 2)
167			strmux_cfg.dma2 = regk_strmux_ata;
168		else if (dmanr == 3)
169			strmux_cfg.dma3 = regk_strmux_ata;
170		else
171			panic("Invalid DMA channel for ata\n");
172		break;
173	case dma_strp:
174		if (dmanr == 8)
175			strmux_cfg.dma8 = regk_strmux_strcop;
176		else if (dmanr == 9)
177			strmux_cfg.dma9 = regk_strmux_strcop;
178		else
179			panic("Invalid DMA channel for strp\n");
180		break;
181	case dma_ext0:
182		if (dmanr == 6)
183			strmux_cfg.dma6 = regk_strmux_ext0;
184		else
185			panic("Invalid DMA channel for ext0\n");
186		break;
187	case dma_ext1:
188		if (dmanr == 7)
189			strmux_cfg.dma7 = regk_strmux_ext1;
190		else
191			panic("Invalid DMA channel for ext1\n");
192		break;
193	case dma_ext2:
194		if (dmanr == 2)
195			strmux_cfg.dma2 = regk_strmux_ext2;
196		else if (dmanr == 8)
197			strmux_cfg.dma8 = regk_strmux_ext2;
198		else
199			panic("Invalid DMA channel for ext2\n");
200		break;
201	case dma_ext3:
202		if (dmanr == 3)
203			strmux_cfg.dma3 = regk_strmux_ext3;
204		else if (dmanr == 9)
205			strmux_cfg.dma9 = regk_strmux_ext2;
206		else
207			panic("Invalid DMA channel for ext2\n");
208		break;
209	}
210
211	used_dma_channels[dmanr] = 1;
212	used_dma_channels_users[dmanr] = device_id;
213	REG_WR(config, regi_config, rw_clk_ctrl, clk_ctrl);
214	REG_WR(strmux, regi_strmux, rw_cfg, strmux_cfg);
215	spin_unlock_irqrestore(&dma_lock,flags);
216	return 0;
217}
218
219void crisv32_free_dma(unsigned int dmanr)
220{
221	spin_lock(&dma_lock);
222	used_dma_channels[dmanr] = 0;
223	spin_unlock(&dma_lock);
224}
225