1/*-
2 * Copyright (c) 2017 Emmanuel Vadot <manu@freebsd.org>
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 *    notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 *    notice, this list of conditions and the following disclaimer in the
11 *    documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
14 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
15 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
16 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
17 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
18 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
19 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
20 * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
21 * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
22 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
23 * SUCH DAMAGE.
24 */
25
26#include <sys/param.h>
27#include <sys/systm.h>
28#include <sys/bus.h>
29
30#include <dev/clk/clk.h>
31
32#include <dev/clk/allwinner/aw_clk.h>
33#include <dev/clk/allwinner/aw_clk_nm.h>
34
35#include "clkdev_if.h"
36
37/*
38 * clknode for clocks matching the formula :
39 *
40 * clk = clkin / n / m
41 *
42 */
43
44struct aw_clk_nm_sc {
45	uint32_t	offset;
46
47	struct aw_clk_factor	m;
48	struct aw_clk_factor	n;
49	struct aw_clk_factor	prediv;
50
51	uint32_t	mux_shift;
52	uint32_t	mux_mask;
53	uint32_t	gate_shift;
54	uint32_t	lock_shift;
55	uint32_t	lock_retries;
56
57	uint32_t	flags;
58};
59
60#define	WRITE4(_clk, off, val)						\
61	CLKDEV_WRITE_4(clknode_get_device(_clk), off, val)
62#define	READ4(_clk, off, val)						\
63	CLKDEV_READ_4(clknode_get_device(_clk), off, val)
64#define	DEVICE_LOCK(_clk)							\
65	CLKDEV_DEVICE_LOCK(clknode_get_device(_clk))
66#define	DEVICE_UNLOCK(_clk)						\
67	CLKDEV_DEVICE_UNLOCK(clknode_get_device(_clk))
68
69static int
70aw_clk_nm_init(struct clknode *clk, device_t dev)
71{
72	struct aw_clk_nm_sc *sc;
73	uint32_t val, idx;
74
75	sc = clknode_get_softc(clk);
76
77	idx = 0;
78	if ((sc->flags & AW_CLK_HAS_MUX) != 0) {
79		DEVICE_LOCK(clk);
80		READ4(clk, sc->offset, &val);
81		DEVICE_UNLOCK(clk);
82
83		idx = (val & sc->mux_mask) >> sc->mux_shift;
84	}
85
86	clknode_init_parent_idx(clk, idx);
87	return (0);
88}
89
90static int
91aw_clk_nm_set_gate(struct clknode *clk, bool enable)
92{
93	struct aw_clk_nm_sc *sc;
94	uint32_t val;
95
96	sc = clknode_get_softc(clk);
97
98	if ((sc->flags & AW_CLK_HAS_GATE) == 0)
99		return (0);
100
101	DEVICE_LOCK(clk);
102	READ4(clk, sc->offset, &val);
103	if (enable)
104		val |= (1 << sc->gate_shift);
105	else
106		val &= ~(1 << sc->gate_shift);
107	WRITE4(clk, sc->offset, val);
108	DEVICE_UNLOCK(clk);
109
110	return (0);
111}
112
113static int
114aw_clk_nm_set_mux(struct clknode *clk, int index)
115{
116	struct aw_clk_nm_sc *sc;
117	uint32_t val;
118
119	sc = clknode_get_softc(clk);
120
121	if ((sc->flags & AW_CLK_HAS_MUX) == 0)
122		return (0);
123
124	DEVICE_LOCK(clk);
125	READ4(clk, sc->offset, &val);
126	val &= ~sc->mux_mask;
127	val |= index << sc->mux_shift;
128	WRITE4(clk, sc->offset, val);
129	DEVICE_UNLOCK(clk);
130
131	return (0);
132}
133
134static uint64_t
135aw_clk_nm_find_best(struct aw_clk_nm_sc *sc, uint64_t fparent, uint64_t *fout,
136    uint32_t *factor_n, uint32_t *factor_m)
137{
138	uint64_t cur, best = 0;
139	uint32_t m, n, max_m, max_n, min_m, min_n;
140
141	*factor_n = *factor_m = 0;
142
143	max_m = aw_clk_factor_get_max(&sc->m);
144	max_n = aw_clk_factor_get_max(&sc->n);
145	min_m = aw_clk_factor_get_min(&sc->m);
146	min_n = aw_clk_factor_get_min(&sc->n);
147
148	for (m = min_m; m <= max_m; ) {
149		for (n = min_n; n <= max_n; ) {
150			cur = fparent / n / m;
151			if (clk_freq_diff(*fout, cur) <
152			    clk_freq_diff(*fout, best)) {
153				best = cur;
154				*factor_n = n;
155				*factor_m = m;
156			}
157
158			if ((sc->n.flags & AW_CLK_FACTOR_POWER_OF_TWO) != 0)
159				n <<= 1;
160			else
161				n++;
162		}
163		if ((sc->m.flags & AW_CLK_FACTOR_POWER_OF_TWO) != 0)
164			m <<= 1;
165		else
166			m++;
167	}
168
169	return (best);
170}
171
172static int
173aw_clk_nm_set_freq(struct clknode *clk, uint64_t fparent, uint64_t *fout,
174    int flags, int *stop)
175{
176	struct aw_clk_nm_sc *sc;
177	struct clknode *p_clk;
178	const char **p_names;
179	uint64_t cur, best;
180	uint32_t val, m, n, best_m, best_n;
181	int p_idx, best_parent, retry;
182
183	sc = clknode_get_softc(clk);
184
185	best = cur = 0;
186	best_parent = 0;
187
188	if ((sc->flags & AW_CLK_REPARENT) != 0) {
189		p_names = clknode_get_parent_names(clk);
190		for (p_idx = 0; p_idx != clknode_get_parents_num(clk); p_idx++) {
191			p_clk = clknode_find_by_name(p_names[p_idx]);
192			clknode_get_freq(p_clk, &fparent);
193
194			cur = aw_clk_nm_find_best(sc, fparent, fout, &n, &m);
195			if (clk_freq_diff(*fout, cur) <
196			    clk_freq_diff(*fout, best)) {
197				best = cur;
198				best_parent = p_idx;
199				best_n = n;
200				best_m = m;
201			}
202		}
203
204		p_idx = clknode_get_parent_idx(clk);
205		p_clk = clknode_get_parent(clk);
206		clknode_get_freq(p_clk, &fparent);
207	} else {
208		best = aw_clk_nm_find_best(sc, fparent, fout,
209		    &best_n, &best_m);
210	}
211
212	if ((flags & CLK_SET_DRYRUN) != 0) {
213		*fout = best;
214		*stop = 1;
215		return (0);
216	}
217
218	if ((best < *fout) &&
219	  ((flags & CLK_SET_ROUND_DOWN) == 0)) {
220		*stop = 1;
221		printf("best freq (%ju) < requested freq(%ju)\n",
222		    best, *fout);
223		return (ERANGE);
224	}
225	if ((best > *fout) &&
226	  ((flags & CLK_SET_ROUND_UP) == 0)) {
227		*stop = 1;
228		printf("best freq (%ju) > requested freq(%ju)\n",
229		    best, *fout);
230		return (ERANGE);
231	}
232
233	if ((sc->flags & AW_CLK_REPARENT) != 0 && p_idx != best_parent)
234		clknode_set_parent_by_idx(clk, best_parent);
235
236	DEVICE_LOCK(clk);
237	READ4(clk, sc->offset, &val);
238
239	n = aw_clk_factor_get_value(&sc->n, best_n);
240	m = aw_clk_factor_get_value(&sc->m, best_m);
241	val &= ~sc->n.mask;
242	val &= ~sc->m.mask;
243	val |= n << sc->n.shift;
244	val |= m << sc->m.shift;
245
246	WRITE4(clk, sc->offset, val);
247	DEVICE_UNLOCK(clk);
248
249	if ((sc->flags & AW_CLK_HAS_LOCK) != 0) {
250		for (retry = 0; retry < sc->lock_retries; retry++) {
251			READ4(clk, sc->offset, &val);
252			if ((val & (1 << sc->lock_shift)) != 0)
253				break;
254			DELAY(1000);
255		}
256	}
257
258	*fout = best;
259	*stop = 1;
260
261	return (0);
262}
263
264static int
265aw_clk_nm_recalc(struct clknode *clk, uint64_t *freq)
266{
267	struct aw_clk_nm_sc *sc;
268	uint32_t val, m, n, prediv;
269
270	sc = clknode_get_softc(clk);
271
272	DEVICE_LOCK(clk);
273	READ4(clk, sc->offset, &val);
274	DEVICE_UNLOCK(clk);
275
276	m = aw_clk_get_factor(val, &sc->m);
277	n = aw_clk_get_factor(val, &sc->n);
278	if (sc->flags & AW_CLK_HAS_PREDIV)
279		prediv = aw_clk_get_factor(val, &sc->prediv);
280	else
281		prediv = 1;
282
283	*freq = *freq / prediv / n / m;
284
285	return (0);
286}
287
288static clknode_method_t aw_nm_clknode_methods[] = {
289	/* Device interface */
290	CLKNODEMETHOD(clknode_init,		aw_clk_nm_init),
291	CLKNODEMETHOD(clknode_set_gate,		aw_clk_nm_set_gate),
292	CLKNODEMETHOD(clknode_set_mux,		aw_clk_nm_set_mux),
293	CLKNODEMETHOD(clknode_recalc_freq,	aw_clk_nm_recalc),
294	CLKNODEMETHOD(clknode_set_freq,		aw_clk_nm_set_freq),
295	CLKNODEMETHOD_END
296};
297
298DEFINE_CLASS_1(aw_nm_clknode, aw_nm_clknode_class, aw_nm_clknode_methods,
299    sizeof(struct aw_clk_nm_sc), clknode_class);
300
301int
302aw_clk_nm_register(struct clkdom *clkdom, struct aw_clk_nm_def *clkdef)
303{
304	struct clknode *clk;
305	struct aw_clk_nm_sc *sc;
306
307	clk = clknode_create(clkdom, &aw_nm_clknode_class, &clkdef->clkdef);
308	if (clk == NULL)
309		return (1);
310
311	sc = clknode_get_softc(clk);
312
313	sc->offset = clkdef->offset;
314
315	sc->m.shift = clkdef->m.shift;
316	sc->m.width = clkdef->m.width;
317	sc->m.mask = ((1 << sc->m.width) - 1) << sc->m.shift;
318	sc->m.value = clkdef->m.value;
319	sc->m.flags = clkdef->m.flags;
320
321	sc->n.shift = clkdef->n.shift;
322	sc->n.width = clkdef->n.width;
323	sc->n.mask = ((1 << sc->n.width) - 1) << sc->n.shift;
324	sc->n.value = clkdef->n.value;
325	sc->n.flags = clkdef->n.flags;
326
327	sc->prediv.shift = clkdef->prediv.shift;
328	sc->prediv.width = clkdef->prediv.width;
329	sc->prediv.mask = ((1 << sc->prediv.width) - 1) << sc->prediv.shift;
330	sc->prediv.value = clkdef->prediv.value;
331	sc->prediv.flags = clkdef->prediv.flags;
332	sc->prediv.cond_shift = clkdef->prediv.cond_shift;
333	if (clkdef->prediv.cond_width != 0)
334		sc->prediv.cond_mask = ((1 << clkdef->prediv.cond_width) - 1) << sc->prediv.shift;
335	else
336		sc->prediv.cond_mask = clkdef->prediv.cond_mask;
337	sc->prediv.cond_value = clkdef->prediv.cond_value;
338
339	sc->mux_shift = clkdef->mux_shift;
340	sc->mux_mask = ((1 << clkdef->mux_width) - 1) << sc->mux_shift;
341
342	sc->gate_shift = clkdef->gate_shift;
343
344	sc->lock_shift = clkdef->lock_shift;
345	sc->lock_retries = clkdef->lock_retries;
346
347	sc->flags = clkdef->flags;
348
349	clknode_register(clkdom, clk);
350
351	return (0);
352}
353