1  // SPDX-License-Identifier: GPL-2.0-or-later
2  /*
3   * Copyright (C) 2016 Maxime Ripard
4   * Maxime Ripard <maxime.ripard@free-electrons.com>
5   */
6  
7  #include <linux/clk-provider.h>
8  #include <linux/io.h>
9  
10  #include "ccu_frac.h"
11  #include "ccu_gate.h"
12  #include "ccu_nm.h"
13  
14  struct _ccu_nm {
15  	unsigned long	n, min_n, max_n;
16  	unsigned long	m, min_m, max_m;
17  };
18  
ccu_nm_calc_rate(unsigned long parent,unsigned long n,unsigned long m)19  static unsigned long ccu_nm_calc_rate(unsigned long parent,
20  				      unsigned long n, unsigned long m)
21  {
22  	u64 rate = parent;
23  
24  	rate *= n;
25  	do_div(rate, m);
26  
27  	return rate;
28  }
29  
ccu_nm_find_best(struct ccu_common * common,unsigned long parent,unsigned long rate,struct _ccu_nm * nm)30  static unsigned long ccu_nm_find_best(struct ccu_common *common, unsigned long parent,
31  				      unsigned long rate, struct _ccu_nm *nm)
32  {
33  	unsigned long best_rate = 0;
34  	unsigned long best_n = 0, best_m = 0;
35  	unsigned long _n, _m;
36  
37  	for (_n = nm->min_n; _n <= nm->max_n; _n++) {
38  		for (_m = nm->min_m; _m <= nm->max_m; _m++) {
39  			unsigned long tmp_rate = ccu_nm_calc_rate(parent,
40  								  _n, _m);
41  
42  			if (ccu_is_better_rate(common, rate, tmp_rate, best_rate)) {
43  				best_rate = tmp_rate;
44  				best_n = _n;
45  				best_m = _m;
46  			}
47  		}
48  	}
49  
50  	nm->n = best_n;
51  	nm->m = best_m;
52  
53  	return best_rate;
54  }
55  
ccu_nm_disable(struct clk_hw * hw)56  static void ccu_nm_disable(struct clk_hw *hw)
57  {
58  	struct ccu_nm *nm = hw_to_ccu_nm(hw);
59  
60  	return ccu_gate_helper_disable(&nm->common, nm->enable);
61  }
62  
ccu_nm_enable(struct clk_hw * hw)63  static int ccu_nm_enable(struct clk_hw *hw)
64  {
65  	struct ccu_nm *nm = hw_to_ccu_nm(hw);
66  
67  	return ccu_gate_helper_enable(&nm->common, nm->enable);
68  }
69  
ccu_nm_is_enabled(struct clk_hw * hw)70  static int ccu_nm_is_enabled(struct clk_hw *hw)
71  {
72  	struct ccu_nm *nm = hw_to_ccu_nm(hw);
73  
74  	return ccu_gate_helper_is_enabled(&nm->common, nm->enable);
75  }
76  
ccu_nm_recalc_rate(struct clk_hw * hw,unsigned long parent_rate)77  static unsigned long ccu_nm_recalc_rate(struct clk_hw *hw,
78  					unsigned long parent_rate)
79  {
80  	struct ccu_nm *nm = hw_to_ccu_nm(hw);
81  	unsigned long rate;
82  	unsigned long n, m;
83  	u32 reg;
84  
85  	if (ccu_frac_helper_is_enabled(&nm->common, &nm->frac)) {
86  		rate = ccu_frac_helper_read_rate(&nm->common, &nm->frac);
87  
88  		if (nm->common.features & CCU_FEATURE_FIXED_POSTDIV)
89  			rate /= nm->fixed_post_div;
90  
91  		return rate;
92  	}
93  
94  	reg = readl(nm->common.base + nm->common.reg);
95  
96  	n = reg >> nm->n.shift;
97  	n &= (1 << nm->n.width) - 1;
98  	n += nm->n.offset;
99  	if (!n)
100  		n++;
101  
102  	m = reg >> nm->m.shift;
103  	m &= (1 << nm->m.width) - 1;
104  	m += nm->m.offset;
105  	if (!m)
106  		m++;
107  
108  	if (ccu_sdm_helper_is_enabled(&nm->common, &nm->sdm))
109  		rate = ccu_sdm_helper_read_rate(&nm->common, &nm->sdm, m, n);
110  	else
111  		rate = ccu_nm_calc_rate(parent_rate, n, m);
112  
113  	if (nm->common.features & CCU_FEATURE_FIXED_POSTDIV)
114  		rate /= nm->fixed_post_div;
115  
116  	return rate;
117  }
118  
ccu_nm_round_rate(struct clk_hw * hw,unsigned long rate,unsigned long * parent_rate)119  static long ccu_nm_round_rate(struct clk_hw *hw, unsigned long rate,
120  			      unsigned long *parent_rate)
121  {
122  	struct ccu_nm *nm = hw_to_ccu_nm(hw);
123  	struct _ccu_nm _nm;
124  
125  	if (nm->common.features & CCU_FEATURE_FIXED_POSTDIV)
126  		rate *= nm->fixed_post_div;
127  
128  	if (rate < nm->min_rate) {
129  		rate = nm->min_rate;
130  		if (nm->common.features & CCU_FEATURE_FIXED_POSTDIV)
131  			rate /= nm->fixed_post_div;
132  		return rate;
133  	}
134  
135  	if (nm->max_rate && rate > nm->max_rate) {
136  		rate = nm->max_rate;
137  		if (nm->common.features & CCU_FEATURE_FIXED_POSTDIV)
138  			rate /= nm->fixed_post_div;
139  		return rate;
140  	}
141  
142  	if (ccu_frac_helper_has_rate(&nm->common, &nm->frac, rate)) {
143  		if (nm->common.features & CCU_FEATURE_FIXED_POSTDIV)
144  			rate /= nm->fixed_post_div;
145  		return rate;
146  	}
147  
148  	if (ccu_sdm_helper_has_rate(&nm->common, &nm->sdm, rate)) {
149  		if (nm->common.features & CCU_FEATURE_FIXED_POSTDIV)
150  			rate /= nm->fixed_post_div;
151  		return rate;
152  	}
153  
154  	_nm.min_n = nm->n.min ?: 1;
155  	_nm.max_n = nm->n.max ?: 1 << nm->n.width;
156  	_nm.min_m = 1;
157  	_nm.max_m = nm->m.max ?: 1 << nm->m.width;
158  
159  	rate = ccu_nm_find_best(&nm->common, *parent_rate, rate, &_nm);
160  
161  	if (nm->common.features & CCU_FEATURE_FIXED_POSTDIV)
162  		rate /= nm->fixed_post_div;
163  
164  	return rate;
165  }
166  
ccu_nm_set_rate(struct clk_hw * hw,unsigned long rate,unsigned long parent_rate)167  static int ccu_nm_set_rate(struct clk_hw *hw, unsigned long rate,
168  			   unsigned long parent_rate)
169  {
170  	struct ccu_nm *nm = hw_to_ccu_nm(hw);
171  	struct _ccu_nm _nm;
172  	unsigned long flags;
173  	u32 reg;
174  
175  	/* Adjust target rate according to post-dividers */
176  	if (nm->common.features & CCU_FEATURE_FIXED_POSTDIV)
177  		rate = rate * nm->fixed_post_div;
178  
179  	if (ccu_frac_helper_has_rate(&nm->common, &nm->frac, rate)) {
180  		spin_lock_irqsave(nm->common.lock, flags);
181  
182  		/* most SoCs require M to be 0 if fractional mode is used */
183  		reg = readl(nm->common.base + nm->common.reg);
184  		reg &= ~GENMASK(nm->m.width + nm->m.shift - 1, nm->m.shift);
185  		writel(reg, nm->common.base + nm->common.reg);
186  
187  		spin_unlock_irqrestore(nm->common.lock, flags);
188  
189  		ccu_frac_helper_enable(&nm->common, &nm->frac);
190  
191  		return ccu_frac_helper_set_rate(&nm->common, &nm->frac,
192  						rate, nm->lock);
193  	} else {
194  		ccu_frac_helper_disable(&nm->common, &nm->frac);
195  	}
196  
197  	_nm.min_n = nm->n.min ?: 1;
198  	_nm.max_n = nm->n.max ?: 1 << nm->n.width;
199  	_nm.min_m = 1;
200  	_nm.max_m = nm->m.max ?: 1 << nm->m.width;
201  
202  	if (ccu_sdm_helper_has_rate(&nm->common, &nm->sdm, rate)) {
203  		ccu_sdm_helper_enable(&nm->common, &nm->sdm, rate);
204  
205  		/* Sigma delta modulation requires specific N and M factors */
206  		ccu_sdm_helper_get_factors(&nm->common, &nm->sdm, rate,
207  					   &_nm.m, &_nm.n);
208  	} else {
209  		ccu_sdm_helper_disable(&nm->common, &nm->sdm);
210  		ccu_nm_find_best(&nm->common, parent_rate, rate, &_nm);
211  	}
212  
213  	spin_lock_irqsave(nm->common.lock, flags);
214  
215  	reg = readl(nm->common.base + nm->common.reg);
216  	reg &= ~GENMASK(nm->n.width + nm->n.shift - 1, nm->n.shift);
217  	reg &= ~GENMASK(nm->m.width + nm->m.shift - 1, nm->m.shift);
218  
219  	reg |= (_nm.n - nm->n.offset) << nm->n.shift;
220  	reg |= (_nm.m - nm->m.offset) << nm->m.shift;
221  	writel(reg, nm->common.base + nm->common.reg);
222  
223  	spin_unlock_irqrestore(nm->common.lock, flags);
224  
225  	ccu_helper_wait_for_lock(&nm->common, nm->lock);
226  
227  	return 0;
228  }
229  
230  const struct clk_ops ccu_nm_ops = {
231  	.disable	= ccu_nm_disable,
232  	.enable		= ccu_nm_enable,
233  	.is_enabled	= ccu_nm_is_enabled,
234  
235  	.recalc_rate	= ccu_nm_recalc_rate,
236  	.round_rate	= ccu_nm_round_rate,
237  	.set_rate	= ccu_nm_set_rate,
238  };
239  EXPORT_SYMBOL_NS_GPL(ccu_nm_ops, SUNXI_CCU);
240