1
2
3
4
5
6#include <linux/kernel.h>
7#include <linux/bitops.h>
8#include <linux/err.h>
9#include <linux/bug.h>
10#include <linux/export.h>
11#include <linux/clk-provider.h>
12#include <linux/delay.h>
13#include <linux/rational.h>
14#include <linux/regmap.h>
15#include <linux/math64.h>
16#include <linux/slab.h>
17
18#include <asm/div64.h>
19
20#include "clk-rcg.h"
21#include "common.h"
22
23#define CMD_REG 0x0
24#define CMD_UPDATE BIT(0)
25#define CMD_ROOT_EN BIT(1)
26#define CMD_DIRTY_CFG BIT(4)
27#define CMD_DIRTY_N BIT(5)
28#define CMD_DIRTY_M BIT(6)
29#define CMD_DIRTY_D BIT(7)
30#define CMD_ROOT_OFF BIT(31)
31
32#define CFG_REG 0x4
33#define CFG_SRC_DIV_SHIFT 0
34#define CFG_SRC_SEL_SHIFT 8
35#define CFG_SRC_SEL_MASK (0x7 << CFG_SRC_SEL_SHIFT)
36#define CFG_MODE_SHIFT 12
37#define CFG_MODE_MASK (0x3 << CFG_MODE_SHIFT)
38#define CFG_MODE_DUAL_EDGE (0x2 << CFG_MODE_SHIFT)
39#define CFG_HW_CLK_CTRL_MASK BIT(20)
40
41#define M_REG 0x8
42#define N_REG 0xc
43#define D_REG 0x10
44
45#define RCG_CFG_OFFSET(rcg) ((rcg)->cmd_rcgr + (rcg)->cfg_off + CFG_REG)
46#define RCG_M_OFFSET(rcg) ((rcg)->cmd_rcgr + (rcg)->cfg_off + M_REG)
47#define RCG_N_OFFSET(rcg) ((rcg)->cmd_rcgr + (rcg)->cfg_off + N_REG)
48#define RCG_D_OFFSET(rcg) ((rcg)->cmd_rcgr + (rcg)->cfg_off + D_REG)
49
50
51#define MAX_PERF_LEVEL 8
52#define SE_CMD_DFSR_OFFSET 0x14
53#define SE_CMD_DFS_EN BIT(0)
54#define SE_PERF_DFSR(level) (0x1c + 0x4 * (level))
55#define SE_PERF_M_DFSR(level) (0x5c + 0x4 * (level))
56#define SE_PERF_N_DFSR(level) (0x9c + 0x4 * (level))
57
58enum freq_policy {
59 FLOOR,
60 CEIL,
61};
62
63static int clk_rcg2_is_enabled(struct clk_hw *hw)
64{
65 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
66 u32 cmd;
67 int ret;
68
69 ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
70 if (ret)
71 return ret;
72
73 return (cmd & CMD_ROOT_OFF) == 0;
74}
75
76static u8 clk_rcg2_get_parent(struct clk_hw *hw)
77{
78 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
79 int num_parents = clk_hw_get_num_parents(hw);
80 u32 cfg;
81 int i, ret;
82
83 ret = regmap_read(rcg->clkr.regmap, RCG_CFG_OFFSET(rcg), &cfg);
84 if (ret)
85 goto err;
86
87 cfg &= CFG_SRC_SEL_MASK;
88 cfg >>= CFG_SRC_SEL_SHIFT;
89
90 for (i = 0; i < num_parents; i++)
91 if (cfg == rcg->parent_map[i].cfg)
92 return i;
93
94err:
95 pr_debug("%s: Clock %s has invalid parent, using default.\n",
96 __func__, clk_hw_get_name(hw));
97 return 0;
98}
99
100static int update_config(struct clk_rcg2 *rcg)
101{
102 int count, ret;
103 u32 cmd;
104 struct clk_hw *hw = &rcg->clkr.hw;
105 const char *name = clk_hw_get_name(hw);
106
107 ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
108 CMD_UPDATE, CMD_UPDATE);
109 if (ret)
110 return ret;
111
112
113 for (count = 500; count > 0; count--) {
114 ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
115 if (ret)
116 return ret;
117 if (!(cmd & CMD_UPDATE))
118 return 0;
119 udelay(1);
120 }
121
122 WARN(1, "%s: rcg didn't update its configuration.", name);
123 return -EBUSY;
124}
125
126static int clk_rcg2_set_parent(struct clk_hw *hw, u8 index)
127{
128 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
129 int ret;
130 u32 cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
131
132 ret = regmap_update_bits(rcg->clkr.regmap, RCG_CFG_OFFSET(rcg),
133 CFG_SRC_SEL_MASK, cfg);
134 if (ret)
135 return ret;
136
137 return update_config(rcg);
138}
139
140
141
142
143
144
145
146
147static unsigned long
148calc_rate(unsigned long rate, u32 m, u32 n, u32 mode, u32 hid_div)
149{
150 if (hid_div) {
151 rate *= 2;
152 rate /= hid_div + 1;
153 }
154
155 if (mode) {
156 u64 tmp = rate;
157 tmp *= m;
158 do_div(tmp, n);
159 rate = tmp;
160 }
161
162 return rate;
163}
164
165static unsigned long
166clk_rcg2_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
167{
168 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
169 u32 cfg, hid_div, m = 0, n = 0, mode = 0, mask;
170
171 regmap_read(rcg->clkr.regmap, RCG_CFG_OFFSET(rcg), &cfg);
172
173 if (rcg->mnd_width) {
174 mask = BIT(rcg->mnd_width) - 1;
175 regmap_read(rcg->clkr.regmap, RCG_M_OFFSET(rcg), &m);
176 m &= mask;
177 regmap_read(rcg->clkr.regmap, RCG_N_OFFSET(rcg), &n);
178 n = ~n;
179 n &= mask;
180 n += m;
181 mode = cfg & CFG_MODE_MASK;
182 mode >>= CFG_MODE_SHIFT;
183 }
184
185 mask = BIT(rcg->hid_width) - 1;
186 hid_div = cfg >> CFG_SRC_DIV_SHIFT;
187 hid_div &= mask;
188
189 return calc_rate(parent_rate, m, n, mode, hid_div);
190}
191
192static int _freq_tbl_determine_rate(struct clk_hw *hw, const struct freq_tbl *f,
193 struct clk_rate_request *req,
194 enum freq_policy policy)
195{
196 unsigned long clk_flags, rate = req->rate;
197 struct clk_hw *p;
198 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
199 int index;
200
201 switch (policy) {
202 case FLOOR:
203 f = qcom_find_freq_floor(f, rate);
204 break;
205 case CEIL:
206 f = qcom_find_freq(f, rate);
207 break;
208 default:
209 return -EINVAL;
210 }
211
212 if (!f)
213 return -EINVAL;
214
215 index = qcom_find_src_index(hw, rcg->parent_map, f->src);
216 if (index < 0)
217 return index;
218
219 clk_flags = clk_hw_get_flags(hw);
220 p = clk_hw_get_parent_by_index(hw, index);
221 if (!p)
222 return -EINVAL;
223
224 if (clk_flags & CLK_SET_RATE_PARENT) {
225 rate = f->freq;
226 if (f->pre_div) {
227 if (!rate)
228 rate = req->rate;
229 rate /= 2;
230 rate *= f->pre_div + 1;
231 }
232
233 if (f->n) {
234 u64 tmp = rate;
235 tmp = tmp * f->n;
236 do_div(tmp, f->m);
237 rate = tmp;
238 }
239 } else {
240 rate = clk_hw_get_rate(p);
241 }
242 req->best_parent_hw = p;
243 req->best_parent_rate = rate;
244 req->rate = f->freq;
245
246 return 0;
247}
248
249static int clk_rcg2_determine_rate(struct clk_hw *hw,
250 struct clk_rate_request *req)
251{
252 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
253
254 return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req, CEIL);
255}
256
257static int clk_rcg2_determine_floor_rate(struct clk_hw *hw,
258 struct clk_rate_request *req)
259{
260 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
261
262 return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req, FLOOR);
263}
264
265static int __clk_rcg2_configure(struct clk_rcg2 *rcg, const struct freq_tbl *f)
266{
267 u32 cfg, mask;
268 struct clk_hw *hw = &rcg->clkr.hw;
269 int ret, index = qcom_find_src_index(hw, rcg->parent_map, f->src);
270
271 if (index < 0)
272 return index;
273
274 if (rcg->mnd_width && f->n) {
275 mask = BIT(rcg->mnd_width) - 1;
276 ret = regmap_update_bits(rcg->clkr.regmap,
277 RCG_M_OFFSET(rcg), mask, f->m);
278 if (ret)
279 return ret;
280
281 ret = regmap_update_bits(rcg->clkr.regmap,
282 RCG_N_OFFSET(rcg), mask, ~(f->n - f->m));
283 if (ret)
284 return ret;
285
286 ret = regmap_update_bits(rcg->clkr.regmap,
287 RCG_D_OFFSET(rcg), mask, ~f->n);
288 if (ret)
289 return ret;
290 }
291
292 mask = BIT(rcg->hid_width) - 1;
293 mask |= CFG_SRC_SEL_MASK | CFG_MODE_MASK | CFG_HW_CLK_CTRL_MASK;
294 cfg = f->pre_div << CFG_SRC_DIV_SHIFT;
295 cfg |= rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
296 if (rcg->mnd_width && f->n && (f->m != f->n))
297 cfg |= CFG_MODE_DUAL_EDGE;
298 return regmap_update_bits(rcg->clkr.regmap, RCG_CFG_OFFSET(rcg),
299 mask, cfg);
300}
301
302static int clk_rcg2_configure(struct clk_rcg2 *rcg, const struct freq_tbl *f)
303{
304 int ret;
305
306 ret = __clk_rcg2_configure(rcg, f);
307 if (ret)
308 return ret;
309
310 return update_config(rcg);
311}
312
313static int __clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate,
314 enum freq_policy policy)
315{
316 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
317 const struct freq_tbl *f;
318
319 switch (policy) {
320 case FLOOR:
321 f = qcom_find_freq_floor(rcg->freq_tbl, rate);
322 break;
323 case CEIL:
324 f = qcom_find_freq(rcg->freq_tbl, rate);
325 break;
326 default:
327 return -EINVAL;
328 }
329
330 if (!f)
331 return -EINVAL;
332
333 return clk_rcg2_configure(rcg, f);
334}
335
336static int clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate,
337 unsigned long parent_rate)
338{
339 return __clk_rcg2_set_rate(hw, rate, CEIL);
340}
341
342static int clk_rcg2_set_floor_rate(struct clk_hw *hw, unsigned long rate,
343 unsigned long parent_rate)
344{
345 return __clk_rcg2_set_rate(hw, rate, FLOOR);
346}
347
348static int clk_rcg2_set_rate_and_parent(struct clk_hw *hw,
349 unsigned long rate, unsigned long parent_rate, u8 index)
350{
351 return __clk_rcg2_set_rate(hw, rate, CEIL);
352}
353
354static int clk_rcg2_set_floor_rate_and_parent(struct clk_hw *hw,
355 unsigned long rate, unsigned long parent_rate, u8 index)
356{
357 return __clk_rcg2_set_rate(hw, rate, FLOOR);
358}
359
360static int clk_rcg2_get_duty_cycle(struct clk_hw *hw, struct clk_duty *duty)
361{
362 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
363 u32 notn_m, n, m, d, not2d, mask;
364
365 if (!rcg->mnd_width) {
366
367 duty->num = 1;
368 duty->den = 2;
369 return 0;
370 }
371
372 regmap_read(rcg->clkr.regmap, RCG_D_OFFSET(rcg), ¬2d);
373 regmap_read(rcg->clkr.regmap, RCG_M_OFFSET(rcg), &m);
374 regmap_read(rcg->clkr.regmap, RCG_N_OFFSET(rcg), ¬n_m);
375
376 if (!not2d && !m && !notn_m) {
377
378 duty->num = 1;
379 duty->den = 2;
380 return 0;
381 }
382
383 mask = BIT(rcg->mnd_width) - 1;
384
385 d = ~(not2d) & mask;
386 d = DIV_ROUND_CLOSEST(d, 2);
387
388 n = (~(notn_m) + m) & mask;
389
390 duty->num = d;
391 duty->den = n;
392
393 return 0;
394}
395
396static int clk_rcg2_set_duty_cycle(struct clk_hw *hw, struct clk_duty *duty)
397{
398 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
399 u32 notn_m, n, m, d, not2d, mask, duty_per;
400 int ret;
401
402
403 if (!rcg->mnd_width)
404 return -EINVAL;
405
406 mask = BIT(rcg->mnd_width) - 1;
407
408 regmap_read(rcg->clkr.regmap, RCG_N_OFFSET(rcg), ¬n_m);
409 regmap_read(rcg->clkr.regmap, RCG_M_OFFSET(rcg), &m);
410
411 n = (~(notn_m) + m) & mask;
412
413 duty_per = (duty->num * 100) / duty->den;
414
415
416 d = DIV_ROUND_CLOSEST(n * duty_per * 2, 100);
417
418
419 if (d > mask)
420 d = mask;
421
422 if ((d / 2) > (n - m))
423 d = (n - m) * 2;
424 else if ((d / 2) < (m / 2))
425 d = m;
426
427 not2d = ~d & mask;
428
429 ret = regmap_update_bits(rcg->clkr.regmap, RCG_D_OFFSET(rcg), mask,
430 not2d);
431 if (ret)
432 return ret;
433
434 return update_config(rcg);
435}
436
437const struct clk_ops clk_rcg2_ops = {
438 .is_enabled = clk_rcg2_is_enabled,
439 .get_parent = clk_rcg2_get_parent,
440 .set_parent = clk_rcg2_set_parent,
441 .recalc_rate = clk_rcg2_recalc_rate,
442 .determine_rate = clk_rcg2_determine_rate,
443 .set_rate = clk_rcg2_set_rate,
444 .set_rate_and_parent = clk_rcg2_set_rate_and_parent,
445 .get_duty_cycle = clk_rcg2_get_duty_cycle,
446 .set_duty_cycle = clk_rcg2_set_duty_cycle,
447};
448EXPORT_SYMBOL_GPL(clk_rcg2_ops);
449
450const struct clk_ops clk_rcg2_floor_ops = {
451 .is_enabled = clk_rcg2_is_enabled,
452 .get_parent = clk_rcg2_get_parent,
453 .set_parent = clk_rcg2_set_parent,
454 .recalc_rate = clk_rcg2_recalc_rate,
455 .determine_rate = clk_rcg2_determine_floor_rate,
456 .set_rate = clk_rcg2_set_floor_rate,
457 .set_rate_and_parent = clk_rcg2_set_floor_rate_and_parent,
458 .get_duty_cycle = clk_rcg2_get_duty_cycle,
459 .set_duty_cycle = clk_rcg2_set_duty_cycle,
460};
461EXPORT_SYMBOL_GPL(clk_rcg2_floor_ops);
462
463struct frac_entry {
464 int num;
465 int den;
466};
467
468static const struct frac_entry frac_table_675m[] = {
469 { 52, 295 },
470 { 11, 57 },
471 { 63, 307 },
472 { 11, 50 },
473 { 47, 206 },
474 { 31, 100 },
475 { 107, 269 },
476 { },
477};
478
479static struct frac_entry frac_table_810m[] = {
480 { 31, 211 },
481 { 32, 199 },
482 { 63, 307 },
483 { 11, 60 },
484 { 50, 263 },
485 { 31, 120 },
486 { 119, 359 },
487 { },
488};
489
490static int clk_edp_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
491 unsigned long parent_rate)
492{
493 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
494 struct freq_tbl f = *rcg->freq_tbl;
495 const struct frac_entry *frac;
496 int delta = 100000;
497 s64 src_rate = parent_rate;
498 s64 request;
499 u32 mask = BIT(rcg->hid_width) - 1;
500 u32 hid_div;
501
502 if (src_rate == 810000000)
503 frac = frac_table_810m;
504 else
505 frac = frac_table_675m;
506
507 for (; frac->num; frac++) {
508 request = rate;
509 request *= frac->den;
510 request = div_s64(request, frac->num);
511 if ((src_rate < (request - delta)) ||
512 (src_rate > (request + delta)))
513 continue;
514
515 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
516 &hid_div);
517 f.pre_div = hid_div;
518 f.pre_div >>= CFG_SRC_DIV_SHIFT;
519 f.pre_div &= mask;
520 f.m = frac->num;
521 f.n = frac->den;
522
523 return clk_rcg2_configure(rcg, &f);
524 }
525
526 return -EINVAL;
527}
528
529static int clk_edp_pixel_set_rate_and_parent(struct clk_hw *hw,
530 unsigned long rate, unsigned long parent_rate, u8 index)
531{
532
533 return clk_edp_pixel_set_rate(hw, rate, parent_rate);
534}
535
536static int clk_edp_pixel_determine_rate(struct clk_hw *hw,
537 struct clk_rate_request *req)
538{
539 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
540 const struct freq_tbl *f = rcg->freq_tbl;
541 const struct frac_entry *frac;
542 int delta = 100000;
543 s64 request;
544 u32 mask = BIT(rcg->hid_width) - 1;
545 u32 hid_div;
546 int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
547
548
549 req->best_parent_hw = clk_hw_get_parent_by_index(hw, index);
550 req->best_parent_rate = clk_hw_get_rate(req->best_parent_hw);
551
552 if (req->best_parent_rate == 810000000)
553 frac = frac_table_810m;
554 else
555 frac = frac_table_675m;
556
557 for (; frac->num; frac++) {
558 request = req->rate;
559 request *= frac->den;
560 request = div_s64(request, frac->num);
561 if ((req->best_parent_rate < (request - delta)) ||
562 (req->best_parent_rate > (request + delta)))
563 continue;
564
565 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
566 &hid_div);
567 hid_div >>= CFG_SRC_DIV_SHIFT;
568 hid_div &= mask;
569
570 req->rate = calc_rate(req->best_parent_rate,
571 frac->num, frac->den,
572 !!frac->den, hid_div);
573 return 0;
574 }
575
576 return -EINVAL;
577}
578
579const struct clk_ops clk_edp_pixel_ops = {
580 .is_enabled = clk_rcg2_is_enabled,
581 .get_parent = clk_rcg2_get_parent,
582 .set_parent = clk_rcg2_set_parent,
583 .recalc_rate = clk_rcg2_recalc_rate,
584 .set_rate = clk_edp_pixel_set_rate,
585 .set_rate_and_parent = clk_edp_pixel_set_rate_and_parent,
586 .determine_rate = clk_edp_pixel_determine_rate,
587};
588EXPORT_SYMBOL_GPL(clk_edp_pixel_ops);
589
590static int clk_byte_determine_rate(struct clk_hw *hw,
591 struct clk_rate_request *req)
592{
593 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
594 const struct freq_tbl *f = rcg->freq_tbl;
595 int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
596 unsigned long parent_rate, div;
597 u32 mask = BIT(rcg->hid_width) - 1;
598 struct clk_hw *p;
599
600 if (req->rate == 0)
601 return -EINVAL;
602
603 req->best_parent_hw = p = clk_hw_get_parent_by_index(hw, index);
604 req->best_parent_rate = parent_rate = clk_hw_round_rate(p, req->rate);
605
606 div = DIV_ROUND_UP((2 * parent_rate), req->rate) - 1;
607 div = min_t(u32, div, mask);
608
609 req->rate = calc_rate(parent_rate, 0, 0, 0, div);
610
611 return 0;
612}
613
614static int clk_byte_set_rate(struct clk_hw *hw, unsigned long rate,
615 unsigned long parent_rate)
616{
617 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
618 struct freq_tbl f = *rcg->freq_tbl;
619 unsigned long div;
620 u32 mask = BIT(rcg->hid_width) - 1;
621
622 div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
623 div = min_t(u32, div, mask);
624
625 f.pre_div = div;
626
627 return clk_rcg2_configure(rcg, &f);
628}
629
630static int clk_byte_set_rate_and_parent(struct clk_hw *hw,
631 unsigned long rate, unsigned long parent_rate, u8 index)
632{
633
634 return clk_byte_set_rate(hw, rate, parent_rate);
635}
636
637const struct clk_ops clk_byte_ops = {
638 .is_enabled = clk_rcg2_is_enabled,
639 .get_parent = clk_rcg2_get_parent,
640 .set_parent = clk_rcg2_set_parent,
641 .recalc_rate = clk_rcg2_recalc_rate,
642 .set_rate = clk_byte_set_rate,
643 .set_rate_and_parent = clk_byte_set_rate_and_parent,
644 .determine_rate = clk_byte_determine_rate,
645};
646EXPORT_SYMBOL_GPL(clk_byte_ops);
647
648static int clk_byte2_determine_rate(struct clk_hw *hw,
649 struct clk_rate_request *req)
650{
651 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
652 unsigned long parent_rate, div;
653 u32 mask = BIT(rcg->hid_width) - 1;
654 struct clk_hw *p;
655 unsigned long rate = req->rate;
656
657 if (rate == 0)
658 return -EINVAL;
659
660 p = req->best_parent_hw;
661 req->best_parent_rate = parent_rate = clk_hw_round_rate(p, rate);
662
663 div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
664 div = min_t(u32, div, mask);
665
666 req->rate = calc_rate(parent_rate, 0, 0, 0, div);
667
668 return 0;
669}
670
671static int clk_byte2_set_rate(struct clk_hw *hw, unsigned long rate,
672 unsigned long parent_rate)
673{
674 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
675 struct freq_tbl f = { 0 };
676 unsigned long div;
677 int i, num_parents = clk_hw_get_num_parents(hw);
678 u32 mask = BIT(rcg->hid_width) - 1;
679 u32 cfg;
680
681 div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
682 div = min_t(u32, div, mask);
683
684 f.pre_div = div;
685
686 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
687 cfg &= CFG_SRC_SEL_MASK;
688 cfg >>= CFG_SRC_SEL_SHIFT;
689
690 for (i = 0; i < num_parents; i++) {
691 if (cfg == rcg->parent_map[i].cfg) {
692 f.src = rcg->parent_map[i].src;
693 return clk_rcg2_configure(rcg, &f);
694 }
695 }
696
697 return -EINVAL;
698}
699
700static int clk_byte2_set_rate_and_parent(struct clk_hw *hw,
701 unsigned long rate, unsigned long parent_rate, u8 index)
702{
703
704 return clk_byte2_set_rate(hw, rate, parent_rate);
705}
706
707const struct clk_ops clk_byte2_ops = {
708 .is_enabled = clk_rcg2_is_enabled,
709 .get_parent = clk_rcg2_get_parent,
710 .set_parent = clk_rcg2_set_parent,
711 .recalc_rate = clk_rcg2_recalc_rate,
712 .set_rate = clk_byte2_set_rate,
713 .set_rate_and_parent = clk_byte2_set_rate_and_parent,
714 .determine_rate = clk_byte2_determine_rate,
715};
716EXPORT_SYMBOL_GPL(clk_byte2_ops);
717
718static const struct frac_entry frac_table_pixel[] = {
719 { 3, 8 },
720 { 2, 9 },
721 { 4, 9 },
722 { 1, 1 },
723 { }
724};
725
726static int clk_pixel_determine_rate(struct clk_hw *hw,
727 struct clk_rate_request *req)
728{
729 unsigned long request, src_rate;
730 int delta = 100000;
731 const struct frac_entry *frac = frac_table_pixel;
732
733 for (; frac->num; frac++) {
734 request = (req->rate * frac->den) / frac->num;
735
736 src_rate = clk_hw_round_rate(req->best_parent_hw, request);
737 if ((src_rate < (request - delta)) ||
738 (src_rate > (request + delta)))
739 continue;
740
741 req->best_parent_rate = src_rate;
742 req->rate = (src_rate * frac->num) / frac->den;
743 return 0;
744 }
745
746 return -EINVAL;
747}
748
749static int clk_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
750 unsigned long parent_rate)
751{
752 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
753 struct freq_tbl f = { 0 };
754 const struct frac_entry *frac = frac_table_pixel;
755 unsigned long request;
756 int delta = 100000;
757 u32 mask = BIT(rcg->hid_width) - 1;
758 u32 hid_div, cfg;
759 int i, num_parents = clk_hw_get_num_parents(hw);
760
761 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
762 cfg &= CFG_SRC_SEL_MASK;
763 cfg >>= CFG_SRC_SEL_SHIFT;
764
765 for (i = 0; i < num_parents; i++)
766 if (cfg == rcg->parent_map[i].cfg) {
767 f.src = rcg->parent_map[i].src;
768 break;
769 }
770
771 for (; frac->num; frac++) {
772 request = (rate * frac->den) / frac->num;
773
774 if ((parent_rate < (request - delta)) ||
775 (parent_rate > (request + delta)))
776 continue;
777
778 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
779 &hid_div);
780 f.pre_div = hid_div;
781 f.pre_div >>= CFG_SRC_DIV_SHIFT;
782 f.pre_div &= mask;
783 f.m = frac->num;
784 f.n = frac->den;
785
786 return clk_rcg2_configure(rcg, &f);
787 }
788 return -EINVAL;
789}
790
791static int clk_pixel_set_rate_and_parent(struct clk_hw *hw, unsigned long rate,
792 unsigned long parent_rate, u8 index)
793{
794 return clk_pixel_set_rate(hw, rate, parent_rate);
795}
796
797const struct clk_ops clk_pixel_ops = {
798 .is_enabled = clk_rcg2_is_enabled,
799 .get_parent = clk_rcg2_get_parent,
800 .set_parent = clk_rcg2_set_parent,
801 .recalc_rate = clk_rcg2_recalc_rate,
802 .set_rate = clk_pixel_set_rate,
803 .set_rate_and_parent = clk_pixel_set_rate_and_parent,
804 .determine_rate = clk_pixel_determine_rate,
805};
806EXPORT_SYMBOL_GPL(clk_pixel_ops);
807
808static int clk_gfx3d_determine_rate(struct clk_hw *hw,
809 struct clk_rate_request *req)
810{
811 struct clk_rate_request parent_req = { };
812 struct clk_rcg2_gfx3d *cgfx = to_clk_rcg2_gfx3d(hw);
813 struct clk_hw *xo, *p0, *p1, *p2;
814 unsigned long p0_rate;
815 u8 mux_div = cgfx->div;
816 int ret;
817
818 p0 = cgfx->hws[0];
819 p1 = cgfx->hws[1];
820 p2 = cgfx->hws[2];
821
822
823
824
825
826 if (WARN_ON(!p0 || !p1 || !p2))
827 return -EINVAL;
828
829 xo = clk_hw_get_parent_by_index(hw, 0);
830 if (req->rate == clk_hw_get_rate(xo)) {
831 req->best_parent_hw = xo;
832 return 0;
833 }
834
835 if (mux_div == 0)
836 mux_div = 1;
837
838 parent_req.rate = req->rate * mux_div;
839
840
841 p0_rate = clk_hw_get_rate(p0);
842
843 if (parent_req.rate == p0_rate) {
844 req->rate = req->best_parent_rate = p0_rate;
845 req->best_parent_hw = p0;
846 return 0;
847 }
848
849 if (req->best_parent_hw == p0) {
850
851 if (clk_hw_get_rate(p2) == parent_req.rate)
852 req->best_parent_hw = p2;
853 else
854 req->best_parent_hw = p1;
855 } else if (req->best_parent_hw == p2) {
856 req->best_parent_hw = p1;
857 } else {
858 req->best_parent_hw = p2;
859 }
860
861 ret = __clk_determine_rate(req->best_parent_hw, &parent_req);
862 if (ret)
863 return ret;
864
865 req->rate = req->best_parent_rate = parent_req.rate;
866 req->rate /= mux_div;
867
868 return 0;
869}
870
871static int clk_gfx3d_set_rate_and_parent(struct clk_hw *hw, unsigned long rate,
872 unsigned long parent_rate, u8 index)
873{
874 struct clk_rcg2_gfx3d *cgfx = to_clk_rcg2_gfx3d(hw);
875 struct clk_rcg2 *rcg = &cgfx->rcg;
876 u32 cfg;
877 int ret;
878
879 cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
880
881 if (cgfx->div > 1)
882 cfg |= ((2 * cgfx->div) - 1) << CFG_SRC_DIV_SHIFT;
883
884 ret = regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, cfg);
885 if (ret)
886 return ret;
887
888 return update_config(rcg);
889}
890
891static int clk_gfx3d_set_rate(struct clk_hw *hw, unsigned long rate,
892 unsigned long parent_rate)
893{
894
895
896
897
898
899 return 0;
900}
901
902const struct clk_ops clk_gfx3d_ops = {
903 .is_enabled = clk_rcg2_is_enabled,
904 .get_parent = clk_rcg2_get_parent,
905 .set_parent = clk_rcg2_set_parent,
906 .recalc_rate = clk_rcg2_recalc_rate,
907 .set_rate = clk_gfx3d_set_rate,
908 .set_rate_and_parent = clk_gfx3d_set_rate_and_parent,
909 .determine_rate = clk_gfx3d_determine_rate,
910};
911EXPORT_SYMBOL_GPL(clk_gfx3d_ops);
912
913static int clk_rcg2_set_force_enable(struct clk_hw *hw)
914{
915 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
916 const char *name = clk_hw_get_name(hw);
917 int ret, count;
918
919 ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
920 CMD_ROOT_EN, CMD_ROOT_EN);
921 if (ret)
922 return ret;
923
924
925 for (count = 500; count > 0; count--) {
926 if (clk_rcg2_is_enabled(hw))
927 return 0;
928
929 udelay(1);
930 }
931
932 pr_err("%s: RCG did not turn on\n", name);
933 return -ETIMEDOUT;
934}
935
936static int clk_rcg2_clear_force_enable(struct clk_hw *hw)
937{
938 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
939
940 return regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
941 CMD_ROOT_EN, 0);
942}
943
944static int
945clk_rcg2_shared_force_enable_clear(struct clk_hw *hw, const struct freq_tbl *f)
946{
947 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
948 int ret;
949
950 ret = clk_rcg2_set_force_enable(hw);
951 if (ret)
952 return ret;
953
954 ret = clk_rcg2_configure(rcg, f);
955 if (ret)
956 return ret;
957
958 return clk_rcg2_clear_force_enable(hw);
959}
960
961static int clk_rcg2_shared_set_rate(struct clk_hw *hw, unsigned long rate,
962 unsigned long parent_rate)
963{
964 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
965 const struct freq_tbl *f;
966
967 f = qcom_find_freq(rcg->freq_tbl, rate);
968 if (!f)
969 return -EINVAL;
970
971
972
973
974
975 if (!__clk_is_enabled(hw->clk))
976 return __clk_rcg2_configure(rcg, f);
977
978 return clk_rcg2_shared_force_enable_clear(hw, f);
979}
980
981static int clk_rcg2_shared_set_rate_and_parent(struct clk_hw *hw,
982 unsigned long rate, unsigned long parent_rate, u8 index)
983{
984 return clk_rcg2_shared_set_rate(hw, rate, parent_rate);
985}
986
987static int clk_rcg2_shared_enable(struct clk_hw *hw)
988{
989 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
990 int ret;
991
992
993
994
995
996 ret = clk_rcg2_set_force_enable(hw);
997 if (ret)
998 return ret;
999
1000 ret = update_config(rcg);
1001 if (ret)
1002 return ret;
1003
1004 return clk_rcg2_clear_force_enable(hw);
1005}
1006
1007static void clk_rcg2_shared_disable(struct clk_hw *hw)
1008{
1009 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1010 u32 cfg;
1011
1012
1013
1014
1015
1016 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026 clk_rcg2_set_force_enable(hw);
1027
1028 regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
1029 rcg->safe_src_index << CFG_SRC_SEL_SHIFT);
1030
1031 update_config(rcg);
1032
1033 clk_rcg2_clear_force_enable(hw);
1034
1035
1036 regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, cfg);
1037}
1038
1039const struct clk_ops clk_rcg2_shared_ops = {
1040 .enable = clk_rcg2_shared_enable,
1041 .disable = clk_rcg2_shared_disable,
1042 .get_parent = clk_rcg2_get_parent,
1043 .set_parent = clk_rcg2_set_parent,
1044 .recalc_rate = clk_rcg2_recalc_rate,
1045 .determine_rate = clk_rcg2_determine_rate,
1046 .set_rate = clk_rcg2_shared_set_rate,
1047 .set_rate_and_parent = clk_rcg2_shared_set_rate_and_parent,
1048};
1049EXPORT_SYMBOL_GPL(clk_rcg2_shared_ops);
1050
1051
1052static void clk_rcg2_dfs_populate_freq(struct clk_hw *hw, unsigned int l,
1053 struct freq_tbl *f)
1054{
1055 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1056 struct clk_hw *p;
1057 unsigned long prate = 0;
1058 u32 val, mask, cfg, mode, src;
1059 int i, num_parents;
1060
1061 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + SE_PERF_DFSR(l), &cfg);
1062
1063 mask = BIT(rcg->hid_width) - 1;
1064 f->pre_div = 1;
1065 if (cfg & mask)
1066 f->pre_div = cfg & mask;
1067
1068 src = cfg & CFG_SRC_SEL_MASK;
1069 src >>= CFG_SRC_SEL_SHIFT;
1070
1071 num_parents = clk_hw_get_num_parents(hw);
1072 for (i = 0; i < num_parents; i++) {
1073 if (src == rcg->parent_map[i].cfg) {
1074 f->src = rcg->parent_map[i].src;
1075 p = clk_hw_get_parent_by_index(&rcg->clkr.hw, i);
1076 prate = clk_hw_get_rate(p);
1077 }
1078 }
1079
1080 mode = cfg & CFG_MODE_MASK;
1081 mode >>= CFG_MODE_SHIFT;
1082 if (mode) {
1083 mask = BIT(rcg->mnd_width) - 1;
1084 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + SE_PERF_M_DFSR(l),
1085 &val);
1086 val &= mask;
1087 f->m = val;
1088
1089 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + SE_PERF_N_DFSR(l),
1090 &val);
1091 val = ~val;
1092 val &= mask;
1093 val += f->m;
1094 f->n = val;
1095 }
1096
1097 f->freq = calc_rate(prate, f->m, f->n, mode, f->pre_div);
1098}
1099
1100static int clk_rcg2_dfs_populate_freq_table(struct clk_rcg2 *rcg)
1101{
1102 struct freq_tbl *freq_tbl;
1103 int i;
1104
1105
1106 freq_tbl = kcalloc(MAX_PERF_LEVEL + 1, sizeof(*freq_tbl), GFP_KERNEL);
1107 if (!freq_tbl)
1108 return -ENOMEM;
1109 rcg->freq_tbl = freq_tbl;
1110
1111 for (i = 0; i < MAX_PERF_LEVEL; i++)
1112 clk_rcg2_dfs_populate_freq(&rcg->clkr.hw, i, freq_tbl + i);
1113
1114 return 0;
1115}
1116
1117static int clk_rcg2_dfs_determine_rate(struct clk_hw *hw,
1118 struct clk_rate_request *req)
1119{
1120 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1121 int ret;
1122
1123 if (!rcg->freq_tbl) {
1124 ret = clk_rcg2_dfs_populate_freq_table(rcg);
1125 if (ret) {
1126 pr_err("Failed to update DFS tables for %s\n",
1127 clk_hw_get_name(hw));
1128 return ret;
1129 }
1130 }
1131
1132 return clk_rcg2_determine_rate(hw, req);
1133}
1134
1135static unsigned long
1136clk_rcg2_dfs_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
1137{
1138 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1139 u32 level, mask, cfg, m = 0, n = 0, mode, pre_div;
1140
1141 regmap_read(rcg->clkr.regmap,
1142 rcg->cmd_rcgr + SE_CMD_DFSR_OFFSET, &level);
1143 level &= GENMASK(4, 1);
1144 level >>= 1;
1145
1146 if (rcg->freq_tbl)
1147 return rcg->freq_tbl[level].freq;
1148
1149
1150
1151
1152
1153
1154
1155
1156 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + SE_PERF_DFSR(level),
1157 &cfg);
1158
1159 mask = BIT(rcg->hid_width) - 1;
1160 pre_div = 1;
1161 if (cfg & mask)
1162 pre_div = cfg & mask;
1163
1164 mode = cfg & CFG_MODE_MASK;
1165 mode >>= CFG_MODE_SHIFT;
1166 if (mode) {
1167 mask = BIT(rcg->mnd_width) - 1;
1168 regmap_read(rcg->clkr.regmap,
1169 rcg->cmd_rcgr + SE_PERF_M_DFSR(level), &m);
1170 m &= mask;
1171
1172 regmap_read(rcg->clkr.regmap,
1173 rcg->cmd_rcgr + SE_PERF_N_DFSR(level), &n);
1174 n = ~n;
1175 n &= mask;
1176 n += m;
1177 }
1178
1179 return calc_rate(parent_rate, m, n, mode, pre_div);
1180}
1181
1182static const struct clk_ops clk_rcg2_dfs_ops = {
1183 .is_enabled = clk_rcg2_is_enabled,
1184 .get_parent = clk_rcg2_get_parent,
1185 .determine_rate = clk_rcg2_dfs_determine_rate,
1186 .recalc_rate = clk_rcg2_dfs_recalc_rate,
1187};
1188
1189static int clk_rcg2_enable_dfs(const struct clk_rcg_dfs_data *data,
1190 struct regmap *regmap)
1191{
1192 struct clk_rcg2 *rcg = data->rcg;
1193 struct clk_init_data *init = data->init;
1194 u32 val;
1195 int ret;
1196
1197 ret = regmap_read(regmap, rcg->cmd_rcgr + SE_CMD_DFSR_OFFSET, &val);
1198 if (ret)
1199 return -EINVAL;
1200
1201 if (!(val & SE_CMD_DFS_EN))
1202 return 0;
1203
1204
1205
1206
1207
1208 init->flags |= CLK_GET_RATE_NOCACHE;
1209 init->ops = &clk_rcg2_dfs_ops;
1210
1211 rcg->freq_tbl = NULL;
1212
1213 return 0;
1214}
1215
1216int qcom_cc_register_rcg_dfs(struct regmap *regmap,
1217 const struct clk_rcg_dfs_data *rcgs, size_t len)
1218{
1219 int i, ret;
1220
1221 for (i = 0; i < len; i++) {
1222 ret = clk_rcg2_enable_dfs(&rcgs[i], regmap);
1223 if (ret)
1224 return ret;
1225 }
1226
1227 return 0;
1228}
1229EXPORT_SYMBOL_GPL(qcom_cc_register_rcg_dfs);
1230
1231static int clk_rcg2_dp_set_rate(struct clk_hw *hw, unsigned long rate,
1232 unsigned long parent_rate)
1233{
1234 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1235 struct freq_tbl f = { 0 };
1236 u32 mask = BIT(rcg->hid_width) - 1;
1237 u32 hid_div, cfg;
1238 int i, num_parents = clk_hw_get_num_parents(hw);
1239 unsigned long num, den;
1240
1241 rational_best_approximation(parent_rate, rate,
1242 GENMASK(rcg->mnd_width - 1, 0),
1243 GENMASK(rcg->mnd_width - 1, 0), &den, &num);
1244
1245 if (!num || !den)
1246 return -EINVAL;
1247
1248 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
1249 hid_div = cfg;
1250 cfg &= CFG_SRC_SEL_MASK;
1251 cfg >>= CFG_SRC_SEL_SHIFT;
1252
1253 for (i = 0; i < num_parents; i++) {
1254 if (cfg == rcg->parent_map[i].cfg) {
1255 f.src = rcg->parent_map[i].src;
1256 break;
1257 }
1258 }
1259
1260 f.pre_div = hid_div;
1261 f.pre_div >>= CFG_SRC_DIV_SHIFT;
1262 f.pre_div &= mask;
1263
1264 if (num != den) {
1265 f.m = num;
1266 f.n = den;
1267 } else {
1268 f.m = 0;
1269 f.n = 0;
1270 }
1271
1272 return clk_rcg2_configure(rcg, &f);
1273}
1274
1275static int clk_rcg2_dp_set_rate_and_parent(struct clk_hw *hw,
1276 unsigned long rate, unsigned long parent_rate, u8 index)
1277{
1278 return clk_rcg2_dp_set_rate(hw, rate, parent_rate);
1279}
1280
1281static int clk_rcg2_dp_determine_rate(struct clk_hw *hw,
1282 struct clk_rate_request *req)
1283{
1284 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1285 unsigned long num, den;
1286 u64 tmp;
1287
1288
1289 rational_best_approximation(req->best_parent_rate, req->rate,
1290 GENMASK(rcg->mnd_width - 1, 0),
1291 GENMASK(rcg->mnd_width - 1, 0), &den, &num);
1292
1293 if (!num || !den)
1294 return -EINVAL;
1295
1296 tmp = req->best_parent_rate * num;
1297 do_div(tmp, den);
1298 req->rate = tmp;
1299
1300 return 0;
1301}
1302
1303const struct clk_ops clk_dp_ops = {
1304 .is_enabled = clk_rcg2_is_enabled,
1305 .get_parent = clk_rcg2_get_parent,
1306 .set_parent = clk_rcg2_set_parent,
1307 .recalc_rate = clk_rcg2_recalc_rate,
1308 .set_rate = clk_rcg2_dp_set_rate,
1309 .set_rate_and_parent = clk_rcg2_dp_set_rate_and_parent,
1310 .determine_rate = clk_rcg2_dp_determine_rate,
1311};
1312EXPORT_SYMBOL_GPL(clk_dp_ops);
1313