1/*
2 * Copyright (c) 2013, The Linux Foundation. All rights reserved.
3 *
4 * This software is licensed under the terms of the GNU General Public
5 * License version 2, as published by the Free Software Foundation, and
6 * may be copied, distributed, and modified under those terms.
7 *
8 * This program is distributed in the hope that it will be useful,
9 * but WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11 * GNU General Public License for more details.
12 */
13
14#include <linux/kernel.h>
15#include <linux/bitops.h>
16#include <linux/err.h>
17#include <linux/bug.h>
18#include <linux/export.h>
19#include <linux/clk-provider.h>
20#include <linux/delay.h>
21#include <linux/regmap.h>
22#include <linux/math64.h>
23
24#include <asm/div64.h>
25
26#include "clk-rcg.h"
27#include "common.h"
28
29#define CMD_REG			0x0
30#define CMD_UPDATE		BIT(0)
31#define CMD_ROOT_EN		BIT(1)
32#define CMD_DIRTY_CFG		BIT(4)
33#define CMD_DIRTY_N		BIT(5)
34#define CMD_DIRTY_M		BIT(6)
35#define CMD_DIRTY_D		BIT(7)
36#define CMD_ROOT_OFF		BIT(31)
37
38#define CFG_REG			0x4
39#define CFG_SRC_DIV_SHIFT	0
40#define CFG_SRC_SEL_SHIFT	8
41#define CFG_SRC_SEL_MASK	(0x7 << CFG_SRC_SEL_SHIFT)
42#define CFG_MODE_SHIFT		12
43#define CFG_MODE_MASK		(0x3 << CFG_MODE_SHIFT)
44#define CFG_MODE_DUAL_EDGE	(0x2 << CFG_MODE_SHIFT)
45
46#define M_REG			0x8
47#define N_REG			0xc
48#define D_REG			0x10
49
50static int clk_rcg2_is_enabled(struct clk_hw *hw)
51{
52	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
53	u32 cmd;
54	int ret;
55
56	ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
57	if (ret)
58		return ret;
59
60	return (cmd & CMD_ROOT_OFF) == 0;
61}
62
63static u8 clk_rcg2_get_parent(struct clk_hw *hw)
64{
65	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
66	int num_parents = clk_hw_get_num_parents(hw);
67	u32 cfg;
68	int i, ret;
69
70	ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
71	if (ret)
72		goto err;
73
74	cfg &= CFG_SRC_SEL_MASK;
75	cfg >>= CFG_SRC_SEL_SHIFT;
76
77	for (i = 0; i < num_parents; i++)
78		if (cfg == rcg->parent_map[i].cfg)
79			return i;
80
81err:
82	pr_debug("%s: Clock %s has invalid parent, using default.\n",
83		 __func__, clk_hw_get_name(hw));
84	return 0;
85}
86
87static int update_config(struct clk_rcg2 *rcg)
88{
89	int count, ret;
90	u32 cmd;
91	struct clk_hw *hw = &rcg->clkr.hw;
92	const char *name = clk_hw_get_name(hw);
93
94	ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
95				 CMD_UPDATE, CMD_UPDATE);
96	if (ret)
97		return ret;
98
99	/* Wait for update to take effect */
100	for (count = 500; count > 0; count--) {
101		ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
102		if (ret)
103			return ret;
104		if (!(cmd & CMD_UPDATE))
105			return 0;
106		udelay(1);
107	}
108
109	WARN(1, "%s: rcg didn't update its configuration.", name);
110	return 0;
111}
112
113static int clk_rcg2_set_parent(struct clk_hw *hw, u8 index)
114{
115	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
116	int ret;
117	u32 cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
118
119	ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
120				 CFG_SRC_SEL_MASK, cfg);
121	if (ret)
122		return ret;
123
124	return update_config(rcg);
125}
126
127/*
128 * Calculate m/n:d rate
129 *
130 *          parent_rate     m
131 *   rate = ----------- x  ---
132 *            hid_div       n
133 */
134static unsigned long
135calc_rate(unsigned long rate, u32 m, u32 n, u32 mode, u32 hid_div)
136{
137	if (hid_div) {
138		rate *= 2;
139		rate /= hid_div + 1;
140	}
141
142	if (mode) {
143		u64 tmp = rate;
144		tmp *= m;
145		do_div(tmp, n);
146		rate = tmp;
147	}
148
149	return rate;
150}
151
152static unsigned long
153clk_rcg2_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
154{
155	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
156	u32 cfg, hid_div, m = 0, n = 0, mode = 0, mask;
157
158	regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
159
160	if (rcg->mnd_width) {
161		mask = BIT(rcg->mnd_width) - 1;
162		regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + M_REG, &m);
163		m &= mask;
164		regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + N_REG, &n);
165		n =  ~n;
166		n &= mask;
167		n += m;
168		mode = cfg & CFG_MODE_MASK;
169		mode >>= CFG_MODE_SHIFT;
170	}
171
172	mask = BIT(rcg->hid_width) - 1;
173	hid_div = cfg >> CFG_SRC_DIV_SHIFT;
174	hid_div &= mask;
175
176	return calc_rate(parent_rate, m, n, mode, hid_div);
177}
178
179static int _freq_tbl_determine_rate(struct clk_hw *hw,
180		const struct freq_tbl *f, struct clk_rate_request *req)
181{
182	unsigned long clk_flags, rate = req->rate;
183	struct clk_hw *p;
184	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
185	int index;
186
187	f = qcom_find_freq(f, rate);
188	if (!f)
189		return -EINVAL;
190
191	index = qcom_find_src_index(hw, rcg->parent_map, f->src);
192	if (index < 0)
193		return index;
194
195	clk_flags = clk_hw_get_flags(hw);
196	p = clk_hw_get_parent_by_index(hw, index);
197	if (clk_flags & CLK_SET_RATE_PARENT) {
198		if (f->pre_div) {
199			rate /= 2;
200			rate *= f->pre_div + 1;
201		}
202
203		if (f->n) {
204			u64 tmp = rate;
205			tmp = tmp * f->n;
206			do_div(tmp, f->m);
207			rate = tmp;
208		}
209	} else {
210		rate =  clk_hw_get_rate(p);
211	}
212	req->best_parent_hw = p;
213	req->best_parent_rate = rate;
214	req->rate = f->freq;
215
216	return 0;
217}
218
219static int clk_rcg2_determine_rate(struct clk_hw *hw,
220				   struct clk_rate_request *req)
221{
222	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
223
224	return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req);
225}
226
227static int clk_rcg2_configure(struct clk_rcg2 *rcg, const struct freq_tbl *f)
228{
229	u32 cfg, mask;
230	struct clk_hw *hw = &rcg->clkr.hw;
231	int ret, index = qcom_find_src_index(hw, rcg->parent_map, f->src);
232
233	if (index < 0)
234		return index;
235
236	if (rcg->mnd_width && f->n) {
237		mask = BIT(rcg->mnd_width) - 1;
238		ret = regmap_update_bits(rcg->clkr.regmap,
239				rcg->cmd_rcgr + M_REG, mask, f->m);
240		if (ret)
241			return ret;
242
243		ret = regmap_update_bits(rcg->clkr.regmap,
244				rcg->cmd_rcgr + N_REG, mask, ~(f->n - f->m));
245		if (ret)
246			return ret;
247
248		ret = regmap_update_bits(rcg->clkr.regmap,
249				rcg->cmd_rcgr + D_REG, mask, ~f->n);
250		if (ret)
251			return ret;
252	}
253
254	mask = BIT(rcg->hid_width) - 1;
255	mask |= CFG_SRC_SEL_MASK | CFG_MODE_MASK;
256	cfg = f->pre_div << CFG_SRC_DIV_SHIFT;
257	cfg |= rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
258	if (rcg->mnd_width && f->n && (f->m != f->n))
259		cfg |= CFG_MODE_DUAL_EDGE;
260	ret = regmap_update_bits(rcg->clkr.regmap,
261			rcg->cmd_rcgr + CFG_REG, mask, cfg);
262	if (ret)
263		return ret;
264
265	return update_config(rcg);
266}
267
268static int __clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate)
269{
270	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
271	const struct freq_tbl *f;
272
273	f = qcom_find_freq(rcg->freq_tbl, rate);
274	if (!f)
275		return -EINVAL;
276
277	return clk_rcg2_configure(rcg, f);
278}
279
280static int clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate,
281			    unsigned long parent_rate)
282{
283	return __clk_rcg2_set_rate(hw, rate);
284}
285
286static int clk_rcg2_set_rate_and_parent(struct clk_hw *hw,
287		unsigned long rate, unsigned long parent_rate, u8 index)
288{
289	return __clk_rcg2_set_rate(hw, rate);
290}
291
292const struct clk_ops clk_rcg2_ops = {
293	.is_enabled = clk_rcg2_is_enabled,
294	.get_parent = clk_rcg2_get_parent,
295	.set_parent = clk_rcg2_set_parent,
296	.recalc_rate = clk_rcg2_recalc_rate,
297	.determine_rate = clk_rcg2_determine_rate,
298	.set_rate = clk_rcg2_set_rate,
299	.set_rate_and_parent = clk_rcg2_set_rate_and_parent,
300};
301EXPORT_SYMBOL_GPL(clk_rcg2_ops);
302
303static int clk_rcg2_shared_force_enable(struct clk_hw *hw, unsigned long rate)
304{
305	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
306	const char *name = clk_hw_get_name(hw);
307	int ret, count;
308
309	/* force enable RCG */
310	ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
311				 CMD_ROOT_EN, CMD_ROOT_EN);
312	if (ret)
313		return ret;
314
315	/* wait for RCG to turn ON */
316	for (count = 500; count > 0; count--) {
317		ret = clk_rcg2_is_enabled(hw);
318		if (ret)
319			break;
320		udelay(1);
321	}
322	if (!count)
323		pr_err("%s: RCG did not turn on\n", name);
324
325	/* set clock rate */
326	ret = __clk_rcg2_set_rate(hw, rate);
327	if (ret)
328		return ret;
329
330	/* clear force enable RCG */
331	return regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
332				 CMD_ROOT_EN, 0);
333}
334
335static int clk_rcg2_shared_set_rate(struct clk_hw *hw, unsigned long rate,
336				    unsigned long parent_rate)
337{
338	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
339
340	/* cache the rate */
341	rcg->current_freq = rate;
342
343	if (!__clk_is_enabled(hw->clk))
344		return 0;
345
346	return clk_rcg2_shared_force_enable(hw, rcg->current_freq);
347}
348
349static unsigned long
350clk_rcg2_shared_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
351{
352	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
353
354	return rcg->current_freq = clk_rcg2_recalc_rate(hw, parent_rate);
355}
356
357static int clk_rcg2_shared_enable(struct clk_hw *hw)
358{
359	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
360
361	return clk_rcg2_shared_force_enable(hw, rcg->current_freq);
362}
363
364static void clk_rcg2_shared_disable(struct clk_hw *hw)
365{
366	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
367
368	/* switch to XO, which is the lowest entry in the freq table */
369	clk_rcg2_shared_set_rate(hw, rcg->freq_tbl[0].freq, 0);
370}
371
372const struct clk_ops clk_rcg2_shared_ops = {
373	.enable = clk_rcg2_shared_enable,
374	.disable = clk_rcg2_shared_disable,
375	.get_parent = clk_rcg2_get_parent,
376	.recalc_rate = clk_rcg2_shared_recalc_rate,
377	.determine_rate = clk_rcg2_determine_rate,
378	.set_rate = clk_rcg2_shared_set_rate,
379};
380EXPORT_SYMBOL_GPL(clk_rcg2_shared_ops);
381
382struct frac_entry {
383	int num;
384	int den;
385};
386
387static const struct frac_entry frac_table_675m[] = {	/* link rate of 270M */
388	{ 52, 295 },	/* 119 M */
389	{ 11, 57 },	/* 130.25 M */
390	{ 63, 307 },	/* 138.50 M */
391	{ 11, 50 },	/* 148.50 M */
392	{ 47, 206 },	/* 154 M */
393	{ 31, 100 },	/* 205.25 M */
394	{ 107, 269 },	/* 268.50 M */
395	{ },
396};
397
398static struct frac_entry frac_table_810m[] = { /* Link rate of 162M */
399	{ 31, 211 },	/* 119 M */
400	{ 32, 199 },	/* 130.25 M */
401	{ 63, 307 },	/* 138.50 M */
402	{ 11, 60 },	/* 148.50 M */
403	{ 50, 263 },	/* 154 M */
404	{ 31, 120 },	/* 205.25 M */
405	{ 119, 359 },	/* 268.50 M */
406	{ },
407};
408
409static int clk_edp_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
410			      unsigned long parent_rate)
411{
412	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
413	struct freq_tbl f = *rcg->freq_tbl;
414	const struct frac_entry *frac;
415	int delta = 100000;
416	s64 src_rate = parent_rate;
417	s64 request;
418	u32 mask = BIT(rcg->hid_width) - 1;
419	u32 hid_div;
420
421	if (src_rate == 810000000)
422		frac = frac_table_810m;
423	else
424		frac = frac_table_675m;
425
426	for (; frac->num; frac++) {
427		request = rate;
428		request *= frac->den;
429		request = div_s64(request, frac->num);
430		if ((src_rate < (request - delta)) ||
431		    (src_rate > (request + delta)))
432			continue;
433
434		regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
435				&hid_div);
436		f.pre_div = hid_div;
437		f.pre_div >>= CFG_SRC_DIV_SHIFT;
438		f.pre_div &= mask;
439		f.m = frac->num;
440		f.n = frac->den;
441
442		return clk_rcg2_configure(rcg, &f);
443	}
444
445	return -EINVAL;
446}
447
448static int clk_edp_pixel_set_rate_and_parent(struct clk_hw *hw,
449		unsigned long rate, unsigned long parent_rate, u8 index)
450{
451	/* Parent index is set statically in frequency table */
452	return clk_edp_pixel_set_rate(hw, rate, parent_rate);
453}
454
455static int clk_edp_pixel_determine_rate(struct clk_hw *hw,
456					struct clk_rate_request *req)
457{
458	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
459	const struct freq_tbl *f = rcg->freq_tbl;
460	const struct frac_entry *frac;
461	int delta = 100000;
462	s64 request;
463	u32 mask = BIT(rcg->hid_width) - 1;
464	u32 hid_div;
465	int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
466
467	/* Force the correct parent */
468	req->best_parent_hw = clk_hw_get_parent_by_index(hw, index);
469	req->best_parent_rate = clk_hw_get_rate(req->best_parent_hw);
470
471	if (req->best_parent_rate == 810000000)
472		frac = frac_table_810m;
473	else
474		frac = frac_table_675m;
475
476	for (; frac->num; frac++) {
477		request = req->rate;
478		request *= frac->den;
479		request = div_s64(request, frac->num);
480		if ((req->best_parent_rate < (request - delta)) ||
481		    (req->best_parent_rate > (request + delta)))
482			continue;
483
484		regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
485				&hid_div);
486		hid_div >>= CFG_SRC_DIV_SHIFT;
487		hid_div &= mask;
488
489		req->rate = calc_rate(req->best_parent_rate,
490				      frac->num, frac->den,
491				      !!frac->den, hid_div);
492		return 0;
493	}
494
495	return -EINVAL;
496}
497
498const struct clk_ops clk_edp_pixel_ops = {
499	.is_enabled = clk_rcg2_is_enabled,
500	.get_parent = clk_rcg2_get_parent,
501	.set_parent = clk_rcg2_set_parent,
502	.recalc_rate = clk_rcg2_recalc_rate,
503	.set_rate = clk_edp_pixel_set_rate,
504	.set_rate_and_parent = clk_edp_pixel_set_rate_and_parent,
505	.determine_rate = clk_edp_pixel_determine_rate,
506};
507EXPORT_SYMBOL_GPL(clk_edp_pixel_ops);
508
509static int clk_byte_determine_rate(struct clk_hw *hw,
510				   struct clk_rate_request *req)
511{
512	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
513	const struct freq_tbl *f = rcg->freq_tbl;
514	int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
515	unsigned long parent_rate, div;
516	u32 mask = BIT(rcg->hid_width) - 1;
517	struct clk_hw *p;
518
519	if (req->rate == 0)
520		return -EINVAL;
521
522	req->best_parent_hw = p = clk_hw_get_parent_by_index(hw, index);
523	req->best_parent_rate = parent_rate = clk_hw_round_rate(p, req->rate);
524
525	div = DIV_ROUND_UP((2 * parent_rate), req->rate) - 1;
526	div = min_t(u32, div, mask);
527
528	req->rate = calc_rate(parent_rate, 0, 0, 0, div);
529
530	return 0;
531}
532
533static int clk_byte_set_rate(struct clk_hw *hw, unsigned long rate,
534			 unsigned long parent_rate)
535{
536	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
537	struct freq_tbl f = *rcg->freq_tbl;
538	unsigned long div;
539	u32 mask = BIT(rcg->hid_width) - 1;
540
541	div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
542	div = min_t(u32, div, mask);
543
544	f.pre_div = div;
545
546	return clk_rcg2_configure(rcg, &f);
547}
548
549static int clk_byte_set_rate_and_parent(struct clk_hw *hw,
550		unsigned long rate, unsigned long parent_rate, u8 index)
551{
552	/* Parent index is set statically in frequency table */
553	return clk_byte_set_rate(hw, rate, parent_rate);
554}
555
556const struct clk_ops clk_byte_ops = {
557	.is_enabled = clk_rcg2_is_enabled,
558	.get_parent = clk_rcg2_get_parent,
559	.set_parent = clk_rcg2_set_parent,
560	.recalc_rate = clk_rcg2_recalc_rate,
561	.set_rate = clk_byte_set_rate,
562	.set_rate_and_parent = clk_byte_set_rate_and_parent,
563	.determine_rate = clk_byte_determine_rate,
564};
565EXPORT_SYMBOL_GPL(clk_byte_ops);
566
567static int clk_byte2_determine_rate(struct clk_hw *hw,
568				    struct clk_rate_request *req)
569{
570	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
571	unsigned long parent_rate, div;
572	u32 mask = BIT(rcg->hid_width) - 1;
573	struct clk_hw *p;
574	unsigned long rate = req->rate;
575
576	if (rate == 0)
577		return -EINVAL;
578
579	p = req->best_parent_hw;
580	req->best_parent_rate = parent_rate = clk_hw_round_rate(p, rate);
581
582	div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
583	div = min_t(u32, div, mask);
584
585	req->rate = calc_rate(parent_rate, 0, 0, 0, div);
586
587	return 0;
588}
589
590static int clk_byte2_set_rate(struct clk_hw *hw, unsigned long rate,
591			 unsigned long parent_rate)
592{
593	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
594	struct freq_tbl f = { 0 };
595	unsigned long div;
596	int i, num_parents = clk_hw_get_num_parents(hw);
597	u32 mask = BIT(rcg->hid_width) - 1;
598	u32 cfg;
599
600	div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
601	div = min_t(u32, div, mask);
602
603	f.pre_div = div;
604
605	regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
606	cfg &= CFG_SRC_SEL_MASK;
607	cfg >>= CFG_SRC_SEL_SHIFT;
608
609	for (i = 0; i < num_parents; i++) {
610		if (cfg == rcg->parent_map[i].cfg) {
611			f.src = rcg->parent_map[i].src;
612			return clk_rcg2_configure(rcg, &f);
613		}
614	}
615
616	return -EINVAL;
617}
618
619static int clk_byte2_set_rate_and_parent(struct clk_hw *hw,
620		unsigned long rate, unsigned long parent_rate, u8 index)
621{
622	/* Read the hardware to determine parent during set_rate */
623	return clk_byte2_set_rate(hw, rate, parent_rate);
624}
625
626const struct clk_ops clk_byte2_ops = {
627	.is_enabled = clk_rcg2_is_enabled,
628	.get_parent = clk_rcg2_get_parent,
629	.set_parent = clk_rcg2_set_parent,
630	.recalc_rate = clk_rcg2_recalc_rate,
631	.set_rate = clk_byte2_set_rate,
632	.set_rate_and_parent = clk_byte2_set_rate_and_parent,
633	.determine_rate = clk_byte2_determine_rate,
634};
635EXPORT_SYMBOL_GPL(clk_byte2_ops);
636
637static const struct frac_entry frac_table_pixel[] = {
638	{ 3, 8 },
639	{ 2, 9 },
640	{ 4, 9 },
641	{ 1, 1 },
642	{ }
643};
644
645static int clk_pixel_determine_rate(struct clk_hw *hw,
646				    struct clk_rate_request *req)
647{
648	unsigned long request, src_rate;
649	int delta = 100000;
650	const struct frac_entry *frac = frac_table_pixel;
651
652	for (; frac->num; frac++) {
653		request = (req->rate * frac->den) / frac->num;
654
655		src_rate = clk_hw_round_rate(req->best_parent_hw, request);
656		if ((src_rate < (request - delta)) ||
657			(src_rate > (request + delta)))
658			continue;
659
660		req->best_parent_rate = src_rate;
661		req->rate = (src_rate * frac->num) / frac->den;
662		return 0;
663	}
664
665	return -EINVAL;
666}
667
668static int clk_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
669		unsigned long parent_rate)
670{
671	struct clk_rcg2 *rcg = to_clk_rcg2(hw);
672	struct freq_tbl f = { 0 };
673	const struct frac_entry *frac = frac_table_pixel;
674	unsigned long request;
675	int delta = 100000;
676	u32 mask = BIT(rcg->hid_width) - 1;
677	u32 hid_div, cfg;
678	int i, num_parents = clk_hw_get_num_parents(hw);
679
680	regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
681	cfg &= CFG_SRC_SEL_MASK;
682	cfg >>= CFG_SRC_SEL_SHIFT;
683
684	for (i = 0; i < num_parents; i++)
685		if (cfg == rcg->parent_map[i].cfg) {
686			f.src = rcg->parent_map[i].src;
687			break;
688		}
689
690	for (; frac->num; frac++) {
691		request = (rate * frac->den) / frac->num;
692
693		if ((parent_rate < (request - delta)) ||
694			(parent_rate > (request + delta)))
695			continue;
696
697		regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
698				&hid_div);
699		f.pre_div = hid_div;
700		f.pre_div >>= CFG_SRC_DIV_SHIFT;
701		f.pre_div &= mask;
702		f.m = frac->num;
703		f.n = frac->den;
704
705		return clk_rcg2_configure(rcg, &f);
706	}
707	return -EINVAL;
708}
709
710static int clk_pixel_set_rate_and_parent(struct clk_hw *hw, unsigned long rate,
711		unsigned long parent_rate, u8 index)
712{
713	return clk_pixel_set_rate(hw, rate, parent_rate);
714}
715
716const struct clk_ops clk_pixel_ops = {
717	.is_enabled = clk_rcg2_is_enabled,
718	.get_parent = clk_rcg2_get_parent,
719	.set_parent = clk_rcg2_set_parent,
720	.recalc_rate = clk_rcg2_recalc_rate,
721	.set_rate = clk_pixel_set_rate,
722	.set_rate_and_parent = clk_pixel_set_rate_and_parent,
723	.determine_rate = clk_pixel_determine_rate,
724};
725EXPORT_SYMBOL_GPL(clk_pixel_ops);
726