This source file includes following definitions.
- clkgen_pll_is_locked
- clkgen_pll_is_enabled
- __clkgen_pll_enable
- clkgen_pll_enable
- __clkgen_pll_disable
- clkgen_pll_disable
- clk_pll3200c32_get_params
- clk_pll3200c32_get_rate
- recalc_stm_pll3200c32
- round_rate_stm_pll3200c32
- set_rate_stm_pll3200c32
- clk_pll4600c28_get_params
- clk_pll4600c28_get_rate
- recalc_stm_pll4600c28
- round_rate_stm_pll4600c28
- set_rate_stm_pll4600c28
- clkgen_pll_register
- clkgen_get_register_base
- clkgen_odf_register
- clkgen_c32_pll_setup
- clkgen_c32_pll0_setup
- clkgen_c32_pll1_setup
- clkgen_c32_plla9_setup
- clkgen_c28_plla9_setup
1
2
3
4
5
6
7
8
9
10
11
12 #include <linux/slab.h>
13 #include <linux/of_address.h>
14 #include <linux/clk.h>
15 #include <linux/clk-provider.h>
16 #include <linux/iopoll.h>
17
18 #include "clkgen.h"
19
20 static DEFINE_SPINLOCK(clkgena_c32_odf_lock);
21 DEFINE_SPINLOCK(clkgen_a9_lock);
22
23
24
25
26 #define C32_NDIV_MASK (0xff)
27 #define C32_IDF_MASK (0x7)
28 #define C32_ODF_MASK (0x3f)
29 #define C32_LDF_MASK (0x7f)
30 #define C32_CP_MASK (0x1f)
31
32 #define C32_MAX_ODFS (4)
33
34
35
36
37 #define C28_NDIV_MASK (0xff)
38 #define C28_IDF_MASK (0x7)
39 #define C28_ODF_MASK (0x3f)
40
41 struct clkgen_pll_data {
42 struct clkgen_field pdn_status;
43 struct clkgen_field pdn_ctrl;
44 struct clkgen_field locked_status;
45 struct clkgen_field mdiv;
46 struct clkgen_field ndiv;
47 struct clkgen_field pdiv;
48 struct clkgen_field idf;
49 struct clkgen_field ldf;
50 struct clkgen_field cp;
51 unsigned int num_odfs;
52 struct clkgen_field odf[C32_MAX_ODFS];
53 struct clkgen_field odf_gate[C32_MAX_ODFS];
54 bool switch2pll_en;
55 struct clkgen_field switch2pll;
56 spinlock_t *lock;
57 const struct clk_ops *ops;
58 };
59
60 static const struct clk_ops stm_pll3200c32_ops;
61 static const struct clk_ops stm_pll3200c32_a9_ops;
62 static const struct clk_ops stm_pll4600c28_ops;
63
64 static const struct clkgen_pll_data st_pll3200c32_cx_0 = {
65
66 .pdn_status = CLKGEN_FIELD(0x2a0, 0x1, 8),
67 .pdn_ctrl = CLKGEN_FIELD(0x2a0, 0x1, 8),
68 .locked_status = CLKGEN_FIELD(0x2a0, 0x1, 24),
69 .ndiv = CLKGEN_FIELD(0x2a4, C32_NDIV_MASK, 16),
70 .idf = CLKGEN_FIELD(0x2a4, C32_IDF_MASK, 0x0),
71 .num_odfs = 1,
72 .odf = { CLKGEN_FIELD(0x2b4, C32_ODF_MASK, 0) },
73 .odf_gate = { CLKGEN_FIELD(0x2b4, 0x1, 6) },
74 .ops = &stm_pll3200c32_ops,
75 };
76
77 static const struct clkgen_pll_data st_pll3200c32_cx_1 = {
78
79 .pdn_status = CLKGEN_FIELD(0x2c8, 0x1, 8),
80 .pdn_ctrl = CLKGEN_FIELD(0x2c8, 0x1, 8),
81 .locked_status = CLKGEN_FIELD(0x2c8, 0x1, 24),
82 .ndiv = CLKGEN_FIELD(0x2cc, C32_NDIV_MASK, 16),
83 .idf = CLKGEN_FIELD(0x2cc, C32_IDF_MASK, 0x0),
84 .num_odfs = 1,
85 .odf = { CLKGEN_FIELD(0x2dc, C32_ODF_MASK, 0) },
86 .odf_gate = { CLKGEN_FIELD(0x2dc, 0x1, 6) },
87 .ops = &stm_pll3200c32_ops,
88 };
89
90 static const struct clkgen_pll_data st_pll3200c32_407_a9 = {
91
92 .pdn_status = CLKGEN_FIELD(0x1a8, 0x1, 0),
93 .pdn_ctrl = CLKGEN_FIELD(0x1a8, 0x1, 0),
94 .locked_status = CLKGEN_FIELD(0x87c, 0x1, 0),
95 .ndiv = CLKGEN_FIELD(0x1b0, C32_NDIV_MASK, 0),
96 .idf = CLKGEN_FIELD(0x1a8, C32_IDF_MASK, 25),
97 .num_odfs = 1,
98 .odf = { CLKGEN_FIELD(0x1b0, C32_ODF_MASK, 8) },
99 .odf_gate = { CLKGEN_FIELD(0x1ac, 0x1, 28) },
100 .switch2pll_en = true,
101 .cp = CLKGEN_FIELD(0x1a8, C32_CP_MASK, 1),
102 .switch2pll = CLKGEN_FIELD(0x1a4, 0x1, 1),
103 .lock = &clkgen_a9_lock,
104 .ops = &stm_pll3200c32_a9_ops,
105 };
106
107 static struct clkgen_pll_data st_pll4600c28_418_a9 = {
108
109 .pdn_status = CLKGEN_FIELD(0x1a8, 0x1, 0),
110 .pdn_ctrl = CLKGEN_FIELD(0x1a8, 0x1, 0),
111 .locked_status = CLKGEN_FIELD(0x87c, 0x1, 0),
112 .ndiv = CLKGEN_FIELD(0x1b0, C28_NDIV_MASK, 0),
113 .idf = CLKGEN_FIELD(0x1a8, C28_IDF_MASK, 25),
114 .num_odfs = 1,
115 .odf = { CLKGEN_FIELD(0x1b0, C28_ODF_MASK, 8) },
116 .odf_gate = { CLKGEN_FIELD(0x1ac, 0x1, 28) },
117 .switch2pll_en = true,
118 .switch2pll = CLKGEN_FIELD(0x1a4, 0x1, 1),
119 .lock = &clkgen_a9_lock,
120 .ops = &stm_pll4600c28_ops,
121 };
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142 struct clkgen_pll {
143 struct clk_hw hw;
144 struct clkgen_pll_data *data;
145 void __iomem *regs_base;
146 spinlock_t *lock;
147
148 u32 ndiv;
149 u32 idf;
150 u32 odf;
151 u32 cp;
152 };
153
154 #define to_clkgen_pll(_hw) container_of(_hw, struct clkgen_pll, hw)
155
156 struct stm_pll {
157 unsigned long mdiv;
158 unsigned long ndiv;
159 unsigned long pdiv;
160 unsigned long odf;
161 unsigned long idf;
162 unsigned long ldf;
163 unsigned long cp;
164 };
165
166 static int clkgen_pll_is_locked(struct clk_hw *hw)
167 {
168 struct clkgen_pll *pll = to_clkgen_pll(hw);
169 u32 locked = CLKGEN_READ(pll, locked_status);
170
171 return !!locked;
172 }
173
174 static int clkgen_pll_is_enabled(struct clk_hw *hw)
175 {
176 struct clkgen_pll *pll = to_clkgen_pll(hw);
177 u32 poweroff = CLKGEN_READ(pll, pdn_status);
178 return !poweroff;
179 }
180
181 static int __clkgen_pll_enable(struct clk_hw *hw)
182 {
183 struct clkgen_pll *pll = to_clkgen_pll(hw);
184 void __iomem *base = pll->regs_base;
185 struct clkgen_field *field = &pll->data->locked_status;
186 int ret = 0;
187 u32 reg;
188
189 if (clkgen_pll_is_enabled(hw))
190 return 0;
191
192 CLKGEN_WRITE(pll, pdn_ctrl, 0);
193
194 ret = readl_relaxed_poll_timeout(base + field->offset, reg,
195 !!((reg >> field->shift) & field->mask), 0, 10000);
196
197 if (!ret) {
198 if (pll->data->switch2pll_en)
199 CLKGEN_WRITE(pll, switch2pll, 0);
200
201 pr_debug("%s:%s enabled\n", __clk_get_name(hw->clk), __func__);
202 }
203
204 return ret;
205 }
206
207 static int clkgen_pll_enable(struct clk_hw *hw)
208 {
209 struct clkgen_pll *pll = to_clkgen_pll(hw);
210 unsigned long flags = 0;
211 int ret = 0;
212
213 if (pll->lock)
214 spin_lock_irqsave(pll->lock, flags);
215
216 ret = __clkgen_pll_enable(hw);
217
218 if (pll->lock)
219 spin_unlock_irqrestore(pll->lock, flags);
220
221 return ret;
222 }
223
224 static void __clkgen_pll_disable(struct clk_hw *hw)
225 {
226 struct clkgen_pll *pll = to_clkgen_pll(hw);
227
228 if (!clkgen_pll_is_enabled(hw))
229 return;
230
231 if (pll->data->switch2pll_en)
232 CLKGEN_WRITE(pll, switch2pll, 1);
233
234 CLKGEN_WRITE(pll, pdn_ctrl, 1);
235
236 pr_debug("%s:%s disabled\n", __clk_get_name(hw->clk), __func__);
237 }
238
239 static void clkgen_pll_disable(struct clk_hw *hw)
240 {
241 struct clkgen_pll *pll = to_clkgen_pll(hw);
242 unsigned long flags = 0;
243
244 if (pll->lock)
245 spin_lock_irqsave(pll->lock, flags);
246
247 __clkgen_pll_disable(hw);
248
249 if (pll->lock)
250 spin_unlock_irqrestore(pll->lock, flags);
251 }
252
253 static int clk_pll3200c32_get_params(unsigned long input, unsigned long output,
254 struct stm_pll *pll)
255 {
256 unsigned long i, n;
257 unsigned long deviation = ~0;
258 unsigned long new_freq;
259 long new_deviation;
260
261 static const unsigned char cp_table[] = {
262 48, 56, 64, 72, 80, 88, 96, 104, 112, 120,
263 128, 136, 144, 152, 160, 168, 176, 184, 192
264 };
265
266
267 if (output < 800000000 || output > 1600000000)
268 return -EINVAL;
269
270 input /= 1000;
271 output /= 1000;
272
273 for (i = 1; i <= 7 && deviation; i++) {
274 n = i * output / (2 * input);
275
276
277 if (n < 8)
278 continue;
279 if (n > 200)
280 break;
281
282 new_freq = (input * 2 * n) / i;
283
284 new_deviation = abs(new_freq - output);
285
286 if (!new_deviation || new_deviation < deviation) {
287 pll->idf = i;
288 pll->ndiv = n;
289 deviation = new_deviation;
290 }
291 }
292
293 if (deviation == ~0)
294 return -EINVAL;
295
296
297 for (pll->cp = 6; pll->ndiv > cp_table[pll->cp-6]; (pll->cp)++)
298 ;
299
300 return 0;
301 }
302
303 static int clk_pll3200c32_get_rate(unsigned long input, struct stm_pll *pll,
304 unsigned long *rate)
305 {
306 if (!pll->idf)
307 pll->idf = 1;
308
309 *rate = ((2 * (input / 1000) * pll->ndiv) / pll->idf) * 1000;
310
311 return 0;
312 }
313
314 static unsigned long recalc_stm_pll3200c32(struct clk_hw *hw,
315 unsigned long parent_rate)
316 {
317 struct clkgen_pll *pll = to_clkgen_pll(hw);
318 unsigned long ndiv, idf;
319 unsigned long rate = 0;
320
321 if (!clkgen_pll_is_enabled(hw) || !clkgen_pll_is_locked(hw))
322 return 0;
323
324 ndiv = CLKGEN_READ(pll, ndiv);
325 idf = CLKGEN_READ(pll, idf);
326
327 if (idf)
328
329 rate = ((2 * (parent_rate/1000) * ndiv) / idf) * 1000;
330
331 pr_debug("%s:%s rate %lu\n", clk_hw_get_name(hw), __func__, rate);
332
333 return rate;
334 }
335
336 static long round_rate_stm_pll3200c32(struct clk_hw *hw, unsigned long rate,
337 unsigned long *prate)
338 {
339 struct stm_pll params;
340
341 if (!clk_pll3200c32_get_params(*prate, rate, ¶ms))
342 clk_pll3200c32_get_rate(*prate, ¶ms, &rate);
343 else {
344 pr_debug("%s: %s rate %ld Invalid\n", __func__,
345 __clk_get_name(hw->clk), rate);
346 return 0;
347 }
348
349 pr_debug("%s: %s new rate %ld [ndiv=%u] [idf=%u]\n",
350 __func__, __clk_get_name(hw->clk),
351 rate, (unsigned int)params.ndiv,
352 (unsigned int)params.idf);
353
354 return rate;
355 }
356
357 static int set_rate_stm_pll3200c32(struct clk_hw *hw, unsigned long rate,
358 unsigned long parent_rate)
359 {
360 struct clkgen_pll *pll = to_clkgen_pll(hw);
361 struct stm_pll params;
362 long hwrate = 0;
363 unsigned long flags = 0;
364
365 if (!rate || !parent_rate)
366 return -EINVAL;
367
368 if (!clk_pll3200c32_get_params(parent_rate, rate, ¶ms))
369 clk_pll3200c32_get_rate(parent_rate, ¶ms, &hwrate);
370
371 pr_debug("%s: %s new rate %ld [ndiv=0x%x] [idf=0x%x]\n",
372 __func__, __clk_get_name(hw->clk),
373 hwrate, (unsigned int)params.ndiv,
374 (unsigned int)params.idf);
375
376 if (!hwrate)
377 return -EINVAL;
378
379 pll->ndiv = params.ndiv;
380 pll->idf = params.idf;
381 pll->cp = params.cp;
382
383 __clkgen_pll_disable(hw);
384
385 if (pll->lock)
386 spin_lock_irqsave(pll->lock, flags);
387
388 CLKGEN_WRITE(pll, ndiv, pll->ndiv);
389 CLKGEN_WRITE(pll, idf, pll->idf);
390 CLKGEN_WRITE(pll, cp, pll->cp);
391
392 if (pll->lock)
393 spin_unlock_irqrestore(pll->lock, flags);
394
395 __clkgen_pll_enable(hw);
396
397 return 0;
398 }
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414 static int clk_pll4600c28_get_params(unsigned long input, unsigned long output,
415 struct stm_pll *pll)
416 {
417
418 unsigned long i, infin, n;
419 unsigned long deviation = ~0;
420 unsigned long new_freq, new_deviation;
421
422
423 if (output < 19000000 || output > 3000000000u)
424 return -EINVAL;
425
426
427 for (i = 1; i <= 7 && deviation; i++) {
428
429 infin = input / i;
430 if (infin < 4000000 || infin > 50000000)
431 continue;
432
433 n = output / (infin * 2);
434 if (n < 8 || n > 246)
435 continue;
436 if (n < 246)
437 n++;
438
439 for (; n >= 8 && deviation; n--) {
440 new_freq = infin * 2 * n;
441 if (new_freq < output)
442 break;
443
444 new_deviation = new_freq - output;
445 if (!new_deviation || new_deviation < deviation) {
446 pll->idf = i;
447 pll->ndiv = n;
448 deviation = new_deviation;
449 }
450 }
451 }
452
453 if (deviation == ~0)
454 return -EINVAL;
455
456 return 0;
457 }
458
459 static int clk_pll4600c28_get_rate(unsigned long input, struct stm_pll *pll,
460 unsigned long *rate)
461 {
462 if (!pll->idf)
463 pll->idf = 1;
464
465 *rate = (input / pll->idf) * 2 * pll->ndiv;
466
467 return 0;
468 }
469
470 static unsigned long recalc_stm_pll4600c28(struct clk_hw *hw,
471 unsigned long parent_rate)
472 {
473 struct clkgen_pll *pll = to_clkgen_pll(hw);
474 struct stm_pll params;
475 unsigned long rate;
476
477 if (!clkgen_pll_is_enabled(hw) || !clkgen_pll_is_locked(hw))
478 return 0;
479
480 params.ndiv = CLKGEN_READ(pll, ndiv);
481 params.idf = CLKGEN_READ(pll, idf);
482
483 clk_pll4600c28_get_rate(parent_rate, ¶ms, &rate);
484
485 pr_debug("%s:%s rate %lu\n", __clk_get_name(hw->clk), __func__, rate);
486
487 return rate;
488 }
489
490 static long round_rate_stm_pll4600c28(struct clk_hw *hw, unsigned long rate,
491 unsigned long *prate)
492 {
493 struct stm_pll params;
494
495 if (!clk_pll4600c28_get_params(*prate, rate, ¶ms)) {
496 clk_pll4600c28_get_rate(*prate, ¶ms, &rate);
497 } else {
498 pr_debug("%s: %s rate %ld Invalid\n", __func__,
499 __clk_get_name(hw->clk), rate);
500 return 0;
501 }
502
503 pr_debug("%s: %s new rate %ld [ndiv=%u] [idf=%u]\n",
504 __func__, __clk_get_name(hw->clk),
505 rate, (unsigned int)params.ndiv,
506 (unsigned int)params.idf);
507
508 return rate;
509 }
510
511 static int set_rate_stm_pll4600c28(struct clk_hw *hw, unsigned long rate,
512 unsigned long parent_rate)
513 {
514 struct clkgen_pll *pll = to_clkgen_pll(hw);
515 struct stm_pll params;
516 long hwrate;
517 unsigned long flags = 0;
518
519 if (!rate || !parent_rate)
520 return -EINVAL;
521
522 if (!clk_pll4600c28_get_params(parent_rate, rate, ¶ms)) {
523 clk_pll4600c28_get_rate(parent_rate, ¶ms, &hwrate);
524 } else {
525 pr_debug("%s: %s rate %ld Invalid\n", __func__,
526 __clk_get_name(hw->clk), rate);
527 return -EINVAL;
528 }
529
530 pr_debug("%s: %s new rate %ld [ndiv=0x%x] [idf=0x%x]\n",
531 __func__, __clk_get_name(hw->clk),
532 hwrate, (unsigned int)params.ndiv,
533 (unsigned int)params.idf);
534
535 if (!hwrate)
536 return -EINVAL;
537
538 pll->ndiv = params.ndiv;
539 pll->idf = params.idf;
540
541 __clkgen_pll_disable(hw);
542
543 if (pll->lock)
544 spin_lock_irqsave(pll->lock, flags);
545
546 CLKGEN_WRITE(pll, ndiv, pll->ndiv);
547 CLKGEN_WRITE(pll, idf, pll->idf);
548
549 if (pll->lock)
550 spin_unlock_irqrestore(pll->lock, flags);
551
552 __clkgen_pll_enable(hw);
553
554 return 0;
555 }
556
557 static const struct clk_ops stm_pll3200c32_ops = {
558 .enable = clkgen_pll_enable,
559 .disable = clkgen_pll_disable,
560 .is_enabled = clkgen_pll_is_enabled,
561 .recalc_rate = recalc_stm_pll3200c32,
562 };
563
564 static const struct clk_ops stm_pll3200c32_a9_ops = {
565 .enable = clkgen_pll_enable,
566 .disable = clkgen_pll_disable,
567 .is_enabled = clkgen_pll_is_enabled,
568 .recalc_rate = recalc_stm_pll3200c32,
569 .round_rate = round_rate_stm_pll3200c32,
570 .set_rate = set_rate_stm_pll3200c32,
571 };
572
573 static const struct clk_ops stm_pll4600c28_ops = {
574 .enable = clkgen_pll_enable,
575 .disable = clkgen_pll_disable,
576 .is_enabled = clkgen_pll_is_enabled,
577 .recalc_rate = recalc_stm_pll4600c28,
578 .round_rate = round_rate_stm_pll4600c28,
579 .set_rate = set_rate_stm_pll4600c28,
580 };
581
582 static struct clk * __init clkgen_pll_register(const char *parent_name,
583 struct clkgen_pll_data *pll_data,
584 void __iomem *reg, unsigned long pll_flags,
585 const char *clk_name, spinlock_t *lock)
586 {
587 struct clkgen_pll *pll;
588 struct clk *clk;
589 struct clk_init_data init;
590
591 pll = kzalloc(sizeof(*pll), GFP_KERNEL);
592 if (!pll)
593 return ERR_PTR(-ENOMEM);
594
595 init.name = clk_name;
596 init.ops = pll_data->ops;
597
598 init.flags = pll_flags | CLK_GET_RATE_NOCACHE;
599 init.parent_names = &parent_name;
600 init.num_parents = 1;
601
602 pll->data = pll_data;
603 pll->regs_base = reg;
604 pll->hw.init = &init;
605 pll->lock = lock;
606
607 clk = clk_register(NULL, &pll->hw);
608 if (IS_ERR(clk)) {
609 kfree(pll);
610 return clk;
611 }
612
613 pr_debug("%s: parent %s rate %lu\n",
614 __clk_get_name(clk),
615 __clk_get_name(clk_get_parent(clk)),
616 clk_get_rate(clk));
617
618 return clk;
619 }
620
621 static void __iomem * __init clkgen_get_register_base(
622 struct device_node *np)
623 {
624 struct device_node *pnode;
625 void __iomem *reg = NULL;
626
627 pnode = of_get_parent(np);
628 if (!pnode)
629 return NULL;
630
631 reg = of_iomap(pnode, 0);
632
633 of_node_put(pnode);
634 return reg;
635 }
636
637 static struct clk * __init clkgen_odf_register(const char *parent_name,
638 void __iomem *reg,
639 struct clkgen_pll_data *pll_data,
640 unsigned long pll_flags, int odf,
641 spinlock_t *odf_lock,
642 const char *odf_name)
643 {
644 struct clk *clk;
645 unsigned long flags;
646 struct clk_gate *gate;
647 struct clk_divider *div;
648
649 flags = pll_flags | CLK_GET_RATE_NOCACHE | CLK_SET_RATE_PARENT;
650
651 gate = kzalloc(sizeof(*gate), GFP_KERNEL);
652 if (!gate)
653 return ERR_PTR(-ENOMEM);
654
655 gate->flags = CLK_GATE_SET_TO_DISABLE;
656 gate->reg = reg + pll_data->odf_gate[odf].offset;
657 gate->bit_idx = pll_data->odf_gate[odf].shift;
658 gate->lock = odf_lock;
659
660 div = kzalloc(sizeof(*div), GFP_KERNEL);
661 if (!div) {
662 kfree(gate);
663 return ERR_PTR(-ENOMEM);
664 }
665
666 div->flags = CLK_DIVIDER_ONE_BASED | CLK_DIVIDER_ALLOW_ZERO;
667 div->reg = reg + pll_data->odf[odf].offset;
668 div->shift = pll_data->odf[odf].shift;
669 div->width = fls(pll_data->odf[odf].mask);
670 div->lock = odf_lock;
671
672 clk = clk_register_composite(NULL, odf_name, &parent_name, 1,
673 NULL, NULL,
674 &div->hw, &clk_divider_ops,
675 &gate->hw, &clk_gate_ops,
676 flags);
677 if (IS_ERR(clk))
678 return clk;
679
680 pr_debug("%s: parent %s rate %lu\n",
681 __clk_get_name(clk),
682 __clk_get_name(clk_get_parent(clk)),
683 clk_get_rate(clk));
684 return clk;
685 }
686
687
688 static void __init clkgen_c32_pll_setup(struct device_node *np,
689 struct clkgen_pll_data *data)
690 {
691 struct clk *clk;
692 const char *parent_name, *pll_name;
693 void __iomem *pll_base;
694 int num_odfs, odf;
695 struct clk_onecell_data *clk_data;
696 unsigned long pll_flags = 0;
697
698
699 parent_name = of_clk_get_parent_name(np, 0);
700 if (!parent_name)
701 return;
702
703 pll_base = clkgen_get_register_base(np);
704 if (!pll_base)
705 return;
706
707 of_clk_detect_critical(np, 0, &pll_flags);
708
709 clk = clkgen_pll_register(parent_name, data, pll_base, pll_flags,
710 np->name, data->lock);
711 if (IS_ERR(clk))
712 return;
713
714 pll_name = __clk_get_name(clk);
715
716 num_odfs = data->num_odfs;
717
718 clk_data = kzalloc(sizeof(*clk_data), GFP_KERNEL);
719 if (!clk_data)
720 return;
721
722 clk_data->clk_num = num_odfs;
723 clk_data->clks = kcalloc(clk_data->clk_num, sizeof(struct clk *),
724 GFP_KERNEL);
725
726 if (!clk_data->clks)
727 goto err;
728
729 for (odf = 0; odf < num_odfs; odf++) {
730 struct clk *clk;
731 const char *clk_name;
732 unsigned long odf_flags = 0;
733
734 if (of_property_read_string_index(np, "clock-output-names",
735 odf, &clk_name))
736 return;
737
738 of_clk_detect_critical(np, odf, &odf_flags);
739
740 clk = clkgen_odf_register(pll_name, pll_base, data, odf_flags,
741 odf, &clkgena_c32_odf_lock, clk_name);
742 if (IS_ERR(clk))
743 goto err;
744
745 clk_data->clks[odf] = clk;
746 }
747
748 of_clk_add_provider(np, of_clk_src_onecell_get, clk_data);
749 return;
750
751 err:
752 kfree(pll_name);
753 kfree(clk_data->clks);
754 kfree(clk_data);
755 }
756 static void __init clkgen_c32_pll0_setup(struct device_node *np)
757 {
758 clkgen_c32_pll_setup(np,
759 (struct clkgen_pll_data *) &st_pll3200c32_cx_0);
760 }
761 CLK_OF_DECLARE(c32_pll0, "st,clkgen-pll0", clkgen_c32_pll0_setup);
762
763 static void __init clkgen_c32_pll1_setup(struct device_node *np)
764 {
765 clkgen_c32_pll_setup(np,
766 (struct clkgen_pll_data *) &st_pll3200c32_cx_1);
767 }
768 CLK_OF_DECLARE(c32_pll1, "st,clkgen-pll1", clkgen_c32_pll1_setup);
769
770 static void __init clkgen_c32_plla9_setup(struct device_node *np)
771 {
772 clkgen_c32_pll_setup(np,
773 (struct clkgen_pll_data *) &st_pll3200c32_407_a9);
774 }
775 CLK_OF_DECLARE(c32_plla9, "st,stih407-clkgen-plla9", clkgen_c32_plla9_setup);
776
777 static void __init clkgen_c28_plla9_setup(struct device_node *np)
778 {
779 clkgen_c32_pll_setup(np,
780 (struct clkgen_pll_data *) &st_pll4600c28_418_a9);
781 }
782 CLK_OF_DECLARE(c28_plla9, "st,stih418-clkgen-plla9", clkgen_c28_plla9_setup);