root/drivers/clk/qcom/clk-rcg2.c

/* [<][>][^][v][top][bottom][index][help] */

DEFINITIONS

This source file includes following definitions.
  1. clk_rcg2_is_enabled
  2. clk_rcg2_get_parent
  3. update_config
  4. clk_rcg2_set_parent
  5. calc_rate
  6. clk_rcg2_recalc_rate
  7. _freq_tbl_determine_rate
  8. clk_rcg2_determine_rate
  9. clk_rcg2_determine_floor_rate
  10. __clk_rcg2_configure
  11. clk_rcg2_configure
  12. __clk_rcg2_set_rate
  13. clk_rcg2_set_rate
  14. clk_rcg2_set_floor_rate
  15. clk_rcg2_set_rate_and_parent
  16. clk_rcg2_set_floor_rate_and_parent
  17. clk_edp_pixel_set_rate
  18. clk_edp_pixel_set_rate_and_parent
  19. clk_edp_pixel_determine_rate
  20. clk_byte_determine_rate
  21. clk_byte_set_rate
  22. clk_byte_set_rate_and_parent
  23. clk_byte2_determine_rate
  24. clk_byte2_set_rate
  25. clk_byte2_set_rate_and_parent
  26. clk_pixel_determine_rate
  27. clk_pixel_set_rate
  28. clk_pixel_set_rate_and_parent
  29. clk_gfx3d_determine_rate
  30. clk_gfx3d_set_rate_and_parent
  31. clk_gfx3d_set_rate
  32. clk_rcg2_set_force_enable
  33. clk_rcg2_clear_force_enable
  34. clk_rcg2_shared_force_enable_clear
  35. clk_rcg2_shared_set_rate
  36. clk_rcg2_shared_set_rate_and_parent
  37. clk_rcg2_shared_enable
  38. clk_rcg2_shared_disable
  39. clk_rcg2_dfs_populate_freq
  40. clk_rcg2_dfs_populate_freq_table
  41. clk_rcg2_dfs_determine_rate
  42. clk_rcg2_dfs_recalc_rate
  43. clk_rcg2_enable_dfs
  44. qcom_cc_register_rcg_dfs

   1 // SPDX-License-Identifier: GPL-2.0
   2 /*
   3  * Copyright (c) 2013, 2018, The Linux Foundation. All rights reserved.
   4  */
   5 
   6 #include <linux/kernel.h>
   7 #include <linux/bitops.h>
   8 #include <linux/err.h>
   9 #include <linux/bug.h>
  10 #include <linux/export.h>
  11 #include <linux/clk-provider.h>
  12 #include <linux/delay.h>
  13 #include <linux/regmap.h>
  14 #include <linux/math64.h>
  15 #include <linux/slab.h>
  16 
  17 #include <asm/div64.h>
  18 
  19 #include "clk-rcg.h"
  20 #include "common.h"
  21 
  22 #define CMD_REG                 0x0
  23 #define CMD_UPDATE              BIT(0)
  24 #define CMD_ROOT_EN             BIT(1)
  25 #define CMD_DIRTY_CFG           BIT(4)
  26 #define CMD_DIRTY_N             BIT(5)
  27 #define CMD_DIRTY_M             BIT(6)
  28 #define CMD_DIRTY_D             BIT(7)
  29 #define CMD_ROOT_OFF            BIT(31)
  30 
  31 #define CFG_REG                 0x4
  32 #define CFG_SRC_DIV_SHIFT       0
  33 #define CFG_SRC_SEL_SHIFT       8
  34 #define CFG_SRC_SEL_MASK        (0x7 << CFG_SRC_SEL_SHIFT)
  35 #define CFG_MODE_SHIFT          12
  36 #define CFG_MODE_MASK           (0x3 << CFG_MODE_SHIFT)
  37 #define CFG_MODE_DUAL_EDGE      (0x2 << CFG_MODE_SHIFT)
  38 #define CFG_HW_CLK_CTRL_MASK    BIT(20)
  39 
  40 #define M_REG                   0x8
  41 #define N_REG                   0xc
  42 #define D_REG                   0x10
  43 
  44 #define RCG_CFG_OFFSET(rcg)     ((rcg)->cmd_rcgr + (rcg)->cfg_off + CFG_REG)
  45 #define RCG_M_OFFSET(rcg)       ((rcg)->cmd_rcgr + (rcg)->cfg_off + M_REG)
  46 #define RCG_N_OFFSET(rcg)       ((rcg)->cmd_rcgr + (rcg)->cfg_off + N_REG)
  47 #define RCG_D_OFFSET(rcg)       ((rcg)->cmd_rcgr + (rcg)->cfg_off + D_REG)
  48 
  49 /* Dynamic Frequency Scaling */
  50 #define MAX_PERF_LEVEL          8
  51 #define SE_CMD_DFSR_OFFSET      0x14
  52 #define SE_CMD_DFS_EN           BIT(0)
  53 #define SE_PERF_DFSR(level)     (0x1c + 0x4 * (level))
  54 #define SE_PERF_M_DFSR(level)   (0x5c + 0x4 * (level))
  55 #define SE_PERF_N_DFSR(level)   (0x9c + 0x4 * (level))
  56 
  57 enum freq_policy {
  58         FLOOR,
  59         CEIL,
  60 };
  61 
  62 static int clk_rcg2_is_enabled(struct clk_hw *hw)
  63 {
  64         struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  65         u32 cmd;
  66         int ret;
  67 
  68         ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
  69         if (ret)
  70                 return ret;
  71 
  72         return (cmd & CMD_ROOT_OFF) == 0;
  73 }
  74 
  75 static u8 clk_rcg2_get_parent(struct clk_hw *hw)
  76 {
  77         struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  78         int num_parents = clk_hw_get_num_parents(hw);
  79         u32 cfg;
  80         int i, ret;
  81 
  82         ret = regmap_read(rcg->clkr.regmap, RCG_CFG_OFFSET(rcg), &cfg);
  83         if (ret)
  84                 goto err;
  85 
  86         cfg &= CFG_SRC_SEL_MASK;
  87         cfg >>= CFG_SRC_SEL_SHIFT;
  88 
  89         for (i = 0; i < num_parents; i++)
  90                 if (cfg == rcg->parent_map[i].cfg)
  91                         return i;
  92 
  93 err:
  94         pr_debug("%s: Clock %s has invalid parent, using default.\n",
  95                  __func__, clk_hw_get_name(hw));
  96         return 0;
  97 }
  98 
  99 static int update_config(struct clk_rcg2 *rcg)
 100 {
 101         int count, ret;
 102         u32 cmd;
 103         struct clk_hw *hw = &rcg->clkr.hw;
 104         const char *name = clk_hw_get_name(hw);
 105 
 106         ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
 107                                  CMD_UPDATE, CMD_UPDATE);
 108         if (ret)
 109                 return ret;
 110 
 111         /* Wait for update to take effect */
 112         for (count = 500; count > 0; count--) {
 113                 ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
 114                 if (ret)
 115                         return ret;
 116                 if (!(cmd & CMD_UPDATE))
 117                         return 0;
 118                 udelay(1);
 119         }
 120 
 121         WARN(1, "%s: rcg didn't update its configuration.", name);
 122         return -EBUSY;
 123 }
 124 
 125 static int clk_rcg2_set_parent(struct clk_hw *hw, u8 index)
 126 {
 127         struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 128         int ret;
 129         u32 cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
 130 
 131         ret = regmap_update_bits(rcg->clkr.regmap, RCG_CFG_OFFSET(rcg),
 132                                  CFG_SRC_SEL_MASK, cfg);
 133         if (ret)
 134                 return ret;
 135 
 136         return update_config(rcg);
 137 }
 138 
 139 /*
 140  * Calculate m/n:d rate
 141  *
 142  *          parent_rate     m
 143  *   rate = ----------- x  ---
 144  *            hid_div       n
 145  */
 146 static unsigned long
 147 calc_rate(unsigned long rate, u32 m, u32 n, u32 mode, u32 hid_div)
 148 {
 149         if (hid_div) {
 150                 rate *= 2;
 151                 rate /= hid_div + 1;
 152         }
 153 
 154         if (mode) {
 155                 u64 tmp = rate;
 156                 tmp *= m;
 157                 do_div(tmp, n);
 158                 rate = tmp;
 159         }
 160 
 161         return rate;
 162 }
 163 
 164 static unsigned long
 165 clk_rcg2_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
 166 {
 167         struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 168         u32 cfg, hid_div, m = 0, n = 0, mode = 0, mask;
 169 
 170         regmap_read(rcg->clkr.regmap, RCG_CFG_OFFSET(rcg), &cfg);
 171 
 172         if (rcg->mnd_width) {
 173                 mask = BIT(rcg->mnd_width) - 1;
 174                 regmap_read(rcg->clkr.regmap, RCG_M_OFFSET(rcg), &m);
 175                 m &= mask;
 176                 regmap_read(rcg->clkr.regmap, RCG_N_OFFSET(rcg), &n);
 177                 n =  ~n;
 178                 n &= mask;
 179                 n += m;
 180                 mode = cfg & CFG_MODE_MASK;
 181                 mode >>= CFG_MODE_SHIFT;
 182         }
 183 
 184         mask = BIT(rcg->hid_width) - 1;
 185         hid_div = cfg >> CFG_SRC_DIV_SHIFT;
 186         hid_div &= mask;
 187 
 188         return calc_rate(parent_rate, m, n, mode, hid_div);
 189 }
 190 
 191 static int _freq_tbl_determine_rate(struct clk_hw *hw, const struct freq_tbl *f,
 192                                     struct clk_rate_request *req,
 193                                     enum freq_policy policy)
 194 {
 195         unsigned long clk_flags, rate = req->rate;
 196         struct clk_hw *p;
 197         struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 198         int index;
 199 
 200         switch (policy) {
 201         case FLOOR:
 202                 f = qcom_find_freq_floor(f, rate);
 203                 break;
 204         case CEIL:
 205                 f = qcom_find_freq(f, rate);
 206                 break;
 207         default:
 208                 return -EINVAL;
 209         };
 210 
 211         if (!f)
 212                 return -EINVAL;
 213 
 214         index = qcom_find_src_index(hw, rcg->parent_map, f->src);
 215         if (index < 0)
 216                 return index;
 217 
 218         clk_flags = clk_hw_get_flags(hw);
 219         p = clk_hw_get_parent_by_index(hw, index);
 220         if (!p)
 221                 return -EINVAL;
 222 
 223         if (clk_flags & CLK_SET_RATE_PARENT) {
 224                 rate = f->freq;
 225                 if (f->pre_div) {
 226                         if (!rate)
 227                                 rate = req->rate;
 228                         rate /= 2;
 229                         rate *= f->pre_div + 1;
 230                 }
 231 
 232                 if (f->n) {
 233                         u64 tmp = rate;
 234                         tmp = tmp * f->n;
 235                         do_div(tmp, f->m);
 236                         rate = tmp;
 237                 }
 238         } else {
 239                 rate =  clk_hw_get_rate(p);
 240         }
 241         req->best_parent_hw = p;
 242         req->best_parent_rate = rate;
 243         req->rate = f->freq;
 244 
 245         return 0;
 246 }
 247 
 248 static int clk_rcg2_determine_rate(struct clk_hw *hw,
 249                                    struct clk_rate_request *req)
 250 {
 251         struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 252 
 253         return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req, CEIL);
 254 }
 255 
 256 static int clk_rcg2_determine_floor_rate(struct clk_hw *hw,
 257                                          struct clk_rate_request *req)
 258 {
 259         struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 260 
 261         return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req, FLOOR);
 262 }
 263 
 264 static int __clk_rcg2_configure(struct clk_rcg2 *rcg, const struct freq_tbl *f)
 265 {
 266         u32 cfg, mask;
 267         struct clk_hw *hw = &rcg->clkr.hw;
 268         int ret, index = qcom_find_src_index(hw, rcg->parent_map, f->src);
 269 
 270         if (index < 0)
 271                 return index;
 272 
 273         if (rcg->mnd_width && f->n) {
 274                 mask = BIT(rcg->mnd_width) - 1;
 275                 ret = regmap_update_bits(rcg->clkr.regmap,
 276                                 RCG_M_OFFSET(rcg), mask, f->m);
 277                 if (ret)
 278                         return ret;
 279 
 280                 ret = regmap_update_bits(rcg->clkr.regmap,
 281                                 RCG_N_OFFSET(rcg), mask, ~(f->n - f->m));
 282                 if (ret)
 283                         return ret;
 284 
 285                 ret = regmap_update_bits(rcg->clkr.regmap,
 286                                 RCG_D_OFFSET(rcg), mask, ~f->n);
 287                 if (ret)
 288                         return ret;
 289         }
 290 
 291         mask = BIT(rcg->hid_width) - 1;
 292         mask |= CFG_SRC_SEL_MASK | CFG_MODE_MASK | CFG_HW_CLK_CTRL_MASK;
 293         cfg = f->pre_div << CFG_SRC_DIV_SHIFT;
 294         cfg |= rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
 295         if (rcg->mnd_width && f->n && (f->m != f->n))
 296                 cfg |= CFG_MODE_DUAL_EDGE;
 297         return regmap_update_bits(rcg->clkr.regmap, RCG_CFG_OFFSET(rcg),
 298                                         mask, cfg);
 299 }
 300 
 301 static int clk_rcg2_configure(struct clk_rcg2 *rcg, const struct freq_tbl *f)
 302 {
 303         int ret;
 304 
 305         ret = __clk_rcg2_configure(rcg, f);
 306         if (ret)
 307                 return ret;
 308 
 309         return update_config(rcg);
 310 }
 311 
 312 static int __clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate,
 313                                enum freq_policy policy)
 314 {
 315         struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 316         const struct freq_tbl *f;
 317 
 318         switch (policy) {
 319         case FLOOR:
 320                 f = qcom_find_freq_floor(rcg->freq_tbl, rate);
 321                 break;
 322         case CEIL:
 323                 f = qcom_find_freq(rcg->freq_tbl, rate);
 324                 break;
 325         default:
 326                 return -EINVAL;
 327         };
 328 
 329         if (!f)
 330                 return -EINVAL;
 331 
 332         return clk_rcg2_configure(rcg, f);
 333 }
 334 
 335 static int clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate,
 336                             unsigned long parent_rate)
 337 {
 338         return __clk_rcg2_set_rate(hw, rate, CEIL);
 339 }
 340 
 341 static int clk_rcg2_set_floor_rate(struct clk_hw *hw, unsigned long rate,
 342                                    unsigned long parent_rate)
 343 {
 344         return __clk_rcg2_set_rate(hw, rate, FLOOR);
 345 }
 346 
 347 static int clk_rcg2_set_rate_and_parent(struct clk_hw *hw,
 348                 unsigned long rate, unsigned long parent_rate, u8 index)
 349 {
 350         return __clk_rcg2_set_rate(hw, rate, CEIL);
 351 }
 352 
 353 static int clk_rcg2_set_floor_rate_and_parent(struct clk_hw *hw,
 354                 unsigned long rate, unsigned long parent_rate, u8 index)
 355 {
 356         return __clk_rcg2_set_rate(hw, rate, FLOOR);
 357 }
 358 
 359 const struct clk_ops clk_rcg2_ops = {
 360         .is_enabled = clk_rcg2_is_enabled,
 361         .get_parent = clk_rcg2_get_parent,
 362         .set_parent = clk_rcg2_set_parent,
 363         .recalc_rate = clk_rcg2_recalc_rate,
 364         .determine_rate = clk_rcg2_determine_rate,
 365         .set_rate = clk_rcg2_set_rate,
 366         .set_rate_and_parent = clk_rcg2_set_rate_and_parent,
 367 };
 368 EXPORT_SYMBOL_GPL(clk_rcg2_ops);
 369 
 370 const struct clk_ops clk_rcg2_floor_ops = {
 371         .is_enabled = clk_rcg2_is_enabled,
 372         .get_parent = clk_rcg2_get_parent,
 373         .set_parent = clk_rcg2_set_parent,
 374         .recalc_rate = clk_rcg2_recalc_rate,
 375         .determine_rate = clk_rcg2_determine_floor_rate,
 376         .set_rate = clk_rcg2_set_floor_rate,
 377         .set_rate_and_parent = clk_rcg2_set_floor_rate_and_parent,
 378 };
 379 EXPORT_SYMBOL_GPL(clk_rcg2_floor_ops);
 380 
 381 struct frac_entry {
 382         int num;
 383         int den;
 384 };
 385 
 386 static const struct frac_entry frac_table_675m[] = {    /* link rate of 270M */
 387         { 52, 295 },    /* 119 M */
 388         { 11, 57 },     /* 130.25 M */
 389         { 63, 307 },    /* 138.50 M */
 390         { 11, 50 },     /* 148.50 M */
 391         { 47, 206 },    /* 154 M */
 392         { 31, 100 },    /* 205.25 M */
 393         { 107, 269 },   /* 268.50 M */
 394         { },
 395 };
 396 
 397 static struct frac_entry frac_table_810m[] = { /* Link rate of 162M */
 398         { 31, 211 },    /* 119 M */
 399         { 32, 199 },    /* 130.25 M */
 400         { 63, 307 },    /* 138.50 M */
 401         { 11, 60 },     /* 148.50 M */
 402         { 50, 263 },    /* 154 M */
 403         { 31, 120 },    /* 205.25 M */
 404         { 119, 359 },   /* 268.50 M */
 405         { },
 406 };
 407 
 408 static int clk_edp_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
 409                               unsigned long parent_rate)
 410 {
 411         struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 412         struct freq_tbl f = *rcg->freq_tbl;
 413         const struct frac_entry *frac;
 414         int delta = 100000;
 415         s64 src_rate = parent_rate;
 416         s64 request;
 417         u32 mask = BIT(rcg->hid_width) - 1;
 418         u32 hid_div;
 419 
 420         if (src_rate == 810000000)
 421                 frac = frac_table_810m;
 422         else
 423                 frac = frac_table_675m;
 424 
 425         for (; frac->num; frac++) {
 426                 request = rate;
 427                 request *= frac->den;
 428                 request = div_s64(request, frac->num);
 429                 if ((src_rate < (request - delta)) ||
 430                     (src_rate > (request + delta)))
 431                         continue;
 432 
 433                 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
 434                                 &hid_div);
 435                 f.pre_div = hid_div;
 436                 f.pre_div >>= CFG_SRC_DIV_SHIFT;
 437                 f.pre_div &= mask;
 438                 f.m = frac->num;
 439                 f.n = frac->den;
 440 
 441                 return clk_rcg2_configure(rcg, &f);
 442         }
 443 
 444         return -EINVAL;
 445 }
 446 
 447 static int clk_edp_pixel_set_rate_and_parent(struct clk_hw *hw,
 448                 unsigned long rate, unsigned long parent_rate, u8 index)
 449 {
 450         /* Parent index is set statically in frequency table */
 451         return clk_edp_pixel_set_rate(hw, rate, parent_rate);
 452 }
 453 
 454 static int clk_edp_pixel_determine_rate(struct clk_hw *hw,
 455                                         struct clk_rate_request *req)
 456 {
 457         struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 458         const struct freq_tbl *f = rcg->freq_tbl;
 459         const struct frac_entry *frac;
 460         int delta = 100000;
 461         s64 request;
 462         u32 mask = BIT(rcg->hid_width) - 1;
 463         u32 hid_div;
 464         int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
 465 
 466         /* Force the correct parent */
 467         req->best_parent_hw = clk_hw_get_parent_by_index(hw, index);
 468         req->best_parent_rate = clk_hw_get_rate(req->best_parent_hw);
 469 
 470         if (req->best_parent_rate == 810000000)
 471                 frac = frac_table_810m;
 472         else
 473                 frac = frac_table_675m;
 474 
 475         for (; frac->num; frac++) {
 476                 request = req->rate;
 477                 request *= frac->den;
 478                 request = div_s64(request, frac->num);
 479                 if ((req->best_parent_rate < (request - delta)) ||
 480                     (req->best_parent_rate > (request + delta)))
 481                         continue;
 482 
 483                 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
 484                                 &hid_div);
 485                 hid_div >>= CFG_SRC_DIV_SHIFT;
 486                 hid_div &= mask;
 487 
 488                 req->rate = calc_rate(req->best_parent_rate,
 489                                       frac->num, frac->den,
 490                                       !!frac->den, hid_div);
 491                 return 0;
 492         }
 493 
 494         return -EINVAL;
 495 }
 496 
 497 const struct clk_ops clk_edp_pixel_ops = {
 498         .is_enabled = clk_rcg2_is_enabled,
 499         .get_parent = clk_rcg2_get_parent,
 500         .set_parent = clk_rcg2_set_parent,
 501         .recalc_rate = clk_rcg2_recalc_rate,
 502         .set_rate = clk_edp_pixel_set_rate,
 503         .set_rate_and_parent = clk_edp_pixel_set_rate_and_parent,
 504         .determine_rate = clk_edp_pixel_determine_rate,
 505 };
 506 EXPORT_SYMBOL_GPL(clk_edp_pixel_ops);
 507 
 508 static int clk_byte_determine_rate(struct clk_hw *hw,
 509                                    struct clk_rate_request *req)
 510 {
 511         struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 512         const struct freq_tbl *f = rcg->freq_tbl;
 513         int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
 514         unsigned long parent_rate, div;
 515         u32 mask = BIT(rcg->hid_width) - 1;
 516         struct clk_hw *p;
 517 
 518         if (req->rate == 0)
 519                 return -EINVAL;
 520 
 521         req->best_parent_hw = p = clk_hw_get_parent_by_index(hw, index);
 522         req->best_parent_rate = parent_rate = clk_hw_round_rate(p, req->rate);
 523 
 524         div = DIV_ROUND_UP((2 * parent_rate), req->rate) - 1;
 525         div = min_t(u32, div, mask);
 526 
 527         req->rate = calc_rate(parent_rate, 0, 0, 0, div);
 528 
 529         return 0;
 530 }
 531 
 532 static int clk_byte_set_rate(struct clk_hw *hw, unsigned long rate,
 533                          unsigned long parent_rate)
 534 {
 535         struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 536         struct freq_tbl f = *rcg->freq_tbl;
 537         unsigned long div;
 538         u32 mask = BIT(rcg->hid_width) - 1;
 539 
 540         div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
 541         div = min_t(u32, div, mask);
 542 
 543         f.pre_div = div;
 544 
 545         return clk_rcg2_configure(rcg, &f);
 546 }
 547 
 548 static int clk_byte_set_rate_and_parent(struct clk_hw *hw,
 549                 unsigned long rate, unsigned long parent_rate, u8 index)
 550 {
 551         /* Parent index is set statically in frequency table */
 552         return clk_byte_set_rate(hw, rate, parent_rate);
 553 }
 554 
 555 const struct clk_ops clk_byte_ops = {
 556         .is_enabled = clk_rcg2_is_enabled,
 557         .get_parent = clk_rcg2_get_parent,
 558         .set_parent = clk_rcg2_set_parent,
 559         .recalc_rate = clk_rcg2_recalc_rate,
 560         .set_rate = clk_byte_set_rate,
 561         .set_rate_and_parent = clk_byte_set_rate_and_parent,
 562         .determine_rate = clk_byte_determine_rate,
 563 };
 564 EXPORT_SYMBOL_GPL(clk_byte_ops);
 565 
 566 static int clk_byte2_determine_rate(struct clk_hw *hw,
 567                                     struct clk_rate_request *req)
 568 {
 569         struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 570         unsigned long parent_rate, div;
 571         u32 mask = BIT(rcg->hid_width) - 1;
 572         struct clk_hw *p;
 573         unsigned long rate = req->rate;
 574 
 575         if (rate == 0)
 576                 return -EINVAL;
 577 
 578         p = req->best_parent_hw;
 579         req->best_parent_rate = parent_rate = clk_hw_round_rate(p, rate);
 580 
 581         div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
 582         div = min_t(u32, div, mask);
 583 
 584         req->rate = calc_rate(parent_rate, 0, 0, 0, div);
 585 
 586         return 0;
 587 }
 588 
 589 static int clk_byte2_set_rate(struct clk_hw *hw, unsigned long rate,
 590                          unsigned long parent_rate)
 591 {
 592         struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 593         struct freq_tbl f = { 0 };
 594         unsigned long div;
 595         int i, num_parents = clk_hw_get_num_parents(hw);
 596         u32 mask = BIT(rcg->hid_width) - 1;
 597         u32 cfg;
 598 
 599         div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
 600         div = min_t(u32, div, mask);
 601 
 602         f.pre_div = div;
 603 
 604         regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
 605         cfg &= CFG_SRC_SEL_MASK;
 606         cfg >>= CFG_SRC_SEL_SHIFT;
 607 
 608         for (i = 0; i < num_parents; i++) {
 609                 if (cfg == rcg->parent_map[i].cfg) {
 610                         f.src = rcg->parent_map[i].src;
 611                         return clk_rcg2_configure(rcg, &f);
 612                 }
 613         }
 614 
 615         return -EINVAL;
 616 }
 617 
 618 static int clk_byte2_set_rate_and_parent(struct clk_hw *hw,
 619                 unsigned long rate, unsigned long parent_rate, u8 index)
 620 {
 621         /* Read the hardware to determine parent during set_rate */
 622         return clk_byte2_set_rate(hw, rate, parent_rate);
 623 }
 624 
 625 const struct clk_ops clk_byte2_ops = {
 626         .is_enabled = clk_rcg2_is_enabled,
 627         .get_parent = clk_rcg2_get_parent,
 628         .set_parent = clk_rcg2_set_parent,
 629         .recalc_rate = clk_rcg2_recalc_rate,
 630         .set_rate = clk_byte2_set_rate,
 631         .set_rate_and_parent = clk_byte2_set_rate_and_parent,
 632         .determine_rate = clk_byte2_determine_rate,
 633 };
 634 EXPORT_SYMBOL_GPL(clk_byte2_ops);
 635 
 636 static const struct frac_entry frac_table_pixel[] = {
 637         { 3, 8 },
 638         { 2, 9 },
 639         { 4, 9 },
 640         { 1, 1 },
 641         { }
 642 };
 643 
 644 static int clk_pixel_determine_rate(struct clk_hw *hw,
 645                                     struct clk_rate_request *req)
 646 {
 647         unsigned long request, src_rate;
 648         int delta = 100000;
 649         const struct frac_entry *frac = frac_table_pixel;
 650 
 651         for (; frac->num; frac++) {
 652                 request = (req->rate * frac->den) / frac->num;
 653 
 654                 src_rate = clk_hw_round_rate(req->best_parent_hw, request);
 655                 if ((src_rate < (request - delta)) ||
 656                         (src_rate > (request + delta)))
 657                         continue;
 658 
 659                 req->best_parent_rate = src_rate;
 660                 req->rate = (src_rate * frac->num) / frac->den;
 661                 return 0;
 662         }
 663 
 664         return -EINVAL;
 665 }
 666 
 667 static int clk_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
 668                 unsigned long parent_rate)
 669 {
 670         struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 671         struct freq_tbl f = { 0 };
 672         const struct frac_entry *frac = frac_table_pixel;
 673         unsigned long request;
 674         int delta = 100000;
 675         u32 mask = BIT(rcg->hid_width) - 1;
 676         u32 hid_div, cfg;
 677         int i, num_parents = clk_hw_get_num_parents(hw);
 678 
 679         regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
 680         cfg &= CFG_SRC_SEL_MASK;
 681         cfg >>= CFG_SRC_SEL_SHIFT;
 682 
 683         for (i = 0; i < num_parents; i++)
 684                 if (cfg == rcg->parent_map[i].cfg) {
 685                         f.src = rcg->parent_map[i].src;
 686                         break;
 687                 }
 688 
 689         for (; frac->num; frac++) {
 690                 request = (rate * frac->den) / frac->num;
 691 
 692                 if ((parent_rate < (request - delta)) ||
 693                         (parent_rate > (request + delta)))
 694                         continue;
 695 
 696                 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
 697                                 &hid_div);
 698                 f.pre_div = hid_div;
 699                 f.pre_div >>= CFG_SRC_DIV_SHIFT;
 700                 f.pre_div &= mask;
 701                 f.m = frac->num;
 702                 f.n = frac->den;
 703 
 704                 return clk_rcg2_configure(rcg, &f);
 705         }
 706         return -EINVAL;
 707 }
 708 
 709 static int clk_pixel_set_rate_and_parent(struct clk_hw *hw, unsigned long rate,
 710                 unsigned long parent_rate, u8 index)
 711 {
 712         return clk_pixel_set_rate(hw, rate, parent_rate);
 713 }
 714 
 715 const struct clk_ops clk_pixel_ops = {
 716         .is_enabled = clk_rcg2_is_enabled,
 717         .get_parent = clk_rcg2_get_parent,
 718         .set_parent = clk_rcg2_set_parent,
 719         .recalc_rate = clk_rcg2_recalc_rate,
 720         .set_rate = clk_pixel_set_rate,
 721         .set_rate_and_parent = clk_pixel_set_rate_and_parent,
 722         .determine_rate = clk_pixel_determine_rate,
 723 };
 724 EXPORT_SYMBOL_GPL(clk_pixel_ops);
 725 
 726 static int clk_gfx3d_determine_rate(struct clk_hw *hw,
 727                                     struct clk_rate_request *req)
 728 {
 729         struct clk_rate_request parent_req = { };
 730         struct clk_hw *p2, *p8, *p9, *xo;
 731         unsigned long p9_rate;
 732         int ret;
 733 
 734         xo = clk_hw_get_parent_by_index(hw, 0);
 735         if (req->rate == clk_hw_get_rate(xo)) {
 736                 req->best_parent_hw = xo;
 737                 return 0;
 738         }
 739 
 740         p9 = clk_hw_get_parent_by_index(hw, 2);
 741         p2 = clk_hw_get_parent_by_index(hw, 3);
 742         p8 = clk_hw_get_parent_by_index(hw, 4);
 743 
 744         /* PLL9 is a fixed rate PLL */
 745         p9_rate = clk_hw_get_rate(p9);
 746 
 747         parent_req.rate = req->rate = min(req->rate, p9_rate);
 748         if (req->rate == p9_rate) {
 749                 req->rate = req->best_parent_rate = p9_rate;
 750                 req->best_parent_hw = p9;
 751                 return 0;
 752         }
 753 
 754         if (req->best_parent_hw == p9) {
 755                 /* Are we going back to a previously used rate? */
 756                 if (clk_hw_get_rate(p8) == req->rate)
 757                         req->best_parent_hw = p8;
 758                 else
 759                         req->best_parent_hw = p2;
 760         } else if (req->best_parent_hw == p8) {
 761                 req->best_parent_hw = p2;
 762         } else {
 763                 req->best_parent_hw = p8;
 764         }
 765 
 766         ret = __clk_determine_rate(req->best_parent_hw, &parent_req);
 767         if (ret)
 768                 return ret;
 769 
 770         req->rate = req->best_parent_rate = parent_req.rate;
 771 
 772         return 0;
 773 }
 774 
 775 static int clk_gfx3d_set_rate_and_parent(struct clk_hw *hw, unsigned long rate,
 776                 unsigned long parent_rate, u8 index)
 777 {
 778         struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 779         u32 cfg;
 780         int ret;
 781 
 782         /* Just mux it, we don't use the division or m/n hardware */
 783         cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
 784         ret = regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, cfg);
 785         if (ret)
 786                 return ret;
 787 
 788         return update_config(rcg);
 789 }
 790 
 791 static int clk_gfx3d_set_rate(struct clk_hw *hw, unsigned long rate,
 792                               unsigned long parent_rate)
 793 {
 794         /*
 795          * We should never get here; clk_gfx3d_determine_rate() should always
 796          * make us use a different parent than what we're currently using, so
 797          * clk_gfx3d_set_rate_and_parent() should always be called.
 798          */
 799         return 0;
 800 }
 801 
 802 const struct clk_ops clk_gfx3d_ops = {
 803         .is_enabled = clk_rcg2_is_enabled,
 804         .get_parent = clk_rcg2_get_parent,
 805         .set_parent = clk_rcg2_set_parent,
 806         .recalc_rate = clk_rcg2_recalc_rate,
 807         .set_rate = clk_gfx3d_set_rate,
 808         .set_rate_and_parent = clk_gfx3d_set_rate_and_parent,
 809         .determine_rate = clk_gfx3d_determine_rate,
 810 };
 811 EXPORT_SYMBOL_GPL(clk_gfx3d_ops);
 812 
 813 static int clk_rcg2_set_force_enable(struct clk_hw *hw)
 814 {
 815         struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 816         const char *name = clk_hw_get_name(hw);
 817         int ret, count;
 818 
 819         ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
 820                                  CMD_ROOT_EN, CMD_ROOT_EN);
 821         if (ret)
 822                 return ret;
 823 
 824         /* wait for RCG to turn ON */
 825         for (count = 500; count > 0; count--) {
 826                 if (clk_rcg2_is_enabled(hw))
 827                         return 0;
 828 
 829                 udelay(1);
 830         }
 831 
 832         pr_err("%s: RCG did not turn on\n", name);
 833         return -ETIMEDOUT;
 834 }
 835 
 836 static int clk_rcg2_clear_force_enable(struct clk_hw *hw)
 837 {
 838         struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 839 
 840         return regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
 841                                         CMD_ROOT_EN, 0);
 842 }
 843 
 844 static int
 845 clk_rcg2_shared_force_enable_clear(struct clk_hw *hw, const struct freq_tbl *f)
 846 {
 847         struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 848         int ret;
 849 
 850         ret = clk_rcg2_set_force_enable(hw);
 851         if (ret)
 852                 return ret;
 853 
 854         ret = clk_rcg2_configure(rcg, f);
 855         if (ret)
 856                 return ret;
 857 
 858         return clk_rcg2_clear_force_enable(hw);
 859 }
 860 
 861 static int clk_rcg2_shared_set_rate(struct clk_hw *hw, unsigned long rate,
 862                                     unsigned long parent_rate)
 863 {
 864         struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 865         const struct freq_tbl *f;
 866 
 867         f = qcom_find_freq(rcg->freq_tbl, rate);
 868         if (!f)
 869                 return -EINVAL;
 870 
 871         /*
 872          * In case clock is disabled, update the CFG, M, N and D registers
 873          * and don't hit the update bit of CMD register.
 874          */
 875         if (!__clk_is_enabled(hw->clk))
 876                 return __clk_rcg2_configure(rcg, f);
 877 
 878         return clk_rcg2_shared_force_enable_clear(hw, f);
 879 }
 880 
 881 static int clk_rcg2_shared_set_rate_and_parent(struct clk_hw *hw,
 882                 unsigned long rate, unsigned long parent_rate, u8 index)
 883 {
 884         return clk_rcg2_shared_set_rate(hw, rate, parent_rate);
 885 }
 886 
 887 static int clk_rcg2_shared_enable(struct clk_hw *hw)
 888 {
 889         struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 890         int ret;
 891 
 892         /*
 893          * Set the update bit because required configuration has already
 894          * been written in clk_rcg2_shared_set_rate()
 895          */
 896         ret = clk_rcg2_set_force_enable(hw);
 897         if (ret)
 898                 return ret;
 899 
 900         ret = update_config(rcg);
 901         if (ret)
 902                 return ret;
 903 
 904         return clk_rcg2_clear_force_enable(hw);
 905 }
 906 
 907 static void clk_rcg2_shared_disable(struct clk_hw *hw)
 908 {
 909         struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 910         u32 cfg;
 911 
 912         /*
 913          * Store current configuration as switching to safe source would clear
 914          * the SRC and DIV of CFG register
 915          */
 916         regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
 917 
 918         /*
 919          * Park the RCG at a safe configuration - sourced off of safe source.
 920          * Force enable and disable the RCG while configuring it to safeguard
 921          * against any update signal coming from the downstream clock.
 922          * The current parent is still prepared and enabled at this point, and
 923          * the safe source is always on while application processor subsystem
 924          * is online. Therefore, the RCG can safely switch its parent.
 925          */
 926         clk_rcg2_set_force_enable(hw);
 927 
 928         regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
 929                      rcg->safe_src_index << CFG_SRC_SEL_SHIFT);
 930 
 931         update_config(rcg);
 932 
 933         clk_rcg2_clear_force_enable(hw);
 934 
 935         /* Write back the stored configuration corresponding to current rate */
 936         regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, cfg);
 937 }
 938 
 939 const struct clk_ops clk_rcg2_shared_ops = {
 940         .enable = clk_rcg2_shared_enable,
 941         .disable = clk_rcg2_shared_disable,
 942         .get_parent = clk_rcg2_get_parent,
 943         .set_parent = clk_rcg2_set_parent,
 944         .recalc_rate = clk_rcg2_recalc_rate,
 945         .determine_rate = clk_rcg2_determine_rate,
 946         .set_rate = clk_rcg2_shared_set_rate,
 947         .set_rate_and_parent = clk_rcg2_shared_set_rate_and_parent,
 948 };
 949 EXPORT_SYMBOL_GPL(clk_rcg2_shared_ops);
 950 
 951 /* Common APIs to be used for DFS based RCGR */
 952 static void clk_rcg2_dfs_populate_freq(struct clk_hw *hw, unsigned int l,
 953                                        struct freq_tbl *f)
 954 {
 955         struct clk_rcg2 *rcg = to_clk_rcg2(hw);
 956         struct clk_hw *p;
 957         unsigned long prate = 0;
 958         u32 val, mask, cfg, mode, src;
 959         int i, num_parents;
 960 
 961         regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + SE_PERF_DFSR(l), &cfg);
 962 
 963         mask = BIT(rcg->hid_width) - 1;
 964         f->pre_div = 1;
 965         if (cfg & mask)
 966                 f->pre_div = cfg & mask;
 967 
 968         src = cfg & CFG_SRC_SEL_MASK;
 969         src >>= CFG_SRC_SEL_SHIFT;
 970 
 971         num_parents = clk_hw_get_num_parents(hw);
 972         for (i = 0; i < num_parents; i++) {
 973                 if (src == rcg->parent_map[i].cfg) {
 974                         f->src = rcg->parent_map[i].src;
 975                         p = clk_hw_get_parent_by_index(&rcg->clkr.hw, i);
 976                         prate = clk_hw_get_rate(p);
 977                 }
 978         }
 979 
 980         mode = cfg & CFG_MODE_MASK;
 981         mode >>= CFG_MODE_SHIFT;
 982         if (mode) {
 983                 mask = BIT(rcg->mnd_width) - 1;
 984                 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + SE_PERF_M_DFSR(l),
 985                             &val);
 986                 val &= mask;
 987                 f->m = val;
 988 
 989                 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + SE_PERF_N_DFSR(l),
 990                             &val);
 991                 val = ~val;
 992                 val &= mask;
 993                 val += f->m;
 994                 f->n = val;
 995         }
 996 
 997         f->freq = calc_rate(prate, f->m, f->n, mode, f->pre_div);
 998 }
 999 
1000 static int clk_rcg2_dfs_populate_freq_table(struct clk_rcg2 *rcg)
1001 {
1002         struct freq_tbl *freq_tbl;
1003         int i;
1004 
1005         /* Allocate space for 1 extra since table is NULL terminated */
1006         freq_tbl = kcalloc(MAX_PERF_LEVEL + 1, sizeof(*freq_tbl), GFP_KERNEL);
1007         if (!freq_tbl)
1008                 return -ENOMEM;
1009         rcg->freq_tbl = freq_tbl;
1010 
1011         for (i = 0; i < MAX_PERF_LEVEL; i++)
1012                 clk_rcg2_dfs_populate_freq(&rcg->clkr.hw, i, freq_tbl + i);
1013 
1014         return 0;
1015 }
1016 
1017 static int clk_rcg2_dfs_determine_rate(struct clk_hw *hw,
1018                                    struct clk_rate_request *req)
1019 {
1020         struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1021         int ret;
1022 
1023         if (!rcg->freq_tbl) {
1024                 ret = clk_rcg2_dfs_populate_freq_table(rcg);
1025                 if (ret) {
1026                         pr_err("Failed to update DFS tables for %s\n",
1027                                         clk_hw_get_name(hw));
1028                         return ret;
1029                 }
1030         }
1031 
1032         return clk_rcg2_determine_rate(hw, req);
1033 }
1034 
1035 static unsigned long
1036 clk_rcg2_dfs_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
1037 {
1038         struct clk_rcg2 *rcg = to_clk_rcg2(hw);
1039         u32 level, mask, cfg, m = 0, n = 0, mode, pre_div;
1040 
1041         regmap_read(rcg->clkr.regmap,
1042                     rcg->cmd_rcgr + SE_CMD_DFSR_OFFSET, &level);
1043         level &= GENMASK(4, 1);
1044         level >>= 1;
1045 
1046         if (rcg->freq_tbl)
1047                 return rcg->freq_tbl[level].freq;
1048 
1049         /*
1050          * Assume that parent_rate is actually the parent because
1051          * we can't do any better at figuring it out when the table
1052          * hasn't been populated yet. We only populate the table
1053          * in determine_rate because we can't guarantee the parents
1054          * will be registered with the framework until then.
1055          */
1056         regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + SE_PERF_DFSR(level),
1057                     &cfg);
1058 
1059         mask = BIT(rcg->hid_width) - 1;
1060         pre_div = 1;
1061         if (cfg & mask)
1062                 pre_div = cfg & mask;
1063 
1064         mode = cfg & CFG_MODE_MASK;
1065         mode >>= CFG_MODE_SHIFT;
1066         if (mode) {
1067                 mask = BIT(rcg->mnd_width) - 1;
1068                 regmap_read(rcg->clkr.regmap,
1069                             rcg->cmd_rcgr + SE_PERF_M_DFSR(level), &m);
1070                 m &= mask;
1071 
1072                 regmap_read(rcg->clkr.regmap,
1073                             rcg->cmd_rcgr + SE_PERF_N_DFSR(level), &n);
1074                 n = ~n;
1075                 n &= mask;
1076                 n += m;
1077         }
1078 
1079         return calc_rate(parent_rate, m, n, mode, pre_div);
1080 }
1081 
1082 static const struct clk_ops clk_rcg2_dfs_ops = {
1083         .is_enabled = clk_rcg2_is_enabled,
1084         .get_parent = clk_rcg2_get_parent,
1085         .determine_rate = clk_rcg2_dfs_determine_rate,
1086         .recalc_rate = clk_rcg2_dfs_recalc_rate,
1087 };
1088 
1089 static int clk_rcg2_enable_dfs(const struct clk_rcg_dfs_data *data,
1090                                struct regmap *regmap)
1091 {
1092         struct clk_rcg2 *rcg = data->rcg;
1093         struct clk_init_data *init = data->init;
1094         u32 val;
1095         int ret;
1096 
1097         ret = regmap_read(regmap, rcg->cmd_rcgr + SE_CMD_DFSR_OFFSET, &val);
1098         if (ret)
1099                 return -EINVAL;
1100 
1101         if (!(val & SE_CMD_DFS_EN))
1102                 return 0;
1103 
1104         /*
1105          * Rate changes with consumer writing a register in
1106          * their own I/O region
1107          */
1108         init->flags |= CLK_GET_RATE_NOCACHE;
1109         init->ops = &clk_rcg2_dfs_ops;
1110 
1111         rcg->freq_tbl = NULL;
1112 
1113         return 0;
1114 }
1115 
1116 int qcom_cc_register_rcg_dfs(struct regmap *regmap,
1117                              const struct clk_rcg_dfs_data *rcgs, size_t len)
1118 {
1119         int i, ret;
1120 
1121         for (i = 0; i < len; i++) {
1122                 ret = clk_rcg2_enable_dfs(&rcgs[i], regmap);
1123                 if (ret)
1124                         return ret;
1125         }
1126 
1127         return 0;
1128 }
1129 EXPORT_SYMBOL_GPL(qcom_cc_register_rcg_dfs);

/* [<][>][^][v][top][bottom][index][help] */