pll_hw 18 drivers/clk/actions/owl-pll.c static u32 owl_pll_calculate_mul(struct owl_pll_hw *pll_hw, unsigned long rate) pll_hw 22 drivers/clk/actions/owl-pll.c mul = DIV_ROUND_CLOSEST(rate, pll_hw->bfreq); pll_hw 23 drivers/clk/actions/owl-pll.c if (mul < pll_hw->min_mul) pll_hw 24 drivers/clk/actions/owl-pll.c mul = pll_hw->min_mul; pll_hw 25 drivers/clk/actions/owl-pll.c else if (mul > pll_hw->max_mul) pll_hw 26 drivers/clk/actions/owl-pll.c mul = pll_hw->max_mul; pll_hw 28 drivers/clk/actions/owl-pll.c return mul &= mul_mask(pll_hw); pll_hw 63 drivers/clk/actions/owl-pll.c struct owl_pll_hw *pll_hw = &pll->pll_hw; pll_hw 67 drivers/clk/actions/owl-pll.c if (pll_hw->table) { pll_hw 68 drivers/clk/actions/owl-pll.c clkt = _get_pll_table(pll_hw->table, rate); pll_hw 73 drivers/clk/actions/owl-pll.c if (pll_hw->width == 0) pll_hw 74 drivers/clk/actions/owl-pll.c return pll_hw->bfreq; pll_hw 76 drivers/clk/actions/owl-pll.c mul = owl_pll_calculate_mul(pll_hw, rate); pll_hw 78 drivers/clk/actions/owl-pll.c return pll_hw->bfreq * mul; pll_hw 85 drivers/clk/actions/owl-pll.c struct owl_pll_hw *pll_hw = &pll->pll_hw; pll_hw 89 drivers/clk/actions/owl-pll.c if (pll_hw->table) { pll_hw 90 drivers/clk/actions/owl-pll.c regmap_read(common->regmap, pll_hw->reg, &val); pll_hw 92 drivers/clk/actions/owl-pll.c val = val >> pll_hw->shift; pll_hw 93 drivers/clk/actions/owl-pll.c val &= mul_mask(pll_hw); pll_hw 95 drivers/clk/actions/owl-pll.c return _get_table_rate(pll_hw->table, val); pll_hw 99 drivers/clk/actions/owl-pll.c if (pll_hw->width == 0) pll_hw 100 drivers/clk/actions/owl-pll.c return pll_hw->bfreq; pll_hw 102 drivers/clk/actions/owl-pll.c regmap_read(common->regmap, pll_hw->reg, &val); pll_hw 104 drivers/clk/actions/owl-pll.c val = val >> pll_hw->shift; pll_hw 105 drivers/clk/actions/owl-pll.c val &= mul_mask(pll_hw); pll_hw 107 drivers/clk/actions/owl-pll.c return pll_hw->bfreq * val; pll_hw 113 drivers/clk/actions/owl-pll.c struct owl_pll_hw *pll_hw = &pll->pll_hw; pll_hw 117 drivers/clk/actions/owl-pll.c regmap_read(common->regmap, pll_hw->reg, ®); pll_hw 119 drivers/clk/actions/owl-pll.c return !!(reg & BIT(pll_hw->bit_idx)); pll_hw 123 drivers/clk/actions/owl-pll.c const struct owl_pll_hw *pll_hw, bool enable) pll_hw 127 drivers/clk/actions/owl-pll.c regmap_read(common->regmap, pll_hw->reg, ®); pll_hw 130 drivers/clk/actions/owl-pll.c reg |= BIT(pll_hw->bit_idx); pll_hw 132 drivers/clk/actions/owl-pll.c reg &= ~BIT(pll_hw->bit_idx); pll_hw 134 drivers/clk/actions/owl-pll.c regmap_write(common->regmap, pll_hw->reg, reg); pll_hw 142 drivers/clk/actions/owl-pll.c owl_pll_set(common, &pll->pll_hw, true); pll_hw 152 drivers/clk/actions/owl-pll.c owl_pll_set(common, &pll->pll_hw, false); pll_hw 159 drivers/clk/actions/owl-pll.c struct owl_pll_hw *pll_hw = &pll->pll_hw; pll_hw 165 drivers/clk/actions/owl-pll.c if (pll_hw->width == 0) pll_hw 168 drivers/clk/actions/owl-pll.c if (pll_hw->table) { pll_hw 169 drivers/clk/actions/owl-pll.c clkt = _get_pll_table(pll_hw->table, rate); pll_hw 172 drivers/clk/actions/owl-pll.c val = owl_pll_calculate_mul(pll_hw, rate); pll_hw 175 drivers/clk/actions/owl-pll.c regmap_read(common->regmap, pll_hw->reg, ®); pll_hw 177 drivers/clk/actions/owl-pll.c reg &= ~mul_mask(pll_hw); pll_hw 178 drivers/clk/actions/owl-pll.c reg |= val << pll_hw->shift; pll_hw 180 drivers/clk/actions/owl-pll.c regmap_write(common->regmap, pll_hw->reg, reg); pll_hw 182 drivers/clk/actions/owl-pll.c udelay(pll_hw->delay); pll_hw 37 drivers/clk/actions/owl-pll.h struct owl_pll_hw pll_hw; pll_hw 58 drivers/clk/actions/owl-pll.h .pll_hw = OWL_PLL_HW(_reg, _bfreq, _bit_idx, _shift, \ pll_hw 73 drivers/clk/actions/owl-pll.h .pll_hw = OWL_PLL_HW(_reg, _bfreq, _bit_idx, _shift, \ pll_hw 88 drivers/clk/actions/owl-pll.h .pll_hw = OWL_PLL_HW(_reg, _bfreq, _bit_idx, _shift, \ pll_hw 749 drivers/clk/clk-stm32f4.c struct clk_hw *pll_hw, spinlock_t *lock) pll_hw 776 drivers/clk/clk-stm32f4.c pll_div->hw_pll = pll_hw; pll_hw 795 drivers/clk/clk-stm32f4.c struct clk_hw *pll_hw; pll_hw 825 drivers/clk/clk-stm32f4.c pll_hw = &pll->gate.hw; pll_hw 826 drivers/clk/clk-stm32f4.c ret = clk_hw_register(NULL, pll_hw); pll_hw 842 drivers/clk/clk-stm32f4.c pll_hw, pll_hw 844 drivers/clk/clk-stm32f4.c return pll_hw; pll_hw 53 drivers/gpu/drm/mediatek/mtk_hdmi_phy.c return container_of(hw, struct mtk_hdmi_phy, pll_hw); pll_hw 146 drivers/gpu/drm/mediatek/mtk_hdmi_phy.c hdmi_phy->pll_hw.init = &clk_init; pll_hw 147 drivers/gpu/drm/mediatek/mtk_hdmi_phy.c hdmi_phy->pll = devm_clk_register(dev, &hdmi_phy->pll_hw); pll_hw 35 drivers/gpu/drm/mediatek/mtk_hdmi_phy.h struct clk_hw pll_hw; pll_hw 131 drivers/gpu/drm/mediatek/mtk_mipi_tx.c struct clk_hw pll_hw; pll_hw 137 drivers/gpu/drm/mediatek/mtk_mipi_tx.c return container_of(hw, struct mtk_mipi_tx, pll_hw); pll_hw 426 drivers/gpu/drm/mediatek/mtk_mipi_tx.c mipi_tx->pll_hw.init = &clk_init; pll_hw 427 drivers/gpu/drm/mediatek/mtk_mipi_tx.c mipi_tx->pll = devm_clk_register(dev, &mipi_tx->pll_hw); pll_hw 13 drivers/gpu/drm/msm/disp/mdp4/mdp4_lvds_pll.c struct clk_hw pll_hw; pll_hw 17 drivers/gpu/drm/msm/disp/mdp4/mdp4_lvds_pll.c #define to_mdp4_lvds_pll(x) container_of(x, struct mdp4_lvds_pll, pll_hw) pll_hw 150 drivers/gpu/drm/msm/disp/mdp4/mdp4_lvds_pll.c lvds_pll->pll_hw.init = &pll_init; pll_hw 151 drivers/gpu/drm/msm/disp/mdp4/mdp4_lvds_pll.c clk = devm_clk_register(dev->dev, &lvds_pll->pll_hw); pll_hw 312 drivers/gpu/drm/omapdrm/dss/dsi.c const struct dss_pll_hw *pll_hw; pll_hw 5031 drivers/gpu/drm/omapdrm/dss/dsi.c pll->hw = dsi->data->pll_hw; pll_hw 5199 drivers/gpu/drm/omapdrm/dss/dsi.c .pll_hw = &dss_omap3_dsi_pll_hw, pll_hw 5211 drivers/gpu/drm/omapdrm/dss/dsi.c .pll_hw = &dss_omap3_dsi_pll_hw, pll_hw 5223 drivers/gpu/drm/omapdrm/dss/dsi.c .pll_hw = &dss_omap4_dsi_pll_hw, pll_hw 5237 drivers/gpu/drm/omapdrm/dss/dsi.c .pll_hw = &dss_omap5_dsi_pll_hw,