bus_clks          105 drivers/gpu/drm/msm/dsi/dsi_host.c 	struct clk *bus_clks[DSI_BUS_CLK_MAX];
bus_clks          387 drivers/gpu/drm/msm/dsi/dsi_host.c 		msm_host->bus_clks[i] = msm_clk_get(pdev,
bus_clks          389 drivers/gpu/drm/msm/dsi/dsi_host.c 		if (IS_ERR(msm_host->bus_clks[i])) {
bus_clks          390 drivers/gpu/drm/msm/dsi/dsi_host.c 			ret = PTR_ERR(msm_host->bus_clks[i]);
bus_clks          453 drivers/gpu/drm/msm/dsi/dsi_host.c 		ret = clk_prepare_enable(msm_host->bus_clks[i]);
bus_clks          464 drivers/gpu/drm/msm/dsi/dsi_host.c 		clk_disable_unprepare(msm_host->bus_clks[i]);
bus_clks          477 drivers/gpu/drm/msm/dsi/dsi_host.c 		clk_disable_unprepare(msm_host->bus_clks[i]);
bus_clks          114 drivers/interconnect/qcom/qcs404.c 	struct clk_bulk_data *bus_clks;
bus_clks          396 drivers/interconnect/qcom/qcs404.c 		ret = clk_set_rate(qp->bus_clks[i].clk, rate);
bus_clks          399 drivers/interconnect/qcom/qcs404.c 			       qp->bus_clks[i].id, ret);
bus_clks          441 drivers/interconnect/qcom/qcs404.c 	qp->bus_clks = devm_kmemdup(dev, bus_clocks, sizeof(bus_clocks),
bus_clks          443 drivers/interconnect/qcom/qcs404.c 	if (!qp->bus_clks)
bus_clks          447 drivers/interconnect/qcom/qcs404.c 	ret = devm_clk_bulk_get(dev, qp->num_clks, qp->bus_clks);
bus_clks          451 drivers/interconnect/qcom/qcs404.c 	ret = clk_bulk_prepare_enable(qp->num_clks, qp->bus_clks);
bus_clks          466 drivers/interconnect/qcom/qcs404.c 		clk_bulk_disable_unprepare(qp->num_clks, qp->bus_clks);
bus_clks          501 drivers/interconnect/qcom/qcs404.c 	clk_bulk_disable_unprepare(qp->num_clks, qp->bus_clks);
bus_clks          517 drivers/interconnect/qcom/qcs404.c 	clk_bulk_disable_unprepare(qp->num_clks, qp->bus_clks);