cur_pll           300 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a.c 	struct gk20a_pll cur_pll;
cur_pll           303 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a.c 	gk20a_pllg_read_mnp(clk, &cur_pll);
cur_pll           337 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a.c 	struct gk20a_pll cur_pll;
cur_pll           341 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a.c 		gk20a_pllg_read_mnp(clk, &cur_pll);
cur_pll           344 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a.c 		if (pll->m == cur_pll.m && pll->pl == cur_pll.pl)
cur_pll           348 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a.c 		cur_pll.n = gk20a_pllg_n_lo(clk, &cur_pll);
cur_pll           349 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a.c 		ret = gk20a_pllg_slide(clk, cur_pll.n);
cur_pll           355 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a.c 	cur_pll = *pll;
cur_pll           356 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a.c 	cur_pll.n = gk20a_pllg_n_lo(clk, &cur_pll);
cur_pll           357 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a.c 	ret = gk20a_pllg_program_mnp(clk, &cur_pll);
cur_pll           364 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c 	struct gm20b_pll cur_pll;
cur_pll           371 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c 	gm20b_pllg_read_mnp(clk, &cur_pll);
cur_pll           372 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c 	pdiv_only = cur_pll.base.n == n_int && cur_pll.sdm_din == sdm_din &&
cur_pll           373 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c 		    cur_pll.base.m == pll->m;
cur_pll           389 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c 		u32 old = cur_pll.base.pl;
cur_pll           399 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c 			cur_pll.base.pl = min(old | BIT(ffs(new) - 1),
cur_pll           401 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c 			gk20a_pllg_write_mnp(&clk->base, &cur_pll.base);
cur_pll           404 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c 		cur_pll.base.pl = new;
cur_pll           405 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c 		gk20a_pllg_write_mnp(&clk->base, &cur_pll.base);
cur_pll           410 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c 		cur_pll.base = *pll;
cur_pll           411 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c 		cur_pll.base.n = n_int;
cur_pll           412 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c 		cur_pll.sdm_din = sdm_din;
cur_pll           413 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c 		gm20b_pllg_write_mnp(clk, &cur_pll);
cur_pll           435 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c 	struct gk20a_pll cur_pll;
cur_pll           439 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c 		gk20a_pllg_read_mnp(&clk->base, &cur_pll);
cur_pll           442 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c 		if (pll->m == cur_pll.m && pll->pl == cur_pll.pl)
cur_pll           446 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c 		cur_pll.n = gk20a_pllg_n_lo(&clk->base, &cur_pll);
cur_pll           447 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c 		ret = gm20b_pllg_slide(clk, cur_pll.n);
cur_pll           453 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c 	cur_pll = *pll;
cur_pll           454 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c 	cur_pll.n = gk20a_pllg_n_lo(&clk->base, &cur_pll);
cur_pll           455 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c 	ret = gm20b_pllg_program_mnp(clk, &cur_pll);