tmu                35 drivers/clocksource/sh_tmu.c 	struct sh_tmu_device *tmu;
tmu                85 drivers/clocksource/sh_tmu.c 		switch (ch->tmu->model) {
tmu                87 drivers/clocksource/sh_tmu.c 			return ioread8(ch->tmu->mapbase + 2);
tmu                89 drivers/clocksource/sh_tmu.c 			return ioread8(ch->tmu->mapbase + 4);
tmu               107 drivers/clocksource/sh_tmu.c 		switch (ch->tmu->model) {
tmu               109 drivers/clocksource/sh_tmu.c 			return iowrite8(value, ch->tmu->mapbase + 2);
tmu               111 drivers/clocksource/sh_tmu.c 			return iowrite8(value, ch->tmu->mapbase + 4);
tmu               128 drivers/clocksource/sh_tmu.c 	raw_spin_lock_irqsave(&ch->tmu->lock, flags);
tmu               137 drivers/clocksource/sh_tmu.c 	raw_spin_unlock_irqrestore(&ch->tmu->lock, flags);
tmu               145 drivers/clocksource/sh_tmu.c 	ret = clk_enable(ch->tmu->clk);
tmu               147 drivers/clocksource/sh_tmu.c 		dev_err(&ch->tmu->pdev->dev, "ch%u: cannot enable clock\n",
tmu               173 drivers/clocksource/sh_tmu.c 	pm_runtime_get_sync(&ch->tmu->pdev->dev);
tmu               174 drivers/clocksource/sh_tmu.c 	dev_pm_syscore_device(&ch->tmu->pdev->dev, true);
tmu               188 drivers/clocksource/sh_tmu.c 	clk_disable(ch->tmu->clk);
tmu               201 drivers/clocksource/sh_tmu.c 	dev_pm_syscore_device(&ch->tmu->pdev->dev, false);
tmu               202 drivers/clocksource/sh_tmu.c 	pm_runtime_put(&ch->tmu->pdev->dev);
tmu               291 drivers/clocksource/sh_tmu.c 		pm_genpd_syscore_poweroff(&ch->tmu->pdev->dev);
tmu               303 drivers/clocksource/sh_tmu.c 		pm_genpd_syscore_poweron(&ch->tmu->pdev->dev);
tmu               323 drivers/clocksource/sh_tmu.c 	dev_info(&ch->tmu->pdev->dev, "ch%u: used as clock source\n",
tmu               326 drivers/clocksource/sh_tmu.c 	clocksource_register_hz(cs, ch->tmu->rate);
tmu               340 drivers/clocksource/sh_tmu.c 		ch->periodic = (ch->tmu->rate + HZ/2) / HZ;
tmu               363 drivers/clocksource/sh_tmu.c 	dev_info(&ch->tmu->pdev->dev, "ch%u: used for %s clock events\n",
tmu               393 drivers/clocksource/sh_tmu.c 	pm_genpd_syscore_poweroff(&ced_to_sh_tmu(ced)->tmu->pdev->dev);
tmu               398 drivers/clocksource/sh_tmu.c 	pm_genpd_syscore_poweron(&ced_to_sh_tmu(ced)->tmu->pdev->dev);
tmu               419 drivers/clocksource/sh_tmu.c 	dev_info(&ch->tmu->pdev->dev, "ch%u: used for clock events\n",
tmu               422 drivers/clocksource/sh_tmu.c 	clockevents_config_and_register(ced, ch->tmu->rate, 0x300, 0xffffffff);
tmu               426 drivers/clocksource/sh_tmu.c 			  dev_name(&ch->tmu->pdev->dev), ch);
tmu               428 drivers/clocksource/sh_tmu.c 		dev_err(&ch->tmu->pdev->dev, "ch%u: failed to request irq %d\n",
tmu               438 drivers/clocksource/sh_tmu.c 		ch->tmu->has_clockevent = true;
tmu               441 drivers/clocksource/sh_tmu.c 		ch->tmu->has_clocksource = true;
tmu               450 drivers/clocksource/sh_tmu.c 				struct sh_tmu_device *tmu)
tmu               456 drivers/clocksource/sh_tmu.c 	ch->tmu = tmu;
tmu               459 drivers/clocksource/sh_tmu.c 	if (tmu->model == SH_TMU_SH3)
tmu               460 drivers/clocksource/sh_tmu.c 		ch->base = tmu->mapbase + 4 + ch->index * 12;
tmu               462 drivers/clocksource/sh_tmu.c 		ch->base = tmu->mapbase + 8 + ch->index * 12;
tmu               464 drivers/clocksource/sh_tmu.c 	ch->irq = platform_get_irq(tmu->pdev, index);
tmu               471 drivers/clocksource/sh_tmu.c 	return sh_tmu_register(ch, dev_name(&tmu->pdev->dev),
tmu               475 drivers/clocksource/sh_tmu.c static int sh_tmu_map_memory(struct sh_tmu_device *tmu)
tmu               479 drivers/clocksource/sh_tmu.c 	res = platform_get_resource(tmu->pdev, IORESOURCE_MEM, 0);
tmu               481 drivers/clocksource/sh_tmu.c 		dev_err(&tmu->pdev->dev, "failed to get I/O memory\n");
tmu               485 drivers/clocksource/sh_tmu.c 	tmu->mapbase = ioremap_nocache(res->start, resource_size(res));
tmu               486 drivers/clocksource/sh_tmu.c 	if (tmu->mapbase == NULL)
tmu               492 drivers/clocksource/sh_tmu.c static int sh_tmu_parse_dt(struct sh_tmu_device *tmu)
tmu               494 drivers/clocksource/sh_tmu.c 	struct device_node *np = tmu->pdev->dev.of_node;
tmu               496 drivers/clocksource/sh_tmu.c 	tmu->model = SH_TMU;
tmu               497 drivers/clocksource/sh_tmu.c 	tmu->num_channels = 3;
tmu               499 drivers/clocksource/sh_tmu.c 	of_property_read_u32(np, "#renesas,channels", &tmu->num_channels);
tmu               501 drivers/clocksource/sh_tmu.c 	if (tmu->num_channels != 2 && tmu->num_channels != 3) {
tmu               502 drivers/clocksource/sh_tmu.c 		dev_err(&tmu->pdev->dev, "invalid number of channels %u\n",
tmu               503 drivers/clocksource/sh_tmu.c 			tmu->num_channels);
tmu               510 drivers/clocksource/sh_tmu.c static int sh_tmu_setup(struct sh_tmu_device *tmu, struct platform_device *pdev)
tmu               515 drivers/clocksource/sh_tmu.c 	tmu->pdev = pdev;
tmu               517 drivers/clocksource/sh_tmu.c 	raw_spin_lock_init(&tmu->lock);
tmu               520 drivers/clocksource/sh_tmu.c 		ret = sh_tmu_parse_dt(tmu);
tmu               527 drivers/clocksource/sh_tmu.c 		tmu->model = id->driver_data;
tmu               528 drivers/clocksource/sh_tmu.c 		tmu->num_channels = hweight8(cfg->channels_mask);
tmu               530 drivers/clocksource/sh_tmu.c 		dev_err(&tmu->pdev->dev, "missing platform data\n");
tmu               535 drivers/clocksource/sh_tmu.c 	tmu->clk = clk_get(&tmu->pdev->dev, "fck");
tmu               536 drivers/clocksource/sh_tmu.c 	if (IS_ERR(tmu->clk)) {
tmu               537 drivers/clocksource/sh_tmu.c 		dev_err(&tmu->pdev->dev, "cannot get clock\n");
tmu               538 drivers/clocksource/sh_tmu.c 		return PTR_ERR(tmu->clk);
tmu               541 drivers/clocksource/sh_tmu.c 	ret = clk_prepare(tmu->clk);
tmu               546 drivers/clocksource/sh_tmu.c 	ret = clk_enable(tmu->clk);
tmu               550 drivers/clocksource/sh_tmu.c 	tmu->rate = clk_get_rate(tmu->clk) / 4;
tmu               551 drivers/clocksource/sh_tmu.c 	clk_disable(tmu->clk);
tmu               554 drivers/clocksource/sh_tmu.c 	ret = sh_tmu_map_memory(tmu);
tmu               556 drivers/clocksource/sh_tmu.c 		dev_err(&tmu->pdev->dev, "failed to remap I/O memory\n");
tmu               561 drivers/clocksource/sh_tmu.c 	tmu->channels = kcalloc(tmu->num_channels, sizeof(*tmu->channels),
tmu               563 drivers/clocksource/sh_tmu.c 	if (tmu->channels == NULL) {
tmu               572 drivers/clocksource/sh_tmu.c 	for (i = 0; i < tmu->num_channels; ++i) {
tmu               573 drivers/clocksource/sh_tmu.c 		ret = sh_tmu_channel_setup(&tmu->channels[i], i,
tmu               574 drivers/clocksource/sh_tmu.c 					   i == 0, i == 1, tmu);
tmu               579 drivers/clocksource/sh_tmu.c 	platform_set_drvdata(pdev, tmu);
tmu               584 drivers/clocksource/sh_tmu.c 	kfree(tmu->channels);
tmu               585 drivers/clocksource/sh_tmu.c 	iounmap(tmu->mapbase);
tmu               587 drivers/clocksource/sh_tmu.c 	clk_unprepare(tmu->clk);
tmu               589 drivers/clocksource/sh_tmu.c 	clk_put(tmu->clk);
tmu               595 drivers/clocksource/sh_tmu.c 	struct sh_tmu_device *tmu = platform_get_drvdata(pdev);
tmu               603 drivers/clocksource/sh_tmu.c 	if (tmu) {
tmu               608 drivers/clocksource/sh_tmu.c 	tmu = kzalloc(sizeof(*tmu), GFP_KERNEL);
tmu               609 drivers/clocksource/sh_tmu.c 	if (tmu == NULL)
tmu               612 drivers/clocksource/sh_tmu.c 	ret = sh_tmu_setup(tmu, pdev);
tmu               614 drivers/clocksource/sh_tmu.c 		kfree(tmu);
tmu               622 drivers/clocksource/sh_tmu.c 	if (tmu->has_clockevent || tmu->has_clocksource)
tmu               157 drivers/gpu/drm/vc4/vc4_validate_shaders.c 		      int tmu)
tmu               170 drivers/gpu/drm/vc4/vc4_validate_shaders.c 	       &validation_state->tmu_setup[tmu],
tmu               177 drivers/gpu/drm/vc4/vc4_validate_shaders.c 		validation_state->tmu_setup[tmu].p_offset[i] = ~0;
tmu               193 drivers/gpu/drm/vc4/vc4_validate_shaders.c 	int tmu = waddr > QPU_W_TMU0_B;
tmu               195 drivers/gpu/drm/vc4/vc4_validate_shaders.c 	bool is_direct = submit && validation_state->tmu_write_count[tmu] == 0;
tmu               236 drivers/gpu/drm/vc4/vc4_validate_shaders.c 		validation_state->tmu_setup[tmu].p_offset[1] =
tmu               245 drivers/gpu/drm/vc4/vc4_validate_shaders.c 		validation_state->tmu_setup[tmu].is_direct = true;
tmu               255 drivers/gpu/drm/vc4/vc4_validate_shaders.c 	if (validation_state->tmu_write_count[tmu] >= 4) {
tmu               257 drivers/gpu/drm/vc4/vc4_validate_shaders.c 			  tmu);
tmu               260 drivers/gpu/drm/vc4/vc4_validate_shaders.c 	validation_state->tmu_setup[tmu].p_offset[validation_state->tmu_write_count[tmu]] =
tmu               262 drivers/gpu/drm/vc4/vc4_validate_shaders.c 	validation_state->tmu_write_count[tmu]++;
tmu               277 drivers/gpu/drm/vc4/vc4_validate_shaders.c 					   validation_state, tmu)) {
tmu               281 drivers/gpu/drm/vc4/vc4_validate_shaders.c 		validation_state->tmu_write_count[tmu] = 0;
tmu              4740 drivers/pinctrl/sh-pfc/pfc-r8a7795-es1.c 	SH_PFC_FUNCTION(tmu),
tmu              5083 drivers/pinctrl/sh-pfc/pfc-r8a7795.c 	SH_PFC_FUNCTION(tmu),
tmu              5046 drivers/pinctrl/sh-pfc/pfc-r8a7796.c 		SH_PFC_FUNCTION(tmu),
tmu              5293 drivers/pinctrl/sh-pfc/pfc-r8a77965.c 	SH_PFC_FUNCTION(tmu),
tmu              2047 drivers/pinctrl/sh-pfc/pfc-r8a77970.c 	SH_PFC_FUNCTION(tmu),
tmu              2468 drivers/pinctrl/sh-pfc/pfc-r8a77980.c 	SH_PFC_FUNCTION(tmu),
tmu              4508 drivers/pinctrl/sh-pfc/pfc-r8a77990.c 		SH_PFC_FUNCTION(tmu),