rpm 193 arch/sparc/include/asm/vio.h u16 rpm; /* Revolutions per minute */ rpm 145 drivers/clk/qcom/clk-rpm.c struct qcom_rpm *rpm; rpm 150 drivers/clk/qcom/clk-rpm.c struct qcom_rpm *rpm; rpm 177 drivers/clk/qcom/clk-rpm.c ret = qcom_rpm_write(r->rpm, QCOM_RPM_ACTIVE_STATE, rpm 181 drivers/clk/qcom/clk-rpm.c ret = qcom_rpm_write(r->rpm, QCOM_RPM_SLEEP_STATE, rpm 193 drivers/clk/qcom/clk-rpm.c return qcom_rpm_write(r->rpm, QCOM_RPM_ACTIVE_STATE, rpm 201 drivers/clk/qcom/clk-rpm.c return qcom_rpm_write(r->rpm, QCOM_RPM_SLEEP_STATE, rpm 313 drivers/clk/qcom/clk-rpm.c ret = qcom_rpm_write(r->rpm, QCOM_RPM_ACTIVE_STATE, clk_id, &value, 1); rpm 334 drivers/clk/qcom/clk-rpm.c ret = qcom_rpm_write(r->rpm, QCOM_RPM_ACTIVE_STATE, clk_id, &value, 1); rpm 349 drivers/clk/qcom/clk-rpm.c ret = qcom_rpm_write(r->rpm, QCOM_RPM_ACTIVE_STATE, rpm 363 drivers/clk/qcom/clk-rpm.c ret = qcom_rpm_write(r->rpm, QCOM_RPM_ACTIVE_STATE, rpm 573 drivers/clk/qcom/clk-rpm.c struct qcom_rpm *rpm; rpm 577 drivers/clk/qcom/clk-rpm.c rpm = dev_get_drvdata(pdev->dev.parent); rpm 578 drivers/clk/qcom/clk-rpm.c if (!rpm) { rpm 602 drivers/clk/qcom/clk-rpm.c rpm_clks[i]->rpm = rpm; rpm 132 drivers/clk/qcom/clk-smd-rpm.c struct qcom_smd_rpm *rpm; rpm 142 drivers/clk/qcom/clk-smd-rpm.c struct qcom_rpm *rpm; rpm 163 drivers/clk/qcom/clk-smd-rpm.c ret = qcom_rpm_smd_write(r->rpm, QCOM_SMD_RPM_ACTIVE_STATE, rpm 168 drivers/clk/qcom/clk-smd-rpm.c ret = qcom_rpm_smd_write(r->rpm, QCOM_SMD_RPM_SLEEP_STATE, rpm 186 drivers/clk/qcom/clk-smd-rpm.c return qcom_rpm_smd_write(r->rpm, QCOM_SMD_RPM_ACTIVE_STATE, rpm 200 drivers/clk/qcom/clk-smd-rpm.c return qcom_rpm_smd_write(r->rpm, QCOM_SMD_RPM_SLEEP_STATE, rpm 367 drivers/clk/qcom/clk-smd-rpm.c static int clk_smd_rpm_enable_scaling(struct qcom_smd_rpm *rpm) rpm 376 drivers/clk/qcom/clk-smd-rpm.c ret = qcom_rpm_smd_write(rpm, QCOM_SMD_RPM_SLEEP_STATE, rpm 384 drivers/clk/qcom/clk-smd-rpm.c ret = qcom_rpm_smd_write(rpm, QCOM_SMD_RPM_ACTIVE_STATE, rpm 747 drivers/clk/qcom/clk-smd-rpm.c struct qcom_smd_rpm *rpm; rpm 751 drivers/clk/qcom/clk-smd-rpm.c rpm = dev_get_drvdata(pdev->dev.parent); rpm 752 drivers/clk/qcom/clk-smd-rpm.c if (!rpm) { rpm 775 drivers/clk/qcom/clk-smd-rpm.c rpm_smd_clks[i]->rpm = rpm; rpm 782 drivers/clk/qcom/clk-smd-rpm.c ret = clk_smd_rpm_enable_scaling(rpm); rpm 1793 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c u32 rpm = 0; rpm 1801 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c err = smu_get_fan_speed_rpm(&adev->smu, &rpm); rpm 1805 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c err = amdgpu_dpm_get_fan_speed_rpm(adev, &rpm); rpm 1810 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c return sprintf(buf, "%i\n", rpm); rpm 256 drivers/gpu/drm/amd/include/kgd_pp_interface.h int (*get_fan_speed_rpm)(void *handle, uint32_t *rpm); rpm 257 drivers/gpu/drm/amd/include/kgd_pp_interface.h int (*set_fan_speed_rpm)(void *handle, uint32_t rpm); rpm 571 drivers/gpu/drm/amd/powerplay/amd_powerplay.c static int pp_dpm_get_fan_speed_rpm(void *handle, uint32_t *rpm) rpm 583 drivers/gpu/drm/amd/powerplay/amd_powerplay.c ret = hwmgr->hwmgr_func->get_fan_speed_rpm(hwmgr, rpm); rpm 588 drivers/gpu/drm/amd/powerplay/amd_powerplay.c static int pp_dpm_set_fan_speed_rpm(void *handle, uint32_t rpm) rpm 601 drivers/gpu/drm/amd/powerplay/amd_powerplay.c ret = hwmgr->hwmgr_func->set_fan_speed_rpm(hwmgr, rpm); rpm 1963 drivers/gpu/drm/i915/display/intel_display_power.c struct intel_runtime_pm *rpm = &dev_priv->runtime_pm; rpm 1972 drivers/gpu/drm/i915/display/intel_display_power.c assert_rpm_raw_wakeref_held(rpm); rpm 1973 drivers/gpu/drm/i915/display/intel_display_power.c wakeref = intel_runtime_pm_get(rpm); rpm 1981 drivers/gpu/drm/i915/display/intel_display_power.c intel_runtime_pm_put(rpm, wakeref); rpm 1991 drivers/gpu/drm/i915/display/intel_display_power.c struct intel_runtime_pm *rpm = &dev_priv->runtime_pm; rpm 1992 drivers/gpu/drm/i915/display/intel_display_power.c intel_wakeref_t new_work_wakeref = intel_runtime_pm_get_raw(rpm); rpm 2022 drivers/gpu/drm/i915/display/intel_display_power.c intel_runtime_pm_put_raw(rpm, old_work_wakeref); rpm 2024 drivers/gpu/drm/i915/display/intel_display_power.c intel_runtime_pm_put_raw(rpm, new_work_wakeref); rpm 2042 drivers/gpu/drm/i915/display/intel_display_power.c struct intel_runtime_pm *rpm = &i915->runtime_pm; rpm 2043 drivers/gpu/drm/i915/display/intel_display_power.c intel_wakeref_t work_wakeref = intel_runtime_pm_get_raw(rpm); rpm 2070 drivers/gpu/drm/i915/display/intel_display_power.c intel_runtime_pm_put_raw(rpm, work_wakeref); rpm 2072 drivers/gpu/drm/i915/display/intel_display_power.c intel_runtime_pm_put(rpm, wakeref); rpm 224 drivers/gpu/drm/i915/gem/i915_gem_mman.c struct intel_runtime_pm *rpm = &i915->runtime_pm; rpm 246 drivers/gpu/drm/i915/gem/i915_gem_mman.c wakeref = intel_runtime_pm_get(rpm); rpm 307 drivers/gpu/drm/i915/gem/i915_gem_mman.c assert_rpm_wakelock_held(rpm); rpm 334 drivers/gpu/drm/i915/gem/i915_gem_mman.c intel_runtime_pm_put(rpm, wakeref); rpm 172 drivers/gpu/drm/i915/gt/intel_engine_pm.c struct intel_runtime_pm *rpm = &engine->i915->runtime_pm; rpm 174 drivers/gpu/drm/i915/gt/intel_engine_pm.c intel_wakeref_init(&engine->wakeref, rpm, &wf_ops); rpm 191 drivers/gpu/drm/i915/gvt/aperture_gm.c struct intel_runtime_pm *rpm = &dev_priv->runtime_pm; rpm 195 drivers/gpu/drm/i915/gvt/aperture_gm.c intel_runtime_pm_get(rpm); rpm 211 drivers/gpu/drm/i915/gvt/aperture_gm.c intel_runtime_pm_put_unchecked(rpm); rpm 224 drivers/gpu/drm/i915/gvt/aperture_gm.c intel_runtime_pm_put_unchecked(rpm); rpm 1001 drivers/gpu/drm/i915/gvt/scheduler.c struct intel_runtime_pm *rpm = &gvt->dev_priv->runtime_pm; rpm 1025 drivers/gpu/drm/i915/gvt/scheduler.c intel_runtime_pm_get(rpm); rpm 1063 drivers/gpu/drm/i915/gvt/scheduler.c intel_runtime_pm_put_unchecked(rpm); rpm 1677 drivers/gpu/drm/i915/i915_drv.c struct intel_runtime_pm *rpm = &dev_priv->runtime_pm; rpm 1679 drivers/gpu/drm/i915/i915_drv.c disable_rpm_wakeref_asserts(rpm); rpm 1690 drivers/gpu/drm/i915/i915_drv.c enable_rpm_wakeref_asserts(rpm); rpm 1691 drivers/gpu/drm/i915/i915_drv.c intel_runtime_pm_driver_release(rpm); rpm 1843 drivers/gpu/drm/i915/i915_drv.c struct intel_runtime_pm *rpm = &dev_priv->runtime_pm; rpm 1846 drivers/gpu/drm/i915/i915_drv.c disable_rpm_wakeref_asserts(rpm); rpm 1886 drivers/gpu/drm/i915/i915_drv.c enable_rpm_wakeref_asserts(rpm); rpm 1888 drivers/gpu/drm/i915/i915_drv.c intel_runtime_pm_driver_release(rpm); rpm 2594 drivers/gpu/drm/i915/i915_drv.c struct intel_runtime_pm *rpm = &dev_priv->runtime_pm; rpm 2605 drivers/gpu/drm/i915/i915_drv.c disable_rpm_wakeref_asserts(rpm); rpm 2634 drivers/gpu/drm/i915/i915_drv.c enable_rpm_wakeref_asserts(rpm); rpm 2639 drivers/gpu/drm/i915/i915_drv.c enable_rpm_wakeref_asserts(rpm); rpm 2640 drivers/gpu/drm/i915/i915_drv.c intel_runtime_pm_driver_release(rpm); rpm 2645 drivers/gpu/drm/i915/i915_drv.c rpm->suspended = true; rpm 2682 drivers/gpu/drm/i915/i915_drv.c struct intel_runtime_pm *rpm = &dev_priv->runtime_pm; rpm 2690 drivers/gpu/drm/i915/i915_drv.c WARN_ON_ONCE(atomic_read(&rpm->wakeref_count)); rpm 2691 drivers/gpu/drm/i915/i915_drv.c disable_rpm_wakeref_asserts(rpm); rpm 2694 drivers/gpu/drm/i915/i915_drv.c rpm->suspended = false; rpm 2724 drivers/gpu/drm/i915/i915_drv.c enable_rpm_wakeref_asserts(rpm); rpm 525 drivers/gpu/drm/i915/i915_gem.c struct intel_runtime_pm *rpm = &i915->runtime_pm; rpm 546 drivers/gpu/drm/i915/i915_gem.c wakeref = intel_runtime_pm_get_if_in_use(rpm); rpm 553 drivers/gpu/drm/i915/i915_gem.c wakeref = intel_runtime_pm_get(rpm); rpm 646 drivers/gpu/drm/i915/i915_gem.c intel_runtime_pm_put(rpm, wakeref); rpm 452 drivers/gpu/drm/i915/i915_pmu.c struct intel_runtime_pm *rpm = &i915->runtime_pm; rpm 458 drivers/gpu/drm/i915/i915_pmu.c wakeref = intel_runtime_pm_get_if_in_use(rpm); rpm 461 drivers/gpu/drm/i915/i915_pmu.c intel_runtime_pm_put(rpm, wakeref); rpm 480 drivers/gpu/drm/i915/i915_pmu.c struct device *kdev = rpm->kdev; rpm 79 drivers/gpu/drm/i915/intel_runtime_pm.c static void init_intel_runtime_pm_wakeref(struct intel_runtime_pm *rpm) rpm 81 drivers/gpu/drm/i915/intel_runtime_pm.c spin_lock_init(&rpm->debug.lock); rpm 85 drivers/gpu/drm/i915/intel_runtime_pm.c track_intel_runtime_pm_wakeref(struct intel_runtime_pm *rpm) rpm 90 drivers/gpu/drm/i915/intel_runtime_pm.c if (!rpm->available) rpm 97 drivers/gpu/drm/i915/intel_runtime_pm.c spin_lock_irqsave(&rpm->debug.lock, flags); rpm 99 drivers/gpu/drm/i915/intel_runtime_pm.c if (!rpm->debug.count) rpm 100 drivers/gpu/drm/i915/intel_runtime_pm.c rpm->debug.last_acquire = stack; rpm 102 drivers/gpu/drm/i915/intel_runtime_pm.c stacks = krealloc(rpm->debug.owners, rpm 103 drivers/gpu/drm/i915/intel_runtime_pm.c (rpm->debug.count + 1) * sizeof(*stacks), rpm 106 drivers/gpu/drm/i915/intel_runtime_pm.c stacks[rpm->debug.count++] = stack; rpm 107 drivers/gpu/drm/i915/intel_runtime_pm.c rpm->debug.owners = stacks; rpm 112 drivers/gpu/drm/i915/intel_runtime_pm.c spin_unlock_irqrestore(&rpm->debug.lock, flags); rpm 117 drivers/gpu/drm/i915/intel_runtime_pm.c static void untrack_intel_runtime_pm_wakeref(struct intel_runtime_pm *rpm, rpm 126 drivers/gpu/drm/i915/intel_runtime_pm.c spin_lock_irqsave(&rpm->debug.lock, flags); rpm 127 drivers/gpu/drm/i915/intel_runtime_pm.c for (n = rpm->debug.count; n--; ) { rpm 128 drivers/gpu/drm/i915/intel_runtime_pm.c if (rpm->debug.owners[n] == stack) { rpm 129 drivers/gpu/drm/i915/intel_runtime_pm.c memmove(rpm->debug.owners + n, rpm 130 drivers/gpu/drm/i915/intel_runtime_pm.c rpm->debug.owners + n + 1, rpm 131 drivers/gpu/drm/i915/intel_runtime_pm.c (--rpm->debug.count - n) * sizeof(stack)); rpm 136 drivers/gpu/drm/i915/intel_runtime_pm.c spin_unlock_irqrestore(&rpm->debug.lock, flags); rpm 140 drivers/gpu/drm/i915/intel_runtime_pm.c rpm->debug.count, atomic_read(&rpm->wakeref_count))) { rpm 150 drivers/gpu/drm/i915/intel_runtime_pm.c stack = READ_ONCE(rpm->debug.last_release); rpm 235 drivers/gpu/drm/i915/intel_runtime_pm.c __intel_wakeref_dec_and_check_tracking(struct intel_runtime_pm *rpm) rpm 240 drivers/gpu/drm/i915/intel_runtime_pm.c if (!atomic_dec_and_lock_irqsave(&rpm->wakeref_count, rpm 241 drivers/gpu/drm/i915/intel_runtime_pm.c &rpm->debug.lock, rpm 245 drivers/gpu/drm/i915/intel_runtime_pm.c __untrack_all_wakerefs(&rpm->debug, &dbg); rpm 246 drivers/gpu/drm/i915/intel_runtime_pm.c spin_unlock_irqrestore(&rpm->debug.lock, flags); rpm 252 drivers/gpu/drm/i915/intel_runtime_pm.c untrack_all_intel_runtime_pm_wakerefs(struct intel_runtime_pm *rpm) rpm 257 drivers/gpu/drm/i915/intel_runtime_pm.c spin_lock_irqsave(&rpm->debug.lock, flags); rpm 258 drivers/gpu/drm/i915/intel_runtime_pm.c __untrack_all_wakerefs(&rpm->debug, &dbg); rpm 259 drivers/gpu/drm/i915/intel_runtime_pm.c spin_unlock_irqrestore(&rpm->debug.lock, flags); rpm 264 drivers/gpu/drm/i915/intel_runtime_pm.c void print_intel_runtime_pm_wakeref(struct intel_runtime_pm *rpm, rpm 273 drivers/gpu/drm/i915/intel_runtime_pm.c spin_lock_irq(&rpm->debug.lock); rpm 274 drivers/gpu/drm/i915/intel_runtime_pm.c dbg.count = rpm->debug.count; rpm 277 drivers/gpu/drm/i915/intel_runtime_pm.c rpm->debug.owners, rpm 280 drivers/gpu/drm/i915/intel_runtime_pm.c dbg.last_acquire = rpm->debug.last_acquire; rpm 281 drivers/gpu/drm/i915/intel_runtime_pm.c dbg.last_release = rpm->debug.last_release; rpm 282 drivers/gpu/drm/i915/intel_runtime_pm.c spin_unlock_irq(&rpm->debug.lock); rpm 303 drivers/gpu/drm/i915/intel_runtime_pm.c static void init_intel_runtime_pm_wakeref(struct intel_runtime_pm *rpm) rpm 308 drivers/gpu/drm/i915/intel_runtime_pm.c track_intel_runtime_pm_wakeref(struct intel_runtime_pm *rpm) rpm 313 drivers/gpu/drm/i915/intel_runtime_pm.c static void untrack_intel_runtime_pm_wakeref(struct intel_runtime_pm *rpm, rpm 319 drivers/gpu/drm/i915/intel_runtime_pm.c __intel_wakeref_dec_and_check_tracking(struct intel_runtime_pm *rpm) rpm 321 drivers/gpu/drm/i915/intel_runtime_pm.c atomic_dec(&rpm->wakeref_count); rpm 325 drivers/gpu/drm/i915/intel_runtime_pm.c untrack_all_intel_runtime_pm_wakerefs(struct intel_runtime_pm *rpm) rpm 332 drivers/gpu/drm/i915/intel_runtime_pm.c intel_runtime_pm_acquire(struct intel_runtime_pm *rpm, bool wakelock) rpm 335 drivers/gpu/drm/i915/intel_runtime_pm.c atomic_add(1 + INTEL_RPM_WAKELOCK_BIAS, &rpm->wakeref_count); rpm 336 drivers/gpu/drm/i915/intel_runtime_pm.c assert_rpm_wakelock_held(rpm); rpm 338 drivers/gpu/drm/i915/intel_runtime_pm.c atomic_inc(&rpm->wakeref_count); rpm 339 drivers/gpu/drm/i915/intel_runtime_pm.c assert_rpm_raw_wakeref_held(rpm); rpm 344 drivers/gpu/drm/i915/intel_runtime_pm.c intel_runtime_pm_release(struct intel_runtime_pm *rpm, int wakelock) rpm 347 drivers/gpu/drm/i915/intel_runtime_pm.c assert_rpm_wakelock_held(rpm); rpm 348 drivers/gpu/drm/i915/intel_runtime_pm.c atomic_sub(INTEL_RPM_WAKELOCK_BIAS, &rpm->wakeref_count); rpm 350 drivers/gpu/drm/i915/intel_runtime_pm.c assert_rpm_raw_wakeref_held(rpm); rpm 353 drivers/gpu/drm/i915/intel_runtime_pm.c __intel_wakeref_dec_and_check_tracking(rpm); rpm 356 drivers/gpu/drm/i915/intel_runtime_pm.c static intel_wakeref_t __intel_runtime_pm_get(struct intel_runtime_pm *rpm, rpm 361 drivers/gpu/drm/i915/intel_runtime_pm.c ret = pm_runtime_get_sync(rpm->kdev); rpm 364 drivers/gpu/drm/i915/intel_runtime_pm.c intel_runtime_pm_acquire(rpm, wakelock); rpm 366 drivers/gpu/drm/i915/intel_runtime_pm.c return track_intel_runtime_pm_wakeref(rpm); rpm 386 drivers/gpu/drm/i915/intel_runtime_pm.c intel_wakeref_t intel_runtime_pm_get_raw(struct intel_runtime_pm *rpm) rpm 388 drivers/gpu/drm/i915/intel_runtime_pm.c return __intel_runtime_pm_get(rpm, false); rpm 403 drivers/gpu/drm/i915/intel_runtime_pm.c intel_wakeref_t intel_runtime_pm_get(struct intel_runtime_pm *rpm) rpm 405 drivers/gpu/drm/i915/intel_runtime_pm.c return __intel_runtime_pm_get(rpm, true); rpm 422 drivers/gpu/drm/i915/intel_runtime_pm.c intel_wakeref_t intel_runtime_pm_get_if_in_use(struct intel_runtime_pm *rpm) rpm 431 drivers/gpu/drm/i915/intel_runtime_pm.c if (pm_runtime_get_if_in_use(rpm->kdev) <= 0) rpm 435 drivers/gpu/drm/i915/intel_runtime_pm.c intel_runtime_pm_acquire(rpm, true); rpm 437 drivers/gpu/drm/i915/intel_runtime_pm.c return track_intel_runtime_pm_wakeref(rpm); rpm 459 drivers/gpu/drm/i915/intel_runtime_pm.c intel_wakeref_t intel_runtime_pm_get_noresume(struct intel_runtime_pm *rpm) rpm 461 drivers/gpu/drm/i915/intel_runtime_pm.c assert_rpm_wakelock_held(rpm); rpm 462 drivers/gpu/drm/i915/intel_runtime_pm.c pm_runtime_get_noresume(rpm->kdev); rpm 464 drivers/gpu/drm/i915/intel_runtime_pm.c intel_runtime_pm_acquire(rpm, true); rpm 466 drivers/gpu/drm/i915/intel_runtime_pm.c return track_intel_runtime_pm_wakeref(rpm); rpm 469 drivers/gpu/drm/i915/intel_runtime_pm.c static void __intel_runtime_pm_put(struct intel_runtime_pm *rpm, rpm 473 drivers/gpu/drm/i915/intel_runtime_pm.c struct device *kdev = rpm->kdev; rpm 475 drivers/gpu/drm/i915/intel_runtime_pm.c untrack_intel_runtime_pm_wakeref(rpm, wref); rpm 477 drivers/gpu/drm/i915/intel_runtime_pm.c intel_runtime_pm_release(rpm, wakelock); rpm 493 drivers/gpu/drm/i915/intel_runtime_pm.c intel_runtime_pm_put_raw(struct intel_runtime_pm *rpm, intel_wakeref_t wref) rpm 495 drivers/gpu/drm/i915/intel_runtime_pm.c __intel_runtime_pm_put(rpm, wref, false); rpm 510 drivers/gpu/drm/i915/intel_runtime_pm.c void intel_runtime_pm_put_unchecked(struct intel_runtime_pm *rpm) rpm 512 drivers/gpu/drm/i915/intel_runtime_pm.c __intel_runtime_pm_put(rpm, -1, true); rpm 525 drivers/gpu/drm/i915/intel_runtime_pm.c void intel_runtime_pm_put(struct intel_runtime_pm *rpm, intel_wakeref_t wref) rpm 527 drivers/gpu/drm/i915/intel_runtime_pm.c __intel_runtime_pm_put(rpm, wref, true); rpm 541 drivers/gpu/drm/i915/intel_runtime_pm.c void intel_runtime_pm_enable(struct intel_runtime_pm *rpm) rpm 543 drivers/gpu/drm/i915/intel_runtime_pm.c struct device *kdev = rpm->kdev; rpm 564 drivers/gpu/drm/i915/intel_runtime_pm.c if (!rpm->available) { rpm 582 drivers/gpu/drm/i915/intel_runtime_pm.c void intel_runtime_pm_disable(struct intel_runtime_pm *rpm) rpm 584 drivers/gpu/drm/i915/intel_runtime_pm.c struct device *kdev = rpm->kdev; rpm 592 drivers/gpu/drm/i915/intel_runtime_pm.c if (!rpm->available) rpm 596 drivers/gpu/drm/i915/intel_runtime_pm.c void intel_runtime_pm_driver_release(struct intel_runtime_pm *rpm) rpm 598 drivers/gpu/drm/i915/intel_runtime_pm.c int count = atomic_read(&rpm->wakeref_count); rpm 605 drivers/gpu/drm/i915/intel_runtime_pm.c untrack_all_intel_runtime_pm_wakerefs(rpm); rpm 608 drivers/gpu/drm/i915/intel_runtime_pm.c void intel_runtime_pm_init_early(struct intel_runtime_pm *rpm) rpm 611 drivers/gpu/drm/i915/intel_runtime_pm.c container_of(rpm, struct drm_i915_private, runtime_pm); rpm 615 drivers/gpu/drm/i915/intel_runtime_pm.c rpm->kdev = kdev; rpm 616 drivers/gpu/drm/i915/intel_runtime_pm.c rpm->available = HAS_RUNTIME_PM(i915); rpm 618 drivers/gpu/drm/i915/intel_runtime_pm.c init_intel_runtime_pm_wakeref(rpm); rpm 96 drivers/gpu/drm/i915/intel_runtime_pm.h assert_rpm_device_not_suspended(struct intel_runtime_pm *rpm) rpm 98 drivers/gpu/drm/i915/intel_runtime_pm.h WARN_ONCE(rpm->suspended, rpm 103 drivers/gpu/drm/i915/intel_runtime_pm.h __assert_rpm_raw_wakeref_held(struct intel_runtime_pm *rpm, int wakeref_count) rpm 105 drivers/gpu/drm/i915/intel_runtime_pm.h assert_rpm_device_not_suspended(rpm); rpm 111 drivers/gpu/drm/i915/intel_runtime_pm.h __assert_rpm_wakelock_held(struct intel_runtime_pm *rpm, int wakeref_count) rpm 113 drivers/gpu/drm/i915/intel_runtime_pm.h __assert_rpm_raw_wakeref_held(rpm, wakeref_count); rpm 119 drivers/gpu/drm/i915/intel_runtime_pm.h assert_rpm_raw_wakeref_held(struct intel_runtime_pm *rpm) rpm 121 drivers/gpu/drm/i915/intel_runtime_pm.h __assert_rpm_raw_wakeref_held(rpm, atomic_read(&rpm->wakeref_count)); rpm 125 drivers/gpu/drm/i915/intel_runtime_pm.h assert_rpm_wakelock_held(struct intel_runtime_pm *rpm) rpm 127 drivers/gpu/drm/i915/intel_runtime_pm.h __assert_rpm_wakelock_held(rpm, atomic_read(&rpm->wakeref_count)); rpm 149 drivers/gpu/drm/i915/intel_runtime_pm.h disable_rpm_wakeref_asserts(struct intel_runtime_pm *rpm) rpm 152 drivers/gpu/drm/i915/intel_runtime_pm.h &rpm->wakeref_count); rpm 167 drivers/gpu/drm/i915/intel_runtime_pm.h enable_rpm_wakeref_asserts(struct intel_runtime_pm *rpm) rpm 170 drivers/gpu/drm/i915/intel_runtime_pm.h &rpm->wakeref_count); rpm 173 drivers/gpu/drm/i915/intel_runtime_pm.h void intel_runtime_pm_init_early(struct intel_runtime_pm *rpm); rpm 174 drivers/gpu/drm/i915/intel_runtime_pm.h void intel_runtime_pm_enable(struct intel_runtime_pm *rpm); rpm 175 drivers/gpu/drm/i915/intel_runtime_pm.h void intel_runtime_pm_disable(struct intel_runtime_pm *rpm); rpm 176 drivers/gpu/drm/i915/intel_runtime_pm.h void intel_runtime_pm_driver_release(struct intel_runtime_pm *rpm); rpm 178 drivers/gpu/drm/i915/intel_runtime_pm.h intel_wakeref_t intel_runtime_pm_get(struct intel_runtime_pm *rpm); rpm 179 drivers/gpu/drm/i915/intel_runtime_pm.h intel_wakeref_t intel_runtime_pm_get_if_in_use(struct intel_runtime_pm *rpm); rpm 180 drivers/gpu/drm/i915/intel_runtime_pm.h intel_wakeref_t intel_runtime_pm_get_noresume(struct intel_runtime_pm *rpm); rpm 181 drivers/gpu/drm/i915/intel_runtime_pm.h intel_wakeref_t intel_runtime_pm_get_raw(struct intel_runtime_pm *rpm); rpm 183 drivers/gpu/drm/i915/intel_runtime_pm.h #define with_intel_runtime_pm(rpm, wf) \ rpm 184 drivers/gpu/drm/i915/intel_runtime_pm.h for ((wf) = intel_runtime_pm_get(rpm); (wf); \ rpm 185 drivers/gpu/drm/i915/intel_runtime_pm.h intel_runtime_pm_put((rpm), (wf)), (wf) = 0) rpm 187 drivers/gpu/drm/i915/intel_runtime_pm.h #define with_intel_runtime_pm_if_in_use(rpm, wf) \ rpm 188 drivers/gpu/drm/i915/intel_runtime_pm.h for ((wf) = intel_runtime_pm_get_if_in_use(rpm); (wf); \ rpm 189 drivers/gpu/drm/i915/intel_runtime_pm.h intel_runtime_pm_put((rpm), (wf)), (wf) = 0) rpm 191 drivers/gpu/drm/i915/intel_runtime_pm.h void intel_runtime_pm_put_unchecked(struct intel_runtime_pm *rpm); rpm 193 drivers/gpu/drm/i915/intel_runtime_pm.h void intel_runtime_pm_put(struct intel_runtime_pm *rpm, intel_wakeref_t wref); rpm 196 drivers/gpu/drm/i915/intel_runtime_pm.h intel_runtime_pm_put(struct intel_runtime_pm *rpm, intel_wakeref_t wref) rpm 198 drivers/gpu/drm/i915/intel_runtime_pm.h intel_runtime_pm_put_unchecked(rpm); rpm 201 drivers/gpu/drm/i915/intel_runtime_pm.h void intel_runtime_pm_put_raw(struct intel_runtime_pm *rpm, intel_wakeref_t wref); rpm 204 drivers/gpu/drm/i915/intel_runtime_pm.h void print_intel_runtime_pm_wakeref(struct intel_runtime_pm *rpm, rpm 207 drivers/gpu/drm/i915/intel_runtime_pm.h static inline void print_intel_runtime_pm_wakeref(struct intel_runtime_pm *rpm, rpm 378 drivers/gpu/drm/i915/intel_uncore.c assert_rpm_device_not_suspended(uncore->rpm); rpm 624 drivers/gpu/drm/i915/intel_uncore.c assert_rpm_wakelock_held(uncore->rpm); rpm 778 drivers/gpu/drm/i915/intel_uncore.c assert_rpm_wakelock_held(uncore->rpm); rpm 1129 drivers/gpu/drm/i915/intel_uncore.c assert_rpm_wakelock_held(uncore->rpm); rpm 1171 drivers/gpu/drm/i915/intel_uncore.c assert_rpm_wakelock_held(uncore->rpm); \ rpm 1241 drivers/gpu/drm/i915/intel_uncore.c assert_rpm_wakelock_held(uncore->rpm); \ rpm 1279 drivers/gpu/drm/i915/intel_uncore.c assert_rpm_wakelock_held(uncore->rpm); \ rpm 1588 drivers/gpu/drm/i915/intel_uncore.c disable_rpm_wakeref_asserts(uncore->rpm); rpm 1590 drivers/gpu/drm/i915/intel_uncore.c enable_rpm_wakeref_asserts(uncore->rpm); rpm 1642 drivers/gpu/drm/i915/intel_uncore.c uncore->rpm = &i915->runtime_pm; rpm 113 drivers/gpu/drm/i915/intel_uncore.h struct intel_runtime_pm *rpm; rpm 14 drivers/gpu/drm/i915/intel_wakeref.c wf->wakeref = intel_runtime_pm_get(wf->rpm); rpm 21 drivers/gpu/drm/i915/intel_wakeref.c intel_runtime_pm_put(wf->rpm, wakeref); rpm 96 drivers/gpu/drm/i915/intel_wakeref.c struct intel_runtime_pm *rpm, rpm 100 drivers/gpu/drm/i915/intel_wakeref.c wf->rpm = rpm; rpm 128 drivers/gpu/drm/i915/intel_wakeref.c intel_runtime_pm_put(wf->rpm, wakeref); rpm 132 drivers/gpu/drm/i915/intel_wakeref.c struct intel_runtime_pm *rpm) rpm 138 drivers/gpu/drm/i915/intel_wakeref.c wf->rpm = rpm; rpm 152 drivers/gpu/drm/i915/intel_wakeref.c assert_rpm_wakelock_held(wf->rpm); rpm 158 drivers/gpu/drm/i915/intel_wakeref.c wf->wakeref = intel_runtime_pm_get_if_in_use(wf->rpm); rpm 43 drivers/gpu/drm/i915/intel_wakeref.h struct intel_runtime_pm *rpm; rpm 50 drivers/gpu/drm/i915/intel_wakeref.h struct intel_runtime_pm *rpm, rpm 53 drivers/gpu/drm/i915/intel_wakeref.h #define intel_wakeref_init(wf, rpm, ops) do { \ rpm 56 drivers/gpu/drm/i915/intel_wakeref.h __intel_wakeref_init((wf), (rpm), (ops), &__key); \ rpm 191 drivers/gpu/drm/i915/intel_wakeref.h struct intel_runtime_pm *rpm; rpm 216 drivers/gpu/drm/i915/intel_wakeref.h struct intel_runtime_pm *rpm); rpm 264 drivers/hwmon/adt7475.c static inline u16 rpm2tach(unsigned long rpm) rpm 266 drivers/hwmon/adt7475.c if (rpm == 0) rpm 269 drivers/hwmon/adt7475.c return clamp_val((90000 * 60) / rpm, 1, 0xFFFF); rpm 113 drivers/hwmon/asb100.c static u8 FAN_TO_REG(long rpm, int div) rpm 115 drivers/hwmon/asb100.c if (rpm == -1) rpm 117 drivers/hwmon/asb100.c if (rpm == 0) rpm 119 drivers/hwmon/asb100.c rpm = clamp_val(rpm, 1, 1000000); rpm 120 drivers/hwmon/asb100.c return clamp_val((1350000 + rpm * div / 2) / (rpm * div), 1, 254); rpm 610 drivers/hwmon/aspeed-pwm-tacho.c int rpm; rpm 613 drivers/hwmon/aspeed-pwm-tacho.c rpm = aspeed_get_fan_tach_ch_rpm(priv, index); rpm 614 drivers/hwmon/aspeed-pwm-tacho.c if (rpm < 0) rpm 615 drivers/hwmon/aspeed-pwm-tacho.c return rpm; rpm 617 drivers/hwmon/aspeed-pwm-tacho.c return sprintf(buf, "%d\n", rpm); rpm 667 drivers/hwmon/aspeed-pwm-tacho.c static SENSOR_DEVICE_ATTR_RO(fan1_input, rpm, 0); rpm 668 drivers/hwmon/aspeed-pwm-tacho.c static SENSOR_DEVICE_ATTR_RO(fan2_input, rpm, 1); rpm 669 drivers/hwmon/aspeed-pwm-tacho.c static SENSOR_DEVICE_ATTR_RO(fan3_input, rpm, 2); rpm 670 drivers/hwmon/aspeed-pwm-tacho.c static SENSOR_DEVICE_ATTR_RO(fan4_input, rpm, 3); rpm 671 drivers/hwmon/aspeed-pwm-tacho.c static SENSOR_DEVICE_ATTR_RO(fan5_input, rpm, 4); rpm 672 drivers/hwmon/aspeed-pwm-tacho.c static SENSOR_DEVICE_ATTR_RO(fan6_input, rpm, 5); rpm 673 drivers/hwmon/aspeed-pwm-tacho.c static SENSOR_DEVICE_ATTR_RO(fan7_input, rpm, 6); rpm 674 drivers/hwmon/aspeed-pwm-tacho.c static SENSOR_DEVICE_ATTR_RO(fan8_input, rpm, 7); rpm 675 drivers/hwmon/aspeed-pwm-tacho.c static SENSOR_DEVICE_ATTR_RO(fan9_input, rpm, 8); rpm 676 drivers/hwmon/aspeed-pwm-tacho.c static SENSOR_DEVICE_ATTR_RO(fan10_input, rpm, 9); rpm 677 drivers/hwmon/aspeed-pwm-tacho.c static SENSOR_DEVICE_ATTR_RO(fan11_input, rpm, 10); rpm 678 drivers/hwmon/aspeed-pwm-tacho.c static SENSOR_DEVICE_ATTR_RO(fan12_input, rpm, 11); rpm 679 drivers/hwmon/aspeed-pwm-tacho.c static SENSOR_DEVICE_ATTR_RO(fan13_input, rpm, 12); rpm 680 drivers/hwmon/aspeed-pwm-tacho.c static SENSOR_DEVICE_ATTR_RO(fan14_input, rpm, 13); rpm 681 drivers/hwmon/aspeed-pwm-tacho.c static SENSOR_DEVICE_ATTR_RO(fan15_input, rpm, 14); rpm 682 drivers/hwmon/aspeed-pwm-tacho.c static SENSOR_DEVICE_ATTR_RO(fan16_input, rpm, 15); rpm 279 drivers/hwmon/emc2103.c int rpm = 0; rpm 281 drivers/hwmon/emc2103.c rpm = (FAN_RPM_FACTOR * data->fan_multiplier) / data->fan_tach; rpm 282 drivers/hwmon/emc2103.c return sprintf(buf, "%d\n", rpm); rpm 364 drivers/hwmon/emc2103.c int rpm = 0; rpm 368 drivers/hwmon/emc2103.c rpm = (FAN_RPM_FACTOR * data->fan_multiplier) rpm 371 drivers/hwmon/emc2103.c return sprintf(buf, "%d\n", rpm); rpm 263 drivers/hwmon/emc6w201.c unsigned rpm; rpm 266 drivers/hwmon/emc6w201.c rpm = 0; rpm 268 drivers/hwmon/emc6w201.c rpm = 5400000U / data->fan[sf][nr]; rpm 270 drivers/hwmon/emc6w201.c return sprintf(buf, "%u\n", rpm); rpm 235 drivers/hwmon/f71805f.c static inline u16 fan_to_reg(long rpm) rpm 242 drivers/hwmon/f71805f.c if (rpm < 367) rpm 244 drivers/hwmon/f71805f.c return 1500000 / rpm; rpm 246 drivers/hwmon/f75375s.c static inline u16 rpm_to_reg(int rpm) rpm 248 drivers/hwmon/f75375s.c if (rpm < 367 || rpm > 0xffff) rpm 250 drivers/hwmon/f75375s.c return 1500000 / rpm; rpm 110 drivers/hwmon/g760a.c unsigned int rpm = 0; rpm 114 drivers/hwmon/g760a.c rpm = rpm_from_cnt(data->act_cnt, data->clk, data->fan_div); rpm 117 drivers/hwmon/g760a.c return sprintf(buf, "%d\n", rpm); rpm 182 drivers/hwmon/g762.c static inline unsigned char cnt_from_rpm(unsigned long rpm, u32 clk_freq, u16 p, rpm 188 drivers/hwmon/g762.c if (!rpm) /* to stop the fan, set cnt to 255 */ rpm 191 drivers/hwmon/g762.c rpm = clamp_val(rpm, f1 / (255 * f2), ULONG_MAX / f2); rpm 192 drivers/hwmon/g762.c return DIV_ROUND_CLOSEST(f1, rpm * f2); rpm 727 drivers/hwmon/g762.c unsigned int rpm = 0; rpm 735 drivers/hwmon/g762.c rpm = rpm_from_cnt(data->act_cnt, data->clk_freq, rpm 742 drivers/hwmon/g762.c return sprintf(buf, "%u\n", rpm); rpm 931 drivers/hwmon/g762.c unsigned int rpm; rpm 937 drivers/hwmon/g762.c rpm = rpm_from_cnt(data->set_cnt, data->clk_freq, rpm 943 drivers/hwmon/g762.c return sprintf(buf, "%u\n", rpm); rpm 80 drivers/hwmon/gl518sm.c static inline u8 FAN_TO_REG(long rpm, int div) rpm 83 drivers/hwmon/gl518sm.c if (rpm == 0) rpm 85 drivers/hwmon/gl518sm.c rpmdiv = clamp_val(rpm, 1, 960000) * div; rpm 25 drivers/hwmon/gpio-fan.c int rpm; rpm 153 drivers/hwmon/gpio-fan.c static int rpm_to_speed_index(struct gpio_fan_data *fan_data, unsigned long rpm) rpm 159 drivers/hwmon/gpio-fan.c if (speed[i].rpm >= rpm) rpm 246 drivers/hwmon/gpio-fan.c return sprintf(buf, "%d\n", fan_data->speed[0].rpm); rpm 255 drivers/hwmon/gpio-fan.c fan_data->speed[fan_data->num_speed - 1].rpm); rpm 263 drivers/hwmon/gpio-fan.c return sprintf(buf, "%d\n", fan_data->speed[fan_data->speed_index].rpm); rpm 270 drivers/hwmon/gpio-fan.c unsigned long rpm; rpm 273 drivers/hwmon/gpio-fan.c if (kstrtoul(buf, 10, &rpm)) rpm 283 drivers/hwmon/gpio-fan.c set_fan_speed(fan_data, rpm_to_speed_index(fan_data, rpm)); rpm 471 drivers/hwmon/gpio-fan.c speed[i].rpm = u; rpm 592 drivers/hwmon/it87.c static inline u8 FAN_TO_REG(long rpm, int div) rpm 594 drivers/hwmon/it87.c if (rpm == 0) rpm 596 drivers/hwmon/it87.c rpm = clamp_val(rpm, 1, 1000000); rpm 597 drivers/hwmon/it87.c return clamp_val((1350000 + rpm * div / 2) / (rpm * div), 1, 254); rpm 600 drivers/hwmon/it87.c static inline u16 FAN16_TO_REG(long rpm) rpm 602 drivers/hwmon/it87.c if (rpm == 0) rpm 604 drivers/hwmon/it87.c return clamp_val((1350000 + rpm) / (rpm * 2), 1, 0xfffe); rpm 79 drivers/hwmon/lm78.c static inline u8 FAN_TO_REG(long rpm, int div) rpm 81 drivers/hwmon/lm78.c if (rpm <= 0) rpm 83 drivers/hwmon/lm78.c if (rpm > 1350000) rpm 85 drivers/hwmon/lm78.c return clamp_val((1350000 + rpm * div / 2) / (rpm * div), 1, 254); rpm 65 drivers/hwmon/lm80.c static inline unsigned char FAN_TO_REG(unsigned rpm, unsigned div) rpm 67 drivers/hwmon/lm80.c if (rpm == 0) rpm 69 drivers/hwmon/lm80.c rpm = clamp_val(rpm, 1, 1000000); rpm 70 drivers/hwmon/lm80.c return clamp_val((1350000 + rpm * div / 2) / (rpm * div), 1, 254); rpm 603 drivers/hwmon/lm93.c static u16 LM93_FAN_TO_REG(long rpm) rpm 607 drivers/hwmon/lm93.c if (rpm == 0) { rpm 610 drivers/hwmon/lm93.c rpm = clamp_val(rpm, 1, 1000000); rpm 611 drivers/hwmon/lm93.c count = clamp_val((1350000 + rpm) / rpm, 1, 0x3ffe); rpm 45 drivers/hwmon/max31790.c #define RPM_TO_REG(rpm, sr) ((60 * (sr) * 8192) / ((rpm) * 2)) rpm 142 drivers/hwmon/max31790.c static u8 bits_for_tach_period(int rpm) rpm 146 drivers/hwmon/max31790.c if (rpm < 500) rpm 148 drivers/hwmon/max31790.c else if (rpm < 1000) rpm 150 drivers/hwmon/max31790.c else if (rpm < 2000) rpm 152 drivers/hwmon/max31790.c else if (rpm < 4000) rpm 154 drivers/hwmon/max31790.c else if (rpm < 8000) rpm 166 drivers/hwmon/max31790.c int sr, rpm; rpm 174 drivers/hwmon/max31790.c rpm = RPM_FROM_REG(data->tach[channel], sr); rpm 175 drivers/hwmon/max31790.c *val = rpm; rpm 179 drivers/hwmon/max31790.c rpm = RPM_FROM_REG(data->target_count[channel], sr); rpm 180 drivers/hwmon/max31790.c *val = rpm; rpm 262 drivers/hwmon/max6650.c static int max6650_set_target(struct max6650_data *data, unsigned long rpm) rpm 266 drivers/hwmon/max6650.c if (rpm == 0) rpm 269 drivers/hwmon/max6650.c rpm = clamp_val(rpm, FAN_RPM_MIN, FAN_RPM_MAX); rpm 279 drivers/hwmon/max6650.c ktach = ((clock * kscale) / (256 * rpm / 60)) - 1; rpm 318 drivers/hwmon/nct6683.c unsigned int rpm[NCT6683_NUM_REG_FAN]; rpm 629 drivers/hwmon/nct6683.c for (i = 0; i < ARRAY_SIZE(data->rpm); i++) { rpm 633 drivers/hwmon/nct6683.c data->rpm[i] = nct6683_read16(data, rpm 716 drivers/hwmon/nct6683.c return sprintf(buf, "%d\n", data->rpm[sattr->index]); rpm 1147 drivers/hwmon/nct6775.c unsigned int rpm[NUM_FAN]; rpm 1795 drivers/hwmon/nct6775.c for (i = 0; i < ARRAY_SIZE(data->rpm); i++) { rpm 1802 drivers/hwmon/nct6775.c data->rpm[i] = data->fan_from_reg(reg, rpm 2095 drivers/hwmon/nct6775.c return sprintf(buf, "%d\n", data->rpm[nr]); rpm 190 drivers/hwmon/nct7904.c unsigned int cnt, rpm; rpm 201 drivers/hwmon/nct7904.c rpm = 0; rpm 203 drivers/hwmon/nct7904.c rpm = 1350000 / cnt; rpm 204 drivers/hwmon/nct7904.c *val = rpm; rpm 213 drivers/hwmon/nct7904.c rpm = 0; rpm 215 drivers/hwmon/nct7904.c rpm = 1350000 / cnt; rpm 216 drivers/hwmon/nct7904.c *val = rpm; rpm 208 drivers/hwmon/pmbus/max31785.c rate = pmbus_get_fan_rate_cached(client, page, 0, rpm); rpm 309 drivers/hwmon/pmbus/pmbus_core.c rv = pmbus_get_fan_rate_device(client, page, id, rpm); rpm 425 drivers/hwmon/pmbus/pmbus_core.c want_rpm = (mode == rpm); rpm 32 drivers/hwmon/pwm-fan.c unsigned int rpm; rpm 64 drivers/hwmon/pwm-fan.c ctx->rpm = tmp; rpm 135 drivers/hwmon/pwm-fan.c return sprintf(buf, "%u\n", ctx->rpm); rpm 139 drivers/hwmon/pwm-fan.c static SENSOR_DEVICE_ATTR_RO(fan1_input, rpm, 0); rpm 126 drivers/hwmon/sis5595.c static inline u8 FAN_TO_REG(long rpm, int div) rpm 128 drivers/hwmon/sis5595.c if (rpm <= 0) rpm 130 drivers/hwmon/sis5595.c if (rpm > 1350000) rpm 132 drivers/hwmon/sis5595.c return clamp_val((1350000 + rpm * div / 2) / (rpm * div), 1, 254); rpm 209 drivers/hwmon/smsc47m1.c int rpm = (data->pwm[nr] & 0x7F) == 0x00 ? 0 : rpm 213 drivers/hwmon/smsc47m1.c return sprintf(buf, "%d\n", rpm); rpm 222 drivers/hwmon/smsc47m1.c int rpm = MIN_FROM_REG(data->fan_preload[nr], rpm 224 drivers/hwmon/smsc47m1.c return sprintf(buf, "%d\n", rpm); rpm 102 drivers/hwmon/ultra45_env.c int rpm, period; rpm 108 drivers/hwmon/ultra45_env.c rpm = FAN_PERIOD_TO_RPM(period); rpm 110 drivers/hwmon/ultra45_env.c rpm = 0; rpm 112 drivers/hwmon/ultra45_env.c return sprintf(buf, "%d\n", rpm); rpm 119 drivers/hwmon/ultra45_env.c unsigned long rpm; rpm 125 drivers/hwmon/ultra45_env.c err = kstrtoul(buf, 10, &rpm); rpm 129 drivers/hwmon/ultra45_env.c if (!rpm) rpm 132 drivers/hwmon/ultra45_env.c period = FAN_RPM_TO_PERIOD(rpm); rpm 157 drivers/hwmon/via686a.c static inline u8 FAN_TO_REG(long rpm, int div) rpm 159 drivers/hwmon/via686a.c if (rpm == 0) rpm 161 drivers/hwmon/via686a.c rpm = clamp_val(rpm, 1, 1000000); rpm 162 drivers/hwmon/via686a.c return clamp_val((1350000 + rpm * div / 2) / (rpm * div), 1, 255); rpm 133 drivers/hwmon/vt8231.c static inline u8 FAN_TO_REG(long rpm, int div) rpm 135 drivers/hwmon/vt8231.c if (rpm <= 0 || rpm > 1310720) rpm 137 drivers/hwmon/vt8231.c return clamp_val(1310720 / (rpm * div), 1, 255); rpm 455 drivers/hwmon/w83627ehf.c unsigned int rpm[5]; rpm 820 drivers/hwmon/w83627ehf.c data->rpm[i] = data->fan_from_reg(reg, rpm 1037 drivers/hwmon/w83627ehf.c return sprintf(buf, "%d\n", data->rpm[nr]); rpm 253 drivers/hwmon/w83627hf.c static inline u8 FAN_TO_REG(long rpm, int div) rpm 255 drivers/hwmon/w83627hf.c if (rpm == 0) rpm 257 drivers/hwmon/w83627hf.c rpm = clamp_val(rpm, 1, 1000000); rpm 258 drivers/hwmon/w83627hf.c return clamp_val((1350000 + rpm * div / 2) / (rpm * div), 1, 254); rpm 153 drivers/hwmon/w83781d.c FAN_TO_REG(long rpm, int div) rpm 155 drivers/hwmon/w83781d.c if (rpm == 0) rpm 157 drivers/hwmon/w83781d.c rpm = clamp_val(rpm, 1, 1000000); rpm 158 drivers/hwmon/w83781d.c return clamp_val((1350000 + rpm * div / 2) / (rpm * div), 1, 254); rpm 213 drivers/hwmon/w83791d.c static u8 fan_to_reg(long rpm, int div) rpm 215 drivers/hwmon/w83791d.c if (rpm == 0) rpm 217 drivers/hwmon/w83791d.c rpm = clamp_val(rpm, 1, 1000000); rpm 218 drivers/hwmon/w83791d.c return clamp_val((1350000 + rpm * div / 2) / (rpm * div), 1, 254); rpm 221 drivers/hwmon/w83792d.c FAN_TO_REG(long rpm, int div) rpm 223 drivers/hwmon/w83792d.c if (rpm == 0) rpm 225 drivers/hwmon/w83792d.c rpm = clamp_val(rpm, 1, 1000000); rpm 226 drivers/hwmon/w83792d.c return clamp_val((1350000 + rpm * div / 2) / (rpm * div), 1, 254); rpm 177 drivers/hwmon/w83793.c static inline u16 FAN_TO_REG(long rpm) rpm 179 drivers/hwmon/w83793.c if (rpm <= 0) rpm 181 drivers/hwmon/w83793.c return clamp_val((1350000 + (rpm >> 1)) / rpm, 1, 0xffe); rpm 249 drivers/hwmon/w83795.c static inline u16 fan_to_reg(long rpm) rpm 251 drivers/hwmon/w83795.c if (rpm <= 0) rpm 253 drivers/hwmon/w83795.c return clamp_val((1350000 + (rpm >> 1)) / rpm, 1, 0xffe); rpm 72 drivers/hwmon/w83l786ng.c FAN_TO_REG(long rpm, int div) rpm 74 drivers/hwmon/w83l786ng.c if (rpm == 0) rpm 76 drivers/hwmon/w83l786ng.c rpm = clamp_val(rpm, 1, 1000000); rpm 77 drivers/hwmon/w83l786ng.c return clamp_val((1350000 + rpm * div / 2) / (rpm * div), 1, 254); rpm 322 drivers/ide/ide-floppy.c u16 transfer_rate, sector_size, cyls, rpm; rpm 344 drivers/ide/ide-floppy.c rpm = be16_to_cpup((__be16 *)&buf[8 + 28]); rpm 354 drivers/ide/ide-floppy.c sectors, transfer_rate / 8, sector_size, rpm); rpm 58 drivers/mfd/qcom_rpm.c #define RPM_STATUS_REG(rpm, i) ((rpm)->status_regs + (i) * 4) rpm 59 drivers/mfd/qcom_rpm.c #define RPM_CTRL_REG(rpm, i) ((rpm)->ctrl_regs + (i) * 4) rpm 60 drivers/mfd/qcom_rpm.c #define RPM_REQ_REG(rpm, i) ((rpm)->req_regs + (i) * 4) rpm 445 drivers/mfd/qcom_rpm.c int qcom_rpm_write(struct qcom_rpm *rpm, rpm 451 drivers/mfd/qcom_rpm.c const struct qcom_rpm_data *data = rpm->data; rpm 464 drivers/mfd/qcom_rpm.c mutex_lock(&rpm->lock); rpm 467 drivers/mfd/qcom_rpm.c writel_relaxed(buf[i], RPM_REQ_REG(rpm, res->target_id + i)); rpm 470 drivers/mfd/qcom_rpm.c for (i = 0; i < rpm->data->req_sel_size; i++) { rpm 472 drivers/mfd/qcom_rpm.c RPM_CTRL_REG(rpm, rpm->data->req_sel_off + i)); rpm 475 drivers/mfd/qcom_rpm.c writel_relaxed(BIT(state), RPM_CTRL_REG(rpm, rpm->data->req_ctx_off)); rpm 477 drivers/mfd/qcom_rpm.c reinit_completion(&rpm->ack); rpm 478 drivers/mfd/qcom_rpm.c regmap_write(rpm->ipc_regmap, rpm->ipc_offset, BIT(rpm->ipc_bit)); rpm 480 drivers/mfd/qcom_rpm.c left = wait_for_completion_timeout(&rpm->ack, RPM_REQUEST_TIMEOUT); rpm 483 drivers/mfd/qcom_rpm.c else if (rpm->ack_status & RPM_REJECTED) rpm 486 drivers/mfd/qcom_rpm.c mutex_unlock(&rpm->lock); rpm 494 drivers/mfd/qcom_rpm.c struct qcom_rpm *rpm = dev; rpm 498 drivers/mfd/qcom_rpm.c ack = readl_relaxed(RPM_CTRL_REG(rpm, rpm->data->ack_ctx_off)); rpm 499 drivers/mfd/qcom_rpm.c for (i = 0; i < rpm->data->ack_sel_size; i++) rpm 501 drivers/mfd/qcom_rpm.c RPM_CTRL_REG(rpm, rpm->data->ack_sel_off + i)); rpm 502 drivers/mfd/qcom_rpm.c writel(0, RPM_CTRL_REG(rpm, rpm->data->ack_ctx_off)); rpm 505 drivers/mfd/qcom_rpm.c dev_warn(rpm->dev, "ignoring notification!\n"); rpm 507 drivers/mfd/qcom_rpm.c rpm->ack_status = ack; rpm 508 drivers/mfd/qcom_rpm.c complete(&rpm->ack); rpm 516 drivers/mfd/qcom_rpm.c struct qcom_rpm *rpm = dev; rpm 518 drivers/mfd/qcom_rpm.c regmap_write(rpm->ipc_regmap, rpm->ipc_offset, BIT(rpm->ipc_bit)); rpm 519 drivers/mfd/qcom_rpm.c dev_err(rpm->dev, "RPM triggered fatal error\n"); rpm 534 drivers/mfd/qcom_rpm.c struct qcom_rpm *rpm; rpm 541 drivers/mfd/qcom_rpm.c rpm = devm_kzalloc(&pdev->dev, sizeof(*rpm), GFP_KERNEL); rpm 542 drivers/mfd/qcom_rpm.c if (!rpm) rpm 545 drivers/mfd/qcom_rpm.c rpm->dev = &pdev->dev; rpm 546 drivers/mfd/qcom_rpm.c mutex_init(&rpm->lock); rpm 547 drivers/mfd/qcom_rpm.c init_completion(&rpm->ack); rpm 550 drivers/mfd/qcom_rpm.c rpm->ramclk = devm_clk_get(&pdev->dev, "ram"); rpm 551 drivers/mfd/qcom_rpm.c if (IS_ERR(rpm->ramclk)) { rpm 552 drivers/mfd/qcom_rpm.c ret = PTR_ERR(rpm->ramclk); rpm 559 drivers/mfd/qcom_rpm.c rpm->ramclk = NULL; rpm 561 drivers/mfd/qcom_rpm.c clk_prepare_enable(rpm->ramclk); /* Accepts NULL */ rpm 578 drivers/mfd/qcom_rpm.c rpm->data = match->data; rpm 581 drivers/mfd/qcom_rpm.c rpm->status_regs = devm_ioremap_resource(&pdev->dev, res); rpm 582 drivers/mfd/qcom_rpm.c if (IS_ERR(rpm->status_regs)) rpm 583 drivers/mfd/qcom_rpm.c return PTR_ERR(rpm->status_regs); rpm 584 drivers/mfd/qcom_rpm.c rpm->ctrl_regs = rpm->status_regs + 0x400; rpm 585 drivers/mfd/qcom_rpm.c rpm->req_regs = rpm->status_regs + 0x600; rpm 593 drivers/mfd/qcom_rpm.c rpm->ipc_regmap = syscon_node_to_regmap(syscon_np); rpm 595 drivers/mfd/qcom_rpm.c if (IS_ERR(rpm->ipc_regmap)) rpm 596 drivers/mfd/qcom_rpm.c return PTR_ERR(rpm->ipc_regmap); rpm 599 drivers/mfd/qcom_rpm.c &rpm->ipc_offset); rpm 606 drivers/mfd/qcom_rpm.c &rpm->ipc_bit); rpm 612 drivers/mfd/qcom_rpm.c dev_set_drvdata(&pdev->dev, rpm); rpm 614 drivers/mfd/qcom_rpm.c fw_version[0] = readl(RPM_STATUS_REG(rpm, 0)); rpm 615 drivers/mfd/qcom_rpm.c fw_version[1] = readl(RPM_STATUS_REG(rpm, 1)); rpm 616 drivers/mfd/qcom_rpm.c fw_version[2] = readl(RPM_STATUS_REG(rpm, 2)); rpm 617 drivers/mfd/qcom_rpm.c if (fw_version[0] != rpm->data->version) { rpm 623 drivers/mfd/qcom_rpm.c rpm->data->version); rpm 627 drivers/mfd/qcom_rpm.c writel(fw_version[0], RPM_CTRL_REG(rpm, 0)); rpm 628 drivers/mfd/qcom_rpm.c writel(fw_version[1], RPM_CTRL_REG(rpm, 1)); rpm 629 drivers/mfd/qcom_rpm.c writel(fw_version[2], RPM_CTRL_REG(rpm, 2)); rpm 640 drivers/mfd/qcom_rpm.c rpm); rpm 655 drivers/mfd/qcom_rpm.c rpm); rpm 666 drivers/mfd/qcom_rpm.c rpm); rpm 681 drivers/mfd/qcom_rpm.c struct qcom_rpm *rpm = dev_get_drvdata(&pdev->dev); rpm 684 drivers/mfd/qcom_rpm.c clk_disable_unprepare(rpm->ramclk); rpm 8071 drivers/net/ethernet/mellanox/mlxsw/reg.h MLXSW_ITEM32(reg, mfsm, rpm, 0x04, 0, 16); rpm 48 drivers/regulator/qcom_rpm-regulator.c struct qcom_rpm *rpm; rpm 199 drivers/regulator/qcom_rpm-regulator.c return qcom_rpm_write(vreg->rpm, rpm 945 drivers/regulator/qcom_rpm-regulator.c struct qcom_rpm *rpm; rpm 947 drivers/regulator/qcom_rpm-regulator.c rpm = dev_get_drvdata(pdev->dev.parent); rpm 948 drivers/regulator/qcom_rpm-regulator.c if (!rpm) { rpm 969 drivers/regulator/qcom_rpm-regulator.c vreg->rpm = rpm; rpm 17 drivers/regulator/qcom_smd-regulator.c struct qcom_smd_rpm *rpm; rpm 73 drivers/regulator/qcom_smd-regulator.c ret = qcom_rpm_smd_write(vreg->rpm, QCOM_SMD_RPM_ACTIVE_STATE, rpm 786 drivers/regulator/qcom_smd-regulator.c struct qcom_smd_rpm *rpm; rpm 788 drivers/regulator/qcom_smd-regulator.c rpm = dev_get_drvdata(pdev->dev.parent); rpm 789 drivers/regulator/qcom_smd-regulator.c if (!rpm) { rpm 808 drivers/regulator/qcom_smd-regulator.c vreg->rpm = rpm; rpm 352 drivers/scsi/aacraid/aacraid.h u32 rpm; /* Drive rotational speed in rpm */ rpm 782 drivers/scsi/hpsa_cmd.h __le32 rpm; /* Drive rotational speed in rpm */ rpm 1274 drivers/scsi/smartpqi/smartpqi.h __le32 rpm; /* drive rotational speed in RPM */ rpm 37 drivers/scsi/ufs/ufs-sysfs.c bool rpm) rpm 49 drivers/scsi/ufs/ufs-sysfs.c if (rpm) rpm 129 drivers/soc/fsl/dpio/qbman-portal.c static inline u32 qbman_set_swp_cfg(u8 max_fill, u8 wn, u8 est, u8 rpm, u8 dcm, rpm 136 drivers/soc/fsl/dpio/qbman-portal.c rpm << SWP_CFG_RPM_SHIFT | rpm 105 drivers/soc/qcom/rpmpd.c struct qcom_smd_rpm *rpm; rpm 215 drivers/soc/qcom/rpmpd.c return qcom_rpm_smd_write(pd->rpm, QCOM_SMD_RPM_ACTIVE_STATE, rpm 227 drivers/soc/qcom/rpmpd.c return qcom_rpm_smd_write(pd->rpm, state, pd->res_type, pd->res_id, rpm 342 drivers/soc/qcom/rpmpd.c struct qcom_smd_rpm *rpm; rpm 346 drivers/soc/qcom/rpmpd.c rpm = dev_get_drvdata(pdev->dev.parent); rpm 347 drivers/soc/qcom/rpmpd.c if (!rpm) { rpm 374 drivers/soc/qcom/rpmpd.c rpmpds[i]->rpm = rpm; rpm 92 drivers/soc/qcom/smd-rpm.c int qcom_rpm_smd_write(struct qcom_smd_rpm *rpm, rpm 116 drivers/soc/qcom/smd-rpm.c mutex_lock(&rpm->lock); rpm 128 drivers/soc/qcom/smd-rpm.c ret = rpmsg_send(rpm->rpm_channel, pkt, size); rpm 132 drivers/soc/qcom/smd-rpm.c left = wait_for_completion_timeout(&rpm->ack, RPM_REQUEST_TIMEOUT); rpm 136 drivers/soc/qcom/smd-rpm.c ret = rpm->ack_status; rpm 140 drivers/soc/qcom/smd-rpm.c mutex_unlock(&rpm->lock); rpm 154 drivers/soc/qcom/smd-rpm.c struct qcom_smd_rpm *rpm = dev_get_drvdata(&rpdev->dev); rpm 163 drivers/soc/qcom/smd-rpm.c dev_err(rpm->dev, "invalid request\n"); rpm 188 drivers/soc/qcom/smd-rpm.c rpm->ack_status = status; rpm 189 drivers/soc/qcom/smd-rpm.c complete(&rpm->ack); rpm 195 drivers/soc/qcom/smd-rpm.c struct qcom_smd_rpm *rpm; rpm 197 drivers/soc/qcom/smd-rpm.c rpm = devm_kzalloc(&rpdev->dev, sizeof(*rpm), GFP_KERNEL); rpm 198 drivers/soc/qcom/smd-rpm.c if (!rpm) rpm 201 drivers/soc/qcom/smd-rpm.c mutex_init(&rpm->lock); rpm 202 drivers/soc/qcom/smd-rpm.c init_completion(&rpm->ack); rpm 204 drivers/soc/qcom/smd-rpm.c rpm->dev = &rpdev->dev; rpm 205 drivers/soc/qcom/smd-rpm.c rpm->rpm_channel = rpdev->ept; rpm 206 drivers/soc/qcom/smd-rpm.c dev_set_drvdata(&rpdev->dev, rpm); rpm 80 drivers/thunderbolt/icm.c bool rpm; rpm 90 drivers/thunderbolt/icm.c size_t *nboot_acl, bool *rpm); rpm 416 drivers/thunderbolt/icm.c size_t *nboot_acl, bool *rpm) rpm 599 drivers/thunderbolt/icm.c sw->rpm = !!(vss->flags & INTEL_VSS_FLAGS_RTD3); rpm 962 drivers/thunderbolt/icm.c size_t *nboot_acl, bool *rpm) rpm 981 drivers/thunderbolt/icm.c if (rpm) rpm 982 drivers/thunderbolt/icm.c *rpm = !!(reply.hdr.flags & ICM_TR_FLAGS_RTD3); rpm 1212 drivers/thunderbolt/icm.c sw->rpm = true; rpm 1405 drivers/thunderbolt/icm.c size_t *nboot_acl, bool *rpm) rpm 1424 drivers/thunderbolt/icm.c if (rpm) rpm 1425 drivers/thunderbolt/icm.c *rpm = !!(reply.hdr.flags & ICM_AR_FLAGS_RTD3); rpm 1535 drivers/thunderbolt/icm.c size_t *nboot_acl, bool *rpm) rpm 1550 drivers/thunderbolt/icm.c if (rpm) rpm 1551 drivers/thunderbolt/icm.c *rpm = true; rpm 1645 drivers/thunderbolt/icm.c size_t *nboot_acl, bool *rpm) rpm 1651 drivers/thunderbolt/icm.c ret = icm->driver_ready(tb, security_level, nboot_acl, rpm); rpm 1863 drivers/thunderbolt/icm.c &icm->rpm); rpm 2046 drivers/thunderbolt/icm.c tb->root_switch->rpm = icm->rpm; rpm 1861 drivers/thunderbolt/switch.c if (sw->rpm) { rpm 1884 drivers/thunderbolt/switch.c if (sw->rpm) { rpm 109 drivers/thunderbolt/tb.h bool rpm; rpm 12 include/linux/mfd/qcom_rpm.h int qcom_rpm_write(struct qcom_rpm *rpm, int state, int resource, u32 *buf, size_t count); rpm 1688 include/linux/platform_data/cros_ec_commands.h uint32_t rpm; rpm 1696 include/linux/platform_data/cros_ec_commands.h uint32_t rpm; rpm 1701 include/linux/platform_data/cros_ec_commands.h uint32_t rpm; rpm 36 include/linux/soc/qcom/smd-rpm.h int qcom_rpm_smd_write(struct qcom_smd_rpm *rpm, rpm 4 include/trace/events/rpm.h #define TRACE_SYSTEM rpm