wf               4775 arch/x86/kvm/mmu.c 		u8 wf = (pfec & PFERR_WRITE_MASK) ? (u8)~w : 0;
wf               4795 arch/x86/kvm/mmu.c 				wf = (pfec & PFERR_USER_MASK) ? wf : 0;
wf               4821 arch/x86/kvm/mmu.c 		mmu->permissions[byte] = ff | uf | wf | smepf | smapf;
wf               4870 arch/x86/kvm/mmu.c 		bool check_pkey, check_write, ff, uf, wf, pte_user;
wf               4875 arch/x86/kvm/mmu.c 		wf = pfec & PFERR_WRITE_MASK;
wf               4889 arch/x86/kvm/mmu.c 		check_write = check_pkey && wf && (uf || wp);
wf               10211 drivers/gpu/drm/i915/display/intel_display.c 	intel_wakeref_t wf;
wf               10284 drivers/gpu/drm/i915/display/intel_display.c 	wf = intel_display_power_get_if_enabled(dev_priv, power_domain);
wf               10285 drivers/gpu/drm/i915/display/intel_display.c 	if (!wf)
wf               10288 drivers/gpu/drm/i915/display/intel_display.c 	wakerefs[power_domain] = wf;
wf               10305 drivers/gpu/drm/i915/display/intel_display.c 	intel_wakeref_t wf;
wf               10318 drivers/gpu/drm/i915/display/intel_display.c 		wf = intel_display_power_get_if_enabled(dev_priv, power_domain);
wf               10319 drivers/gpu/drm/i915/display/intel_display.c 		if (!wf)
wf               10322 drivers/gpu/drm/i915/display/intel_display.c 		wakerefs[power_domain] = wf;
wf               10404 drivers/gpu/drm/i915/display/intel_display.c 	intel_wakeref_t wakerefs[POWER_DOMAIN_NUM], wf;
wf               10412 drivers/gpu/drm/i915/display/intel_display.c 	wf = intel_display_power_get_if_enabled(dev_priv, power_domain);
wf               10413 drivers/gpu/drm/i915/display/intel_display.c 	if (!wf)
wf               10416 drivers/gpu/drm/i915/display/intel_display.c 	wakerefs[power_domain] = wf;
wf               10464 drivers/gpu/drm/i915/display/intel_display.c 	wf = intel_display_power_get_if_enabled(dev_priv, power_domain);
wf               10465 drivers/gpu/drm/i915/display/intel_display.c 	if (wf) {
wf               10466 drivers/gpu/drm/i915/display/intel_display.c 		wakerefs[power_domain] = wf;
wf                319 drivers/gpu/drm/i915/display/intel_display_power.h #define with_intel_display_power(i915, domain, wf) \
wf                320 drivers/gpu/drm/i915/display/intel_display_power.h 	for ((wf) = intel_display_power_get((i915), (domain)); (wf); \
wf                321 drivers/gpu/drm/i915/display/intel_display_power.h 	     intel_display_power_put_async((i915), (domain), (wf)), (wf) = 0)
wf                720 drivers/gpu/drm/i915/display/intel_dp.c #define with_pps_lock(dp, wf) \
wf                721 drivers/gpu/drm/i915/display/intel_dp.c 	for ((wf) = pps_lock(dp); (wf); (wf) = pps_unlock((dp), (wf)))
wf                 15 drivers/gpu/drm/i915/gt/intel_engine_pm.c static int __engine_unpark(struct intel_wakeref *wf)
wf                 18 drivers/gpu/drm/i915/gt/intel_engine_pm.c 		container_of(wf, typeof(*engine), wakeref);
wf                126 drivers/gpu/drm/i915/gt/intel_engine_pm.c static int __engine_park(struct intel_wakeref *wf)
wf                129 drivers/gpu/drm/i915/gt/intel_engine_pm.c 		container_of(wf, typeof(*engine), wakeref);
wf                 20 drivers/gpu/drm/i915/gt/intel_gt_pm.c static int __gt_unpark(struct intel_wakeref *wf)
wf                 22 drivers/gpu/drm/i915/gt/intel_gt_pm.c 	struct intel_gt *gt = container_of(wf, typeof(*gt), wakeref);
wf                 59 drivers/gpu/drm/i915/gt/intel_gt_pm.c static int __gt_park(struct intel_wakeref *wf)
wf                 62 drivers/gpu/drm/i915/gt/intel_gt_pm.c 		container_of(wf, typeof(*i915), gt.wakeref);
wf                183 drivers/gpu/drm/i915/intel_runtime_pm.h #define with_intel_runtime_pm(rpm, wf) \
wf                184 drivers/gpu/drm/i915/intel_runtime_pm.h 	for ((wf) = intel_runtime_pm_get(rpm); (wf); \
wf                185 drivers/gpu/drm/i915/intel_runtime_pm.h 	     intel_runtime_pm_put((rpm), (wf)), (wf) = 0)
wf                187 drivers/gpu/drm/i915/intel_runtime_pm.h #define with_intel_runtime_pm_if_in_use(rpm, wf) \
wf                188 drivers/gpu/drm/i915/intel_runtime_pm.h 	for ((wf) = intel_runtime_pm_get_if_in_use(rpm); (wf); \
wf                189 drivers/gpu/drm/i915/intel_runtime_pm.h 	     intel_runtime_pm_put((rpm), (wf)), (wf) = 0)
wf                 12 drivers/gpu/drm/i915/intel_wakeref.c static void rpm_get(struct intel_wakeref *wf)
wf                 14 drivers/gpu/drm/i915/intel_wakeref.c 	wf->wakeref = intel_runtime_pm_get(wf->rpm);
wf                 17 drivers/gpu/drm/i915/intel_wakeref.c static void rpm_put(struct intel_wakeref *wf)
wf                 19 drivers/gpu/drm/i915/intel_wakeref.c 	intel_wakeref_t wakeref = fetch_and_zero(&wf->wakeref);
wf                 21 drivers/gpu/drm/i915/intel_wakeref.c 	intel_runtime_pm_put(wf->rpm, wakeref);
wf                 25 drivers/gpu/drm/i915/intel_wakeref.c int __intel_wakeref_get_first(struct intel_wakeref *wf)
wf                 33 drivers/gpu/drm/i915/intel_wakeref.c 	mutex_lock_nested(&wf->mutex, SINGLE_DEPTH_NESTING);
wf                 34 drivers/gpu/drm/i915/intel_wakeref.c 	if (!atomic_read(&wf->count)) {
wf                 37 drivers/gpu/drm/i915/intel_wakeref.c 		rpm_get(wf);
wf                 39 drivers/gpu/drm/i915/intel_wakeref.c 		err = wf->ops->get(wf);
wf                 41 drivers/gpu/drm/i915/intel_wakeref.c 			rpm_put(wf);
wf                 42 drivers/gpu/drm/i915/intel_wakeref.c 			mutex_unlock(&wf->mutex);
wf                 48 drivers/gpu/drm/i915/intel_wakeref.c 	atomic_inc(&wf->count);
wf                 49 drivers/gpu/drm/i915/intel_wakeref.c 	mutex_unlock(&wf->mutex);
wf                 51 drivers/gpu/drm/i915/intel_wakeref.c 	INTEL_WAKEREF_BUG_ON(atomic_read(&wf->count) <= 0);
wf                 55 drivers/gpu/drm/i915/intel_wakeref.c static void ____intel_wakeref_put_last(struct intel_wakeref *wf)
wf                 57 drivers/gpu/drm/i915/intel_wakeref.c 	if (!atomic_dec_and_test(&wf->count))
wf                 61 drivers/gpu/drm/i915/intel_wakeref.c 	if (likely(!wf->ops->put(wf))) {
wf                 62 drivers/gpu/drm/i915/intel_wakeref.c 		rpm_put(wf);
wf                 63 drivers/gpu/drm/i915/intel_wakeref.c 		wake_up_var(&wf->wakeref);
wf                 67 drivers/gpu/drm/i915/intel_wakeref.c 	mutex_unlock(&wf->mutex);
wf                 70 drivers/gpu/drm/i915/intel_wakeref.c void __intel_wakeref_put_last(struct intel_wakeref *wf)
wf                 72 drivers/gpu/drm/i915/intel_wakeref.c 	INTEL_WAKEREF_BUG_ON(work_pending(&wf->work));
wf                 75 drivers/gpu/drm/i915/intel_wakeref.c 	if (wf->ops->flags & INTEL_WAKEREF_PUT_ASYNC ||
wf                 76 drivers/gpu/drm/i915/intel_wakeref.c 	    !mutex_trylock(&wf->mutex)) {
wf                 77 drivers/gpu/drm/i915/intel_wakeref.c 		schedule_work(&wf->work);
wf                 81 drivers/gpu/drm/i915/intel_wakeref.c 	____intel_wakeref_put_last(wf);
wf                 86 drivers/gpu/drm/i915/intel_wakeref.c 	struct intel_wakeref *wf = container_of(wrk, typeof(*wf), work);
wf                 88 drivers/gpu/drm/i915/intel_wakeref.c 	if (atomic_add_unless(&wf->count, -1, 1))
wf                 91 drivers/gpu/drm/i915/intel_wakeref.c 	mutex_lock(&wf->mutex);
wf                 92 drivers/gpu/drm/i915/intel_wakeref.c 	____intel_wakeref_put_last(wf);
wf                 95 drivers/gpu/drm/i915/intel_wakeref.c void __intel_wakeref_init(struct intel_wakeref *wf,
wf                100 drivers/gpu/drm/i915/intel_wakeref.c 	wf->rpm = rpm;
wf                101 drivers/gpu/drm/i915/intel_wakeref.c 	wf->ops = ops;
wf                103 drivers/gpu/drm/i915/intel_wakeref.c 	__mutex_init(&wf->mutex, "wakeref", key);
wf                104 drivers/gpu/drm/i915/intel_wakeref.c 	atomic_set(&wf->count, 0);
wf                105 drivers/gpu/drm/i915/intel_wakeref.c 	wf->wakeref = 0;
wf                107 drivers/gpu/drm/i915/intel_wakeref.c 	INIT_WORK(&wf->work, __intel_wakeref_put_work);
wf                110 drivers/gpu/drm/i915/intel_wakeref.c int intel_wakeref_wait_for_idle(struct intel_wakeref *wf)
wf                112 drivers/gpu/drm/i915/intel_wakeref.c 	return wait_var_event_killable(&wf->wakeref,
wf                113 drivers/gpu/drm/i915/intel_wakeref.c 				       !intel_wakeref_is_active(wf));
wf                118 drivers/gpu/drm/i915/intel_wakeref.c 	struct intel_wakeref_auto *wf = from_timer(wf, t, timer);
wf                122 drivers/gpu/drm/i915/intel_wakeref.c 	if (!refcount_dec_and_lock_irqsave(&wf->count, &wf->lock, &flags))
wf                125 drivers/gpu/drm/i915/intel_wakeref.c 	wakeref = fetch_and_zero(&wf->wakeref);
wf                126 drivers/gpu/drm/i915/intel_wakeref.c 	spin_unlock_irqrestore(&wf->lock, flags);
wf                128 drivers/gpu/drm/i915/intel_wakeref.c 	intel_runtime_pm_put(wf->rpm, wakeref);
wf                131 drivers/gpu/drm/i915/intel_wakeref.c void intel_wakeref_auto_init(struct intel_wakeref_auto *wf,
wf                134 drivers/gpu/drm/i915/intel_wakeref.c 	spin_lock_init(&wf->lock);
wf                135 drivers/gpu/drm/i915/intel_wakeref.c 	timer_setup(&wf->timer, wakeref_auto_timeout, 0);
wf                136 drivers/gpu/drm/i915/intel_wakeref.c 	refcount_set(&wf->count, 0);
wf                137 drivers/gpu/drm/i915/intel_wakeref.c 	wf->wakeref = 0;
wf                138 drivers/gpu/drm/i915/intel_wakeref.c 	wf->rpm = rpm;
wf                141 drivers/gpu/drm/i915/intel_wakeref.c void intel_wakeref_auto(struct intel_wakeref_auto *wf, unsigned long timeout)
wf                146 drivers/gpu/drm/i915/intel_wakeref.c 		if (del_timer_sync(&wf->timer))
wf                147 drivers/gpu/drm/i915/intel_wakeref.c 			wakeref_auto_timeout(&wf->timer);
wf                152 drivers/gpu/drm/i915/intel_wakeref.c 	assert_rpm_wakelock_held(wf->rpm);
wf                154 drivers/gpu/drm/i915/intel_wakeref.c 	if (!refcount_inc_not_zero(&wf->count)) {
wf                155 drivers/gpu/drm/i915/intel_wakeref.c 		spin_lock_irqsave(&wf->lock, flags);
wf                156 drivers/gpu/drm/i915/intel_wakeref.c 		if (!refcount_inc_not_zero(&wf->count)) {
wf                157 drivers/gpu/drm/i915/intel_wakeref.c 			INTEL_WAKEREF_BUG_ON(wf->wakeref);
wf                158 drivers/gpu/drm/i915/intel_wakeref.c 			wf->wakeref = intel_runtime_pm_get_if_in_use(wf->rpm);
wf                159 drivers/gpu/drm/i915/intel_wakeref.c 			refcount_set(&wf->count, 1);
wf                161 drivers/gpu/drm/i915/intel_wakeref.c 		spin_unlock_irqrestore(&wf->lock, flags);
wf                169 drivers/gpu/drm/i915/intel_wakeref.c 	if (mod_timer(&wf->timer, jiffies + timeout))
wf                170 drivers/gpu/drm/i915/intel_wakeref.c 		wakeref_auto_timeout(&wf->timer);
wf                173 drivers/gpu/drm/i915/intel_wakeref.c void intel_wakeref_auto_fini(struct intel_wakeref_auto *wf)
wf                175 drivers/gpu/drm/i915/intel_wakeref.c 	intel_wakeref_auto(wf, 0);
wf                176 drivers/gpu/drm/i915/intel_wakeref.c 	INTEL_WAKEREF_BUG_ON(wf->wakeref);
wf                 30 drivers/gpu/drm/i915/intel_wakeref.h 	int (*get)(struct intel_wakeref *wf);
wf                 31 drivers/gpu/drm/i915/intel_wakeref.h 	int (*put)(struct intel_wakeref *wf);
wf                 49 drivers/gpu/drm/i915/intel_wakeref.h void __intel_wakeref_init(struct intel_wakeref *wf,
wf                 53 drivers/gpu/drm/i915/intel_wakeref.h #define intel_wakeref_init(wf, rpm, ops) do {				\
wf                 56 drivers/gpu/drm/i915/intel_wakeref.h 	__intel_wakeref_init((wf), (rpm), (ops), &__key);		\
wf                 59 drivers/gpu/drm/i915/intel_wakeref.h int __intel_wakeref_get_first(struct intel_wakeref *wf);
wf                 60 drivers/gpu/drm/i915/intel_wakeref.h void __intel_wakeref_put_last(struct intel_wakeref *wf);
wf                 79 drivers/gpu/drm/i915/intel_wakeref.h intel_wakeref_get(struct intel_wakeref *wf)
wf                 81 drivers/gpu/drm/i915/intel_wakeref.h 	if (unlikely(!atomic_inc_not_zero(&wf->count)))
wf                 82 drivers/gpu/drm/i915/intel_wakeref.h 		return __intel_wakeref_get_first(wf);
wf                 97 drivers/gpu/drm/i915/intel_wakeref.h intel_wakeref_get_if_active(struct intel_wakeref *wf)
wf                 99 drivers/gpu/drm/i915/intel_wakeref.h 	return atomic_inc_not_zero(&wf->count);
wf                119 drivers/gpu/drm/i915/intel_wakeref.h intel_wakeref_put(struct intel_wakeref *wf)
wf                121 drivers/gpu/drm/i915/intel_wakeref.h 	INTEL_WAKEREF_BUG_ON(atomic_read(&wf->count) <= 0);
wf                122 drivers/gpu/drm/i915/intel_wakeref.h 	if (unlikely(!atomic_add_unless(&wf->count, -1, 1)))
wf                123 drivers/gpu/drm/i915/intel_wakeref.h 		__intel_wakeref_put_last(wf);
wf                135 drivers/gpu/drm/i915/intel_wakeref.h intel_wakeref_lock(struct intel_wakeref *wf)
wf                136 drivers/gpu/drm/i915/intel_wakeref.h 	__acquires(wf->mutex)
wf                138 drivers/gpu/drm/i915/intel_wakeref.h 	mutex_lock(&wf->mutex);
wf                148 drivers/gpu/drm/i915/intel_wakeref.h intel_wakeref_unlock(struct intel_wakeref *wf)
wf                149 drivers/gpu/drm/i915/intel_wakeref.h 	__releases(wf->mutex)
wf                151 drivers/gpu/drm/i915/intel_wakeref.h 	mutex_unlock(&wf->mutex);
wf                161 drivers/gpu/drm/i915/intel_wakeref.h intel_wakeref_is_active(const struct intel_wakeref *wf)
wf                163 drivers/gpu/drm/i915/intel_wakeref.h 	return READ_ONCE(wf->wakeref);
wf                171 drivers/gpu/drm/i915/intel_wakeref.h __intel_wakeref_defer_park(struct intel_wakeref *wf)
wf                173 drivers/gpu/drm/i915/intel_wakeref.h 	INTEL_WAKEREF_BUG_ON(atomic_read(&wf->count));
wf                174 drivers/gpu/drm/i915/intel_wakeref.h 	atomic_set_release(&wf->count, 1);
wf                188 drivers/gpu/drm/i915/intel_wakeref.h int intel_wakeref_wait_for_idle(struct intel_wakeref *wf);
wf                213 drivers/gpu/drm/i915/intel_wakeref.h void intel_wakeref_auto(struct intel_wakeref_auto *wf, unsigned long timeout);
wf                215 drivers/gpu/drm/i915/intel_wakeref.h void intel_wakeref_auto_init(struct intel_wakeref_auto *wf,
wf                217 drivers/gpu/drm/i915/intel_wakeref.h void intel_wakeref_auto_fini(struct intel_wakeref_auto *wf);