Searched refs:static_key_false (Results 1 – 24 of 24) sorted by relevance
123 static __always_inline bool static_key_false(struct static_key *key) in static_key_false() function130 return !static_key_false(key); in static_key_true()162 static __always_inline bool static_key_false(struct static_key *key) in static_key_false() function
154 if (static_key_false(&__tracepoint_##name.key)) \181 if (static_key_false(&__tracepoint_##name.key)) \213 return static_key_false(&__tracepoint_##name.key); \
28 return static_key_false(&context_tracking_enabled); in context_tracking_is_enabled()
773 if (static_key_false(&perf_swevent_enabled[event_id])) in perf_sw_event()787 if (static_key_false(&perf_swevent_enabled[event_id])) { in perf_sw_event_sched()800 if (static_key_false(&perf_sched_events.key)) in perf_event_task_sched_in()809 if (static_key_false(&perf_sched_events.key)) in perf_event_task_sched_out()
22 return static_key_false(&cpusets_enabled_key); in cpusets_enabled()
415 return static_key_false(&memcg_kmem_enabled_key); in memcg_kmem_enabled()
138 return static_key_false(&nf_hooks_needed[pf][hook]); in nf_hooks_active()
107 if (static_key_false(&have_mvcos)) in __copy_from_user()180 if (static_key_false(&have_mvcos)) in __copy_to_user()243 if (static_key_false(&have_mvcos)) in __copy_in_user()315 if (static_key_false(&have_mvcos)) in __clear_user()
108 if (static_key_false(&kvm_no_apic_vcpu)) in kvm_vcpu_has_lapic()117 if (static_key_false(&apic_hw_disabled.key)) in kvm_apic_hw_enabled()126 if (static_key_false(&apic_sw_disabled.key)) in kvm_apic_sw_enabled()
249 if (static_key_false((&mmu_audit_key))) in kvm_mmu_audit()
16 if (static_key_false(&key))27 The static_key_false() branch will be generated into the code with as little59 if (static_key_false(&key))94 if (static_key_false(&key))107 'static_key_false()' construct. Likewise, a key initialized via130 if (static_key_false(&__tracepoint_##name.key)) \176 + if (static_key_false(&key))
43 static __always_inline bool static_key_false(struct static_key *key);145 static_key_false(¶virt_ticketlocks_enabled)) { in arch_spin_unlock()
26 if (static_key_false(&rdpmc_always_available) || in load_mm_cr4()
30 if (static_key_false(&crct10dif_fallback)) in crc_t10dif()
85 return static_key_false(&__sched_clock_stable); in sched_clock_stable()
260 if (static_key_false(¶virt_steal_enabled)) { in steal_account_process_tick()
3396 return static_key_false(&__cfs_bandwidth_used); in cfs_bandwidth_used()
869 if (static_key_false((¶virt_steal_rq_enabled))) { in update_rq_clock_task()
288 if (!static_key_false(&__use_tsc)) { in native_sched_clock()
631 if (static_key_false(&udpv6_encap_needed) && up->encap_type) { in udpv6_queue_rcv_skb()1407 if (static_key_false(&udpv6_encap_needed) && up->encap_type) { in udpv6_destroy_sock()
752 return static_key_false(&memalloc_socks); in sk_memalloc_socks()1123 #define mem_cgroup_sockets_enabled static_key_false(&memcg_socket_limit_enabled)
1684 if (static_key_false(&netstamp_needed)) in net_timestamp_set()1689 if (static_key_false(&netstamp_needed)) { \3387 if (static_key_false(&rps_needed)) { in netif_rx_internal()3710 if (static_key_false(&ingress_needed)) { in __netif_receive_skb_core()3837 if (static_key_false(&rps_needed)) { in netif_receive_skb_internal()
2024 if (static_key_false(&i2c_trace_msg)) { in __i2c_transfer()2043 if (static_key_false(&i2c_trace_msg)) { in __i2c_transfer()
1507 if (static_key_false(&udp_encap_needed) && up->encap_type) { in udp_queue_rcv_skb()2026 if (static_key_false(&udp_encap_needed) && up->encap_type) { in udp_destroy_sock()