__always_inline   105 arch/alpha/include/asm/xchg.h static __always_inline unsigned long
__always_inline   228 arch/alpha/include/asm/xchg.h static __always_inline unsigned long
__always_inline    26 arch/arm/include/asm/arch_timer.h static __always_inline
__always_inline    52 arch/arm/include/asm/arch_timer.h static __always_inline
__always_inline     5 arch/arm/include/asm/bitrev.h static __always_inline __attribute_const__ u32 __arch_bitrev32(u32 x)
__always_inline    11 arch/arm/include/asm/bitrev.h static __always_inline __attribute_const__ u16 __arch_bitrev16(u16 x)
__always_inline    16 arch/arm/include/asm/bitrev.h static __always_inline __attribute_const__ u8 __arch_bitrev8(u8 x)
__always_inline    85 arch/arm/include/asm/domain.h static __always_inline unsigned int get_domain(void)
__always_inline    97 arch/arm/include/asm/domain.h static __always_inline void set_domain(unsigned int val)
__always_inline   105 arch/arm/include/asm/domain.h static __always_inline unsigned int get_domain(void)
__always_inline   110 arch/arm/include/asm/domain.h static __always_inline void set_domain(unsigned int val)
__always_inline    12 arch/arm/include/asm/jump_label.h static __always_inline bool arch_static_branch(struct static_key *key, bool branch)
__always_inline    26 arch/arm/include/asm/jump_label.h static __always_inline bool arch_static_branch_jump(struct static_key *key, bool branch)
__always_inline   423 arch/arm/include/asm/kvm_mmu.h static __always_inline u64 kvm_get_vttbr(struct kvm *kvm)
__always_inline    31 arch/arm/include/asm/stackprotector.h static __always_inline void boot_init_stack_canary(void)
__always_inline    18 arch/arm/include/asm/stacktrace.h static __always_inline
__always_inline    25 arch/arm/include/asm/uaccess.h static __always_inline unsigned int uaccess_save_and_enable(void)
__always_inline    40 arch/arm/include/asm/uaccess.h static __always_inline void uaccess_restore(unsigned int flags)
__always_inline   103 arch/arm64/include/asm/arch_timer.h static __always_inline
__always_inline   129 arch/arm64/include/asm/arch_timer.h static __always_inline
__always_inline   186 arch/arm64/include/asm/arch_timer.h static __always_inline u64 __arch_counter_get_cntpct_stable(void)
__always_inline   196 arch/arm64/include/asm/arch_timer.h static __always_inline u64 __arch_counter_get_cntpct(void)
__always_inline   206 arch/arm64/include/asm/arch_timer.h static __always_inline u64 __arch_counter_get_cntvct_stable(void)
__always_inline   216 arch/arm64/include/asm/arch_timer.h static __always_inline u64 __arch_counter_get_cntvct(void)
__always_inline   341 arch/arm64/include/asm/atomic_lse.h static __always_inline u##sz						\
__always_inline   384 arch/arm64/include/asm/atomic_lse.h static __always_inline long						\
__always_inline     4 arch/arm64/include/asm/bitrev.h static __always_inline __attribute_const__ u32 __arch_bitrev32(u32 x)
__always_inline    10 arch/arm64/include/asm/bitrev.h static __always_inline __attribute_const__ u16 __arch_bitrev16(u16 x)
__always_inline    15 arch/arm64/include/asm/bitrev.h static __always_inline __attribute_const__ u8 __arch_bitrev8(u8 x)
__always_inline    65 arch/arm64/include/asm/cmpxchg.h static __always_inline  unsigned long __xchg##sfx(unsigned long x,	\
__always_inline   151 arch/arm64/include/asm/cmpxchg.h static __always_inline unsigned long __cmpxchg##sfx(volatile void *ptr,	\
__always_inline   258 arch/arm64/include/asm/cmpxchg.h static __always_inline void __cmpwait##sfx(volatile void *ptr,		\
__always_inline   394 arch/arm64/include/asm/cpufeature.h static __always_inline bool __cpus_have_const_cap(int num)
__always_inline   408 arch/arm64/include/asm/cpufeature.h static __always_inline bool cpus_have_const_cap(int num)
__always_inline    15 arch/arm64/include/asm/current.h static __always_inline struct task_struct *get_current(void)
__always_inline   260 arch/arm64/include/asm/insn.h static __always_inline bool aarch64_insn_is_##abbr(u32 code)		\
__always_inline   265 arch/arm64/include/asm/insn.h static __always_inline u32 aarch64_insn_get_##abbr##_value(void)	\
__always_inline    18 arch/arm64/include/asm/jump_label.h static __always_inline bool arch_static_branch(struct static_key *key,
__always_inline    35 arch/arm64/include/asm/jump_label.h static __always_inline bool arch_static_branch_jump(struct static_key *key,
__always_inline    88 arch/arm64/include/asm/kvm_hyp.h static __always_inline void __hyp_text __load_guest_stage2(struct kvm *kvm)
__always_inline   595 arch/arm64/include/asm/kvm_mmu.h static __always_inline u64 kvm_get_vttbr(struct kvm *kvm)
__always_inline    27 arch/arm64/include/asm/stackprotector.h static __always_inline void boot_init_stack_canary(void)
__always_inline    21 arch/arm64/include/asm/vdso/compat_gettimeofday.h static __always_inline
__always_inline    39 arch/arm64/include/asm/vdso/compat_gettimeofday.h static __always_inline
__always_inline    56 arch/arm64/include/asm/vdso/compat_gettimeofday.h static __always_inline
__always_inline    73 arch/arm64/include/asm/vdso/compat_gettimeofday.h static __always_inline
__always_inline    94 arch/arm64/include/asm/vdso/compat_gettimeofday.h static __always_inline
__always_inline   115 arch/arm64/include/asm/vdso/compat_gettimeofday.h static __always_inline u64 __arch_get_hw_counter(s32 clock_mode)
__always_inline   141 arch/arm64/include/asm/vdso/compat_gettimeofday.h static __always_inline const struct vdso_data *__arch_get_vdso_data(void)
__always_inline    17 arch/arm64/include/asm/vdso/gettimeofday.h static __always_inline
__always_inline    35 arch/arm64/include/asm/vdso/gettimeofday.h static __always_inline
__always_inline    52 arch/arm64/include/asm/vdso/gettimeofday.h static __always_inline
__always_inline    69 arch/arm64/include/asm/vdso/gettimeofday.h static __always_inline u64 __arch_get_hw_counter(s32 clock_mode)
__always_inline    95 arch/arm64/include/asm/vdso/gettimeofday.h static __always_inline
__always_inline    17 arch/arm64/include/asm/vdso/vsyscall.h static __always_inline
__always_inline    24 arch/arm64/include/asm/vdso/vsyscall.h static __always_inline
__always_inline    33 arch/arm64/include/asm/vdso/vsyscall.h static __always_inline
__always_inline    30 arch/csky/include/asm/bitops.h static __always_inline unsigned long __ffs(unsigned long x)
__always_inline    43 arch/csky/include/asm/bitops.h static __always_inline int fls(unsigned int x)
__always_inline    56 arch/csky/include/asm/bitops.h static __always_inline unsigned long __fls(unsigned long x)
__always_inline    43 arch/ia64/include/asm/spinlock.h static __always_inline void __ticket_spin_lock(arch_spinlock_t *lock)
__always_inline    63 arch/ia64/include/asm/spinlock.h static __always_inline int __ticket_spin_trylock(arch_spinlock_t *lock)
__always_inline    72 arch/ia64/include/asm/spinlock.h static __always_inline void __ticket_spin_unlock(arch_spinlock_t *lock)
__always_inline    96 arch/ia64/include/asm/spinlock.h static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock)
__always_inline   112 arch/ia64/include/asm/spinlock.h static __always_inline void arch_spin_lock(arch_spinlock_t *lock)
__always_inline   117 arch/ia64/include/asm/spinlock.h static __always_inline int arch_spin_trylock(arch_spinlock_t *lock)
__always_inline   122 arch/ia64/include/asm/spinlock.h static __always_inline void arch_spin_unlock(arch_spinlock_t *lock)
__always_inline   127 arch/ia64/include/asm/spinlock.h static __always_inline void arch_spin_lock_flags(arch_spinlock_t *lock,
__always_inline   136 arch/ia64/include/asm/spinlock.h static __always_inline void
__always_inline   188 arch/ia64/include/asm/spinlock.h static __always_inline void
__always_inline   234 arch/m68k/include/asm/uaccess_mm.h static __always_inline unsigned long
__always_inline   315 arch/m68k/include/asm/uaccess_mm.h static __always_inline unsigned long
__always_inline   498 arch/mips/include/asm/bitops.h static __always_inline unsigned long __fls(unsigned long word)
__always_inline   564 arch/mips/include/asm/bitops.h static __always_inline unsigned long __ffs(unsigned long word)
__always_inline     7 arch/mips/include/asm/bitrev.h static __always_inline __attribute_const__ u32 __arch_bitrev32(u32 x)
__always_inline    15 arch/mips/include/asm/bitrev.h static __always_inline __attribute_const__ u16 __arch_bitrev16(u16 x)
__always_inline    23 arch/mips/include/asm/bitrev.h static __always_inline __attribute_const__ u8 __arch_bitrev8(u8 x)
__always_inline    80 arch/mips/include/asm/cmpxchg.h static __always_inline
__always_inline   156 arch/mips/include/asm/cmpxchg.h static __always_inline
__always_inline    22 arch/mips/include/asm/ginvt.h static __always_inline void ginvt(unsigned long addr, enum ginvt_type type)
__always_inline    35 arch/mips/include/asm/jump_label.h static __always_inline bool arch_static_branch(struct static_key *key, bool branch)
__always_inline    49 arch/mips/include/asm/jump_label.h static __always_inline bool arch_static_branch_jump(struct static_key *key, bool branch)
__always_inline    29 arch/mips/include/asm/stackprotector.h static __always_inline void boot_init_stack_canary(void)
__always_inline    34 arch/mips/include/asm/stacktrace.h static __always_inline void prepare_frametrace(struct pt_regs *regs)
__always_inline    29 arch/mips/include/asm/vdso/gettimeofday.h static __always_inline long gettimeofday_fallback(
__always_inline    49 arch/mips/include/asm/vdso/gettimeofday.h static __always_inline long clock_gettime_fallback(
__always_inline    73 arch/mips/include/asm/vdso/gettimeofday.h static __always_inline int clock_getres_fallback(
__always_inline   101 arch/mips/include/asm/vdso/gettimeofday.h static __always_inline long clock_gettime32_fallback(
__always_inline   121 arch/mips/include/asm/vdso/gettimeofday.h static __always_inline int clock_getres32_fallback(
__always_inline   144 arch/mips/include/asm/vdso/gettimeofday.h static __always_inline u64 read_r4k_count(void)
__always_inline   162 arch/mips/include/asm/vdso/gettimeofday.h static __always_inline u64 read_gic_count(const struct vdso_data *data)
__always_inline   178 arch/mips/include/asm/vdso/gettimeofday.h static __always_inline u64 __arch_get_hw_counter(s32 clock_mode)
__always_inline   204 arch/mips/include/asm/vdso/gettimeofday.h static __always_inline const struct vdso_data *__arch_get_vdso_data(void)
__always_inline    15 arch/mips/include/asm/vdso/vsyscall.h static __always_inline
__always_inline    22 arch/mips/include/asm/vdso/vsyscall.h static __always_inline
__always_inline    27 arch/mips/kernel/cpu-bugs64.c static __always_inline __init
__always_inline    42 arch/mips/kernel/cpu-bugs64.c static __always_inline __init
__always_inline   118 arch/mips/kernel/cpu-bugs64.c static __always_inline __init void check_mult_sh(void)
__always_inline    61 arch/openrisc/include/asm/fixmap.h static __always_inline unsigned long fix_to_virt(const unsigned int idx)
__always_inline   117 arch/parisc/include/asm/hash.h static __always_inline u32 __attribute_const__
__always_inline    12 arch/parisc/include/asm/jump_label.h static __always_inline bool arch_static_branch(struct static_key *key, bool branch)
__always_inline    27 arch/parisc/include/asm/jump_label.h static __always_inline bool arch_static_branch_jump(struct static_key *key, bool branch)
__always_inline   105 arch/powerpc/include/asm/book3s/32/kup.h static __always_inline void allow_user_access(void __user *to, const void __user *from,
__always_inline   124 arch/powerpc/include/asm/book3s/32/kup.h static __always_inline void prevent_user_access(void __user *to, const void __user *from,
__always_inline    80 arch/powerpc/include/asm/book3s/64/kup-radix.h static __always_inline void allow_user_access(void __user *to, const void __user *from,
__always_inline    88 arch/powerpc/include/asm/cmpxchg.h static __always_inline unsigned long
__always_inline   105 arch/powerpc/include/asm/cmpxchg.h static __always_inline unsigned long
__always_inline   123 arch/powerpc/include/asm/cmpxchg.h static __always_inline unsigned long
__always_inline   140 arch/powerpc/include/asm/cmpxchg.h static __always_inline unsigned long
__always_inline   158 arch/powerpc/include/asm/cmpxchg.h static __always_inline unsigned long
__always_inline   177 arch/powerpc/include/asm/cmpxchg.h static __always_inline unsigned long
__always_inline   222 arch/powerpc/include/asm/cmpxchg.h static __always_inline unsigned long
__always_inline   245 arch/powerpc/include/asm/cmpxchg.h static __always_inline unsigned long
__always_inline   267 arch/powerpc/include/asm/cmpxchg.h static __always_inline unsigned long
__always_inline   295 arch/powerpc/include/asm/cmpxchg.h static __always_inline unsigned long
__always_inline   318 arch/powerpc/include/asm/cmpxchg.h static __always_inline unsigned long
__always_inline   340 arch/powerpc/include/asm/cmpxchg.h static __always_inline unsigned long
__always_inline   361 arch/powerpc/include/asm/cmpxchg.h static __always_inline unsigned long
__always_inline   380 arch/powerpc/include/asm/cmpxchg.h static __always_inline unsigned long
__always_inline   402 arch/powerpc/include/asm/cmpxchg.h static __always_inline unsigned long
__always_inline   422 arch/powerpc/include/asm/cmpxchg.h static __always_inline unsigned long
__always_inline   442 arch/powerpc/include/asm/cmpxchg.h static __always_inline unsigned long
__always_inline   462 arch/powerpc/include/asm/cmpxchg.h static __always_inline unsigned long
__always_inline    23 arch/powerpc/include/asm/cpu_has_feature.h static __always_inline bool cpu_has_feature(unsigned long feature)
__always_inline    18 arch/powerpc/include/asm/jump_label.h static __always_inline bool arch_static_branch(struct static_key *key, bool branch)
__always_inline    32 arch/powerpc/include/asm/jump_label.h static __always_inline bool arch_static_branch_jump(struct static_key *key, bool branch)
__always_inline   196 arch/powerpc/include/asm/mmu.h static __always_inline bool mmu_has_feature(unsigned long feature)
__always_inline    51 arch/powerpc/include/asm/spinlock.h static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock)
__always_inline    22 arch/powerpc/include/asm/stackprotector.h static __always_inline void boot_init_stack_canary(void)
__always_inline   390 arch/powerpc/include/asm/uaccess.h static __always_inline unsigned long __must_check
__always_inline    59 arch/powerpc/mm/book3s64/hash_native.c static __always_inline void tlbiel_hash_set_isa300(unsigned int set, unsigned int is,
__always_inline    28 arch/powerpc/mm/book3s64/radix_tlb.c static __always_inline void tlbiel_radix_set_isa300(unsigned int set, unsigned int is,
__always_inline    93 arch/powerpc/mm/book3s64/radix_tlb.c static __always_inline void __tlbiel_pid(unsigned long pid, int set,
__always_inline   109 arch/powerpc/mm/book3s64/radix_tlb.c static __always_inline void __tlbie_pid(unsigned long pid, unsigned long ric)
__always_inline   123 arch/powerpc/mm/book3s64/radix_tlb.c static __always_inline void __tlbie_lpid(unsigned long lpid, unsigned long ric)
__always_inline   137 arch/powerpc/mm/book3s64/radix_tlb.c static __always_inline void __tlbie_lpid_guest(unsigned long lpid, unsigned long ric)
__always_inline   151 arch/powerpc/mm/book3s64/radix_tlb.c static __always_inline void __tlbiel_va(unsigned long va, unsigned long pid,
__always_inline   167 arch/powerpc/mm/book3s64/radix_tlb.c static __always_inline void __tlbie_va(unsigned long va, unsigned long pid,
__always_inline   183 arch/powerpc/mm/book3s64/radix_tlb.c static __always_inline void __tlbie_lpid_va(unsigned long va, unsigned long lpid,
__always_inline   284 arch/powerpc/mm/book3s64/radix_tlb.c static __always_inline void _tlbiel_pid(unsigned long pid, unsigned long ric)
__always_inline   393 arch/powerpc/mm/book3s64/radix_tlb.c static __always_inline void _tlbie_lpid_guest(unsigned long lpid, unsigned long ric)
__always_inline   426 arch/powerpc/mm/book3s64/radix_tlb.c static __always_inline void _tlbiel_va(unsigned long va, unsigned long pid,
__always_inline   460 arch/powerpc/mm/book3s64/radix_tlb.c static __always_inline void _tlbie_va(unsigned long va, unsigned long pid,
__always_inline   518 arch/powerpc/mm/book3s64/radix_tlb.c static __always_inline void _tlbie_lpid_va(unsigned long va, unsigned long lpid,
__always_inline  1073 arch/powerpc/mm/book3s64/radix_tlb.c static __always_inline void __radix__flush_tlb_range_psize(struct mm_struct *mm,
__always_inline    30 arch/riscv/include/asm/atomic.h static __always_inline int atomic_read(const atomic_t *v)
__always_inline    34 arch/riscv/include/asm/atomic.h static __always_inline void atomic_set(atomic_t *v, int i)
__always_inline    41 arch/riscv/include/asm/atomic.h static __always_inline s64 atomic64_read(const atomic64_t *v)
__always_inline    45 arch/riscv/include/asm/atomic.h static __always_inline void atomic64_set(atomic64_t *v, s64 i)
__always_inline    57 arch/riscv/include/asm/atomic.h static __always_inline							\
__always_inline    91 arch/riscv/include/asm/atomic.h static __always_inline							\
__always_inline   103 arch/riscv/include/asm/atomic.h static __always_inline							\
__always_inline   116 arch/riscv/include/asm/atomic.h static __always_inline							\
__always_inline   122 arch/riscv/include/asm/atomic.h static __always_inline							\
__always_inline   202 arch/riscv/include/asm/atomic.h static __always_inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
__always_inline   222 arch/riscv/include/asm/atomic.h static __always_inline s64 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
__always_inline   248 arch/riscv/include/asm/atomic.h static __always_inline							\
__always_inline   253 arch/riscv/include/asm/atomic.h static __always_inline							\
__always_inline   258 arch/riscv/include/asm/atomic.h static __always_inline							\
__always_inline   263 arch/riscv/include/asm/atomic.h static __always_inline							\
__always_inline   268 arch/riscv/include/asm/atomic.h static __always_inline							\
__always_inline   274 arch/riscv/include/asm/atomic.h static __always_inline							\
__always_inline   280 arch/riscv/include/asm/atomic.h static __always_inline							\
__always_inline   286 arch/riscv/include/asm/atomic.h static __always_inline							\
__always_inline   315 arch/riscv/include/asm/atomic.h static __always_inline int atomic_sub_if_positive(atomic_t *v, int offset)
__always_inline   336 arch/riscv/include/asm/atomic.h static __always_inline s64 atomic64_sub_if_positive(atomic64_t *v, s64 offset)
__always_inline    27 arch/riscv/include/asm/current.h static __always_inline struct task_struct *get_current(void)
__always_inline    44 arch/s390/include/asm/atomic_ops.h static __always_inline void op_name(op_type val, op_type *ptr)		\
__always_inline    59 arch/s390/include/asm/bitops.h static __always_inline void arch_set_bit(unsigned long nr, volatile unsigned long *ptr)
__always_inline    80 arch/s390/include/asm/bitops.h static __always_inline void arch_clear_bit(unsigned long nr, volatile unsigned long *ptr)
__always_inline   101 arch/s390/include/asm/bitops.h static __always_inline void arch_change_bit(unsigned long nr,
__always_inline   174 arch/s390/include/asm/cpacf.h static __always_inline void __cpacf_query(unsigned int opcode, cpacf_mask_t *mask)
__always_inline   189 arch/s390/include/asm/cpacf.h static __always_inline int __cpacf_check_opcode(unsigned int opcode)
__always_inline   214 arch/s390/include/asm/cpacf.h static __always_inline int cpacf_query(unsigned int opcode, cpacf_mask_t *mask)
__always_inline   229 arch/s390/include/asm/cpacf.h static __always_inline int cpacf_query_func(unsigned int opcode, unsigned int func)
__always_inline   227 arch/s390/include/asm/cpu_mf.h static __always_inline int stcctm(enum stcctm_ctr_set set, u64 range, u64 *dest)
__always_inline    58 arch/s390/include/asm/ctl_reg.h static __always_inline void __ctl_set_bit(unsigned int cr, unsigned int bit)
__always_inline    67 arch/s390/include/asm/ctl_reg.h static __always_inline void __ctl_clear_bit(unsigned int cr, unsigned int bit)
__always_inline    78 arch/s390/include/asm/irq.h static __always_inline void inc_irq_stat(enum interruption_class irq)
__always_inline    23 arch/s390/include/asm/jump_label.h static __always_inline bool arch_static_branch(struct static_key *key, bool branch)
__always_inline    37 arch/s390/include/asm/jump_label.h static __always_inline bool arch_static_branch_jump(struct static_key *key, bool branch)
__always_inline  1006 arch/s390/include/asm/pgtable.h static __always_inline void __ptep_ipte(unsigned long address, pte_t *ptep,
__always_inline  1029 arch/s390/include/asm/pgtable.h static __always_inline void __ptep_ipte_range(unsigned long address, int nr,
__always_inline  1445 arch/s390/include/asm/pgtable.h static __always_inline void __pmdp_idte(unsigned long addr, pmd_t *pmdp,
__always_inline  1471 arch/s390/include/asm/pgtable.h static __always_inline void __pudp_idte(unsigned long addr, pud_t *pudp,
__always_inline    86 arch/s390/include/asm/uaccess.h static __always_inline int __put_user_fn(void *x, void __user *ptr, unsigned long size)
__always_inline   116 arch/s390/include/asm/uaccess.h static __always_inline int __get_user_fn(void *x, const void __user *ptr, unsigned long size)
__always_inline   335 arch/s390/kvm/kvm-s390.c static __always_inline void __insn32_query(unsigned int opcode, u8 *query)
__always_inline    69 arch/s390/pci/pci_clp.c static __always_inline int clp_req(void *data, unsigned int lps)
__always_inline   203 arch/sh/include/asm/dwarf.h static __always_inline unsigned long dwarf_read_arch_reg(unsigned int reg)
__always_inline    16 arch/sh/include/asm/stackprotector.h static __always_inline void boot_init_stack_canary(void)
__always_inline   113 arch/sh/include/asm/uaccess.h static __always_inline unsigned long
__always_inline   119 arch/sh/include/asm/uaccess.h static __always_inline unsigned long __must_check
__always_inline    35 arch/sh/include/asm/unaligned-sh4a.h static __always_inline u32 sh4a_get_unaligned_cpu32(const u8 *p)
__always_inline    72 arch/sh/mm/pmb.c static __always_inline unsigned long mk_pmb_entry(unsigned int entry)
__always_inline    77 arch/sh/mm/pmb.c static __always_inline unsigned long mk_pmb_addr(unsigned int entry)
__always_inline    82 arch/sh/mm/pmb.c static __always_inline unsigned long mk_pmb_data(unsigned int entry)
__always_inline    87 arch/sh/mm/pmb.c static __always_inline unsigned int pmb_ppn_in_range(unsigned long ppn)
__always_inline    99 arch/sh/mm/pmb.c static __always_inline unsigned long pmb_cache_flags(void)
__always_inline    11 arch/sparc/include/asm/jump_label.h static __always_inline bool arch_static_branch(struct static_key *key, bool branch)
__always_inline    27 arch/sparc/include/asm/jump_label.h static __always_inline bool arch_static_branch_jump(struct static_key *key, bool branch)
__always_inline    51 arch/sparc/vdso/vclock_gettime.c notrace static __always_inline struct vvar_data *get_vvar_data(void)
__always_inline    89 arch/sparc/vdso/vclock_gettime.c notrace static __always_inline u64 vread_tick(void)
__always_inline    97 arch/sparc/vdso/vclock_gettime.c notrace static __always_inline u64 vread_tick_stick(void)
__always_inline   105 arch/sparc/vdso/vclock_gettime.c notrace static __always_inline u64 vread_tick(void)
__always_inline   115 arch/sparc/vdso/vclock_gettime.c notrace static __always_inline u64 vread_tick_stick(void)
__always_inline   126 arch/sparc/vdso/vclock_gettime.c notrace static __always_inline u64 vgetsns(struct vvar_data *vvar)
__always_inline   136 arch/sparc/vdso/vclock_gettime.c notrace static __always_inline u64 vgetsns_stick(struct vvar_data *vvar)
__always_inline   146 arch/sparc/vdso/vclock_gettime.c notrace static __always_inline int do_realtime(struct vvar_data *vvar,
__always_inline   166 arch/sparc/vdso/vclock_gettime.c notrace static __always_inline int do_realtime_stick(struct vvar_data *vvar,
__always_inline   186 arch/sparc/vdso/vclock_gettime.c notrace static __always_inline int do_monotonic(struct vvar_data *vvar,
__always_inline   206 arch/sparc/vdso/vclock_gettime.c notrace static __always_inline int do_monotonic_stick(struct vvar_data *vvar,
__always_inline   311 arch/x86/entry/common.c static __always_inline void do_syscall_32_irqs_on(struct pt_regs *regs)
__always_inline    15 arch/x86/include/asm/arch_hweight.h static __always_inline unsigned int __arch_hweight32(unsigned int w)
__always_inline    43 arch/x86/include/asm/arch_hweight.h static __always_inline unsigned long __arch_hweight64(__u64 w)
__always_inline    25 arch/x86/include/asm/atomic.h static __always_inline int arch_atomic_read(const atomic_t *v)
__always_inline    41 arch/x86/include/asm/atomic.h static __always_inline void arch_atomic_set(atomic_t *v, int i)
__always_inline    53 arch/x86/include/asm/atomic.h static __always_inline void arch_atomic_add(int i, atomic_t *v)
__always_inline    67 arch/x86/include/asm/atomic.h static __always_inline void arch_atomic_sub(int i, atomic_t *v)
__always_inline    83 arch/x86/include/asm/atomic.h static __always_inline bool arch_atomic_sub_and_test(int i, atomic_t *v)
__always_inline    95 arch/x86/include/asm/atomic.h static __always_inline void arch_atomic_inc(atomic_t *v)
__always_inline   108 arch/x86/include/asm/atomic.h static __always_inline void arch_atomic_dec(atomic_t *v)
__always_inline   123 arch/x86/include/asm/atomic.h static __always_inline bool arch_atomic_dec_and_test(atomic_t *v)
__always_inline   137 arch/x86/include/asm/atomic.h static __always_inline bool arch_atomic_inc_and_test(atomic_t *v)
__always_inline   152 arch/x86/include/asm/atomic.h static __always_inline bool arch_atomic_add_negative(int i, atomic_t *v)
__always_inline   165 arch/x86/include/asm/atomic.h static __always_inline int arch_atomic_add_return(int i, atomic_t *v)
__always_inline   177 arch/x86/include/asm/atomic.h static __always_inline int arch_atomic_sub_return(int i, atomic_t *v)
__always_inline   182 arch/x86/include/asm/atomic.h static __always_inline int arch_atomic_fetch_add(int i, atomic_t *v)
__always_inline   187 arch/x86/include/asm/atomic.h static __always_inline int arch_atomic_fetch_sub(int i, atomic_t *v)
__always_inline   192 arch/x86/include/asm/atomic.h static __always_inline int arch_atomic_cmpxchg(atomic_t *v, int old, int new)
__always_inline   198 arch/x86/include/asm/atomic.h static __always_inline bool arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
__always_inline    44 arch/x86/include/asm/atomic64_64.h static __always_inline void arch_atomic64_add(s64 i, atomic64_t *v)
__always_inline    86 arch/x86/include/asm/atomic64_64.h static __always_inline void arch_atomic64_inc(atomic64_t *v)
__always_inline   100 arch/x86/include/asm/atomic64_64.h static __always_inline void arch_atomic64_dec(atomic64_t *v)
__always_inline   158 arch/x86/include/asm/atomic64_64.h static __always_inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v)
__always_inline   184 arch/x86/include/asm/atomic64_64.h static __always_inline bool arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
__always_inline    51 arch/x86/include/asm/bitops.h static __always_inline void
__always_inline    65 arch/x86/include/asm/bitops.h static __always_inline void
__always_inline    71 arch/x86/include/asm/bitops.h static __always_inline void
__always_inline    84 arch/x86/include/asm/bitops.h static __always_inline void
__always_inline    91 arch/x86/include/asm/bitops.h static __always_inline void
__always_inline    97 arch/x86/include/asm/bitops.h static __always_inline bool
__always_inline   110 arch/x86/include/asm/bitops.h static __always_inline void
__always_inline   116 arch/x86/include/asm/bitops.h static __always_inline void
__always_inline   122 arch/x86/include/asm/bitops.h static __always_inline void
__always_inline   135 arch/x86/include/asm/bitops.h static __always_inline bool
__always_inline   141 arch/x86/include/asm/bitops.h static __always_inline bool
__always_inline   147 arch/x86/include/asm/bitops.h static __always_inline bool
__always_inline   159 arch/x86/include/asm/bitops.h static __always_inline bool
__always_inline   173 arch/x86/include/asm/bitops.h static __always_inline bool
__always_inline   185 arch/x86/include/asm/bitops.h static __always_inline bool
__always_inline   198 arch/x86/include/asm/bitops.h static __always_inline bool
__always_inline   204 arch/x86/include/asm/bitops.h static __always_inline bool constant_test_bit(long nr, const volatile unsigned long *addr)
__always_inline   210 arch/x86/include/asm/bitops.h static __always_inline bool variable_test_bit(long nr, volatile const unsigned long *addr)
__always_inline   233 arch/x86/include/asm/bitops.h static __always_inline unsigned long __ffs(unsigned long word)
__always_inline   247 arch/x86/include/asm/bitops.h static __always_inline unsigned long ffz(unsigned long word)
__always_inline   261 arch/x86/include/asm/bitops.h static __always_inline unsigned long __fls(unsigned long word)
__always_inline   283 arch/x86/include/asm/bitops.h static __always_inline int ffs(int x)
__always_inline   324 arch/x86/include/asm/bitops.h static __always_inline int fls(unsigned int x)
__always_inline   366 arch/x86/include/asm/bitops.h static __always_inline int fls64(__u64 x)
__always_inline   173 arch/x86/include/asm/cpufeature.h static __always_inline bool _static_cpu_has(u16 bit)
__always_inline    13 arch/x86/include/asm/current.h static __always_inline struct task_struct *get_current(void)
__always_inline    11 arch/x86/include/asm/dmi.h static __always_inline __init void *dmi_alloc(unsigned len)
__always_inline    61 arch/x86/include/asm/fpu/internal.h static __always_inline __pure bool use_xsaveopt(void)
__always_inline    66 arch/x86/include/asm/fpu/internal.h static __always_inline __pure bool use_xsave(void)
__always_inline    71 arch/x86/include/asm/fpu/internal.h static __always_inline __pure bool use_fxsr(void)
__always_inline    23 arch/x86/include/asm/jump_label.h static __always_inline bool arch_static_branch(struct static_key *key, bool branch)
__always_inline    39 arch/x86/include/asm/jump_label.h static __always_inline bool arch_static_branch_jump(struct static_key *key, bool branch)
__always_inline   201 arch/x86/include/asm/msr.h static __always_inline unsigned long long rdtsc(void)
__always_inline   218 arch/x86/include/asm/msr.h static __always_inline unsigned long long rdtsc_ordered(void)
__always_inline   262 arch/x86/include/asm/nospec-branch.h static __always_inline
__always_inline   639 arch/x86/include/asm/paravirt.h static __always_inline void pv_queued_spin_lock_slowpath(struct qspinlock *lock,
__always_inline   645 arch/x86/include/asm/paravirt.h static __always_inline void pv_queued_spin_unlock(struct qspinlock *lock)
__always_inline   650 arch/x86/include/asm/paravirt.h static __always_inline void pv_wait(u8 *ptr, u8 val)
__always_inline   655 arch/x86/include/asm/paravirt.h static __always_inline void pv_kick(int cpu)
__always_inline   660 arch/x86/include/asm/paravirt.h static __always_inline bool pv_vcpu_is_preempted(long cpu)
__always_inline   524 arch/x86/include/asm/percpu.h static __always_inline bool x86_this_cpu_constant_test_bit(unsigned int nr,
__always_inline    24 arch/x86/include/asm/preempt.h static __always_inline int preempt_count(void)
__always_inline    29 arch/x86/include/asm/preempt.h static __always_inline void preempt_count_set(int pc)
__always_inline    58 arch/x86/include/asm/preempt.h static __always_inline void set_preempt_need_resched(void)
__always_inline    63 arch/x86/include/asm/preempt.h static __always_inline void clear_preempt_need_resched(void)
__always_inline    68 arch/x86/include/asm/preempt.h static __always_inline bool test_preempt_need_resched(void)
__always_inline    77 arch/x86/include/asm/preempt.h static __always_inline void __preempt_count_add(int val)
__always_inline    82 arch/x86/include/asm/preempt.h static __always_inline void __preempt_count_sub(int val)
__always_inline    92 arch/x86/include/asm/preempt.h static __always_inline bool __preempt_count_dec_and_test(void)
__always_inline   100 arch/x86/include/asm/preempt.h static __always_inline bool should_resched(int preempt_offset)
__always_inline   648 arch/x86/include/asm/processor.h static __always_inline void rep_nop(void)
__always_inline   653 arch/x86/include/asm/processor.h static __always_inline void cpu_relax(void)
__always_inline    20 arch/x86/include/asm/pvclock.h static __always_inline
__always_inline    29 arch/x86/include/asm/pvclock.h static __always_inline
__always_inline    81 arch/x86/include/asm/pvclock.h static __always_inline
__always_inline    14 arch/x86/include/asm/qspinlock.h static __always_inline u32 queued_fetch_set_pending_acquire(struct qspinlock *lock)
__always_inline    42 arch/x86/include/asm/refcount.h static __always_inline void refcount_add(unsigned int i, refcount_t *r)
__always_inline    51 arch/x86/include/asm/refcount.h static __always_inline void refcount_inc(refcount_t *r)
__always_inline    59 arch/x86/include/asm/refcount.h static __always_inline void refcount_dec(refcount_t *r)
__always_inline    67 arch/x86/include/asm/refcount.h static __always_inline __must_check
__always_inline    82 arch/x86/include/asm/refcount.h static __always_inline __must_check bool refcount_dec_and_test(refcount_t *r)
__always_inline    96 arch/x86/include/asm/refcount.h static __always_inline __must_check
__always_inline   121 arch/x86/include/asm/refcount.h static __always_inline __must_check bool refcount_inc_not_zero(refcount_t *r)
__always_inline    44 arch/x86/include/asm/smap.h static __always_inline void clac(void)
__always_inline    50 arch/x86/include/asm/smap.h static __always_inline void stac(void)
__always_inline    56 arch/x86/include/asm/smap.h static __always_inline unsigned long smap_save(void)
__always_inline    67 arch/x86/include/asm/smap.h static __always_inline void smap_restore(unsigned long flags)
__always_inline    66 arch/x86/include/asm/stackprotector.h static __always_inline void boot_init_stack_canary(void)
__always_inline    33 arch/x86/include/asm/string_32.h static __always_inline void *__memcpy(void *to, const void *from, size_t n)
__always_inline    52 arch/x86/include/asm/string_32.h static __always_inline void *__constant_memcpy(void *to, const void *from,
__always_inline   105 arch/x86/include/asm/string_64.h static __always_inline __must_check unsigned long
__always_inline   120 arch/x86/include/asm/string_64.h static __always_inline void memcpy_flushcache(void *dst, const void *src, size_t cnt)
__always_inline   712 arch/x86/include/asm/uaccess.h static __must_check __always_inline bool user_access_begin(const void __user *ptr, size_t len)
__always_inline    17 arch/x86/include/asm/uaccess_32.h static __always_inline unsigned long __must_check
__always_inline    23 arch/x86/include/asm/uaccess_32.h static __always_inline unsigned long
__always_inline    56 arch/x86/include/asm/uaccess_32.h static __always_inline unsigned long
__always_inline    27 arch/x86/include/asm/uaccess_64.h static __always_inline __must_check unsigned long
__always_inline    49 arch/x86/include/asm/uaccess_64.h static __always_inline __must_check unsigned long
__always_inline    65 arch/x86/include/asm/uaccess_64.h static __always_inline __must_check unsigned long
__always_inline   122 arch/x86/include/asm/uaccess_64.h static __always_inline __must_check unsigned long
__always_inline   181 arch/x86/include/asm/uaccess_64.h static __always_inline __must_check
__always_inline    61 arch/x86/include/asm/vdso/gettimeofday.h static __always_inline
__always_inline    73 arch/x86/include/asm/vdso/gettimeofday.h static __always_inline
__always_inline    85 arch/x86/include/asm/vdso/gettimeofday.h static __always_inline
__always_inline   101 arch/x86/include/asm/vdso/gettimeofday.h static __always_inline
__always_inline   118 arch/x86/include/asm/vdso/gettimeofday.h static __always_inline
__always_inline   135 arch/x86/include/asm/vdso/gettimeofday.h static __always_inline
__always_inline   153 arch/x86/include/asm/vdso/gettimeofday.h static __always_inline long
__always_inline   170 arch/x86/include/asm/vdso/gettimeofday.h static __always_inline
__always_inline   263 arch/x86/include/asm/vdso/gettimeofday.h static __always_inline const struct vdso_data *__arch_get_vdso_data(void)
__always_inline   286 arch/x86/include/asm/vdso/gettimeofday.h static __always_inline
__always_inline    19 arch/x86/include/asm/vdso/vsyscall.h static __always_inline
__always_inline    26 arch/x86/include/asm/vdso/vsyscall.h static __always_inline
__always_inline   220 arch/x86/include/asm/xen/hypercall.h static __always_inline void __xen_stac(void)
__always_inline   230 arch/x86/include/asm/xen/hypercall.h static __always_inline void __xen_clac(void)
__always_inline   316 arch/x86/kernel/cpu/common.c static __always_inline void setup_smep(struct cpuinfo_x86 *c)
__always_inline   329 arch/x86/kernel/cpu/common.c static __always_inline void setup_smap(struct cpuinfo_x86 *c)
__always_inline   345 arch/x86/kernel/cpu/common.c static __always_inline void setup_umip(struct cpuinfo_x86 *c)
__always_inline   445 arch/x86/kernel/cpu/common.c static __always_inline void setup_pku(struct cpuinfo_x86 *c)
__always_inline   360 arch/x86/kernel/process.c static __always_inline void amd_set_core_ssb_state(unsigned long tifn)
__always_inline   399 arch/x86/kernel/process.c static __always_inline void amd_set_core_ssb_state(unsigned long tifn)
__always_inline   407 arch/x86/kernel/process.c static __always_inline void amd_set_ssb_virt_state(unsigned long tifn)
__always_inline   422 arch/x86/kernel/process.c static __always_inline void __speculation_ctrl_update(unsigned long tifp,
__always_inline   160 arch/x86/kernel/process_64.c static __always_inline void save_base_legacy(struct task_struct *prev_p,
__always_inline   199 arch/x86/kernel/process_64.c static __always_inline void save_fsgs(struct task_struct *task)
__always_inline   221 arch/x86/kernel/process_64.c static __always_inline void loadseg(enum which_selector which,
__always_inline   230 arch/x86/kernel/process_64.c static __always_inline void load_seg_legacy(unsigned short prev_index,
__always_inline   280 arch/x86/kernel/process_64.c static __always_inline void x86_fsgsbase_load(struct thread_struct *prev,
__always_inline    62 arch/x86/kernel/tsc.c __always_inline void cyc2ns_read_begin(struct cyc2ns_data *data)
__always_inline    79 arch/x86/kernel/tsc.c __always_inline void cyc2ns_read_end(void)
__always_inline   108 arch/x86/kernel/tsc.c static __always_inline unsigned long long cycles_2_ns(unsigned long long cyc)
__always_inline    58 arch/x86/kvm/cpuid.h static __always_inline struct cpuid_reg x86_feature_cpuid(unsigned x86_feature)
__always_inline    68 arch/x86/kvm/cpuid.h static __always_inline int *guest_cpuid_get_register(struct kvm_vcpu *vcpu, unsigned x86_feature)
__always_inline    92 arch/x86/kvm/cpuid.h static __always_inline bool guest_cpuid_has(struct kvm_vcpu *vcpu, unsigned x86_feature)
__always_inline   107 arch/x86/kvm/cpuid.h static __always_inline void guest_cpuid_clear(struct kvm_vcpu *vcpu, unsigned x86_feature)
__always_inline   702 arch/x86/kvm/emulate.c static __always_inline int __linearize(struct x86_emulate_ctxt *ctxt,
__always_inline   921 arch/x86/kvm/emulate.c static __always_inline int do_insn_fetch_bytes(struct x86_emulate_ctxt *ctxt,
__always_inline  1050 arch/x86/kvm/emulate.c static __always_inline u8 test_cc(unsigned int condition, unsigned long flags)
__always_inline  1324 arch/x86/kvm/hyperv.c static __always_inline unsigned long *sparse_set_to_vcpu_mask(
__always_inline    13 arch/x86/kvm/kvm_cache_regs.h static __always_inline unsigned long kvm_##lname##_read(struct kvm_vcpu *vcpu)\
__always_inline    17 arch/x86/kvm/kvm_cache_regs.h static __always_inline void kvm_##lname##_write(struct kvm_vcpu *vcpu,	      \
__always_inline  5690 arch/x86/kvm/mmu.c static __always_inline bool
__always_inline  5723 arch/x86/kvm/mmu.c static __always_inline bool
__always_inline  5734 arch/x86/kvm/mmu.c static __always_inline bool
__always_inline  5742 arch/x86/kvm/mmu.c static __always_inline bool
__always_inline  5750 arch/x86/kvm/mmu.c static __always_inline bool
__always_inline    77 arch/x86/kvm/vmx/evmcs.h static __always_inline int get_evmcs_offset(unsigned long field,
__always_inline    24 arch/x86/kvm/vmx/ops.h static __always_inline void vmcs_check16(unsigned long field)
__always_inline    36 arch/x86/kvm/vmx/ops.h static __always_inline void vmcs_check32(unsigned long field)
__always_inline    44 arch/x86/kvm/vmx/ops.h static __always_inline void vmcs_check64(unsigned long field)
__always_inline    56 arch/x86/kvm/vmx/ops.h static __always_inline void vmcs_checkl(unsigned long field)
__always_inline    68 arch/x86/kvm/vmx/ops.h static __always_inline unsigned long __vmcs_readl(unsigned long field)
__always_inline   104 arch/x86/kvm/vmx/ops.h static __always_inline u16 vmcs_read16(unsigned long field)
__always_inline   112 arch/x86/kvm/vmx/ops.h static __always_inline u32 vmcs_read32(unsigned long field)
__always_inline   120 arch/x86/kvm/vmx/ops.h static __always_inline u64 vmcs_read64(unsigned long field)
__always_inline   132 arch/x86/kvm/vmx/ops.h static __always_inline unsigned long vmcs_readl(unsigned long field)
__always_inline   170 arch/x86/kvm/vmx/ops.h static __always_inline void __vmcs_writel(unsigned long field, unsigned long value)
__always_inline   175 arch/x86/kvm/vmx/ops.h static __always_inline void vmcs_write16(unsigned long field, u16 value)
__always_inline   184 arch/x86/kvm/vmx/ops.h static __always_inline void vmcs_write32(unsigned long field, u32 value)
__always_inline   193 arch/x86/kvm/vmx/ops.h static __always_inline void vmcs_write64(unsigned long field, u64 value)
__always_inline   205 arch/x86/kvm/vmx/ops.h static __always_inline void vmcs_writel(unsigned long field, unsigned long value)
__always_inline   214 arch/x86/kvm/vmx/ops.h static __always_inline void vmcs_clear_bits(unsigned long field, u32 mask)
__always_inline   224 arch/x86/kvm/vmx/ops.h static __always_inline void vmcs_set_bits(unsigned long field, u32 mask)
__always_inline   346 arch/x86/kvm/vmx/vmx.c static __always_inline void vmx_disable_intercept_for_msr(unsigned long *msr_bitmap,
__always_inline  3572 arch/x86/kvm/vmx/vmx.c static __always_inline void vmx_disable_intercept_for_msr(unsigned long *msr_bitmap,
__always_inline  3610 arch/x86/kvm/vmx/vmx.c static __always_inline void vmx_enable_intercept_for_msr(unsigned long *msr_bitmap,
__always_inline  3648 arch/x86/kvm/vmx/vmx.c static __always_inline void vmx_set_intercept_for_msr(unsigned long *msr_bitmap,
__always_inline     9 arch/x86/lib/iomem.c static __always_inline void rep_movs(void *to, const void *from, size_t n)
__always_inline  1532 arch/x86/mm/fault.c static __always_inline void
__always_inline    60 arch/xtensa/include/asm/fixmap.h static __always_inline unsigned long fix_to_virt(const unsigned int idx)
__always_inline    13 arch/xtensa/include/asm/jump_label.h static __always_inline bool arch_static_branch(struct static_key *key,
__always_inline    28 arch/xtensa/include/asm/jump_label.h static __always_inline bool arch_static_branch_jump(struct static_key *key,
__always_inline    28 arch/xtensa/include/asm/stackprotector.h static __always_inline void boot_init_stack_canary(void)
__always_inline    20 arch/xtensa/include/asm/stacktrace.h static __always_inline unsigned long *stack_pointer(struct task_struct *task)
__always_inline    27 crypto/aegis.h static __always_inline void crypto_aegis_block_xor(union aegis_block *dst,
__always_inline    34 crypto/aegis.h static __always_inline void crypto_aegis_block_and(union aegis_block *dst,
__always_inline    41 crypto/aegis.h static __always_inline void crypto_aegis_aesenc(union aegis_block *dst,
__always_inline    92 drivers/base/devres.c static __always_inline struct devres * alloc_dr(dr_release_t release,
__always_inline    87 drivers/clocksource/arm_arch_timer.c static __always_inline
__always_inline   116 drivers/clocksource/arm_arch_timer.c static __always_inline
__always_inline   623 drivers/clocksource/arm_arch_timer.c static __always_inline irqreturn_t timer_handler(const int access,
__always_inline   667 drivers/clocksource/arm_arch_timer.c static __always_inline int timer_shutdown(const int access,
__always_inline   699 drivers/clocksource/arm_arch_timer.c static __always_inline void set_next_event(const int access, unsigned long evt,
__always_inline   193 drivers/crypto/bcm/spu.h static __always_inline  bool spu_req_incl_icv(enum spu_cipher_mode cipher_mode,
__always_inline   204 drivers/crypto/bcm/spu.h static __always_inline u32 spu_real_db_size(u32 assoc_size,
__always_inline    12 drivers/gpu/drm/i915/gem/i915_gem_busy.c static __always_inline u32 __busy_read_flag(u16 id)
__always_inline    21 drivers/gpu/drm/i915/gem/i915_gem_busy.c static __always_inline u32 __busy_write_id(u16 id)
__always_inline    38 drivers/gpu/drm/i915/gem/i915_gem_busy.c static __always_inline unsigned int
__always_inline    64 drivers/gpu/drm/i915/gem/i915_gem_busy.c static __always_inline unsigned int
__always_inline    70 drivers/gpu/drm/i915/gem/i915_gem_busy.c static __always_inline unsigned int
__always_inline  2926 drivers/gpu/drm/i915/gt/intel_lrc.c static __always_inline u32*
__always_inline  1866 drivers/gpu/drm/i915/i915_drv.h static __always_inline unsigned int
__always_inline  1880 drivers/gpu/drm/i915/i915_drv.h static __always_inline unsigned int
__always_inline  1898 drivers/gpu/drm/i915/i915_drv.h static __always_inline bool
__always_inline  1910 drivers/gpu/drm/i915/i915_drv.h static __always_inline bool
__always_inline  1166 drivers/gpu/drm/i915/i915_gem_gtt.c static __always_inline u64
__always_inline   181 drivers/gpu/drm/i915/i915_params.c static __always_inline void _print_param(struct drm_printer *p,
__always_inline   213 drivers/gpu/drm/i915/i915_params.c static __always_inline void dup_param(const char *type, void *x)
__always_inline   227 drivers/gpu/drm/i915/i915_params.c static __always_inline void free_param(const char *type, void *x)
__always_inline    19 drivers/gpu/drm/i915/i915_scatterlist.h static __always_inline struct sgt_iter {
__always_inline   247 drivers/gpu/drm/i915/i915_vma.h static __always_inline ptrdiff_t ptrdiff(const void *a, const void *b)
__always_inline  3340 drivers/infiniband/hw/mlx4/qp.c static __always_inline void set_raddr_seg(struct mlx4_wqe_raddr_seg *rseg,
__always_inline  4087 drivers/infiniband/hw/mlx5/qp.c static __always_inline void set_raddr_seg(struct mlx5_wqe_raddr_seg *rseg,
__always_inline  1584 drivers/infiniband/hw/mthca/mthca_qp.c static __always_inline void set_raddr_seg(struct mthca_raddr_seg *rseg,
__always_inline  1592 drivers/infiniband/hw/mthca/mthca_qp.c static __always_inline void set_atomic_seg(struct mthca_atomic_seg *aseg,
__always_inline   116 drivers/infiniband/hw/mthca/mthca_wqe.h static __always_inline void mthca_set_data_seg(struct mthca_data_seg *dseg,
__always_inline   124 drivers/infiniband/hw/mthca/mthca_wqe.h static __always_inline void mthca_set_data_seg_inval(struct mthca_data_seg *dseg)
__always_inline   413 drivers/md/bcache/bset.h static __always_inline int64_t bkey_cmp(const struct bkey *l,
__always_inline   730 drivers/md/dm-cache-target.c __always_inline
__always_inline   360 drivers/md/dm-switch.c static __always_inline unsigned long parse_hex(const char **string)
__always_inline    62 drivers/misc/mic/scif/scif_fd.c static __always_inline void scif_err_debug(int err, const char *str)
__always_inline    14 drivers/misc/mic/scif/scif_map.h static __always_inline void *
__always_inline    34 drivers/misc/mic/scif/scif_map.h static __always_inline void
__always_inline    48 drivers/misc/mic/scif/scif_map.h static __always_inline int
__always_inline    69 drivers/misc/mic/scif/scif_map.h static __always_inline void
__always_inline    81 drivers/misc/mic/scif/scif_map.h static __always_inline void *
__always_inline    95 drivers/misc/mic/scif/scif_map.h static __always_inline void
__always_inline   105 drivers/misc/mic/scif/scif_map.h static __always_inline int
__always_inline   275 drivers/misc/mic/scif/scif_nodeqp.c static __always_inline void
__always_inline   701 drivers/misc/mic/scif/scif_nodeqp.c static __always_inline void
__always_inline   728 drivers/misc/mic/scif/scif_nodeqp.c static __always_inline void
__always_inline   746 drivers/misc/mic/scif/scif_nodeqp.c static __always_inline void
__always_inline   767 drivers/misc/mic/scif/scif_nodeqp.c static __always_inline void
__always_inline   885 drivers/misc/mic/scif/scif_nodeqp.c static __always_inline void
__always_inline   937 drivers/misc/mic/scif/scif_nodeqp.c static __always_inline void
__always_inline   955 drivers/misc/mic/scif/scif_nodeqp.c static __always_inline void
__always_inline   971 drivers/misc/mic/scif/scif_nodeqp.c static __always_inline void
__always_inline   986 drivers/misc/mic/scif/scif_nodeqp.c static __always_inline void
__always_inline   506 drivers/misc/vmw_balloon.c static __always_inline unsigned long
__always_inline   313 drivers/net/ethernet/intel/i40e/i40e_xsk.c static __always_inline bool
__always_inline   311 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c static __always_inline bool
__always_inline  3465 drivers/net/wireless/broadcom/brcm80211/brcmfmac/cfg80211.c static __always_inline void brcmf_delay(u32 ms)
__always_inline   128 drivers/net/wireless/quantenna/qtnfmac/bus.h static __always_inline void qtnf_bus_lock(struct qtnf_bus *bus)
__always_inline   133 drivers/net/wireless/quantenna/qtnfmac/bus.h static __always_inline void qtnf_bus_unlock(struct qtnf_bus *bus)
__always_inline    32 drivers/video/fbdev/c2p_core.h static __always_inline u32 get_mask(unsigned int n)
__always_inline    60 drivers/video/fbdev/c2p_core.h static __always_inline void transp8(u32 d[], unsigned int n, unsigned int m)
__always_inline   102 drivers/video/fbdev/c2p_core.h static __always_inline void transp4(u32 d[], unsigned int n, unsigned int m)
__always_inline   129 drivers/video/fbdev/c2p_core.h static __always_inline void transp4x(u32 d[], unsigned int n, unsigned int m)
__always_inline  1040 fs/namei.c     static __always_inline
__always_inline  1243 fs/userfaultfd.c static __always_inline void wake_userfault(struct userfaultfd_ctx *ctx,
__always_inline  1274 fs/userfaultfd.c static __always_inline int validate_range(struct mm_struct *mm,
__always_inline    13 include/asm-generic/bitops/__ffs.h static __always_inline unsigned long __ffs(unsigned long word)
__always_inline    13 include/asm-generic/bitops/__fls.h static __always_inline unsigned long __fls(unsigned long word)
__always_inline    11 include/asm-generic/bitops/builtin-__ffs.h static __always_inline unsigned long __ffs(unsigned long word)
__always_inline    11 include/asm-generic/bitops/builtin-__fls.h static __always_inline unsigned long __fls(unsigned long word)
__always_inline    13 include/asm-generic/bitops/builtin-ffs.h static __always_inline int ffs(int x)
__always_inline    12 include/asm-generic/bitops/builtin-fls.h static __always_inline int fls(unsigned int x)
__always_inline    13 include/asm-generic/bitops/fls.h static __always_inline int fls(unsigned int x)
__always_inline    19 include/asm-generic/bitops/fls64.h static __always_inline int fls64(__u64 x)
__always_inline    27 include/asm-generic/bitops/fls64.h static __always_inline int fls64(__u64 x)
__always_inline    30 include/asm-generic/fixmap.h static __always_inline unsigned long fix_to_virt(const unsigned int idx)
__always_inline    18 include/asm-generic/kprobes.h # define nokprobe_inline	__always_inline
__always_inline     9 include/asm-generic/preempt.h static __always_inline int preempt_count(void)
__always_inline    14 include/asm-generic/preempt.h static __always_inline volatile int *preempt_count_ptr(void)
__always_inline    19 include/asm-generic/preempt.h static __always_inline void preempt_count_set(int pc)
__always_inline    35 include/asm-generic/preempt.h static __always_inline void set_preempt_need_resched(void)
__always_inline    39 include/asm-generic/preempt.h static __always_inline void clear_preempt_need_resched(void)
__always_inline    43 include/asm-generic/preempt.h static __always_inline bool test_preempt_need_resched(void)
__always_inline    52 include/asm-generic/preempt.h static __always_inline void __preempt_count_add(int val)
__always_inline    57 include/asm-generic/preempt.h static __always_inline void __preempt_count_sub(int val)
__always_inline    62 include/asm-generic/preempt.h static __always_inline bool __preempt_count_dec_and_test(void)
__always_inline    75 include/asm-generic/preempt.h static __always_inline bool should_resched(int preempt_offset)
__always_inline    20 include/asm-generic/qspinlock.h static __always_inline int queued_spin_is_locked(struct qspinlock *lock)
__always_inline    39 include/asm-generic/qspinlock.h static __always_inline int queued_spin_value_unlocked(struct qspinlock lock)
__always_inline    49 include/asm-generic/qspinlock.h static __always_inline int queued_spin_is_contended(struct qspinlock *lock)
__always_inline    58 include/asm-generic/qspinlock.h static __always_inline int queued_spin_trylock(struct qspinlock *lock)
__always_inline    74 include/asm-generic/qspinlock.h static __always_inline void queued_spin_lock(struct qspinlock *lock)
__always_inline    89 include/asm-generic/qspinlock.h static __always_inline void queued_spin_unlock(struct qspinlock *lock)
__always_inline    99 include/asm-generic/qspinlock.h static __always_inline bool virt_spin_lock(struct qspinlock *lock)
__always_inline     8 include/asm-generic/vdso/vsyscall.h static __always_inline struct vdso_data *__arch_get_k_vdso_data(void)
__always_inline    15 include/asm-generic/vdso/vsyscall.h static __always_inline bool __arch_update_vdso_data(void)
__always_inline    22 include/asm-generic/vdso/vsyscall.h static __always_inline int __arch_get_clock_mode(struct timekeeper *tk)
__always_inline    29 include/asm-generic/vdso/vsyscall.h static __always_inline void __arch_update_vsyscall(struct vdso_data *vdata,
__always_inline    36 include/asm-generic/vdso/vsyscall.h static __always_inline void __arch_sync_vdso_data(struct vdso_data *vdata)
__always_inline    17 include/linux/async_tx.h #define __async_inline __always_inline
__always_inline   103 include/linux/bitfield.h static __always_inline u64 field_multiplier(u64 field)
__always_inline   109 include/linux/bitfield.h static __always_inline u64 field_mask(u64 field)
__always_inline   114 include/linux/bitfield.h static __always_inline __##type type##_encode_bits(base v, base field)	\
__always_inline   120 include/linux/bitfield.h static __always_inline __##type type##_replace_bits(__##type old,	\
__always_inline   125 include/linux/bitfield.h static __always_inline void type##p_replace_bits(__##type *p,		\
__always_inline   130 include/linux/bitfield.h static __always_inline base type##_get_bits(__##type v, base field)	\
__always_inline   382 include/linux/bitmap.h static __always_inline int bitmap_weight(const unsigned long *src, unsigned int nbits)
__always_inline   389 include/linux/bitmap.h static __always_inline void bitmap_set(unsigned long *map, unsigned int start,
__always_inline   403 include/linux/bitmap.h static __always_inline void bitmap_clear(unsigned long *map, unsigned int start,
__always_inline    58 include/linux/bitops.h static __always_inline unsigned long hweight_long(unsigned long w)
__always_inline   225 include/linux/bitops.h static __always_inline void assign_bit(long nr, volatile unsigned long *addr,
__always_inline   234 include/linux/bitops.h static __always_inline void __assign_bit(long nr, volatile unsigned long *addr,
__always_inline    10 include/linux/bottom_half.h static __always_inline void __local_bh_disable_ip(unsigned long ip, unsigned int cnt)
__always_inline    88 include/linux/buffer_head.h static __always_inline void set_buffer_##name(struct buffer_head *bh)	\
__always_inline    93 include/linux/buffer_head.h static __always_inline void clear_buffer_##name(struct buffer_head *bh)	\
__always_inline    97 include/linux/buffer_head.h static __always_inline int buffer_##name(const struct buffer_head *bh)	\
__always_inline   106 include/linux/buffer_head.h static __always_inline int test_set_buffer_##name(struct buffer_head *bh) \
__always_inline   110 include/linux/buffer_head.h static __always_inline int test_clear_buffer_##name(struct buffer_head *bh) \
__always_inline   196 include/linux/compiler.h static __always_inline
__always_inline   211 include/linux/compiler.h # define __no_kasan_or_inline __always_inline
__always_inline   220 include/linux/compiler.h static __always_inline void __write_once_size(volatile void *p, void *res, int size)
__always_inline    27 include/linux/dma-noncoherent.h static __always_inline bool dma_alloc_need_uncached(struct device *dev,
__always_inline   465 include/linux/filter.h 	static __always_inline						       \
__always_inline   472 include/linux/filter.h 	static __always_inline						       \
__always_inline   696 include/linux/filter.h static __always_inline u32 bpf_prog_run_xdp(const struct bpf_prog *prog,
__always_inline    77 include/linux/hash.h static __always_inline u32 hash_64_generic(u64 val, unsigned int bits)
__always_inline   364 include/linux/jiffies.h static __always_inline unsigned long msecs_to_jiffies(const unsigned int m)
__always_inline   411 include/linux/jiffies.h static __always_inline unsigned long usecs_to_jiffies(const unsigned int u)
__always_inline   198 include/linux/jump_label.h static __always_inline bool static_key_false(struct static_key *key)
__always_inline   203 include/linux/jump_label.h static __always_inline bool static_key_true(struct static_key *key)
__always_inline   257 include/linux/jump_label.h static __always_inline void jump_label_init(void)
__always_inline   262 include/linux/jump_label.h static __always_inline bool static_key_false(struct static_key *key)
__always_inline   269 include/linux/jump_label.h static __always_inline bool static_key_true(struct static_key *key)
__always_inline   145 include/linux/math64.h static __always_inline u32
__always_inline  1326 include/linux/mm.h static __always_inline void *lowmem_page_address(const struct page *page)
__always_inline    26 include/linux/mm_inline.h static __always_inline void __update_lru_size(struct lruvec *lruvec,
__always_inline    37 include/linux/mm_inline.h static __always_inline void update_lru_size(struct lruvec *lruvec,
__always_inline    47 include/linux/mm_inline.h static __always_inline void add_page_to_lru_list(struct page *page,
__always_inline    54 include/linux/mm_inline.h static __always_inline void add_page_to_lru_list_tail(struct page *page,
__always_inline    61 include/linux/mm_inline.h static __always_inline void del_page_from_lru_list(struct page *page,
__always_inline    90 include/linux/mm_inline.h static __always_inline enum lru_list page_off_lru(struct page *page)
__always_inline   114 include/linux/mm_inline.h static __always_inline enum lru_list page_lru(struct page *page)
__always_inline  1038 include/linux/mmzone.h static __always_inline struct zoneref *next_zones_zonelist(struct zoneref *z,
__always_inline  3074 include/linux/netdevice.h static __always_inline void netif_tx_start_queue(struct netdev_queue *dev_queue)
__always_inline  3124 include/linux/netdevice.h static __always_inline void netif_tx_stop_queue(struct netdev_queue *dev_queue)
__always_inline  3708 include/linux/netdevice.h static __always_inline int ____dev_forward_skb(struct net_device *dev,
__always_inline   128 include/linux/nodemask.h static __always_inline void __node_set(int node, volatile nodemask_t *dstp)
__always_inline   181 include/linux/page-flags.h static __always_inline int PageTail(struct page *page)
__always_inline   186 include/linux/page-flags.h static __always_inline int PageCompound(struct page *page)
__always_inline   247 include/linux/page-flags.h static __always_inline int Page##uname(struct page *page)		\
__always_inline   251 include/linux/page-flags.h static __always_inline void SetPage##uname(struct page *page)		\
__always_inline   255 include/linux/page-flags.h static __always_inline void ClearPage##uname(struct page *page)		\
__always_inline   259 include/linux/page-flags.h static __always_inline void __SetPage##uname(struct page *page)		\
__always_inline   263 include/linux/page-flags.h static __always_inline void __ClearPage##uname(struct page *page)	\
__always_inline   267 include/linux/page-flags.h static __always_inline int TestSetPage##uname(struct page *page)	\
__always_inline   271 include/linux/page-flags.h static __always_inline int TestClearPage##uname(struct page *page)	\
__always_inline   380 include/linux/page-flags.h static __always_inline int PageSwapCache(struct page *page)
__always_inline   456 include/linux/page-flags.h static __always_inline int PageMappingFlags(struct page *page)
__always_inline   461 include/linux/page-flags.h static __always_inline int PageAnon(struct page *page)
__always_inline   467 include/linux/page-flags.h static __always_inline int __PageMovable(struct page *page)
__always_inline   480 include/linux/page-flags.h static __always_inline int PageKsm(struct page *page)
__always_inline   511 include/linux/page-flags.h static __always_inline void __SetPageUptodate(struct page *page)
__always_inline   518 include/linux/page-flags.h static __always_inline void SetPageUptodate(struct page *page)
__always_inline   552 include/linux/page-flags.h static __always_inline void set_compound_head(struct page *page, struct page *head)
__always_inline   557 include/linux/page-flags.h static __always_inline void clear_compound_head(struct page *page)
__always_inline   737 include/linux/page-flags.h static __always_inline int Page##uname(struct page *page)		\
__always_inline   741 include/linux/page-flags.h static __always_inline void __SetPage##uname(struct page *page)		\
__always_inline   746 include/linux/page-flags.h static __always_inline void __ClearPage##uname(struct page *page)	\
__always_inline  1105 include/linux/perf_event.h static __always_inline void
__always_inline  1119 include/linux/perf_event.h static __always_inline void
__always_inline  1132 include/linux/perf_event.h static __always_inline bool
__always_inline  1426 include/linux/perf_event.h static __always_inline bool perf_raw_frag_last(const struct perf_raw_frag *frag)
__always_inline   501 include/linux/quota.h static __always_inline unsigned dquot_state_types(unsigned flags, unsigned flag)
__always_inline   268 include/linux/radix-tree.h static __always_inline void __rcu **
__always_inline   360 include/linux/radix-tree.h static __always_inline long
__always_inline   385 include/linux/radix-tree.h static __always_inline void __rcu **radix_tree_next_slot(void __rcu **slot,
__always_inline   197 include/linux/rbtree_augmented.h static __always_inline struct rb_node *
__always_inline   301 include/linux/rbtree_augmented.h static __always_inline void
__always_inline   310 include/linux/rbtree_augmented.h static __always_inline void
__always_inline    69 include/linux/rbtree_latch.h static __always_inline struct latch_tree_node *
__always_inline    75 include/linux/rbtree_latch.h static __always_inline void
__always_inline    99 include/linux/rbtree_latch.h static __always_inline void
__always_inline   105 include/linux/rbtree_latch.h static __always_inline struct latch_tree_node *
__always_inline   143 include/linux/rbtree_latch.h static __always_inline void
__always_inline   170 include/linux/rbtree_latch.h static __always_inline void
__always_inline   199 include/linux/rbtree_latch.h static __always_inline struct latch_tree_node *
__always_inline   595 include/linux/rcupdate.h static __always_inline void rcu_read_lock(void)
__always_inline  1804 include/linux/sched.h static __always_inline bool need_resched(void)
__always_inline    10 include/linux/sched/smt.h static __always_inline bool sched_smt_active(void)
__always_inline   221 include/linux/sched/topology.h static __always_inline
__always_inline  3293 include/linux/skbuff.h static __always_inline void
__always_inline  3321 include/linux/skbuff.h static __always_inline void
__always_inline   322 include/linux/slab.h static __always_inline enum kmalloc_cache_type kmalloc_type(gfp_t flags)
__always_inline   350 include/linux/slab.h static __always_inline unsigned int kmalloc_index(size_t size)
__always_inline   411 include/linux/slab.h static __always_inline void kfree_bulk(size_t size, void **p)
__always_inline   420 include/linux/slab.h static __always_inline void *__kmalloc_node(size_t size, gfp_t flags, int node)
__always_inline   425 include/linux/slab.h static __always_inline void *kmem_cache_alloc_node(struct kmem_cache *s, gfp_t flags, int node)
__always_inline   439 include/linux/slab.h static __always_inline void *
__always_inline   449 include/linux/slab.h static __always_inline void *kmem_cache_alloc_trace(struct kmem_cache *s,
__always_inline   458 include/linux/slab.h static __always_inline void *
__always_inline   475 include/linux/slab.h static __always_inline void *
__always_inline   482 include/linux/slab.h static __always_inline void *kmalloc_large(size_t size, gfp_t flags)
__always_inline   542 include/linux/slab.h static __always_inline void *kmalloc(size_t size, gfp_t flags)
__always_inline   569 include/linux/slab.h static __always_inline unsigned int kmalloc_size(unsigned int n)
__always_inline   584 include/linux/slab.h static __always_inline void *kmalloc_node(size_t size, gfp_t flags, int node)
__always_inline   325 include/linux/spinlock.h static __always_inline raw_spinlock_t *spinlock_check(spinlock_t *lock)
__always_inline   336 include/linux/spinlock.h static __always_inline void spin_lock(spinlock_t *lock)
__always_inline   341 include/linux/spinlock.h static __always_inline void spin_lock_bh(spinlock_t *lock)
__always_inline   346 include/linux/spinlock.h static __always_inline int spin_trylock(spinlock_t *lock)
__always_inline   361 include/linux/spinlock.h static __always_inline void spin_lock_irq(spinlock_t *lock)
__always_inline   376 include/linux/spinlock.h static __always_inline void spin_unlock(spinlock_t *lock)
__always_inline   381 include/linux/spinlock.h static __always_inline void spin_unlock_bh(spinlock_t *lock)
__always_inline   386 include/linux/spinlock.h static __always_inline void spin_unlock_irq(spinlock_t *lock)
__always_inline   391 include/linux/spinlock.h static __always_inline void spin_unlock_irqrestore(spinlock_t *lock, unsigned long flags)
__always_inline   396 include/linux/spinlock.h static __always_inline int spin_trylock_bh(spinlock_t *lock)
__always_inline   401 include/linux/spinlock.h static __always_inline int spin_trylock_irq(spinlock_t *lock)
__always_inline   429 include/linux/spinlock.h static __always_inline int spin_is_locked(spinlock_t *lock)
__always_inline   434 include/linux/spinlock.h static __always_inline int spin_is_contended(spinlock_t *lock)
__always_inline   262 include/linux/string.h #define __FORTIFY_INLINE extern __always_inline __attribute__((gnu_inline))
__always_inline   500 include/linux/string.h static __always_inline size_t str_has_prefix(const char *str, const char *prefix)
__always_inline   115 include/linux/thread_info.h static __always_inline void check_object_size(const void *ptr, unsigned long n,
__always_inline   137 include/linux/thread_info.h static __always_inline __must_check bool
__always_inline   171 include/linux/time32.h static __always_inline void timespec_add_ns(struct timespec *a, u64 ns)
__always_inline   154 include/linux/time64.h static __always_inline void timespec64_add_ns(struct timespec64 *a, u64 ns)
__always_inline    58 include/linux/uaccess.h static __always_inline __must_check unsigned long
__always_inline    66 include/linux/uaccess.h static __always_inline __must_check unsigned long
__always_inline    88 include/linux/uaccess.h static __always_inline __must_check unsigned long
__always_inline    96 include/linux/uaccess.h static __always_inline __must_check unsigned long
__always_inline   140 include/linux/uaccess.h static __always_inline unsigned long __must_check
__always_inline   148 include/linux/uaccess.h static __always_inline unsigned long __must_check
__always_inline   156 include/linux/uaccess.h static __always_inline unsigned long __must_check
__always_inline   166 include/linux/uaccess.h static __always_inline void pagefault_disabled_inc(void)
__always_inline   171 include/linux/uaccess.h static __always_inline void pagefault_disabled_dec(void)
__always_inline   283 include/linux/uaccess.h static __always_inline __must_check int
__always_inline   132 include/linux/uio.h static __always_inline __must_check
__always_inline   141 include/linux/uio.h static __always_inline __must_check
__always_inline   150 include/linux/uio.h static __always_inline __must_check
__always_inline   159 include/linux/uio.h static __always_inline __must_check
__always_inline   168 include/linux/uio.h static __always_inline __must_check
__always_inline   195 include/linux/uio.h static __always_inline __must_check
__always_inline   204 include/linux/uio.h static __always_inline __must_check
__always_inline     8 include/linux/unaligned/access_ok.h static __always_inline u16 get_unaligned_le16(const void *p)
__always_inline    13 include/linux/unaligned/access_ok.h static __always_inline u32 get_unaligned_le32(const void *p)
__always_inline    18 include/linux/unaligned/access_ok.h static __always_inline u64 get_unaligned_le64(const void *p)
__always_inline    23 include/linux/unaligned/access_ok.h static __always_inline u16 get_unaligned_be16(const void *p)
__always_inline    28 include/linux/unaligned/access_ok.h static __always_inline u32 get_unaligned_be32(const void *p)
__always_inline    33 include/linux/unaligned/access_ok.h static __always_inline u64 get_unaligned_be64(const void *p)
__always_inline    38 include/linux/unaligned/access_ok.h static __always_inline void put_unaligned_le16(u16 val, void *p)
__always_inline    43 include/linux/unaligned/access_ok.h static __always_inline void put_unaligned_le32(u32 val, void *p)
__always_inline    48 include/linux/unaligned/access_ok.h static __always_inline void put_unaligned_le64(u64 val, void *p)
__always_inline    53 include/linux/unaligned/access_ok.h static __always_inline void put_unaligned_be16(u16 val, void *p)
__always_inline    58 include/linux/unaligned/access_ok.h static __always_inline void put_unaligned_be32(u32 val, void *p)
__always_inline    63 include/linux/unaligned/access_ok.h static __always_inline void put_unaligned_be64(u64 val, void *p)
__always_inline   647 include/net/sock.h static __always_inline void sock_hold(struct sock *sk)
__always_inline   655 include/net/sock.h static __always_inline void __sock_put(struct sock *sk)
__always_inline   159 include/net/xdp.h static __always_inline void
__always_inline   165 include/net/xdp.h static __always_inline bool
__always_inline    44 include/uapi/linux/byteorder/big_endian.h static __always_inline __le64 __cpu_to_le64p(const __u64 *p)
__always_inline    48 include/uapi/linux/byteorder/big_endian.h static __always_inline __u64 __le64_to_cpup(const __le64 *p)
__always_inline    52 include/uapi/linux/byteorder/big_endian.h static __always_inline __le32 __cpu_to_le32p(const __u32 *p)
__always_inline    56 include/uapi/linux/byteorder/big_endian.h static __always_inline __u32 __le32_to_cpup(const __le32 *p)
__always_inline    60 include/uapi/linux/byteorder/big_endian.h static __always_inline __le16 __cpu_to_le16p(const __u16 *p)
__always_inline    64 include/uapi/linux/byteorder/big_endian.h static __always_inline __u16 __le16_to_cpup(const __le16 *p)
__always_inline    68 include/uapi/linux/byteorder/big_endian.h static __always_inline __be64 __cpu_to_be64p(const __u64 *p)
__always_inline    72 include/uapi/linux/byteorder/big_endian.h static __always_inline __u64 __be64_to_cpup(const __be64 *p)
__always_inline    76 include/uapi/linux/byteorder/big_endian.h static __always_inline __be32 __cpu_to_be32p(const __u32 *p)
__always_inline    80 include/uapi/linux/byteorder/big_endian.h static __always_inline __u32 __be32_to_cpup(const __be32 *p)
__always_inline    84 include/uapi/linux/byteorder/big_endian.h static __always_inline __be16 __cpu_to_be16p(const __u16 *p)
__always_inline    88 include/uapi/linux/byteorder/big_endian.h static __always_inline __u16 __be16_to_cpup(const __be16 *p)
__always_inline    44 include/uapi/linux/byteorder/little_endian.h static __always_inline __le64 __cpu_to_le64p(const __u64 *p)
__always_inline    48 include/uapi/linux/byteorder/little_endian.h static __always_inline __u64 __le64_to_cpup(const __le64 *p)
__always_inline    52 include/uapi/linux/byteorder/little_endian.h static __always_inline __le32 __cpu_to_le32p(const __u32 *p)
__always_inline    56 include/uapi/linux/byteorder/little_endian.h static __always_inline __u32 __le32_to_cpup(const __le32 *p)
__always_inline    60 include/uapi/linux/byteorder/little_endian.h static __always_inline __le16 __cpu_to_le16p(const __u16 *p)
__always_inline    64 include/uapi/linux/byteorder/little_endian.h static __always_inline __u16 __le16_to_cpup(const __le16 *p)
__always_inline    68 include/uapi/linux/byteorder/little_endian.h static __always_inline __be64 __cpu_to_be64p(const __u64 *p)
__always_inline    72 include/uapi/linux/byteorder/little_endian.h static __always_inline __u64 __be64_to_cpup(const __be64 *p)
__always_inline    76 include/uapi/linux/byteorder/little_endian.h static __always_inline __be32 __cpu_to_be32p(const __u32 *p)
__always_inline    80 include/uapi/linux/byteorder/little_endian.h static __always_inline __u32 __be32_to_cpup(const __be32 *p)
__always_inline    84 include/uapi/linux/byteorder/little_endian.h static __always_inline __be16 __cpu_to_be16p(const __u16 *p)
__always_inline    88 include/uapi/linux/byteorder/little_endian.h static __always_inline __u16 __be16_to_cpup(const __be16 *p)
__always_inline     4 include/uapi/linux/stddef.h #ifndef __always_inline
__always_inline   136 include/uapi/linux/swab.h static __always_inline unsigned long __swab(const unsigned long y)
__always_inline   171 include/uapi/linux/swab.h static __always_inline __u16 __swab16p(const __u16 *p)
__always_inline   184 include/uapi/linux/swab.h static __always_inline __u32 __swab32p(const __u32 *p)
__always_inline   197 include/uapi/linux/swab.h static __always_inline __u64 __swab64p(const __u64 *p)
__always_inline   252 include/uapi/linux/swab.h static __always_inline void __swab32s(__u32 *p)
__always_inline   265 include/uapi/linux/swab.h static __always_inline void __swab64s(__u64 *p)
__always_inline     9 include/vdso/helpers.h static __always_inline u32 vdso_read_begin(const struct vdso_data *vd)
__always_inline    20 include/vdso/helpers.h static __always_inline u32 vdso_read_retry(const struct vdso_data *vd,
__always_inline    30 include/vdso/helpers.h static __always_inline void vdso_write_begin(struct vdso_data *vd)
__always_inline    42 include/vdso/helpers.h static __always_inline void vdso_write_end(struct vdso_data *vd)
__always_inline   526 kernel/bpf/core.c static __always_inline void
__always_inline   575 kernel/bpf/core.c static __always_inline unsigned long
__always_inline   587 kernel/bpf/core.c static __always_inline bool bpf_tree_less(struct latch_tree_node *a,
__always_inline   593 kernel/bpf/core.c static __always_inline int bpf_tree_comp(void *key, struct latch_tree_node *n)
__always_inline   518 kernel/bpf/hashtab.c static __always_inline void *__htab_lru_map_lookup_elem(struct bpf_map *map,
__always_inline   613 kernel/events/core.c static __always_inline enum perf_event_state
__always_inline   624 kernel/events/core.c static __always_inline void
__always_inline  6702 kernel/events/core.c static __always_inline int
__always_inline   137 kernel/events/ring_buffer.c static __always_inline bool
__always_inline   148 kernel/events/ring_buffer.c static __always_inline int
__always_inline   452 kernel/events/ring_buffer.c static __always_inline bool rb_need_aux_wakeup(struct ring_buffer *rb)
__always_inline   980 kernel/fork.c  static __always_inline void mm_clear_owner(struct mm_struct *mm,
__always_inline  1744 kernel/fork.c  static __always_inline void delayed_free_task(struct task_struct *tsk)
__always_inline   229 kernel/irq/chip.c static __always_inline int
__always_inline   360 kernel/irq/internals.h static __always_inline void irq_timings_push(u64 ts, int irq)
__always_inline   378 kernel/irq/internals.h static __always_inline void record_irq_time(struct irq_desc *desc)
__always_inline   435 kernel/irq/timings.c static __always_inline int irq_timings_interval_index(u64 interval)
__always_inline   446 kernel/irq/timings.c static __always_inline void __irq_timings_store(int irq, struct irqt_stat *irqs,
__always_inline   166 kernel/locking/mutex.c static __always_inline bool __mutex_trylock_fast(struct mutex *lock)
__always_inline   177 kernel/locking/mutex.c static __always_inline bool __mutex_unlock_fast(struct mutex *lock)
__always_inline   305 kernel/locking/mutex.c static __always_inline void
__always_inline   464 kernel/locking/mutex.c static __always_inline void
__always_inline   637 kernel/locking/mutex.c static __always_inline bool
__always_inline   713 kernel/locking/mutex.c static __always_inline bool
__always_inline   775 kernel/locking/mutex.c static __always_inline int __sched
__always_inline   925 kernel/locking/mutex.c static __always_inline int __sched
__always_inline   147 kernel/locking/qspinlock.c static __always_inline void clear_pending(struct qspinlock *lock)
__always_inline   160 kernel/locking/qspinlock.c static __always_inline void clear_pending_set_locked(struct qspinlock *lock)
__always_inline   175 kernel/locking/qspinlock.c static __always_inline u32 xchg_tail(struct qspinlock *lock, u32 tail)
__always_inline   193 kernel/locking/qspinlock.c static __always_inline void clear_pending(struct qspinlock *lock)
__always_inline   204 kernel/locking/qspinlock.c static __always_inline void clear_pending_set_locked(struct qspinlock *lock)
__always_inline   219 kernel/locking/qspinlock.c static __always_inline u32 xchg_tail(struct qspinlock *lock, u32 tail)
__always_inline   248 kernel/locking/qspinlock.c static __always_inline u32 queued_fetch_set_pending_acquire(struct qspinlock *lock)
__always_inline   260 kernel/locking/qspinlock.c static __always_inline void set_locked(struct qspinlock *lock)
__always_inline   271 kernel/locking/qspinlock.c static __always_inline void __pv_init_node(struct mcs_spinlock *node) { }
__always_inline   272 kernel/locking/qspinlock.c static __always_inline void __pv_wait_node(struct mcs_spinlock *node,
__always_inline   274 kernel/locking/qspinlock.c static __always_inline void __pv_kick_node(struct qspinlock *lock,
__always_inline   276 kernel/locking/qspinlock.c static __always_inline u32  __pv_wait_head_or_lock(struct qspinlock *lock,
__always_inline   109 kernel/locking/qspinlock_paravirt.h static __always_inline void set_pending(struct qspinlock *lock)
__always_inline   119 kernel/locking/qspinlock_paravirt.h static __always_inline int trylock_clear_pending(struct qspinlock *lock)
__always_inline   126 kernel/locking/qspinlock_paravirt.h static __always_inline void set_pending(struct qspinlock *lock)
__always_inline   131 kernel/locking/qspinlock_paravirt.h static __always_inline int trylock_clear_pending(struct qspinlock *lock)
__always_inline   105 kernel/module.c static __always_inline unsigned long __mod_tree_val(struct latch_tree_node *n)
__always_inline   112 kernel/module.c static __always_inline unsigned long __mod_tree_size(struct latch_tree_node *n)
__always_inline   119 kernel/module.c static __always_inline bool
__always_inline   125 kernel/module.c static __always_inline int
__always_inline   635 kernel/rcu/tree.c static __always_inline void rcu_nmi_exit_common(bool irq)
__always_inline   801 kernel/rcu/tree.c static __always_inline void rcu_nmi_enter_common(bool irq)
__always_inline  3028 kernel/sched/core.c static __always_inline void fire_sched_in_preempt_notifiers(struct task_struct *curr)
__always_inline  3044 kernel/sched/core.c static __always_inline void
__always_inline  3328 kernel/sched/core.c static __always_inline struct rq *
__always_inline   234 kernel/sched/cputime.c static __always_inline u64 steal_account_process_time(u64 maxtime)
__always_inline   499 kernel/sched/fair.c static __always_inline
__always_inline  4066 kernel/sched/fair.c static __always_inline void return_cfs_rq_runtime(struct cfs_rq *cfs_rq);
__always_inline  4428 kernel/sched/fair.c static __always_inline
__always_inline  4805 kernel/sched/fair.c static __always_inline void return_cfs_rq_runtime(struct cfs_rq *cfs_rq)
__always_inline  5096 kernel/sched/fair.c static __always_inline void return_cfs_rq_runtime(struct cfs_rq *cfs_rq) {}
__always_inline   109 kernel/sched/pelt.c static __always_inline u32
__always_inline   175 kernel/sched/pelt.c static __always_inline int
__always_inline   226 kernel/sched/pelt.c static __always_inline void
__always_inline  1578 kernel/sched/sched.h static __always_inline bool static_branch_##name(struct static_key *key) \
__always_inline  1987 kernel/sched/sched.h static __always_inline
__always_inline  2320 kernel/sched/sched.h static __always_inline
__always_inline   107 kernel/smp.c   static __always_inline void csd_lock_wait(call_single_data_t *csd)
__always_inline   112 kernel/smp.c   static __always_inline void csd_lock(call_single_data_t *csd)
__always_inline   125 kernel/smp.c   static __always_inline void csd_unlock(call_single_data_t *csd)
__always_inline   451 kernel/time/timekeeping.c static __always_inline u64 __ktime_get_fast_ns(struct tk_fast *tkf)
__always_inline   517 kernel/time/timekeeping.c static __always_inline u64 __ktime_get_real_fast_ns(struct tk_fast *tkf)
__always_inline  1821 kernel/time/timekeeping.c static __always_inline void timekeeping_apply_adjustment(struct timekeeper *tk,
__always_inline   346 kernel/trace/bpf_trace.c static __always_inline int
__always_inline   419 kernel/trace/bpf_trace.c static __always_inline u64
__always_inline  1329 kernel/trace/bpf_trace.c static __always_inline
__always_inline  1115 kernel/trace/ftrace.c static __always_inline unsigned long
__always_inline  1125 kernel/trace/ftrace.c static __always_inline struct ftrace_func_entry *
__always_inline   247 kernel/trace/ring_buffer.c static __always_inline void *
__always_inline  1905 kernel/trace/ring_buffer.c static __always_inline void *__rb_page_index(struct buffer_page *bpage, unsigned index)
__always_inline  1910 kernel/trace/ring_buffer.c static __always_inline struct ring_buffer_event *
__always_inline  1917 kernel/trace/ring_buffer.c static __always_inline struct ring_buffer_event *
__always_inline  1923 kernel/trace/ring_buffer.c static __always_inline unsigned rb_page_commit(struct buffer_page *bpage)
__always_inline  1929 kernel/trace/ring_buffer.c static __always_inline unsigned rb_page_size(struct buffer_page *bpage)
__always_inline  1934 kernel/trace/ring_buffer.c static __always_inline unsigned
__always_inline  1940 kernel/trace/ring_buffer.c static __always_inline unsigned
__always_inline  2460 kernel/trace/ring_buffer.c static __always_inline void
__always_inline  2515 kernel/trace/ring_buffer.c static __always_inline void rb_end_commit(struct ring_buffer_per_cpu *cpu_buffer)
__always_inline  2560 kernel/trace/ring_buffer.c static __always_inline bool
__always_inline  2574 kernel/trace/ring_buffer.c static __always_inline void
__always_inline  2611 kernel/trace/ring_buffer.c static __always_inline void
__always_inline  2691 kernel/trace/ring_buffer.c static __always_inline int
__always_inline  2713 kernel/trace/ring_buffer.c static __always_inline void
__always_inline  2870 kernel/trace/ring_buffer.c static __always_inline struct ring_buffer_event *
__always_inline   758 kernel/trace/trace.c static __always_inline void
__always_inline   767 kernel/trace/trace.c static __always_inline struct ring_buffer_event *
__always_inline   812 kernel/trace/trace.c static __always_inline void
__always_inline   621 kernel/trace/trace.h static __always_inline int trace_get_context_bit(void)
__always_inline   639 kernel/trace/trace.h static __always_inline int trace_test_and_set_recursion(int start, int max)
__always_inline   659 kernel/trace/trace.h static __always_inline void trace_clear_recursion(int bit)
__always_inline   889 kernel/trace/trace.h static __always_inline bool ftrace_hash_empty(struct ftrace_hash *hash)
__always_inline  1994 kernel/trace/trace.h static __always_inline void trace_iterator_reset(struct trace_iterator *iter)
__always_inline   402 kernel/trace/trace_functions.c static __always_inline void trace_stack(struct trace_array *tr)
__always_inline   147 lib/crypto/aes.c static __always_inline u32 subshift(u32 in[], int pos)
__always_inline   155 lib/crypto/aes.c static __always_inline u32 inv_subshift(u32 in[], int pos)
__always_inline    41 lib/lz4/lz4defs.h #define FORCE_INLINE __always_inline
__always_inline   188 lib/radix-tree.c static __always_inline unsigned long
__always_inline    84 lib/rbtree.c   static __always_inline void
__always_inline   226 lib/rbtree.c   static __always_inline void
__always_inline    32 lib/sort.c     __attribute_const__ __always_inline
__always_inline   177 lib/sort.c     __attribute_const__ __always_inline
__always_inline    34 lib/vdso/gettimeofday.c static __always_inline
__always_inline   478 lib/xz/xz_dec_lzma2.c static __always_inline void rc_normalize(struct rc_dec *rc)
__always_inline   497 lib/xz/xz_dec_lzma2.c static __always_inline int rc_bit(struct rc_dec *rc, uint16_t *prob)
__always_inline   519 lib/xz/xz_dec_lzma2.c static __always_inline uint32_t rc_bittree(struct rc_dec *rc,
__always_inline   535 lib/xz/xz_dec_lzma2.c static __always_inline void rc_bittree_reverse(struct rc_dec *rc,
__always_inline    43 lib/zstd/fse_compress.c #define FORCE_INLINE static __always_inline
__always_inline    43 lib/zstd/fse_decompress.c #define FORCE_INLINE static __always_inline
__always_inline    43 lib/zstd/huf_decompress.c #define FORCE_INLINE static __always_inline
__always_inline    23 lib/zstd/zstd_internal.h #define FORCE_INLINE static __always_inline
__always_inline  1013 mm/gup.c       static __always_inline long __get_user_pages_locked(struct task_struct *tsk,
__always_inline  1602 mm/gup.c       static __always_inline long __gup_longterm_locked(struct task_struct *tsk,
__always_inline    49 mm/kasan/generic.c static __always_inline bool memory_is_poisoned_1(unsigned long addr)
__always_inline    61 mm/kasan/generic.c static __always_inline bool memory_is_poisoned_2_4_8(unsigned long addr,
__always_inline    76 mm/kasan/generic.c static __always_inline bool memory_is_poisoned_16(unsigned long addr)
__always_inline    87 mm/kasan/generic.c static __always_inline unsigned long bytes_is_nonzero(const u8 *start,
__always_inline   100 mm/kasan/generic.c static __always_inline unsigned long memory_is_nonzero(const void *start,
__always_inline   129 mm/kasan/generic.c static __always_inline bool memory_is_poisoned_n(unsigned long addr,
__always_inline   148 mm/kasan/generic.c static __always_inline bool memory_is_poisoned(unsigned long addr, size_t size)
__always_inline   168 mm/kasan/generic.c static __always_inline bool check_memory_region_inline(unsigned long addr,
__always_inline   338 mm/ksm.c       static __always_inline bool is_stable_node_chain(struct stable_node *chain)
__always_inline   343 mm/ksm.c       static __always_inline bool is_stable_node_dup(struct stable_node *dup)
__always_inline  1334 mm/ksm.c       static __always_inline
__always_inline  1348 mm/ksm.c       static __always_inline
__always_inline  1522 mm/ksm.c       static __always_inline struct page *chain_prune(struct stable_node **s_n_d,
__always_inline  1529 mm/ksm.c       static __always_inline struct page *chain(struct stable_node **s_n_d,
__always_inline    60 mm/list_lru.c  static __always_inline struct mem_cgroup *mem_cgroup_from_kmem(void *ptr)
__always_inline     9 mm/maccess.c   static __always_inline long
__always_inline    21 mm/maccess.c   static __always_inline long
__always_inline  1007 mm/memcontrol.c static __always_inline struct mem_cgroup *get_mem_cgroup_from_current(void)
__always_inline   106 mm/mempool.c   static __always_inline void kasan_poison_element(mempool_t *pool, void *element)
__always_inline   122 mm/mempool.c   static __always_inline void add_element(mempool_t *pool, void *element)
__always_inline   468 mm/mmap.c      static __always_inline void vma_rb_erase_ignore(struct vm_area_struct *vma,
__always_inline   482 mm/mmap.c      static __always_inline void vma_rb_erase(struct vm_area_struct *vma,
__always_inline   677 mm/mmap.c      static __always_inline void __vma_unlink_common(struct mm_struct *mm,
__always_inline   479 mm/page_alloc.c static __always_inline unsigned long __get_pfnblock_flags_mask(struct page *page,
__always_inline   505 mm/page_alloc.c static __always_inline int get_pfnblock_migratetype(struct page *page, unsigned long pfn)
__always_inline  1121 mm/page_alloc.c static __always_inline bool free_pages_prepare(struct page *page,
__always_inline  2182 mm/page_alloc.c static __always_inline
__always_inline  2223 mm/page_alloc.c static __always_inline struct page *__rmqueue_cma_fallback(struct zone *zone,
__always_inline  2635 mm/page_alloc.c static __always_inline bool
__always_inline  2717 mm/page_alloc.c static __always_inline struct page *
__always_inline    56 mm/page_owner.c static __always_inline depot_stack_handle_t create_dummy_stack(void)
__always_inline  2885 mm/slab.c      static __always_inline int alloc_block(struct kmem_cache *cachep,
__always_inline  3217 mm/slab.c      static __always_inline void *
__always_inline  3266 mm/slab.c      static __always_inline void *
__always_inline  3290 mm/slab.c      static __always_inline void *
__always_inline  3298 mm/slab.c      static __always_inline void *
__always_inline  3421 mm/slab.c      static __always_inline void __cache_free(struct kmem_cache *cachep, void *objp,
__always_inline  3492 mm/slab.c      static __always_inline void
__always_inline  3603 mm/slab.c      static __always_inline void *
__always_inline  3642 mm/slab.c      static __always_inline void *__do_kmalloc(size_t size, gfp_t flags,
__always_inline   347 mm/slab.h      static __always_inline int memcg_charge_slab(struct page *page,
__always_inline   387 mm/slab.h      static __always_inline void memcg_uncharge_slab(struct page *page, int order,
__always_inline   479 mm/slab.h      static __always_inline int charge_slab_page(struct page *page,
__always_inline   492 mm/slab.h      static __always_inline void uncharge_slab_page(struct page *page, int order,
__always_inline  1651 mm/slab_common.c static __always_inline void *__do_krealloc(const void *p, size_t new_size,
__always_inline   468 mm/slob.c      static __always_inline void *
__always_inline   352 mm/slub.c      static __always_inline void slab_lock(struct page *page)
__always_inline   358 mm/slub.c      static __always_inline void slab_unlock(struct page *page)
__always_inline  1396 mm/slub.c      static __always_inline void kfree_hook(void *x)
__always_inline  1402 mm/slub.c      static __always_inline bool slab_free_hook(struct kmem_cache *s, void *x)
__always_inline  2665 mm/slub.c      static __always_inline void maybe_wipe_obj_freeptr(struct kmem_cache *s,
__always_inline  2682 mm/slub.c      static __always_inline void *slab_alloc_node(struct kmem_cache *s,
__always_inline  2771 mm/slub.c      static __always_inline void *slab_alloc(struct kmem_cache *s,
__always_inline  2964 mm/slub.c      static __always_inline void do_slab_free(struct kmem_cache *s,
__always_inline  3006 mm/slub.c      static __always_inline void slab_free(struct kmem_cache *s, struct page *page,
__always_inline   173 mm/userfaultfd.c static __always_inline ssize_t __mcopy_atomic_hugetlb(struct mm_struct *dst_mm,
__always_inline   393 mm/userfaultfd.c static __always_inline ssize_t mfill_atomic_pte(struct mm_struct *dst_mm,
__always_inline   433 mm/userfaultfd.c static __always_inline ssize_t __mcopy_atomic(struct mm_struct *dst_mm,
__always_inline   374 mm/vmalloc.c   static __always_inline unsigned long
__always_inline   380 mm/vmalloc.c   static __always_inline unsigned long
__always_inline   392 mm/vmalloc.c   static __always_inline unsigned long
__always_inline   437 mm/vmalloc.c   static __always_inline struct rb_node **
__always_inline   482 mm/vmalloc.c   static __always_inline struct list_head *
__always_inline   500 mm/vmalloc.c   static __always_inline void
__always_inline   539 mm/vmalloc.c   static __always_inline void
__always_inline   624 mm/vmalloc.c   static __always_inline void
__always_inline   686 mm/vmalloc.c   static __always_inline void
__always_inline   764 mm/vmalloc.c   static __always_inline bool
__always_inline   788 mm/vmalloc.c   static __always_inline struct vmap_area *
__always_inline   890 mm/vmalloc.c   static __always_inline enum fit_type
__always_inline   916 mm/vmalloc.c   static __always_inline int
__always_inline  1007 mm/vmalloc.c   static __always_inline unsigned long
__always_inline  1646 mm/vmscan.c    static __always_inline void update_lru_sizes(struct lruvec *lruvec,
__always_inline  1397 net/core/flow_dissector.c static __always_inline void __flow_hash_secret_init(void)
__always_inline    26 net/core/secure_seq.c static __always_inline void net_secret_init(void)
__always_inline    31 net/core/secure_seq.c static __always_inline void ts_secret_init(void)
__always_inline  4114 net/core/skbuff.c static __always_inline unsigned int skb_ext_total_length(void)
__always_inline    36 net/ipv6/ila/ila_xlat.c static __always_inline void __ila_hash_secret_init(void)
__always_inline  2406 net/netfilter/nf_conntrack_core.c static __always_inline unsigned int total_extension_size(void)
__always_inline     9 samples/bpf/hash_func01.h static __always_inline
__always_inline   140 samples/bpf/hbm_kern.h static __always_inline void hbm_init_vqueue(struct hbm_vqueue *qdp, int rate)
__always_inline   148 samples/bpf/hbm_kern.h static __always_inline void hbm_init_edt_vqueue(struct hbm_vqueue *qdp,
__always_inline   160 samples/bpf/hbm_kern.h static __always_inline void hbm_update_stats(struct hbm_queue_stats *qsp,
__always_inline   138 samples/bpf/sockex3_kern.c static __always_inline void parse_ip_proto(struct __sk_buff *skb,
__always_inline    35 samples/bpf/syscall_tp_kern.c static __always_inline void count(void *map)
__always_inline    48 samples/bpf/tc_l2_redirect_kern.c static __always_inline bool is_vip_addr(__be16 eth_proto, __be32 daddr)
__always_inline    73 samples/bpf/test_map_in_map_kern.c static __always_inline int do_reg_lookup(void *inner_map, u32 port)
__always_inline    81 samples/bpf/test_map_in_map_kern.c static __always_inline int do_inline_array_lookup(void *inner_map, u32 port)
__always_inline    92 samples/bpf/test_map_in_map_kern.c static __always_inline int do_inline_hash_lookup(void *inner_map, u32 port)
__always_inline    35 samples/bpf/xdp_adjust_tail_kern.c static __always_inline void count_icmp(void)
__always_inline    45 samples/bpf/xdp_adjust_tail_kern.c static __always_inline void swap_mac(void *data, struct ethhdr *orig_eth)
__always_inline    55 samples/bpf/xdp_adjust_tail_kern.c static __always_inline __u16 csum_fold_helper(__u32 csum)
__always_inline    60 samples/bpf/xdp_adjust_tail_kern.c static __always_inline void ipv4_csum(void *data_start, int data_size,
__always_inline    67 samples/bpf/xdp_adjust_tail_kern.c static __always_inline int send_icmp4_too_big(struct xdp_md *xdp)
__always_inline   117 samples/bpf/xdp_adjust_tail_kern.c static __always_inline int handle_ipv4(struct xdp_md *xdp)
__always_inline    34 samples/bpf/xdp_fwd_kern.c static __always_inline int ip_decrease_ttl(struct iphdr *iph)
__always_inline    43 samples/bpf/xdp_fwd_kern.c static __always_inline int xdp_fwd_flags(struct xdp_md *ctx, u32 flags)
__always_inline    44 samples/bpf/xdp_monitor_kern.c static __always_inline
__always_inline   111 samples/bpf/xdp_redirect_cpu_kern.c static __always_inline
__always_inline   154 samples/bpf/xdp_redirect_cpu_kern.c static __always_inline
__always_inline   176 samples/bpf/xdp_redirect_cpu_kern.c static __always_inline
__always_inline   188 samples/bpf/xdp_redirect_cpu_kern.c static __always_inline
__always_inline   468 samples/bpf/xdp_redirect_cpu_kern.c static __always_inline
__always_inline   485 samples/bpf/xdp_redirect_cpu_kern.c static __always_inline
__always_inline   591 samples/bpf/xdp_redirect_cpu_kern.c static __always_inline
__always_inline    57 samples/bpf/xdp_rxq_info_kern.c static __always_inline
__always_inline    36 samples/bpf/xdp_tx_iptunnel_kern.c static __always_inline void count_tx(u32 protocol)
__always_inline    45 samples/bpf/xdp_tx_iptunnel_kern.c static __always_inline int get_dport(void *trans_data, void *data_end,
__always_inline    67 samples/bpf/xdp_tx_iptunnel_kern.c static __always_inline void set_ethhdr(struct ethhdr *new_eth,
__always_inline    77 samples/bpf/xdp_tx_iptunnel_kern.c static __always_inline int handle_ipv4(struct xdp_md *xdp)
__always_inline   152 samples/bpf/xdp_tx_iptunnel_kern.c static __always_inline int handle_ipv6(struct xdp_md *xdp)
__always_inline   204 sound/soc/soc-dapm.c static __always_inline void dapm_widget_invalidate_paths(
__always_inline  1186 sound/soc/soc-dapm.c static __always_inline int is_connected_ep(struct snd_soc_dapm_widget *widget,
__always_inline    68 tools/arch/x86/include/asm/atomic.h static __always_inline int atomic_cmpxchg(atomic_t *v, int old, int new)
__always_inline    14 tools/include/asm-generic/bitops/__ffs.h static __always_inline unsigned long __ffs(unsigned long word)
__always_inline    13 tools/include/asm-generic/bitops/__fls.h static __always_inline unsigned long __fls(unsigned long word)
__always_inline    13 tools/include/asm-generic/bitops/fls.h static __always_inline int fls(unsigned int x)
__always_inline    19 tools/include/asm-generic/bitops/fls64.h static __always_inline int fls64(__u64 x)
__always_inline    27 tools/include/asm-generic/bitops/fls64.h static __always_inline int fls64(__u64 x)
__always_inline    17 tools/include/linux/compiler.h #ifndef __always_inline
__always_inline   105 tools/include/linux/compiler.h static __always_inline void __read_once_size(const volatile void *p, void *res, int size)
__always_inline   119 tools/include/linux/compiler.h static __always_inline void __write_once_size(volatile void *p, void *res, int size)
__always_inline    77 tools/include/linux/hash.h static __always_inline u32 hash_64_generic(u64 val, unsigned int bits)
__always_inline   186 tools/include/linux/rbtree_augmented.h static __always_inline struct rb_node *
__always_inline   290 tools/include/linux/rbtree_augmented.h static __always_inline void
__always_inline   299 tools/include/linux/rbtree_augmented.h static __always_inline void
__always_inline    84 tools/lib/rbtree.c static __always_inline void
__always_inline   226 tools/lib/rbtree.c static __always_inline void
__always_inline    74 tools/testing/selftests/bpf/progs/bpf_flow.c static __always_inline int export_flow_keys(struct bpf_flow_keys *keys,
__always_inline    91 tools/testing/selftests/bpf/progs/bpf_flow.c static __always_inline void *bpf_flow_dissect_get_header(struct __sk_buff *skb,
__always_inline   115 tools/testing/selftests/bpf/progs/bpf_flow.c static __always_inline int parse_eth_proto(struct __sk_buff *skb, __be16 proto)
__always_inline   151 tools/testing/selftests/bpf/progs/bpf_flow.c static __always_inline int parse_ip_proto(struct __sk_buff *skb, __u8 proto)
__always_inline   242 tools/testing/selftests/bpf/progs/bpf_flow.c static __always_inline int parse_ipv6_proto(struct __sk_buff *skb, __u8 nexthdr)
__always_inline    70 tools/testing/selftests/bpf/progs/pyperf.h static __always_inline void *get_thread_state(void *tls_base, PidData *pidData)
__always_inline    81 tools/testing/selftests/bpf/progs/pyperf.h static __always_inline bool get_frame_data(void *frame_ptr, PidData *pidData,
__always_inline   155 tools/testing/selftests/bpf/progs/pyperf.h static __always_inline int __on_event(struct pt_regs *ctx)
__always_inline   269 tools/testing/selftests/bpf/progs/strobemeta.h static __always_inline void *calc_location(struct strobe_value_loc *loc,
__always_inline   330 tools/testing/selftests/bpf/progs/strobemeta.h static __always_inline void read_int_var(struct strobemeta_cfg *cfg,
__always_inline   345 tools/testing/selftests/bpf/progs/strobemeta.h static __always_inline uint64_t read_str_var(struct strobemeta_cfg *cfg,
__always_inline   375 tools/testing/selftests/bpf/progs/strobemeta.h static __always_inline void *read_map_var(struct strobemeta_cfg *cfg,
__always_inline   443 tools/testing/selftests/bpf/progs/strobemeta.h static __always_inline void *read_strobe_meta(struct task_struct *task,
__always_inline     7 tools/testing/selftests/bpf/progs/test_jhash.h static __always_inline u32 rol32(u32 word, unsigned int shift)
__always_inline   207 tools/testing/selftests/bpf/progs/test_l4lb.c static __always_inline __u32 get_packet_hash(struct packet_description *pckt,
__always_inline   217 tools/testing/selftests/bpf/progs/test_l4lb.c static __always_inline bool get_packet_dst(struct real_definition **real,
__always_inline   236 tools/testing/selftests/bpf/progs/test_l4lb.c static __always_inline int parse_icmpv6(void *data, void *data_end, __u64 off,
__always_inline   258 tools/testing/selftests/bpf/progs/test_l4lb.c static __always_inline int parse_icmp(void *data, void *data_end, __u64 off,
__always_inline   283 tools/testing/selftests/bpf/progs/test_l4lb.c static __always_inline bool parse_udp(void *data, __u64 off, void *data_end,
__always_inline   302 tools/testing/selftests/bpf/progs/test_l4lb.c static __always_inline bool parse_tcp(void *data, __u64 off, void *data_end,
__always_inline   324 tools/testing/selftests/bpf/progs/test_l4lb.c static __always_inline int process_packet(void *data, __u64 off, void *data_end,
__always_inline    53 tools/testing/selftests/bpf/progs/test_lwt_seg6local.c static __always_inline struct ip6_srh_t *get_srh(struct __sk_buff *skb)
__always_inline    87 tools/testing/selftests/bpf/progs/test_lwt_seg6local.c static __always_inline
__always_inline   117 tools/testing/selftests/bpf/progs/test_lwt_seg6local.c static __always_inline
__always_inline   176 tools/testing/selftests/bpf/progs/test_lwt_seg6local.c static __always_inline
__always_inline   220 tools/testing/selftests/bpf/progs/test_lwt_seg6local.c static __always_inline
__always_inline   258 tools/testing/selftests/bpf/progs/test_lwt_seg6local.c static __always_inline
__always_inline    53 tools/testing/selftests/bpf/progs/test_seg6_loop.c static __always_inline struct ip6_srh_t *get_srh(struct __sk_buff *skb)
__always_inline    87 tools/testing/selftests/bpf/progs/test_seg6_loop.c static __always_inline int update_tlv_pad(struct __sk_buff *skb,
__always_inline   117 tools/testing/selftests/bpf/progs/test_seg6_loop.c static __always_inline int is_valid_tlv_boundary(struct __sk_buff *skb,
__always_inline   179 tools/testing/selftests/bpf/progs/test_seg6_loop.c static __always_inline int add_tlv(struct __sk_buff *skb,
__always_inline    20 tools/testing/selftests/bpf/progs/test_skb_cgroup_id_kern.c static __always_inline void log_nth_level(struct __sk_buff *skb, __u32 level)
__always_inline    21 tools/testing/selftests/bpf/progs/test_sysctl_loop1.c static __always_inline int is_tcp_mem(struct bpf_sysctl *ctx)
__always_inline    22 tools/testing/selftests/bpf/progs/test_sysctl_prog.c static __always_inline int is_tcp_mem(struct bpf_sysctl *ctx)
__always_inline    57 tools/testing/selftests/bpf/progs/test_tc_tunnel.c static __always_inline void set_ipv4_csum(struct iphdr *iph)
__always_inline    72 tools/testing/selftests/bpf/progs/test_tc_tunnel.c static __always_inline int encap_ipv4(struct __sk_buff *skb, __u8 encap_proto,
__always_inline   217 tools/testing/selftests/bpf/progs/test_tc_tunnel.c static __always_inline int encap_ipv6(struct __sk_buff *skb, __u8 encap_proto,
__always_inline    26 tools/testing/selftests/bpf/progs/test_tcp_check_syncookie_kern.c static __always_inline __s64 gen_syncookie(void *data_end, struct bpf_sock *sk,
__always_inline    45 tools/testing/selftests/bpf/progs/test_tcp_check_syncookie_kern.c static __always_inline void check_syncookie(void *ctx, void *data,
__always_inline   163 tools/testing/selftests/bpf/progs/test_tcp_estats.c static __always_inline void tcp_estats_ev_init(struct tcp_estats_event *event,
__always_inline   171 tools/testing/selftests/bpf/progs/test_tcp_estats.c static __always_inline void unaligned_u32_set(unsigned char *to, __u8 *from)
__always_inline   179 tools/testing/selftests/bpf/progs/test_tcp_estats.c static __always_inline void conn_id_ipv4_init(struct tcp_estats_conn_id *conn_id,
__always_inline   188 tools/testing/selftests/bpf/progs/test_tcp_estats.c static __always_inline void conn_id_ipv6_init(struct tcp_estats_conn_id *conn_id,
__always_inline   211 tools/testing/selftests/bpf/progs/test_tcp_estats.c static __always_inline void tcp_estats_conn_id_init(struct tcp_estats_conn_id *conn_id,
__always_inline   227 tools/testing/selftests/bpf/progs/test_tcp_estats.c static __always_inline void tcp_estats_init(struct sock *sk,
__always_inline   236 tools/testing/selftests/bpf/progs/test_tcp_estats.c static __always_inline void send_basic_event(struct sock *sk,
__always_inline     5 tools/testing/selftests/bpf/progs/test_verif_scale2.c #define ATTR __always_inline
__always_inline    39 tools/testing/selftests/bpf/progs/test_xdp.c static __always_inline void count_tx(__u32 protocol)
__always_inline    48 tools/testing/selftests/bpf/progs/test_xdp.c static __always_inline int get_dport(void *trans_data, void *data_end,
__always_inline    70 tools/testing/selftests/bpf/progs/test_xdp.c static __always_inline void set_ethhdr(struct ethhdr *new_eth,
__always_inline    80 tools/testing/selftests/bpf/progs/test_xdp.c static __always_inline int handle_ipv4(struct xdp_md *xdp)
__always_inline   153 tools/testing/selftests/bpf/progs/test_xdp.c static __always_inline int handle_ipv6(struct xdp_md *xdp)
__always_inline    35 tools/testing/selftests/bpf/progs/test_xdp_loop.c static __always_inline void count_tx(__u32 protocol)
__always_inline    44 tools/testing/selftests/bpf/progs/test_xdp_loop.c static __always_inline int get_dport(void *trans_data, void *data_end,
__always_inline    66 tools/testing/selftests/bpf/progs/test_xdp_loop.c static __always_inline void set_ethhdr(struct ethhdr *new_eth,
__always_inline    76 tools/testing/selftests/bpf/progs/test_xdp_loop.c static __always_inline int handle_ipv4(struct xdp_md *xdp)
__always_inline   149 tools/testing/selftests/bpf/progs/test_xdp_loop.c static __always_inline int handle_ipv6(struct xdp_md *xdp)
__always_inline    56 tools/testing/selftests/bpf/progs/test_xdp_vlan.c static __always_inline
__always_inline   212 tools/testing/selftests/bpf/progs/test_xdp_vlan.c static __always_inline
__always_inline   225 tools/testing/selftests/bpf/progs/test_xdp_vlan.c static __always_inline
__always_inline    27 tools/testing/selftests/bpf/progs/xdping_kern.c static __always_inline void swap_src_dst_mac(void *data)
__always_inline    43 tools/testing/selftests/bpf/progs/xdping_kern.c static __always_inline __u16 csum_fold_helper(__wsum sum)
__always_inline    49 tools/testing/selftests/bpf/progs/xdping_kern.c static __always_inline __u16 ipv4_csum(void *data_start, int data_size)
__always_inline    59 tools/testing/selftests/bpf/progs/xdping_kern.c static __always_inline int icmp_check(struct xdp_md *ctx, int type)
__always_inline   149 tools/virtio/ringtest/main.h static __always_inline
__always_inline   164 tools/virtio/ringtest/main.h static __always_inline void __write_once_size(volatile void *p, void *res, int size)