Searched refs:__always_inline (Results 1 - 124 of 124) sorted by relevance

/linux-4.1.27/tools/lib/lockdep/uinclude/linux/
H A Drbtree_augmented.h1 #define __always_inline macro
/linux-4.1.27/arch/arm64/include/asm/
H A Dbitrev.h3 static __always_inline __attribute_const__ u32 __arch_bitrev32(u32 x) __arch_bitrev32()
9 static __always_inline __attribute_const__ u16 __arch_bitrev16(u16 x) __arch_bitrev16()
14 static __always_inline __attribute_const__ u8 __arch_bitrev8(u8 x) __arch_bitrev8()
H A Djump_label.h29 static __always_inline bool arch_static_branch(struct static_key *key) arch_static_branch()
H A Dstackprotector.h26 static __always_inline void boot_init_stack_canary(void) boot_init_stack_canary()
H A Darch_timer.h35 static __always_inline arch_timer_reg_write_cp15()
61 static __always_inline arch_timer_reg_read_cp15()
H A Dinsn.h221 static __always_inline bool aarch64_insn_is_##abbr(u32 code) \
223 static __always_inline u32 aarch64_insn_get_##abbr##_value(void) \
/linux-4.1.27/arch/arm/include/asm/
H A Dbitrev.h4 static __always_inline __attribute_const__ u32 __arch_bitrev32(u32 x) __arch_bitrev32()
10 static __always_inline __attribute_const__ u16 __arch_bitrev16(u16 x) __arch_bitrev16()
15 static __always_inline __attribute_const__ u8 __arch_bitrev8(u8 x) __arch_bitrev8()
H A Djump_label.h16 static __always_inline bool arch_static_branch(struct static_key *key) arch_static_branch()
H A Dstacktrace.h17 static __always_inline arm_get_current_stackframe()
H A Dstackprotector.h26 static __always_inline void boot_init_stack_canary(void) boot_init_stack_canary()
H A Darch_timer.h20 static __always_inline arch_timer_reg_write_cp15()
46 static __always_inline arch_timer_reg_read_cp15()
/linux-4.1.27/include/asm-generic/
H A Dpreempt.h8 static __always_inline int preempt_count(void) preempt_count()
13 static __always_inline int *preempt_count_ptr(void) preempt_count_ptr()
18 static __always_inline void preempt_count_set(int pc) preempt_count_set()
34 static __always_inline void set_preempt_need_resched(void) set_preempt_need_resched()
38 static __always_inline void clear_preempt_need_resched(void) clear_preempt_need_resched()
42 static __always_inline bool test_preempt_need_resched(void) test_preempt_need_resched()
51 static __always_inline void __preempt_count_add(int val) __preempt_count_add()
56 static __always_inline void __preempt_count_sub(int val) __preempt_count_sub()
61 static __always_inline bool __preempt_count_dec_and_test(void) __preempt_count_dec_and_test()
74 static __always_inline bool should_resched(int preempt_offset) should_resched()
H A Dfixmap.h29 static __always_inline unsigned long fix_to_virt(const unsigned int idx) fix_to_virt()
/linux-4.1.27/arch/alpha/include/asm/
H A Dcompiler.h14 #undef __always_inline macro
15 #define __always_inline inline __attribute__((always_inline)) macro
H A Dxchg.h108 static __always_inline unsigned long ____xchg()
240 static __always_inline unsigned long ____cmpxchg()
/linux-4.1.27/tools/include/linux/
H A Dcompiler.h4 #ifndef __always_inline
5 # define __always_inline inline __attribute__((always_inline)) macro
/linux-4.1.27/arch/x86/include/asm/
H A Dpreempt.h20 static __always_inline int preempt_count(void) preempt_count()
25 static __always_inline void preempt_count_set(int pc) preempt_count_set()
51 static __always_inline void set_preempt_need_resched(void) set_preempt_need_resched()
56 static __always_inline void clear_preempt_need_resched(void) clear_preempt_need_resched()
61 static __always_inline bool test_preempt_need_resched(void) test_preempt_need_resched()
70 static __always_inline void __preempt_count_add(int val) __preempt_count_add()
75 static __always_inline void __preempt_count_sub(int val) __preempt_count_sub()
85 static __always_inline bool __preempt_count_dec_and_test(void) __preempt_count_dec_and_test()
93 static __always_inline bool should_resched(int preempt_offset) should_resched()
H A Dcurrent.h12 static __always_inline struct task_struct *get_current(void) get_current()
H A Ddmi.h10 static __always_inline __init void *dmi_alloc(unsigned len) dmi_alloc()
H A Djump_label.h19 static __always_inline bool arch_static_branch(struct static_key *key) arch_static_branch()
H A Dsmap.h49 static __always_inline void clac(void) clac()
55 static __always_inline void stac(void) stac()
H A Duaccess_64.h26 static __always_inline __must_check unsigned long copy_user_generic()
51 static __always_inline __must_check __copy_from_user_nocheck()
94 static __always_inline __must_check __copy_from_user()
101 static __always_inline __must_check __copy_to_user_nocheck()
144 static __always_inline __must_check __copy_to_user()
151 static __always_inline __must_check __copy_in_user()
204 static __must_check __always_inline int __copy_from_user_inatomic()
210 static __must_check __always_inline int __copy_to_user_inatomic()
H A Dspinlock.h43 static __always_inline bool static_key_false(struct static_key *key);
53 static __always_inline void __ticket_lock_spinning(arch_spinlock_t *lock, __ticket_lock_spinning()
84 static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock) arch_spin_value_unlocked()
102 static __always_inline void arch_spin_lock(arch_spinlock_t *lock) arch_spin_lock()
127 static __always_inline int arch_spin_trylock(arch_spinlock_t *lock) arch_spin_trylock()
142 static __always_inline void arch_spin_unlock(arch_spinlock_t *lock) arch_spin_unlock()
176 static __always_inline void arch_spin_lock_flags(arch_spinlock_t *lock, arch_spin_lock_flags()
H A Dpvclock.h62 static __always_inline pvclock_get_nsec_offset()
70 static __always_inline __pvclock_read_cycles()
H A Duaccess_32.h43 static __always_inline unsigned long __must_check __copy_to_user_inatomic()
81 static __always_inline unsigned long __must_check __copy_to_user()
88 static __always_inline unsigned long __copy_from_user_inatomic()
136 static __always_inline unsigned long __copy_from_user()
158 static __always_inline unsigned long __copy_from_user_nocache(void *to, __copy_from_user_nocache()
180 static __always_inline unsigned long __copy_from_user_inatomic_nocache()
H A Dstring_32.h32 static __always_inline void *__memcpy(void *to, const void *from, size_t n) __memcpy()
51 static __always_inline void *__constant_memcpy(void *to, const void *from, __constant_memcpy()
226 static __always_inline __constant_c_memset()
256 static __always_inline __constant_c_and_count_memset()
H A Dtsc.h35 static __always_inline cycles_t vget_cycles(void) vget_cycles()
H A Dbitops.h8 * __always_inline to avoid problems with older gcc's inlining heuristics.
71 static __always_inline void set_bit()
109 static __always_inline void clear_bit()
216 static __always_inline int test_and_set_bit_lock()
308 static __always_inline int constant_test_bit(long nr, const volatile unsigned long *addr) constant_test_bit()
479 static __always_inline int fls64(__u64 x) fls64()
H A Dstring_64.h9 static __always_inline void *__inline_memcpy(void *to, const void *from, size_t n) __inline_memcpy()
H A Dbarrier.h99 static __always_inline void rdtsc_barrier(void) rdtsc_barrier()
H A Dfpu-internal.h116 static __always_inline __pure bool use_eager_fpu(void) use_eager_fpu()
121 static __always_inline __pure bool use_xsaveopt(void) use_xsaveopt()
126 static __always_inline __pure bool use_xsave(void) use_xsave()
131 static __always_inline __pure bool use_fxsr(void) use_fxsr()
H A Dstackprotector.h58 static __always_inline void boot_init_stack_canary(void) boot_init_stack_canary()
H A Dmsr.h113 static __always_inline unsigned long long __native_read_tsc(void) __native_read_tsc()
H A Dcpufeature.h414 static __always_inline __pure bool __static_cpu_has(u16 bit) __static_cpu_has()
497 static __always_inline __pure bool _static_cpu_has_safe(u16 bit) _static_cpu_has_safe()
H A Dpercpu.h520 static __always_inline int x86_this_cpu_constant_test_bit(unsigned int nr, x86_this_cpu_constant_test_bit()
H A Dparavirt.h715 static __always_inline void __ticket_lock_spinning(struct arch_spinlock *lock, __ticket_lock_spinning()
721 static __always_inline void __ticket_unlock_kick(struct arch_spinlock *lock, __ticket_unlock_kick()
/linux-4.1.27/include/asm-generic/bitops/
H A Dbuiltin-__ffs.h10 static __always_inline unsigned long __ffs(unsigned long word) __ffs()
H A Dbuiltin-fls.h11 static __always_inline int fls(int x) fls()
H A Dfls64.h18 static __always_inline int fls64(__u64 x) fls64()
26 static __always_inline int fls64(__u64 x) fls64()
H A Dbuiltin-__fls.h10 static __always_inline unsigned long __fls(unsigned long word) __fls()
H A Dbuiltin-ffs.h12 static __always_inline int ffs(int x) ffs()
H A Dfls.h12 static __always_inline int fls(int x) fls()
H A D__ffs.h12 static __always_inline unsigned long __ffs(unsigned long word) __ffs()
H A D__fls.h12 static __always_inline unsigned long __fls(unsigned long word) __fls()
/linux-4.1.27/arch/powerpc/include/asm/
H A Dcmpxchg.h15 static __always_inline unsigned long __xchg_u32()
40 static __always_inline unsigned long __xchg_u32_local()
58 static __always_inline unsigned long __xchg_u64()
77 static __always_inline unsigned long __xchg_u64_local()
101 static __always_inline unsigned long __xchg()
116 static __always_inline unsigned long __xchg_local()
149 static __always_inline unsigned long __cmpxchg_u32()
172 static __always_inline unsigned long __cmpxchg_u32_local()
195 static __always_inline unsigned long __cmpxchg_u64()
217 static __always_inline unsigned long __cmpxchg_u64_local()
243 static __always_inline unsigned long __cmpxchg()
259 static __always_inline unsigned long __cmpxchg_local()
H A Djump_label.h21 static __always_inline bool arch_static_branch(struct static_key *key) arch_static_branch()
H A Dspinlock.h57 static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock) arch_spin_value_unlocked()
/linux-4.1.27/arch/sparc/include/asm/
H A Djump_label.h10 static __always_inline bool arch_static_branch(struct static_key *key) arch_static_branch()
/linux-4.1.27/tools/include/asm-generic/bitops/
H A Datomic.h16 static __always_inline int test_bit(unsigned int nr, const unsigned long *addr) test_bit()
H A D__ffs.h12 static __always_inline unsigned long __ffs(unsigned long word) __ffs()
/linux-4.1.27/arch/s390/include/asm/
H A Djump_label.h15 static __always_inline bool arch_static_branch(struct static_key *key) arch_static_branch()
H A Dirq.h79 static __always_inline void inc_irq_stat(enum interruption_class irq) inc_irq_stat()
/linux-4.1.27/arch/sh/include/asm/
H A Dstackprotector.h15 static __always_inline void boot_init_stack_canary(void) boot_init_stack_canary()
H A Duaccess.h114 static __always_inline unsigned long __copy_from_user()
120 static __always_inline unsigned long __must_check __copy_to_user()
H A Dunaligned-sh4a.h34 static __always_inline u32 sh4a_get_unaligned_cpu32(const u8 *p) sh4a_get_unaligned_cpu32()
H A Ddwarf.h207 static __always_inline unsigned long dwarf_read_arch_reg(unsigned int reg) dwarf_read_arch_reg()
/linux-4.1.27/include/linux/
H A Dbottom_half.h10 static __always_inline void __local_bh_disable_ip(unsigned long ip, unsigned int cnt) __local_bh_disable_ip()
H A Dmm_inline.h25 static __always_inline void add_page_to_lru_list(struct page *page, add_page_to_lru_list()
34 static __always_inline void del_page_from_lru_list(struct page *page, del_page_from_lru_list()
65 static __always_inline enum lru_list page_off_lru(struct page *page) page_off_lru()
89 static __always_inline enum lru_list page_lru(struct page *page) page_lru()
H A Djump_label.h123 static __always_inline bool static_key_false(struct static_key *key) static_key_false()
128 static __always_inline bool static_key_true(struct static_key *key) static_key_true()
157 static __always_inline void jump_label_init(void) jump_label_init()
162 static __always_inline bool static_key_false(struct static_key *key) static_key_false()
169 static __always_inline bool static_key_true(struct static_key *key) static_key_true()
H A Dslab.h246 static __always_inline int kmalloc_index(size_t size) kmalloc_index()
297 static __always_inline void *__kmalloc_node(size_t size, gfp_t flags, int node) __kmalloc_node()
302 static __always_inline void *kmem_cache_alloc_node(struct kmem_cache *s, gfp_t flags, int node) kmem_cache_alloc_node()
316 static __always_inline void * kmem_cache_alloc_node_trace()
326 static __always_inline void *kmem_cache_alloc_trace(struct kmem_cache *s, kmem_cache_alloc_trace()
335 static __always_inline void * kmem_cache_alloc_node_trace()
352 static __always_inline void * kmalloc_order_trace()
359 static __always_inline void *kmalloc_large(size_t size, gfp_t flags) kmalloc_large()
418 static __always_inline void *kmalloc(size_t size, gfp_t flags) kmalloc()
443 static __always_inline int kmalloc_size(int n) kmalloc_size()
458 static __always_inline void *kmalloc_node(size_t size, gfp_t flags, int node) kmalloc_node()
H A Dhash.h51 static __always_inline u64 hash_64(u64 val, unsigned int bits) hash_64()
H A Dcompiler.h199 static __always_inline void __read_once_size(const volatile void *p, void *res, int size) __read_once_size()
213 static __always_inline void __write_once_size(volatile void *p, void *res, int size) __write_once_size()
331 #ifndef __always_inline
332 #define __always_inline inline macro
463 # define nokprobe_inline __always_inline
H A Dmath64.h117 static __always_inline u32 __iter_div_u64_rem()
H A Drbtree_augmented.h136 static __always_inline struct rb_node * __rb_erase_augmented()
233 static __always_inline void rb_erase_augmented()
H A Dmemcontrol.h522 static __always_inline struct kmem_cache * memcg_kmem_get_cache()
539 static __always_inline void memcg_kmem_put_cache(struct kmem_cache *cachep) memcg_kmem_put_cache()
545 static __always_inline struct mem_cgroup *mem_cgroup_from_kmem(void *ptr) mem_cgroup_from_kmem()
H A Dtime64.h182 static __always_inline void timespec64_add_ns(struct timespec64 *a, u64 ns) timespec64_add_ns()
H A Dnodemask.h111 * freeable items not being freed). So we must use __always_inline here
113 * this situation they will also need to be annotated as __always_inline
116 static __always_inline void __node_set(int node, volatile nodemask_t *dstp) __node_set()
H A Dradix-tree.h340 static __always_inline void ** radix_tree_iter_init()
394 static __always_inline long radix_tree_chunk_size()
411 static __always_inline void ** radix_tree_next_slot()
H A Dtime.h233 static __always_inline void timespec_add_ns(struct timespec *a, u64 ns) timespec_add_ns()
H A Dasync_tx.h30 #define __async_inline __always_inline
H A Dcompiler-gcc.h245 #define __always_inline inline __attribute__((always_inline)) macro
H A Dperf_event.h770 static __always_inline void perf_sw_event()
784 static __always_inline void perf_sw_event_sched()
H A Dquota.h488 static __always_inline unsigned dquot_state_types(unsigned flags, unsigned flag) dquot_state_types()
H A Dsched.h2949 static __always_inline bool need_resched(void) need_resched()
H A Dmm.h923 static __always_inline void *lowmem_page_address(const struct page *page) lowmem_page_address()
/linux-4.1.27/arch/ia64/include/asm/
H A Dspinlock.h40 static __always_inline void __ticket_spin_lock(arch_spinlock_t *lock) __ticket_spin_lock()
60 static __always_inline int __ticket_spin_trylock(arch_spinlock_t *lock) __ticket_spin_trylock()
69 static __always_inline void __ticket_spin_unlock(arch_spinlock_t *lock) __ticket_spin_unlock()
77 static __always_inline void __ticket_spin_unlock_wait(arch_spinlock_t *lock) __ticket_spin_unlock_wait()
105 static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock) arch_spin_value_unlocked()
121 static __always_inline void arch_spin_lock(arch_spinlock_t *lock) arch_spin_lock()
126 static __always_inline int arch_spin_trylock(arch_spinlock_t *lock) arch_spin_trylock()
131 static __always_inline void arch_spin_unlock(arch_spinlock_t *lock) arch_spin_unlock()
136 static __always_inline void arch_spin_lock_flags(arch_spinlock_t *lock, arch_spin_lock_flags()
152 static __always_inline void arch_read_lock_flags()
203 static __always_inline void arch_write_lock_flags()
/linux-4.1.27/arch/xtensa/include/asm/
H A Dstacktrace.h20 static __always_inline unsigned long *stack_pointer(struct task_struct *task) stack_pointer()
H A Dfixmap.h60 static __always_inline unsigned long fix_to_virt(const unsigned int idx) fix_to_virt()
/linux-4.1.27/arch/mips/include/asm/
H A Djump_label.h29 static __always_inline bool arch_static_branch(struct static_key *key) arch_static_branch()
H A Dstackprotector.h28 static __always_inline void boot_init_stack_canary(void) boot_init_stack_canary()
H A Dstacktrace.h23 static __always_inline void prepare_frametrace(struct pt_regs *regs) prepare_frametrace()
/linux-4.1.27/mm/kasan/
H A Dkasan.c69 static __always_inline bool memory_is_poisoned_1(unsigned long addr) memory_is_poisoned_1()
81 static __always_inline bool memory_is_poisoned_2(unsigned long addr) memory_is_poisoned_2()
98 static __always_inline bool memory_is_poisoned_4(unsigned long addr) memory_is_poisoned_4()
115 static __always_inline bool memory_is_poisoned_8(unsigned long addr) memory_is_poisoned_8()
132 static __always_inline bool memory_is_poisoned_16(unsigned long addr) memory_is_poisoned_16()
152 static __always_inline unsigned long bytes_is_zero(const u8 *start, bytes_is_zero()
165 static __always_inline unsigned long memory_is_zero(const void *start, memory_is_zero()
194 static __always_inline bool memory_is_poisoned_n(unsigned long addr, memory_is_poisoned_n()
213 static __always_inline bool memory_is_poisoned(unsigned long addr, size_t size) memory_is_poisoned()
236 static __always_inline void check_memory_region(unsigned long addr, check_memory_region()
/linux-4.1.27/drivers/infiniband/hw/mthca/
H A Dmthca_wqe.h116 static __always_inline void mthca_set_data_seg(struct mthca_data_seg *dseg, mthca_set_data_seg()
124 static __always_inline void mthca_set_data_seg_inval(struct mthca_data_seg *dseg) mthca_set_data_seg_inval()
H A Dmthca_qp.c1563 static __always_inline void set_raddr_seg(struct mthca_raddr_seg *rseg, set_raddr_seg()
1571 static __always_inline void set_atomic_seg(struct mthca_atomic_seg *aseg, set_atomic_seg()
/linux-4.1.27/arch/m32r/include/asm/
H A Dcmpxchg.h16 static __always_inline unsigned long __xchg()
70 static __always_inline unsigned long __xchg_local()
/linux-4.1.27/arch/openrisc/include/asm/
H A Dfixmap.h64 static __always_inline unsigned long fix_to_virt(const unsigned int idx) fix_to_virt()
/linux-4.1.27/net/core/
H A Dsecure_seq.c19 static __always_inline void net_secret_init(void) net_secret_init()
H A Dflow_dissector.c272 static __always_inline void __flow_hash_secret_init(void) __flow_hash_secret_init()
277 static __always_inline u32 __flow_hash_3words(u32 a, u32 b, u32 c) __flow_hash_3words()
/linux-4.1.27/arch/x86/vdso/
H A Dvclock_gettime.c230 notrace static int __always_inline do_realtime(struct timespec *ts) do_realtime()
251 notrace static int __always_inline do_monotonic(struct timespec *ts) do_monotonic()
/linux-4.1.27/arch/m68k/include/asm/
H A Duaccess_mm.h238 static __always_inline unsigned long __constant_copy_from_user()
319 static __always_inline unsigned long __constant_copy_to_user()
/linux-4.1.27/drivers/clocksource/
H A Darm_arch_timer.c78 static __always_inline arch_timer_reg_write()
107 static __always_inline arch_timer_reg_read()
140 static __always_inline irqreturn_t timer_handler(const int access, timer_handler()
184 static __always_inline void timer_set_mode(const int access, int mode, timer_set_mode()
224 static __always_inline void set_next_event(const int access, unsigned long evt, set_next_event()
/linux-4.1.27/arch/sh/mm/
H A Dpmb.c72 static __always_inline unsigned long mk_pmb_entry(unsigned int entry) mk_pmb_entry()
77 static __always_inline unsigned long mk_pmb_addr(unsigned int entry) mk_pmb_addr()
82 static __always_inline unsigned long mk_pmb_data(unsigned int entry) mk_pmb_data()
87 static __always_inline unsigned int pmb_ppn_in_range(unsigned long ppn) pmb_ppn_in_range()
99 static __always_inline unsigned long pmb_cache_flags(void) pmb_cache_flags()
/linux-4.1.27/lib/
H A Drbtree.c72 static __always_inline void __rb_insert()
201 static __always_inline void ____rb_erase_color()
404 * This instantiates the same __always_inline functions as in the non-augmented
H A Dradix-tree.c150 static __always_inline unsigned long radix_tree_find_next_bit()
/linux-4.1.27/kernel/locking/
H A Dqrwlock.c33 static __always_inline void rspin_until_writer_unlock()
H A Dmutex.c109 static __always_inline void ww_mutex_lock_acquired(struct ww_mutex *ww, ww_mutex_lock_acquired()
156 static __always_inline void ww_mutex_set_context_fastpath()
200 static __always_inline void ww_mutex_set_context_slowpath()
504 static __always_inline int __sched __mutex_lock_common()
/linux-4.1.27/mm/
H A Dslab.h233 static __always_inline int memcg_charge_slab(struct kmem_cache *s, memcg_charge_slab()
243 static __always_inline void memcg_uncharge_slab(struct kmem_cache *s, int order) memcg_uncharge_slab()
H A Dswap.c117 static __always_inline put_unrefcounted_compound_page()
165 static __always_inline put_refcounted_compound_page()
H A Dslob.c426 static __always_inline void * __do_kmalloc_node()
H A Dgup.c578 static __always_inline long __get_user_pages_locked(struct task_struct *tsk, __get_user_pages_locked()
715 __always_inline long __get_user_pages_unlocked(struct task_struct *tsk, struct mm_struct *mm, __get_user_pages_unlocked()
H A Dslab.c3152 static __always_inline void * slab_alloc_node()
3210 static __always_inline void * __do_cache_alloc()
3234 static __always_inline void * __do_cache_alloc()
3242 static __always_inline void * slab_alloc()
3482 static __always_inline void * __do_kmalloc_node()
3513 static __always_inline void *__do_kmalloc(size_t size, gfp_t flags, __do_kmalloc()
H A Dslub.c339 static __always_inline void slab_lock(struct page *page) slab_lock()
344 static __always_inline void slab_unlock(struct page *page) slab_unlock()
2425 static __always_inline void *slab_alloc_node(struct kmem_cache *s, slab_alloc_node()
2512 static __always_inline void *slab_alloc(struct kmem_cache *s, slab_alloc()
2701 static __always_inline void slab_free(struct kmem_cache *s, slab_free()
H A Dslab_common.c1072 static __always_inline void *__do_krealloc(const void *p, size_t new_size, __do_krealloc()
/linux-4.1.27/lib/xz/
H A Dxz_dec_lzma2.c478 static __always_inline void rc_normalize(struct rc_dec *rc) rc_normalize()
497 static __always_inline int rc_bit(struct rc_dec *rc, uint16_t *prob) rc_bit()
519 static __always_inline uint32_t rc_bittree(struct rc_dec *rc, rc_bittree()
535 static __always_inline void rc_bittree_reverse(struct rc_dec *rc, rc_bittree_reverse()
/linux-4.1.27/kernel/trace/
H A Dtrace.h472 static __always_inline int trace_get_context_bit(void) trace_get_context_bit()
490 static __always_inline int trace_test_and_set_recursion(int start, int max) trace_test_and_set_recursion()
510 static __always_inline void trace_clear_recursion(int bit) trace_clear_recursion()
H A Dring_buffer.c2680 static __always_inline int trace_recursive_lock()
2705 static __always_inline void trace_recursive_unlock()
2815 static __always_inline void rb_wakeups()
H A Dftrace.c1210 static bool __always_inline ftrace_hash_empty(struct ftrace_hash *hash) ftrace_hash_empty()
/linux-4.1.27/drivers/md/
H A Ddm-switch.c360 static __always_inline unsigned long parse_hex(const char **string) parse_hex()
H A Ddm-cache-target.c557 __always_inline
/linux-4.1.27/arch/x86/kernel/cpu/
H A Dcommon.c279 static __always_inline void setup_smep(struct cpuinfo_x86 *c) setup_smep()
292 static __always_inline void setup_smap(struct cpuinfo_x86 *c) setup_smap()
/linux-4.1.27/kernel/time/
H A Dtimekeeping.c413 static __always_inline u64 __ktime_get_fast_ns(struct tk_fast *tkf) __ktime_get_fast_ns()
1508 static __always_inline void timekeeping_apply_adjustment(struct timekeeper *tk, timekeeping_apply_adjustment()
1590 static __always_inline void timekeeping_freqadjust(struct timekeeper *tk, timekeeping_freqadjust()
/linux-4.1.27/drivers/md/bcache/
H A Dbset.h395 static __always_inline int64_t bkey_cmp(const struct bkey *l, bkey_cmp()
/linux-4.1.27/kernel/sched/
H A Dcputime.c257 static __always_inline bool steal_account_process_tick(void) steal_account_process_tick()
H A Dsched.h978 static __always_inline bool static_branch_##name(struct static_key *key) \
1393 static __always_inline arch_scale_freq_capacity()
H A Dfair.c431 static __always_inline
2436 static __always_inline u64 decay_load(u64 val, u64 n) decay_load()
2521 static __always_inline int __update_entity_runnable_avg(u64 now, int cpu, __update_entity_runnable_avg()
3144 static __always_inline void return_cfs_rq_runtime(struct cfs_rq *cfs_rq);
3563 static __always_inline account_cfs_rq_runtime()
3910 static __always_inline void return_cfs_rq_runtime(struct cfs_rq *cfs_rq) return_cfs_rq_runtime()
4130 static __always_inline void return_cfs_rq_runtime(struct cfs_rq *cfs_rq) {} return_cfs_rq_runtime()
/linux-4.1.27/drivers/base/
H A Ddevres.c84 static __always_inline struct devres * alloc_dr(dr_release_t release, alloc_dr()
/linux-4.1.27/fs/
H A Dnamei.c682 static __always_inline void set_root(struct nameidata *nd) set_root()
689 static __always_inline unsigned set_root_rcu(struct nameidata *nd) set_root_rcu()
870 static __always_inline int follow_link()
/linux-4.1.27/arch/x86/kvm/
H A Dvmx.c1379 static __always_inline unsigned long vmcs_readl(unsigned long field) vmcs_readl()
1388 static __always_inline u16 vmcs_read16(unsigned long field) vmcs_read16()
1393 static __always_inline u32 vmcs_read32(unsigned long field) vmcs_read32()
1398 static __always_inline u64 vmcs_read64(unsigned long field) vmcs_read64()
H A Demulate.c643 static __always_inline int __linearize(struct x86_emulate_ctxt *ctxt, __linearize()
833 static __always_inline int do_insn_fetch_bytes(struct x86_emulate_ctxt *ctxt, do_insn_fetch_bytes()
/linux-4.1.27/scripts/
H A Dkernel-doc2084 $prototype =~ s/^__always_inline +//;
H A Dcheckpatch.pl300 our $Inline = qr{inline|__always_inline|noinline|__inline|__inline__};
/linux-4.1.27/drivers/infiniband/hw/mlx4/
H A Dqp.c2448 static __always_inline void set_raddr_seg(struct mlx4_wqe_raddr_seg *rseg, set_raddr_seg()
/linux-4.1.27/drivers/infiniband/hw/mlx5/
H A Dqp.c1842 static __always_inline void set_raddr_seg(struct mlx5_wqe_raddr_seg *rseg, set_raddr_seg()
/linux-4.1.27/drivers/net/wireless/brcm80211/brcmfmac/
H A Dcfg80211.c2931 static __always_inline void brcmf_delay(u32 ms) brcmf_delay()

Completed in 4911 milliseconds