Home
last modified time | relevance | path

Searched refs:lock (Results 1 – 200 of 4625) sorted by relevance

12345678910>>...24

/linux-4.4.14/drivers/gpu/drm/ttm/
Dttm_lock.c45 void ttm_lock_init(struct ttm_lock *lock) in ttm_lock_init() argument
47 spin_lock_init(&lock->lock); in ttm_lock_init()
48 init_waitqueue_head(&lock->queue); in ttm_lock_init()
49 lock->rw = 0; in ttm_lock_init()
50 lock->flags = 0; in ttm_lock_init()
51 lock->kill_takers = false; in ttm_lock_init()
52 lock->signal = SIGKILL; in ttm_lock_init()
56 void ttm_read_unlock(struct ttm_lock *lock) in ttm_read_unlock() argument
58 spin_lock(&lock->lock); in ttm_read_unlock()
59 if (--lock->rw == 0) in ttm_read_unlock()
[all …]
Dttm_bo_manager.c47 spinlock_t lock; member
76 spin_lock(&rman->lock); in ttm_bo_man_get_node()
81 spin_unlock(&rman->lock); in ttm_bo_man_get_node()
99 spin_lock(&rman->lock); in ttm_bo_man_put_node()
101 spin_unlock(&rman->lock); in ttm_bo_man_put_node()
118 spin_lock_init(&rman->lock); in ttm_bo_man_init()
128 spin_lock(&rman->lock); in ttm_bo_man_takedown()
131 spin_unlock(&rman->lock); in ttm_bo_man_takedown()
136 spin_unlock(&rman->lock); in ttm_bo_man_takedown()
145 spin_lock(&rman->lock); in ttm_bo_man_debug()
[all …]
/linux-4.4.14/include/linux/
Dspinlock_api_up.h19 #define assert_raw_spin_locked(lock) do { (void)(lock); } while (0) argument
27 #define ___LOCK(lock) \ argument
28 do { __acquire(lock); (void)(lock); } while (0)
30 #define __LOCK(lock) \ argument
31 do { preempt_disable(); ___LOCK(lock); } while (0)
33 #define __LOCK_BH(lock) \ argument
34 do { __local_bh_disable_ip(_THIS_IP_, SOFTIRQ_LOCK_OFFSET); ___LOCK(lock); } while (0)
36 #define __LOCK_IRQ(lock) \ argument
37 do { local_irq_disable(); __LOCK(lock); } while (0)
39 #define __LOCK_IRQSAVE(lock, flags) \ argument
[all …]
Dspinlock.h93 extern void __raw_spin_lock_init(raw_spinlock_t *lock, const char *name,
95 # define raw_spin_lock_init(lock) \ argument
99 __raw_spin_lock_init((lock), #lock, &__key); \
103 # define raw_spin_lock_init(lock) \ argument
104 do { *(lock) = __RAW_SPIN_LOCK_UNLOCKED(lock); } while (0)
107 #define raw_spin_is_locked(lock) arch_spin_is_locked(&(lock)->raw_lock) argument
110 #define raw_spin_is_contended(lock) ((lock)->break_lock) argument
114 #define raw_spin_is_contended(lock) arch_spin_is_contended(&(lock)->raw_lock) argument
116 #define raw_spin_is_contended(lock) (((void)(lock), 0)) argument
137 #define raw_spin_unlock_wait(lock) arch_spin_unlock_wait(&(lock)->raw_lock) argument
[all …]
Drwlock_api_smp.h18 void __lockfunc _raw_read_lock(rwlock_t *lock) __acquires(lock);
19 void __lockfunc _raw_write_lock(rwlock_t *lock) __acquires(lock);
20 void __lockfunc _raw_read_lock_bh(rwlock_t *lock) __acquires(lock);
21 void __lockfunc _raw_write_lock_bh(rwlock_t *lock) __acquires(lock);
22 void __lockfunc _raw_read_lock_irq(rwlock_t *lock) __acquires(lock);
23 void __lockfunc _raw_write_lock_irq(rwlock_t *lock) __acquires(lock);
24 unsigned long __lockfunc _raw_read_lock_irqsave(rwlock_t *lock)
25 __acquires(lock);
26 unsigned long __lockfunc _raw_write_lock_irqsave(rwlock_t *lock)
27 __acquires(lock);
[all …]
Drwlock.h18 extern void __rwlock_init(rwlock_t *lock, const char *name,
20 # define rwlock_init(lock) \ argument
24 __rwlock_init((lock), #lock, &__key); \
27 # define rwlock_init(lock) \ argument
28 do { *(lock) = __RW_LOCK_UNLOCKED(lock); } while (0)
32 extern void do_raw_read_lock(rwlock_t *lock) __acquires(lock);
33 #define do_raw_read_lock_flags(lock, flags) do_raw_read_lock(lock) argument
34 extern int do_raw_read_trylock(rwlock_t *lock);
35 extern void do_raw_read_unlock(rwlock_t *lock) __releases(lock);
36 extern void do_raw_write_lock(rwlock_t *lock) __acquires(lock);
[all …]
Dspinlock_api_smp.h22 void __lockfunc _raw_spin_lock(raw_spinlock_t *lock) __acquires(lock);
23 void __lockfunc _raw_spin_lock_nested(raw_spinlock_t *lock, int subclass)
24 __acquires(lock);
25 void __lockfunc _raw_spin_lock_bh_nested(raw_spinlock_t *lock, int subclass)
26 __acquires(lock);
28 _raw_spin_lock_nest_lock(raw_spinlock_t *lock, struct lockdep_map *map)
29 __acquires(lock);
30 void __lockfunc _raw_spin_lock_bh(raw_spinlock_t *lock) __acquires(lock);
31 void __lockfunc _raw_spin_lock_irq(raw_spinlock_t *lock)
32 __acquires(lock);
[all …]
Dspinlock_up.h28 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() argument
30 lock->slock = 0; in arch_spin_lock()
35 arch_spin_lock_flags(arch_spinlock_t *lock, unsigned long flags) in arch_spin_lock_flags() argument
38 lock->slock = 0; in arch_spin_lock_flags()
42 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() argument
44 char oldval = lock->slock; in arch_spin_trylock()
46 lock->slock = 0; in arch_spin_trylock()
52 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() argument
55 lock->slock = 1; in arch_spin_unlock()
61 #define arch_read_lock(lock) do { barrier(); (void)(lock); } while (0) argument
[all …]
Dmutex.h99 static inline void mutex_destroy(struct mutex *lock) {} in mutex_destroy() argument
119 extern void __mutex_init(struct mutex *lock, const char *name,
128 static inline int mutex_is_locked(struct mutex *lock) in mutex_is_locked() argument
130 return atomic_read(&lock->count) != 1; in mutex_is_locked()
138 extern void mutex_lock_nested(struct mutex *lock, unsigned int subclass);
139 extern void _mutex_lock_nest_lock(struct mutex *lock, struct lockdep_map *nest_lock);
141 extern int __must_check mutex_lock_interruptible_nested(struct mutex *lock,
143 extern int __must_check mutex_lock_killable_nested(struct mutex *lock,
146 #define mutex_lock(lock) mutex_lock_nested(lock, 0) argument
147 #define mutex_lock_interruptible(lock) mutex_lock_interruptible_nested(lock, 0) argument
[all …]
Dlockdep.h267 extern void lockdep_reset_lock(struct lockdep_map *lock);
280 extern void lockdep_init_map(struct lockdep_map *lock, const char *name,
296 #define lockdep_set_class(lock, key) \ argument
297 lockdep_init_map(&(lock)->dep_map, #key, key, 0)
298 #define lockdep_set_class_and_name(lock, key, name) \ argument
299 lockdep_init_map(&(lock)->dep_map, name, key, 0)
300 #define lockdep_set_class_and_subclass(lock, key, sub) \ argument
301 lockdep_init_map(&(lock)->dep_map, #key, key, sub)
302 #define lockdep_set_subclass(lock, sub) \ argument
303 lockdep_init_map(&(lock)->dep_map, #lock, \
[all …]
Dseqlock.h403 spinlock_t lock; member
413 .lock = __SPIN_LOCK_UNLOCKED(lockname) \
419 spin_lock_init(&(x)->lock); \
445 spin_lock(&sl->lock); in write_seqlock()
452 spin_unlock(&sl->lock); in write_sequnlock()
457 spin_lock_bh(&sl->lock); in write_seqlock_bh()
464 spin_unlock_bh(&sl->lock); in write_sequnlock_bh()
469 spin_lock_irq(&sl->lock); in write_seqlock_irq()
476 spin_unlock_irq(&sl->lock); in write_sequnlock_irq()
483 spin_lock_irqsave(&sl->lock, flags); in __write_seqlock_irqsave()
[all …]
Dww_mutex.h85 static inline void ww_mutex_init(struct ww_mutex *lock, in ww_mutex_init() argument
88 __mutex_init(&lock->base, ww_class->mutex_name, &ww_class->mutex_key); in ww_mutex_init()
89 lock->ctx = NULL; in ww_mutex_init()
91 lock->ww_class = ww_class; in ww_mutex_init()
189 extern int __must_check __ww_mutex_lock(struct ww_mutex *lock,
191 extern int __must_check __ww_mutex_lock_interruptible(struct ww_mutex *lock,
223 static inline int ww_mutex_lock(struct ww_mutex *lock, struct ww_acquire_ctx *ctx) in ww_mutex_lock() argument
226 return __ww_mutex_lock(lock, ctx); in ww_mutex_lock()
228 mutex_lock(&lock->base); in ww_mutex_lock()
262 static inline int __must_check ww_mutex_lock_interruptible(struct ww_mutex *lock, in ww_mutex_lock_interruptible() argument
[all …]
Drtmutex.h84 static inline int rt_mutex_is_locked(struct rt_mutex *lock) in rt_mutex_is_locked() argument
86 return lock->owner != NULL; in rt_mutex_is_locked()
89 extern void __rt_mutex_init(struct rt_mutex *lock, const char *name);
90 extern void rt_mutex_destroy(struct rt_mutex *lock);
92 extern void rt_mutex_lock(struct rt_mutex *lock);
93 extern int rt_mutex_lock_interruptible(struct rt_mutex *lock);
94 extern int rt_mutex_timed_lock(struct rt_mutex *lock,
97 extern int rt_mutex_trylock(struct rt_mutex *lock);
99 extern void rt_mutex_unlock(struct rt_mutex *lock);
Dosq_lock.h27 static inline void osq_lock_init(struct optimistic_spin_queue *lock) in osq_lock_init() argument
29 atomic_set(&lock->tail, OSQ_UNLOCKED_VAL); in osq_lock_init()
32 extern bool osq_lock(struct optimistic_spin_queue *lock);
33 extern void osq_unlock(struct optimistic_spin_queue *lock);
35 static inline bool osq_is_locked(struct optimistic_spin_queue *lock) in osq_is_locked() argument
37 return atomic_read(&lock->tail) != OSQ_UNLOCKED_VAL; in osq_is_locked()
/linux-4.4.14/drivers/staging/lustre/lustre/ldlm/
Dldlm_lock.c165 struct ldlm_lock *ldlm_lock_get(struct ldlm_lock *lock) in ldlm_lock_get() argument
167 atomic_inc(&lock->l_refc); in ldlm_lock_get()
168 return lock; in ldlm_lock_get()
177 void ldlm_lock_put(struct ldlm_lock *lock) in ldlm_lock_put() argument
179 LASSERT(lock->l_resource != LP_POISON); in ldlm_lock_put()
180 LASSERT(atomic_read(&lock->l_refc) > 0); in ldlm_lock_put()
181 if (atomic_dec_and_test(&lock->l_refc)) { in ldlm_lock_put()
184 LDLM_DEBUG(lock, in ldlm_lock_put()
187 res = lock->l_resource; in ldlm_lock_put()
188 LASSERT(lock->l_flags & LDLM_FL_DESTROYED); in ldlm_lock_put()
[all …]
Dldlm_request.c93 struct ldlm_lock *lock = lwd->lwd_lock; in ldlm_expired_completion_wait() local
97 if (lock->l_conn_export == NULL) { in ldlm_expired_completion_wait()
101 (s64)lock->l_last_activity, in ldlm_expired_completion_wait()
103 lock->l_last_activity)); in ldlm_expired_completion_wait()
104 …LDLM_DEBUG(lock, "lock timed out (enqueued at %lld, %llds ago); not entering recovery in server co… in ldlm_expired_completion_wait()
105 (s64)lock->l_last_activity, in ldlm_expired_completion_wait()
107 lock->l_last_activity)); in ldlm_expired_completion_wait()
112 ldlm_lock_to_ns(lock)); in ldlm_expired_completion_wait()
119 obd = lock->l_conn_export->exp_obd; in ldlm_expired_completion_wait()
122 LDLM_ERROR(lock, "lock timed out (enqueued at %lld, %llds ago), entering recovery for %s@%s", in ldlm_expired_completion_wait()
[all …]
Dldlm_flock.c78 ldlm_same_flock_owner(struct ldlm_lock *lock, struct ldlm_lock *new) in ldlm_same_flock_owner() argument
81 lock->l_policy_data.l_flock.owner) && in ldlm_same_flock_owner()
82 (new->l_export == lock->l_export)); in ldlm_same_flock_owner()
86 ldlm_flocks_overlap(struct ldlm_lock *lock, struct ldlm_lock *new) in ldlm_flocks_overlap() argument
89 lock->l_policy_data.l_flock.end) && in ldlm_flocks_overlap()
91 lock->l_policy_data.l_flock.start)); in ldlm_flocks_overlap()
95 ldlm_flock_destroy(struct ldlm_lock *lock, ldlm_mode_t mode, __u64 flags) in ldlm_flock_destroy() argument
97 LDLM_DEBUG(lock, "ldlm_flock_destroy(mode: %d, flags: 0x%llx)", in ldlm_flock_destroy()
101 LASSERT(hlist_unhashed(&lock->l_exp_flock_hash)); in ldlm_flock_destroy()
103 list_del_init(&lock->l_res_link); in ldlm_flock_destroy()
[all …]
Dldlm_lockd.c120 struct ldlm_lock_desc *ld, struct ldlm_lock *lock) in ldlm_handle_bl_callback() argument
124 LDLM_DEBUG(lock, "client blocking AST callback handler"); in ldlm_handle_bl_callback()
126 lock_res_and_lock(lock); in ldlm_handle_bl_callback()
127 lock->l_flags |= LDLM_FL_CBPENDING; in ldlm_handle_bl_callback()
129 if (lock->l_flags & LDLM_FL_CANCEL_ON_BLOCK) in ldlm_handle_bl_callback()
130 lock->l_flags |= LDLM_FL_CANCEL; in ldlm_handle_bl_callback()
132 do_ast = !lock->l_readers && !lock->l_writers; in ldlm_handle_bl_callback()
133 unlock_res_and_lock(lock); in ldlm_handle_bl_callback()
137 "Lock %p already unused, calling callback (%p)\n", lock, in ldlm_handle_bl_callback()
138 lock->l_blocking_ast); in ldlm_handle_bl_callback()
[all …]
Dldlm_extent.c66 __u64 ldlm_extent_shift_kms(struct ldlm_lock *lock, __u64 old_kms) in ldlm_extent_shift_kms() argument
68 struct ldlm_resource *res = lock->l_resource; in ldlm_extent_shift_kms()
76 lock->l_flags |= LDLM_FL_KMS_IGNORE; in ldlm_extent_shift_kms()
110 struct ldlm_interval *ldlm_interval_alloc(struct ldlm_lock *lock) in ldlm_interval_alloc() argument
114 LASSERT(lock->l_resource->lr_type == LDLM_EXTENT); in ldlm_interval_alloc()
120 ldlm_interval_attach(node, lock); in ldlm_interval_alloc()
161 struct ldlm_lock *lock) in ldlm_extent_add_lock() argument
168 LASSERT(lock->l_granted_mode == lock->l_req_mode); in ldlm_extent_add_lock()
170 node = lock->l_tree_node; in ldlm_extent_add_lock()
174 idx = lock_mode_to_index(lock->l_granted_mode); in ldlm_extent_add_lock()
[all …]
Dl_lock.c51 struct ldlm_resource *lock_res_and_lock(struct ldlm_lock *lock) in lock_res_and_lock() argument
53 spin_lock(&lock->l_lock); in lock_res_and_lock()
55 lock_res(lock->l_resource); in lock_res_and_lock()
57 lock->l_flags |= LDLM_FL_RES_LOCKED; in lock_res_and_lock()
58 return lock->l_resource; in lock_res_and_lock()
65 void unlock_res_and_lock(struct ldlm_lock *lock) in unlock_res_and_lock() argument
68 lock->l_flags &= ~LDLM_FL_RES_LOCKED; in unlock_res_and_lock()
70 unlock_res(lock->l_resource); in unlock_res_and_lock()
71 spin_unlock(&lock->l_lock); in unlock_res_and_lock()
Dldlm_internal.h131 void ldlm_grant_lock(struct ldlm_lock *lock, struct list_head *work_list);
132 int ldlm_fill_lvb(struct ldlm_lock *lock, struct req_capsule *pill,
147 int ldlm_lock_remove_from_lru(struct ldlm_lock *lock);
148 int ldlm_lock_remove_from_lru_nolock(struct ldlm_lock *lock);
149 void ldlm_lock_destroy_nolock(struct ldlm_lock *lock);
153 struct ldlm_lock *lock);
160 struct ldlm_lock_desc *ld, struct ldlm_lock *lock);
169 void ldlm_extent_add_lock(struct ldlm_resource *res, struct ldlm_lock *lock);
170 void ldlm_extent_unlink_lock(struct ldlm_lock *lock);
194 struct ldlm_interval *ldlm_interval_alloc(struct ldlm_lock *lock);
[all …]
/linux-4.4.14/kernel/locking/
Dspinlock_debug.c16 void __raw_spin_lock_init(raw_spinlock_t *lock, const char *name, in __raw_spin_lock_init() argument
23 debug_check_no_locks_freed((void *)lock, sizeof(*lock)); in __raw_spin_lock_init()
24 lockdep_init_map(&lock->dep_map, name, key, 0); in __raw_spin_lock_init()
26 lock->raw_lock = (arch_spinlock_t)__ARCH_SPIN_LOCK_UNLOCKED; in __raw_spin_lock_init()
27 lock->magic = SPINLOCK_MAGIC; in __raw_spin_lock_init()
28 lock->owner = SPINLOCK_OWNER_INIT; in __raw_spin_lock_init()
29 lock->owner_cpu = -1; in __raw_spin_lock_init()
34 void __rwlock_init(rwlock_t *lock, const char *name, in __rwlock_init() argument
41 debug_check_no_locks_freed((void *)lock, sizeof(*lock)); in __rwlock_init()
42 lockdep_init_map(&lock->dep_map, name, key, 0); in __rwlock_init()
[all …]
Dmutex.c50 __mutex_init(struct mutex *lock, const char *name, struct lock_class_key *key) in __mutex_init() argument
52 atomic_set(&lock->count, 1); in __mutex_init()
53 spin_lock_init(&lock->wait_lock); in __mutex_init()
54 INIT_LIST_HEAD(&lock->wait_list); in __mutex_init()
55 mutex_clear_owner(lock); in __mutex_init()
57 osq_lock_init(&lock->osq); in __mutex_init()
60 debug_mutex_init(lock, name, key); in __mutex_init()
95 void __sched mutex_lock(struct mutex *lock) in mutex_lock() argument
102 __mutex_fastpath_lock(&lock->count, __mutex_lock_slowpath); in mutex_lock()
103 mutex_set_owner(lock); in mutex_lock()
[all …]
Dspinlock.c61 void __lockfunc __raw_##op##_lock(locktype##_t *lock) \
65 if (likely(do_raw_##op##_trylock(lock))) \
69 if (!(lock)->break_lock) \
70 (lock)->break_lock = 1; \
71 while (!raw_##op##_can_lock(lock) && (lock)->break_lock)\
72 arch_##op##_relax(&lock->raw_lock); \
74 (lock)->break_lock = 0; \
77 unsigned long __lockfunc __raw_##op##_lock_irqsave(locktype##_t *lock) \
84 if (likely(do_raw_##op##_trylock(lock))) \
89 if (!(lock)->break_lock) \
[all …]
Drtmutex.c50 rt_mutex_set_owner(struct rt_mutex *lock, struct task_struct *owner) in rt_mutex_set_owner() argument
54 if (rt_mutex_has_waiters(lock)) in rt_mutex_set_owner()
57 lock->owner = (struct task_struct *)val; in rt_mutex_set_owner()
60 static inline void clear_rt_mutex_waiters(struct rt_mutex *lock) in clear_rt_mutex_waiters() argument
62 lock->owner = (struct task_struct *) in clear_rt_mutex_waiters()
63 ((unsigned long)lock->owner & ~RT_MUTEX_HAS_WAITERS); in clear_rt_mutex_waiters()
66 static void fixup_rt_mutex_waiters(struct rt_mutex *lock) in fixup_rt_mutex_waiters() argument
68 if (!rt_mutex_has_waiters(lock)) in fixup_rt_mutex_waiters()
69 clear_rt_mutex_waiters(lock); in fixup_rt_mutex_waiters()
86 static inline void mark_rt_mutex_waiters(struct rt_mutex *lock) in mark_rt_mutex_waiters() argument
[all …]
Dmutex-debug.c30 void debug_mutex_lock_common(struct mutex *lock, struct mutex_waiter *waiter) in debug_mutex_lock_common() argument
37 void debug_mutex_wake_waiter(struct mutex *lock, struct mutex_waiter *waiter) in debug_mutex_wake_waiter() argument
39 SMP_DEBUG_LOCKS_WARN_ON(!spin_is_locked(&lock->wait_lock)); in debug_mutex_wake_waiter()
40 DEBUG_LOCKS_WARN_ON(list_empty(&lock->wait_list)); in debug_mutex_wake_waiter()
51 void debug_mutex_add_waiter(struct mutex *lock, struct mutex_waiter *waiter, in debug_mutex_add_waiter() argument
54 SMP_DEBUG_LOCKS_WARN_ON(!spin_is_locked(&lock->wait_lock)); in debug_mutex_add_waiter()
60 void mutex_remove_waiter(struct mutex *lock, struct mutex_waiter *waiter, in mutex_remove_waiter() argument
72 void debug_mutex_unlock(struct mutex *lock) in debug_mutex_unlock() argument
75 DEBUG_LOCKS_WARN_ON(lock->magic != lock); in debug_mutex_unlock()
77 if (!lock->owner) in debug_mutex_unlock()
[all …]
Dlglock.c21 arch_spinlock_t *lock; in lg_local_lock() local
25 lock = this_cpu_ptr(lg->lock); in lg_local_lock()
26 arch_spin_lock(lock); in lg_local_lock()
32 arch_spinlock_t *lock; in lg_local_unlock() local
35 lock = this_cpu_ptr(lg->lock); in lg_local_unlock()
36 arch_spin_unlock(lock); in lg_local_unlock()
43 arch_spinlock_t *lock; in lg_local_lock_cpu() local
47 lock = per_cpu_ptr(lg->lock, cpu); in lg_local_lock_cpu()
48 arch_spin_lock(lock); in lg_local_lock_cpu()
54 arch_spinlock_t *lock; in lg_local_unlock_cpu() local
[all …]
Drtmutex-debug.c41 static void printk_lock(struct rt_mutex *lock, int print_owner) in printk_lock() argument
43 if (lock->name) in printk_lock()
45 lock, lock->name); in printk_lock()
48 lock, lock->file, lock->line); in printk_lock()
50 if (print_owner && rt_mutex_owner(lock)) { in printk_lock()
51 printk(".. ->owner: %p\n", lock->owner); in printk_lock()
53 printk_task(rt_mutex_owner(lock)); in printk_lock()
71 struct rt_mutex *lock) in debug_rt_mutex_deadlock() argument
78 task = rt_mutex_owner(act_waiter->lock); in debug_rt_mutex_deadlock()
81 act_waiter->deadlock_lock = lock; in debug_rt_mutex_deadlock()
[all …]
Dmutex.h12 #define spin_lock_mutex(lock, flags) \ argument
13 do { spin_lock(lock); (void)(flags); } while (0)
14 #define spin_unlock_mutex(lock, flags) \ argument
15 do { spin_unlock(lock); (void)(flags); } while (0)
16 #define mutex_remove_waiter(lock, waiter, ti) \ argument
20 static inline void mutex_set_owner(struct mutex *lock) in mutex_set_owner() argument
22 lock->owner = current; in mutex_set_owner()
25 static inline void mutex_clear_owner(struct mutex *lock) in mutex_clear_owner() argument
27 lock->owner = NULL; in mutex_clear_owner()
30 static inline void mutex_set_owner(struct mutex *lock) in mutex_set_owner() argument
[all …]
Dqrwlock.c42 arch_spinlock_t lock; member
54 rspin_until_writer_unlock(struct qrwlock *lock, u32 cnts) in rspin_until_writer_unlock() argument
58 cnts = atomic_read_acquire(&lock->cnts); in rspin_until_writer_unlock()
67 void queued_read_lock_slowpath(struct qrwlock *lock, u32 cnts) in queued_read_lock_slowpath() argument
81 rspin_until_writer_unlock(lock, cnts); in queued_read_lock_slowpath()
84 atomic_sub(_QR_BIAS, &lock->cnts); in queued_read_lock_slowpath()
89 arch_spin_lock(&lock->wait_lock); in queued_read_lock_slowpath()
96 cnts = atomic_add_return_acquire(_QR_BIAS, &lock->cnts) - _QR_BIAS; in queued_read_lock_slowpath()
97 rspin_until_writer_unlock(lock, cnts); in queued_read_lock_slowpath()
102 arch_spin_unlock(&lock->wait_lock); in queued_read_lock_slowpath()
[all …]
Dmutex-debug.h16 extern void debug_mutex_lock_common(struct mutex *lock,
18 extern void debug_mutex_wake_waiter(struct mutex *lock,
21 extern void debug_mutex_add_waiter(struct mutex *lock,
24 extern void mutex_remove_waiter(struct mutex *lock, struct mutex_waiter *waiter,
26 extern void debug_mutex_unlock(struct mutex *lock);
27 extern void debug_mutex_init(struct mutex *lock, const char *name,
30 static inline void mutex_set_owner(struct mutex *lock) in mutex_set_owner() argument
32 lock->owner = current; in mutex_set_owner()
35 static inline void mutex_clear_owner(struct mutex *lock) in mutex_clear_owner() argument
37 lock->owner = NULL; in mutex_clear_owner()
[all …]
Dqspinlock.c158 static __always_inline void clear_pending_set_locked(struct qspinlock *lock) in clear_pending_set_locked() argument
160 struct __qspinlock *l = (void *)lock; in clear_pending_set_locked()
175 static __always_inline u32 xchg_tail(struct qspinlock *lock, u32 tail) in xchg_tail() argument
177 struct __qspinlock *l = (void *)lock; in xchg_tail()
190 static __always_inline void clear_pending_set_locked(struct qspinlock *lock) in clear_pending_set_locked() argument
192 atomic_add(-_Q_PENDING_VAL + _Q_LOCKED_VAL, &lock->val); in clear_pending_set_locked()
205 static __always_inline u32 xchg_tail(struct qspinlock *lock, u32 tail) in xchg_tail() argument
207 u32 old, new, val = atomic_read(&lock->val); in xchg_tail()
211 old = atomic_cmpxchg(&lock->val, val, new); in xchg_tail()
227 static __always_inline void set_locked(struct qspinlock *lock) in set_locked() argument
[all …]
Drtmutex_common.h29 struct rt_mutex *lock; member
41 static inline int rt_mutex_has_waiters(struct rt_mutex *lock) in rt_mutex_has_waiters() argument
43 return !RB_EMPTY_ROOT(&lock->waiters); in rt_mutex_has_waiters()
47 rt_mutex_top_waiter(struct rt_mutex *lock) in rt_mutex_top_waiter() argument
51 w = rb_entry(lock->waiters_leftmost, struct rt_mutex_waiter, in rt_mutex_top_waiter()
53 BUG_ON(w->lock != lock); in rt_mutex_top_waiter()
76 static inline struct task_struct *rt_mutex_owner(struct rt_mutex *lock) in rt_mutex_owner() argument
79 ((unsigned long)lock->owner & ~RT_MUTEX_OWNER_MASKALL); in rt_mutex_owner()
100 extern struct task_struct *rt_mutex_next_owner(struct rt_mutex *lock);
101 extern void rt_mutex_init_proxy_locked(struct rt_mutex *lock,
[all …]
Dqspinlock_paravirt.h60 struct qspinlock *lock; member
99 static struct qspinlock **pv_hash(struct qspinlock *lock, struct pv_node *node) in pv_hash() argument
101 unsigned long offset, hash = hash_ptr(lock, pv_lock_hash_bits); in pv_hash()
105 if (!cmpxchg(&he->lock, NULL, lock)) { in pv_hash()
107 return &he->lock; in pv_hash()
123 static struct pv_node *pv_unhash(struct qspinlock *lock) in pv_unhash() argument
125 unsigned long offset, hash = hash_ptr(lock, pv_lock_hash_bits); in pv_unhash()
130 if (READ_ONCE(he->lock) == lock) { in pv_unhash()
132 WRITE_ONCE(he->lock, NULL); in pv_unhash()
218 static void pv_kick_node(struct qspinlock *lock, struct mcs_spinlock *node) in pv_kick_node() argument
[all …]
Dsemaphore.c57 raw_spin_lock_irqsave(&sem->lock, flags); in down()
62 raw_spin_unlock_irqrestore(&sem->lock, flags); in down()
80 raw_spin_lock_irqsave(&sem->lock, flags); in down_interruptible()
85 raw_spin_unlock_irqrestore(&sem->lock, flags); in down_interruptible()
106 raw_spin_lock_irqsave(&sem->lock, flags); in down_killable()
111 raw_spin_unlock_irqrestore(&sem->lock, flags); in down_killable()
135 raw_spin_lock_irqsave(&sem->lock, flags); in down_trylock()
139 raw_spin_unlock_irqrestore(&sem->lock, flags); in down_trylock()
160 raw_spin_lock_irqsave(&sem->lock, flags); in down_timeout()
165 raw_spin_unlock_irqrestore(&sem->lock, flags); in down_timeout()
[all …]
Drtmutex-debug.h13 rt_mutex_deadlock_account_lock(struct rt_mutex *lock, struct task_struct *task);
17 extern void debug_rt_mutex_init(struct rt_mutex *lock, const char *name);
18 extern void debug_rt_mutex_lock(struct rt_mutex *lock);
19 extern void debug_rt_mutex_unlock(struct rt_mutex *lock);
20 extern void debug_rt_mutex_proxy_lock(struct rt_mutex *lock,
22 extern void debug_rt_mutex_proxy_unlock(struct rt_mutex *lock);
25 struct rt_mutex *lock);
Dosq_lock.c36 osq_wait_next(struct optimistic_spin_queue *lock, in osq_wait_next() argument
52 if (atomic_read(&lock->tail) == curr && in osq_wait_next()
53 atomic_cmpxchg_acquire(&lock->tail, curr, old) == curr) { in osq_wait_next()
84 bool osq_lock(struct optimistic_spin_queue *lock) in osq_lock() argument
101 old = atomic_xchg(&lock->tail, curr); in osq_lock()
167 next = osq_wait_next(lock, node, prev); in osq_lock()
185 void osq_unlock(struct optimistic_spin_queue *lock) in osq_unlock() argument
193 if (likely(atomic_cmpxchg_release(&lock->tail, curr, in osq_unlock()
207 next = osq_wait_next(lock, node, NULL); in osq_unlock()
/linux-4.4.14/drivers/staging/lustre/lustre/obdclass/
Dcl_lock.c77 const struct cl_lock *lock) in cl_lock_invariant_trusted() argument
79 return ergo(lock->cll_state == CLS_FREEING, lock->cll_holds == 0) && in cl_lock_invariant_trusted()
80 atomic_read(&lock->cll_ref) >= lock->cll_holds && in cl_lock_invariant_trusted()
81 lock->cll_holds >= lock->cll_users && in cl_lock_invariant_trusted()
82 lock->cll_holds >= 0 && in cl_lock_invariant_trusted()
83 lock->cll_users >= 0 && in cl_lock_invariant_trusted()
84 lock->cll_depth >= 0; in cl_lock_invariant_trusted()
93 const struct cl_lock *lock) in cl_lock_invariant() argument
97 result = atomic_read(&lock->cll_ref) > 0 && in cl_lock_invariant()
98 cl_lock_invariant_trusted(env, lock); in cl_lock_invariant()
[all …]
/linux-4.4.14/arch/alpha/include/asm/
Dspinlock.h14 #define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock) argument
15 #define arch_spin_is_locked(x) ((x)->lock != 0)
17 do { cpu_relax(); } while ((x)->lock)
19 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() argument
21 return lock.lock == 0; in arch_spin_value_unlocked()
24 static inline void arch_spin_unlock(arch_spinlock_t * lock) in arch_spin_unlock() argument
27 lock->lock = 0; in arch_spin_unlock()
30 static inline void arch_spin_lock(arch_spinlock_t * lock) in arch_spin_lock() argument
46 : "=&r" (tmp), "=m" (lock->lock) in arch_spin_lock()
47 : "m"(lock->lock) : "memory"); in arch_spin_lock()
[all …]
/linux-4.4.14/arch/hexagon/include/asm/
Dspinlock.h40 static inline void arch_read_lock(arch_rwlock_t *lock) in arch_read_lock() argument
49 : "r" (&lock->lock) in arch_read_lock()
55 static inline void arch_read_unlock(arch_rwlock_t *lock) in arch_read_unlock() argument
63 : "r" (&lock->lock) in arch_read_unlock()
70 static inline int arch_read_trylock(arch_rwlock_t *lock) in arch_read_trylock() argument
81 : "r" (&lock->lock) in arch_read_trylock()
89 return rwlock->lock == 0; in arch_read_can_lock()
94 return rwlock->lock == 0; in arch_write_can_lock()
98 static inline void arch_write_lock(arch_rwlock_t *lock) in arch_write_lock() argument
107 : "r" (&lock->lock) in arch_write_lock()
[all …]
/linux-4.4.14/arch/mips/include/asm/
Dspinlock.h38 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() argument
40 u32 counters = ACCESS_ONCE(lock->lock); in arch_spin_is_locked()
45 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() argument
47 return lock.h.serving_now == lock.h.ticket; in arch_spin_value_unlocked()
50 #define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock) argument
54 static inline int arch_spin_is_contended(arch_spinlock_t *lock) in arch_spin_is_contended() argument
56 u32 counters = ACCESS_ONCE(lock->lock); in arch_spin_is_contended()
62 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() argument
97 : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock), in arch_spin_lock()
98 [serving_now_ptr] "+m" (lock->h.serving_now), in arch_spin_lock()
[all …]
/linux-4.4.14/arch/ia64/include/asm/
Dspinlock.h19 #define arch_spin_lock_init(x) ((x)->lock = 0)
40 static __always_inline void __ticket_spin_lock(arch_spinlock_t *lock) in __ticket_spin_lock() argument
42 int *p = (int *)&lock->lock, ticket, serve; in __ticket_spin_lock()
60 static __always_inline int __ticket_spin_trylock(arch_spinlock_t *lock) in __ticket_spin_trylock() argument
62 int tmp = ACCESS_ONCE(lock->lock); in __ticket_spin_trylock()
65 return ia64_cmpxchg(acq, &lock->lock, tmp, tmp + 1, sizeof (tmp)) == tmp; in __ticket_spin_trylock()
69 static __always_inline void __ticket_spin_unlock(arch_spinlock_t *lock) in __ticket_spin_unlock() argument
71 unsigned short *p = (unsigned short *)&lock->lock + 1, tmp; in __ticket_spin_unlock()
77 static __always_inline void __ticket_spin_unlock_wait(arch_spinlock_t *lock) in __ticket_spin_unlock_wait() argument
79 int *p = (int *)&lock->lock, ticket; in __ticket_spin_unlock_wait()
[all …]
Dacenv.h23 ia64_acpi_acquire_global_lock(unsigned int *lock) in ia64_acpi_acquire_global_lock() argument
27 old = *lock; in ia64_acpi_acquire_global_lock()
29 val = ia64_cmpxchg4_acq(lock, new, old); in ia64_acpi_acquire_global_lock()
35 ia64_acpi_release_global_lock(unsigned int *lock) in ia64_acpi_release_global_lock() argument
39 old = *lock; in ia64_acpi_release_global_lock()
41 val = ia64_cmpxchg4_acq(lock, new, old); in ia64_acpi_release_global_lock()
/linux-4.4.14/fs/ocfs2/dlm/
Ddlmast.c52 struct dlm_lock *lock);
53 static int dlm_should_cancel_bast(struct dlm_ctxt *dlm, struct dlm_lock *lock);
64 static int dlm_should_cancel_bast(struct dlm_ctxt *dlm, struct dlm_lock *lock) in dlm_should_cancel_bast() argument
67 assert_spin_locked(&lock->spinlock); in dlm_should_cancel_bast()
69 if (lock->ml.highest_blocked == LKM_IVMODE) in dlm_should_cancel_bast()
71 BUG_ON(lock->ml.highest_blocked == LKM_NLMODE); in dlm_should_cancel_bast()
73 if (lock->bast_pending && in dlm_should_cancel_bast()
74 list_empty(&lock->bast_list)) in dlm_should_cancel_bast()
78 if (lock->ml.type == LKM_EXMODE) in dlm_should_cancel_bast()
81 else if (lock->ml.type == LKM_NLMODE) in dlm_should_cancel_bast()
[all …]
Ddlmlock.c62 struct dlm_lock *lock, int flags);
66 static void dlm_lock_detach_lockres(struct dlm_lock *lock);
92 struct dlm_lock *lock) in dlm_can_grant_new_lock() argument
97 if (!dlm_lock_compatible(tmplock->ml.type, lock->ml.type)) in dlm_can_grant_new_lock()
102 if (!dlm_lock_compatible(tmplock->ml.type, lock->ml.type)) in dlm_can_grant_new_lock()
105 lock->ml.type)) in dlm_can_grant_new_lock()
121 struct dlm_lock *lock, int flags) in dlmlock_master() argument
126 mlog(0, "type=%d\n", lock->ml.type); in dlmlock_master()
133 lock->ml.node != dlm->node_num) { in dlmlock_master()
142 if (dlm_can_grant_new_lock(res, lock)) { in dlmlock_master()
[all …]
Ddlmconvert.c60 struct dlm_lock *lock, int flags,
65 struct dlm_lock *lock, int flags, int type);
78 struct dlm_lock *lock, int flags, int type) in dlmconvert_master() argument
89 status = __dlmconvert_master(dlm, res, lock, flags, type, in dlmconvert_master()
100 dlm_queue_ast(dlm, lock); in dlmconvert_master()
121 struct dlm_lock *lock, int flags, in __dlmconvert_master() argument
131 lock->ml.type, lock->ml.convert_type, type); in __dlmconvert_master()
133 spin_lock(&lock->spinlock); in __dlmconvert_master()
136 if (lock->ml.convert_type != LKM_IVMODE) { in __dlmconvert_master()
144 if (!dlm_lock_on_list(&res->granted, lock)) { in __dlmconvert_master()
[all …]
Ddlmunlock.c60 struct dlm_lock *lock,
65 struct dlm_lock *lock,
71 struct dlm_lock *lock,
100 struct dlm_lock *lock, in dlmunlock_common() argument
121 in_use = !list_empty(&lock->ast_list); in dlmunlock_common()
141 spin_lock(&lock->spinlock); in dlmunlock_common()
156 status = dlm_get_cancel_actions(dlm, res, lock, lksb, &actions); in dlmunlock_common()
158 status = dlm_get_unlock_actions(dlm, res, lock, lksb, &actions); in dlmunlock_common()
177 lock->cancel_pending = 1; in dlmunlock_common()
179 lock->unlock_pending = 1; in dlmunlock_common()
[all …]
Ddlmthread.c57 #define dlm_lock_is_remote(dlm, lock) ((lock)->ml.node != (dlm)->node_num) argument
301 struct dlm_lock *lock, *target; in dlm_shuffle_lists() local
328 list_for_each_entry(lock, &res->granted, list) { in dlm_shuffle_lists()
329 if (lock==target) in dlm_shuffle_lists()
331 if (!dlm_lock_compatible(lock->ml.type, in dlm_shuffle_lists()
335 if (lock->ml.highest_blocked == LKM_IVMODE) { in dlm_shuffle_lists()
337 __dlm_queue_bast(dlm, lock); in dlm_shuffle_lists()
340 if (lock->ml.highest_blocked < target->ml.convert_type) in dlm_shuffle_lists()
341 lock->ml.highest_blocked = in dlm_shuffle_lists()
346 list_for_each_entry(lock, &res->converting, list) { in dlm_shuffle_lists()
[all …]
Ddlmdebug.c74 static void __dlm_print_lock(struct dlm_lock *lock) in __dlm_print_lock() argument
76 spin_lock(&lock->spinlock); in __dlm_print_lock()
81 lock->ml.type, lock->ml.convert_type, lock->ml.node, in __dlm_print_lock()
82 dlm_get_lock_cookie_node(be64_to_cpu(lock->ml.cookie)), in __dlm_print_lock()
83 dlm_get_lock_cookie_seq(be64_to_cpu(lock->ml.cookie)), in __dlm_print_lock()
84 atomic_read(&lock->lock_refs.refcount), in __dlm_print_lock()
85 (list_empty(&lock->ast_list) ? 'y' : 'n'), in __dlm_print_lock()
86 (lock->ast_pending ? 'y' : 'n'), in __dlm_print_lock()
87 (list_empty(&lock->bast_list) ? 'y' : 'n'), in __dlm_print_lock()
88 (lock->bast_pending ? 'y' : 'n'), in __dlm_print_lock()
[all …]
/linux-4.4.14/drivers/md/persistent-data/
Ddm-block-manager.c38 spinlock_t lock; member
55 static unsigned __find_holder(struct block_lock *lock, in __find_holder() argument
61 if (lock->holders[i] == task) in __find_holder()
69 static void __add_holder(struct block_lock *lock, struct task_struct *task) in __add_holder() argument
71 unsigned h = __find_holder(lock, NULL); in __add_holder()
77 lock->holders[h] = task; in __add_holder()
80 t = lock->traces + h; in __add_holder()
83 t->entries = lock->entries[h]; in __add_holder()
90 static void __del_holder(struct block_lock *lock, struct task_struct *task) in __del_holder() argument
92 unsigned h = __find_holder(lock, task); in __del_holder()
[all …]
/linux-4.4.14/arch/arm64/include/asm/
Dspinlock.h30 #define arch_spin_unlock_wait(lock) \ argument
31 do { while (arch_spin_is_locked(lock)) cpu_relax(); } while (0)
33 #define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock) argument
35 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() argument
71 : "=&r" (lockval), "=&r" (newval), "=&r" (tmp), "+Q" (*lock) in arch_spin_lock()
72 : "Q" (lock->owner), "I" (1 << TICKET_SHIFT) in arch_spin_lock()
76 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() argument
100 : "=&r" (lockval), "=&r" (tmp), "+Q" (*lock) in arch_spin_trylock()
107 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() argument
120 : "=Q" (lock->owner), "=&r" (tmp) in arch_spin_unlock()
[all …]
/linux-4.4.14/arch/metag/include/asm/
Dspinlock_lock1.h7 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() argument
12 ret = lock->lock; in arch_spin_is_locked()
17 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() argument
24 if (lock->lock == 0) { in arch_spin_lock()
26 lock->lock = 1; in arch_spin_lock()
32 WARN_ON(lock->lock != 1); in arch_spin_lock()
36 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() argument
42 ret = lock->lock; in arch_spin_trylock()
45 lock->lock = 1; in arch_spin_trylock()
51 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() argument
[all …]
Dspinlock_lnkget.h10 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() argument
19 : "da" (&lock->lock) in arch_spin_is_locked()
24 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() argument
38 : "da" (&lock->lock) in arch_spin_lock()
45 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() argument
60 : "da" (&lock->lock) in arch_spin_trylock()
68 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() argument
74 : "da" (&lock->lock), "da" (0) in arch_spin_unlock()
100 : "da" (&rw->lock), "bd" (0x80000000) in arch_write_lock()
121 : "da" (&rw->lock), "bd" (0x80000000) in arch_write_trylock()
[all …]
Dspinlock.h10 #define arch_spin_unlock_wait(lock) \ argument
11 do { while (arch_spin_is_locked(lock)) cpu_relax(); } while (0)
13 #define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock) argument
15 #define arch_read_lock_flags(lock, flags) arch_read_lock(lock) argument
16 #define arch_write_lock_flags(lock, flags) arch_write_lock(lock) argument
18 #define arch_spin_relax(lock) cpu_relax() argument
19 #define arch_read_relax(lock) cpu_relax() argument
20 #define arch_write_relax(lock) cpu_relax() argument
/linux-4.4.14/arch/blackfin/include/asm/
Dspinlock.h27 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() argument
29 return __raw_spin_is_locked_asm(&lock->lock); in arch_spin_is_locked()
32 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() argument
34 __raw_spin_lock_asm(&lock->lock); in arch_spin_lock()
37 #define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock) argument
39 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() argument
41 return __raw_spin_trylock_asm(&lock->lock); in arch_spin_trylock()
44 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() argument
46 __raw_spin_unlock_asm(&lock->lock); in arch_spin_unlock()
49 static inline void arch_spin_unlock_wait(arch_spinlock_t *lock) in arch_spin_unlock_wait() argument
[all …]
/linux-4.4.14/arch/powerpc/include/asm/
Dspinlock.h55 static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() argument
57 return lock.slock == 0; in arch_spin_value_unlocked()
60 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() argument
63 return !arch_spin_value_unlocked(*lock); in arch_spin_is_locked()
70 static inline unsigned long __arch_spin_trylock(arch_spinlock_t *lock) in __arch_spin_trylock() argument
84 : "r" (token), "r" (&lock->slock) in __arch_spin_trylock()
90 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() argument
93 return __arch_spin_trylock(lock) == 0; in arch_spin_trylock()
113 extern void __spin_yield(arch_spinlock_t *lock);
114 extern void __rw_yield(arch_rwlock_t *lock);
[all …]
/linux-4.4.14/arch/arm/include/asm/
Dspinlock.h53 #define arch_spin_unlock_wait(lock) \ argument
54 do { while (arch_spin_is_locked(lock)) cpu_relax(); } while (0)
56 #define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock) argument
58 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() argument
64 prefetchw(&lock->slock); in arch_spin_lock()
72 : "r" (&lock->slock), "I" (1 << TICKET_SHIFT) in arch_spin_lock()
77 lockval.tickets.owner = ACCESS_ONCE(lock->tickets.owner); in arch_spin_lock()
83 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() argument
88 prefetchw(&lock->slock); in arch_spin_trylock()
97 : "r" (&lock->slock), "I" (1 << TICKET_SHIFT) in arch_spin_trylock()
[all …]
/linux-4.4.14/drivers/gpu/drm/
Ddrm_lock.c58 struct drm_lock *lock = data; in drm_legacy_lock() local
67 if (lock->context == DRM_KERNEL_CONTEXT) { in drm_legacy_lock()
69 task_pid_nr(current), lock->context); in drm_legacy_lock()
74 lock->context, task_pid_nr(current), in drm_legacy_lock()
75 master->lock.hw_lock->lock, lock->flags); in drm_legacy_lock()
77 add_wait_queue(&master->lock.lock_queue, &entry); in drm_legacy_lock()
78 spin_lock_bh(&master->lock.spinlock); in drm_legacy_lock()
79 master->lock.user_waiters++; in drm_legacy_lock()
80 spin_unlock_bh(&master->lock.spinlock); in drm_legacy_lock()
84 if (!master->lock.hw_lock) { in drm_legacy_lock()
[all …]
Ddrm_modeset_lock.c291 struct drm_modeset_lock *lock; in drm_modeset_drop_locks() local
293 lock = list_first_entry(&ctx->locked, in drm_modeset_drop_locks()
296 drm_modeset_unlock(lock); in drm_modeset_drop_locks()
301 static inline int modeset_lock(struct drm_modeset_lock *lock, in modeset_lock() argument
312 if (!ww_mutex_trylock(&lock->mutex)) in modeset_lock()
317 ret = ww_mutex_lock_slow_interruptible(&lock->mutex, &ctx->ww_ctx); in modeset_lock()
319 ret = ww_mutex_lock_interruptible(&lock->mutex, &ctx->ww_ctx); in modeset_lock()
321 ww_mutex_lock_slow(&lock->mutex, &ctx->ww_ctx); in modeset_lock()
324 ret = ww_mutex_lock(&lock->mutex, &ctx->ww_ctx); in modeset_lock()
327 WARN_ON(!list_empty(&lock->head)); in modeset_lock()
[all …]
/linux-4.4.14/drivers/acpi/acpica/
Dutlock.c62 acpi_status acpi_ut_create_rw_lock(struct acpi_rw_lock *lock) in acpi_ut_create_rw_lock() argument
66 lock->num_readers = 0; in acpi_ut_create_rw_lock()
67 status = acpi_os_create_mutex(&lock->reader_mutex); in acpi_ut_create_rw_lock()
72 status = acpi_os_create_mutex(&lock->writer_mutex); in acpi_ut_create_rw_lock()
76 void acpi_ut_delete_rw_lock(struct acpi_rw_lock *lock) in acpi_ut_delete_rw_lock() argument
79 acpi_os_delete_mutex(lock->reader_mutex); in acpi_ut_delete_rw_lock()
80 acpi_os_delete_mutex(lock->writer_mutex); in acpi_ut_delete_rw_lock()
82 lock->num_readers = 0; in acpi_ut_delete_rw_lock()
83 lock->reader_mutex = NULL; in acpi_ut_delete_rw_lock()
84 lock->writer_mutex = NULL; in acpi_ut_delete_rw_lock()
[all …]
/linux-4.4.14/drivers/staging/lustre/lustre/include/linux/
Dobd.h61 spinlock_t lock; member
69 static inline void __client_obd_list_lock(client_obd_lock_t *lock, in __client_obd_list_lock() argument
75 if (spin_trylock(&lock->lock)) { in __client_obd_list_lock()
76 LASSERT(lock->task == NULL); in __client_obd_list_lock()
77 lock->task = current; in __client_obd_list_lock()
78 lock->func = func; in __client_obd_list_lock()
79 lock->line = line; in __client_obd_list_lock()
80 lock->time = jiffies; in __client_obd_list_lock()
85 time_before(lock->time + 5 * HZ, jiffies)) { in __client_obd_list_lock()
86 struct task_struct *task = lock->task; in __client_obd_list_lock()
[all …]
/linux-4.4.14/arch/sparc/include/asm/
Dspinlock_32.h14 #define arch_spin_is_locked(lock) (*((volatile unsigned char *)(lock)) != 0) argument
16 #define arch_spin_unlock_wait(lock) \ argument
17 do { while (arch_spin_is_locked(lock)) cpu_relax(); } while (0)
19 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() argument
35 : "r" (lock) in arch_spin_lock()
39 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() argument
44 : "r" (lock) in arch_spin_trylock()
49 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() argument
51 __asm__ __volatile__("stb %%g0, [%0]" : : "r" (lock) : "memory"); in arch_spin_unlock()
93 #define arch_read_lock(lock) \ argument
[all …]
Dspinlock_64.h24 #define arch_spin_is_locked(lp) ((lp)->lock != 0)
28 } while((lp)->lock)
30 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() argument
45 : "r" (lock) in arch_spin_lock()
49 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() argument
56 : "r" (lock) in arch_spin_trylock()
62 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() argument
67 : "r" (lock) in arch_spin_unlock()
71 static inline void arch_spin_lock_flags(arch_spinlock_t *lock, unsigned long flags) in arch_spin_lock_flags() argument
89 : "r"(lock), "r"(flags) in arch_spin_lock_flags()
[all …]
/linux-4.4.14/arch/sh/include/asm/
Dspinlock.h26 #define arch_spin_is_locked(x) ((x)->lock <= 0)
27 #define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock) argument
37 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() argument
52 : "r" (&lock->lock) in arch_spin_lock()
57 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() argument
65 : "r" (&lock->lock) in arch_spin_unlock()
70 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() argument
83 : "r" (&lock->lock) in arch_spin_trylock()
103 #define arch_read_can_lock(x) ((x)->lock > 0)
109 #define arch_write_can_lock(x) ((x)->lock == RW_LOCK_BIAS)
[all …]
/linux-4.4.14/arch/x86/include/asm/
Dspinlock.h51 static inline void __ticket_enter_slowpath(arch_spinlock_t *lock) in __ticket_enter_slowpath() argument
53 set_bit(0, (volatile unsigned long *)&lock->tickets.head); in __ticket_enter_slowpath()
57 static __always_inline void __ticket_lock_spinning(arch_spinlock_t *lock, in __ticket_lock_spinning() argument
61 static inline void __ticket_unlock_kick(arch_spinlock_t *lock, in __ticket_unlock_kick() argument
72 static inline void __ticket_check_and_clear_slowpath(arch_spinlock_t *lock, in __ticket_check_and_clear_slowpath() argument
84 cmpxchg(&lock->head_tail, old.head_tail, new.head_tail); in __ticket_check_and_clear_slowpath()
88 static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() argument
90 return __tickets_equal(lock.tickets.head, lock.tickets.tail); in arch_spin_value_unlocked()
106 static __always_inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() argument
110 inc = xadd(&lock->tickets, inc); in arch_spin_lock()
[all …]
Dqspinlock.h15 static inline void native_queued_spin_unlock(struct qspinlock *lock) in native_queued_spin_unlock() argument
17 smp_store_release((u8 *)lock, 0); in native_queued_spin_unlock()
21 extern void native_queued_spin_lock_slowpath(struct qspinlock *lock, u32 val);
23 extern void __pv_queued_spin_lock_slowpath(struct qspinlock *lock, u32 val);
24 extern void __raw_callee_save___pv_queued_spin_unlock(struct qspinlock *lock);
26 static inline void queued_spin_lock_slowpath(struct qspinlock *lock, u32 val) in queued_spin_lock_slowpath() argument
28 pv_queued_spin_lock_slowpath(lock, val); in queued_spin_lock_slowpath()
31 static inline void queued_spin_unlock(struct qspinlock *lock) in queued_spin_unlock() argument
33 pv_queued_spin_unlock(lock); in queued_spin_unlock()
36 static inline void queued_spin_unlock(struct qspinlock *lock) in queued_spin_unlock() argument
[all …]
Dcmpxchg.h40 #define __xchg_op(ptr, arg, op, lock) \ argument
45 asm volatile (lock #op "b %b0, %1\n" \
50 asm volatile (lock #op "w %w0, %1\n" \
55 asm volatile (lock #op "l %0, %1\n" \
60 asm volatile (lock #op "q %q0, %1\n" \
83 #define __raw_cmpxchg(ptr, old, new, size, lock) \ argument
92 asm volatile(lock "cmpxchgb %2,%1" \
101 asm volatile(lock "cmpxchgw %2,%1" \
110 asm volatile(lock "cmpxchgl %2,%1" \
119 asm volatile(lock "cmpxchgq %2,%1" \
[all …]
/linux-4.4.14/arch/tile/include/asm/
Dspinlock_64.h47 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() argument
50 u32 val = READ_ONCE(lock->lock); in arch_spin_is_locked()
55 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() argument
58 __insn_fetchadd4(&lock->lock, 1U << __ARCH_SPIN_CURRENT_SHIFT); in arch_spin_unlock()
61 void arch_spin_unlock_wait(arch_spinlock_t *lock);
63 void arch_spin_lock_slow(arch_spinlock_t *lock, u32 val);
69 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() argument
71 u32 val = __insn_fetchadd4(&lock->lock, 1); in arch_spin_lock()
74 arch_spin_lock_slow(lock, ticket); in arch_spin_lock()
78 int arch_spin_trylock(arch_spinlock_t *lock);
[all …]
Dspinlock_32.h36 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() argument
46 int curr = READ_ONCE(lock->current_ticket); in arch_spin_is_locked()
47 int next = READ_ONCE(lock->next_ticket); in arch_spin_is_locked()
52 void arch_spin_lock(arch_spinlock_t *lock);
55 #define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock) argument
57 int arch_spin_trylock(arch_spinlock_t *lock);
59 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() argument
62 int old_ticket = lock->current_ticket; in arch_spin_unlock()
64 lock->current_ticket = old_ticket + TICKET_QUANTUM; in arch_spin_unlock()
67 void arch_spin_unlock_wait(arch_spinlock_t *lock);
[all …]
Datomic_32.h246 int *lock, int o, int n);
247 extern struct __get_user __atomic_xchg(volatile int *p, int *lock, int n);
248 extern struct __get_user __atomic_xchg_add(volatile int *p, int *lock, int n);
250 int *lock, int o, int n);
251 extern struct __get_user __atomic_or(volatile int *p, int *lock, int n);
252 extern struct __get_user __atomic_and(volatile int *p, int *lock, int n);
253 extern struct __get_user __atomic_andn(volatile int *p, int *lock, int n);
254 extern struct __get_user __atomic_xor(volatile int *p, int *lock, int n);
255 extern long long __atomic64_cmpxchg(volatile long long *p, int *lock,
257 extern long long __atomic64_xchg(volatile long long *p, int *lock, long long n);
[all …]
/linux-4.4.14/tools/lib/lockdep/include/liblockdep/
Drwlock.h20 static inline int __rwlock_init(liblockdep_pthread_rwlock_t *lock, in __rwlock_init() argument
25 lockdep_init_map(&lock->dep_map, name, key, 0); in __rwlock_init()
27 return pthread_rwlock_init(&lock->rwlock, attr); in __rwlock_init()
30 #define liblockdep_pthread_rwlock_init(lock, attr) \ argument
34 __rwlock_init((lock), #lock, &__key, (attr)); \
37 static inline int liblockdep_pthread_rwlock_rdlock(liblockdep_pthread_rwlock_t *lock) in liblockdep_pthread_rwlock_rdlock() argument
39 lock_acquire(&lock->dep_map, 0, 0, 2, 1, NULL, (unsigned long)_RET_IP_); in liblockdep_pthread_rwlock_rdlock()
40 return pthread_rwlock_rdlock(&lock->rwlock); in liblockdep_pthread_rwlock_rdlock()
44 static inline int liblockdep_pthread_rwlock_unlock(liblockdep_pthread_rwlock_t *lock) in liblockdep_pthread_rwlock_unlock() argument
46 lock_release(&lock->dep_map, 0, (unsigned long)_RET_IP_); in liblockdep_pthread_rwlock_unlock()
[all …]
Dmutex.h20 static inline int __mutex_init(liblockdep_pthread_mutex_t *lock, in __mutex_init() argument
25 lockdep_init_map(&lock->dep_map, name, key, 0); in __mutex_init()
26 return pthread_mutex_init(&lock->mutex, __mutexattr); in __mutex_init()
36 static inline int liblockdep_pthread_mutex_lock(liblockdep_pthread_mutex_t *lock) in liblockdep_pthread_mutex_lock() argument
38 lock_acquire(&lock->dep_map, 0, 0, 0, 1, NULL, (unsigned long)_RET_IP_); in liblockdep_pthread_mutex_lock()
39 return pthread_mutex_lock(&lock->mutex); in liblockdep_pthread_mutex_lock()
42 static inline int liblockdep_pthread_mutex_unlock(liblockdep_pthread_mutex_t *lock) in liblockdep_pthread_mutex_unlock() argument
44 lock_release(&lock->dep_map, 0, (unsigned long)_RET_IP_); in liblockdep_pthread_mutex_unlock()
45 return pthread_mutex_unlock(&lock->mutex); in liblockdep_pthread_mutex_unlock()
48 static inline int liblockdep_pthread_mutex_trylock(liblockdep_pthread_mutex_t *lock) in liblockdep_pthread_mutex_trylock() argument
[all …]
/linux-4.4.14/arch/s390/include/asm/
Dspinlock.h19 _raw_compare_and_swap(unsigned int *lock, unsigned int old, unsigned int new) in _raw_compare_and_swap() argument
21 return __sync_bool_compare_and_swap(lock, old, new); in _raw_compare_and_swap()
39 static inline void arch_spin_relax(arch_spinlock_t *lock) in arch_spin_relax() argument
41 arch_lock_relax(lock->lock); in arch_spin_relax()
49 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() argument
51 return lock.lock == 0; in arch_spin_value_unlocked()
56 return ACCESS_ONCE(lp->lock) != 0; in arch_spin_is_locked()
63 _raw_compare_and_swap(&lp->lock, 0, SPINLOCK_LOCKVAL)); in arch_spin_trylock_once()
88 typecheck(unsigned int, lp->lock); in arch_spin_unlock()
91 : "+Q" (lp->lock) in arch_spin_unlock()
[all …]
/linux-4.4.14/arch/m32r/include/asm/
Dspinlock.h29 #define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock) argument
40 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() argument
63 : "r" (&lock->slock) in arch_spin_trylock()
73 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() argument
107 : "r" (&lock->slock) in arch_spin_lock()
115 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() argument
118 lock->slock = 1; in arch_spin_unlock()
144 #define arch_read_can_lock(x) ((int)(x)->lock > 0)
150 #define arch_write_can_lock(x) ((x)->lock == RW_LOCK_BIAS)
195 : "r" (&rw->lock) in arch_read_lock()
[all …]
/linux-4.4.14/include/trace/events/
Dlock.h2 #define TRACE_SYSTEM lock
14 TP_PROTO(struct lockdep_map *lock, unsigned int subclass,
18 TP_ARGS(lock, subclass, trylock, read, check, next_lock, ip),
22 __string(name, lock->name)
28 __assign_str(name, lock->name);
29 __entry->lockdep_addr = lock;
38 DECLARE_EVENT_CLASS(lock,
40 TP_PROTO(struct lockdep_map *lock, unsigned long ip),
42 TP_ARGS(lock, ip),
45 __string( name, lock->name )
[all …]
/linux-4.4.14/include/asm-generic/
Dqrwlock.h39 extern void queued_read_lock_slowpath(struct qrwlock *lock, u32 cnts);
40 extern void queued_write_lock_slowpath(struct qrwlock *lock);
46 static inline int queued_read_can_lock(struct qrwlock *lock) in queued_read_can_lock() argument
48 return !(atomic_read(&lock->cnts) & _QW_WMASK); in queued_read_can_lock()
55 static inline int queued_write_can_lock(struct qrwlock *lock) in queued_write_can_lock() argument
57 return !atomic_read(&lock->cnts); in queued_write_can_lock()
65 static inline int queued_read_trylock(struct qrwlock *lock) in queued_read_trylock() argument
69 cnts = atomic_read(&lock->cnts); in queued_read_trylock()
71 cnts = (u32)atomic_add_return_acquire(_QR_BIAS, &lock->cnts); in queued_read_trylock()
74 atomic_sub(_QR_BIAS, &lock->cnts); in queued_read_trylock()
[all …]
Dqspinlock.h28 static __always_inline int queued_spin_is_locked(struct qspinlock *lock) in queued_spin_is_locked() argument
53 return atomic_read(&lock->val) & _Q_LOCKED_MASK; in queued_spin_is_locked()
66 static __always_inline int queued_spin_value_unlocked(struct qspinlock lock) in queued_spin_value_unlocked() argument
68 return !atomic_read(&lock.val); in queued_spin_value_unlocked()
76 static __always_inline int queued_spin_is_contended(struct qspinlock *lock) in queued_spin_is_contended() argument
78 return atomic_read(&lock->val) & ~_Q_LOCKED_MASK; in queued_spin_is_contended()
85 static __always_inline int queued_spin_trylock(struct qspinlock *lock) in queued_spin_trylock() argument
87 if (!atomic_read(&lock->val) && in queued_spin_trylock()
88 (atomic_cmpxchg(&lock->val, 0, _Q_LOCKED_VAL) == 0)) in queued_spin_trylock()
93 extern void queued_spin_lock_slowpath(struct qspinlock *lock, u32 val);
[all …]
/linux-4.4.14/arch/tile/lib/
Dspinlock_32.c22 void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() argument
28 while ((my_ticket = __insn_tns((void *)&lock->next_ticket)) & 1) in arch_spin_lock()
32 lock->next_ticket = my_ticket + TICKET_QUANTUM; in arch_spin_lock()
35 while ((delta = my_ticket - lock->current_ticket) != 0) in arch_spin_lock()
40 int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() argument
47 int my_ticket = __insn_tns((void *)&lock->next_ticket); in arch_spin_trylock()
49 if (my_ticket == lock->current_ticket) { in arch_spin_trylock()
51 lock->next_ticket = my_ticket + TICKET_QUANTUM; in arch_spin_trylock()
58 lock->next_ticket = my_ticket; in arch_spin_trylock()
65 void arch_spin_unlock_wait(arch_spinlock_t *lock) in arch_spin_unlock_wait() argument
[all …]
Dspinlock_64.c26 static inline u32 arch_spin_read_noalloc(void *lock) in arch_spin_read_noalloc() argument
28 return atomic_cmpxchg((atomic_t *)lock, -1, -1); in arch_spin_read_noalloc()
35 void arch_spin_lock_slow(arch_spinlock_t *lock, u32 my_ticket) in arch_spin_lock_slow() argument
38 __insn_fetchand4(&lock->lock, ~__ARCH_SPIN_NEXT_OVERFLOW); in arch_spin_lock_slow()
43 u32 val = arch_spin_read_noalloc(lock); in arch_spin_lock_slow()
55 int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() argument
57 u32 val = arch_spin_read_noalloc(lock); in arch_spin_trylock()
60 return cmpxchg(&lock->lock, val, (val + 1) & ~__ARCH_SPIN_NEXT_OVERFLOW) in arch_spin_trylock()
65 void arch_spin_unlock_wait(arch_spinlock_t *lock) in arch_spin_unlock_wait() argument
68 u32 val = READ_ONCE(lock->lock); in arch_spin_unlock_wait()
[all …]
/linux-4.4.14/arch/xtensa/include/asm/
Dspinlock.h32 #define arch_spin_unlock_wait(lock) \ argument
33 do { while (arch_spin_is_locked(lock)) cpu_relax(); } while (0)
35 #define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock) argument
37 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() argument
48 : "a" (&lock->slock) in arch_spin_lock()
54 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() argument
64 : "a" (&lock->slock) in arch_spin_trylock()
70 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() argument
78 : "a" (&lock->slock) in arch_spin_unlock()
99 #define arch_write_can_lock(x) ((x)->lock == 0)
[all …]
/linux-4.4.14/include/drm/ttm/
Dttm_lock.h72 spinlock_t lock; member
87 extern void ttm_lock_init(struct ttm_lock *lock);
96 extern void ttm_read_unlock(struct ttm_lock *lock);
108 extern int ttm_read_lock(struct ttm_lock *lock, bool interruptible);
125 extern int ttm_read_trylock(struct ttm_lock *lock, bool interruptible);
134 extern void ttm_write_unlock(struct ttm_lock *lock);
146 extern int ttm_write_lock(struct ttm_lock *lock, bool interruptible);
155 extern void ttm_lock_downgrade(struct ttm_lock *lock);
164 extern void ttm_suspend_lock(struct ttm_lock *lock);
173 extern void ttm_suspend_unlock(struct ttm_lock *lock);
[all …]
/linux-4.4.14/lib/
Datomic64.c32 raw_spinlock_t lock; member
36 .lock = __RAW_SPIN_LOCK_UNLOCKED(atomic64_lock.lock),
46 return &atomic64_lock[addr & (NR_LOCKS - 1)].lock; in lock_addr()
52 raw_spinlock_t *lock = lock_addr(v); in atomic64_read() local
55 raw_spin_lock_irqsave(lock, flags); in atomic64_read()
57 raw_spin_unlock_irqrestore(lock, flags); in atomic64_read()
65 raw_spinlock_t *lock = lock_addr(v); in atomic64_set() local
67 raw_spin_lock_irqsave(lock, flags); in atomic64_set()
69 raw_spin_unlock_irqrestore(lock, flags); in atomic64_set()
77 raw_spinlock_t *lock = lock_addr(v); \
[all …]
Dlockref.c14 while (likely(arch_spin_value_unlocked(old.lock.rlock.raw_lock))) { \
48 spin_lock(&lockref->lock); in lockref_get()
50 spin_unlock(&lockref->lock); in lockref_get()
71 spin_lock(&lockref->lock); in lockref_get_not_zero()
77 spin_unlock(&lockref->lock); in lockref_get_not_zero()
98 spin_lock(&lockref->lock); in lockref_get_or_lock()
102 spin_unlock(&lockref->lock); in lockref_get_or_lock()
142 spin_lock(&lockref->lock); in lockref_put_or_lock()
146 spin_unlock(&lockref->lock); in lockref_put_or_lock()
157 assert_spin_locked(&lockref->lock); in lockref_mark_dead()
[all …]
Dpercpu_ida.c35 spinlock_t lock; member
84 spin_lock(&remote->lock); in steal_tags()
95 spin_unlock(&remote->lock); in steal_tags()
118 spin_lock(&tags->lock); in alloc_local_tag()
121 spin_unlock(&tags->lock); in alloc_local_tag()
162 spin_lock(&pool->lock); in percpu_ida_alloc()
186 spin_unlock(&pool->lock); in percpu_ida_alloc()
227 spin_lock(&tags->lock); in percpu_ida_free()
231 spin_unlock(&tags->lock); in percpu_ida_free()
240 spin_lock(&pool->lock); in percpu_ida_free()
[all …]
/linux-4.4.14/arch/mn10300/include/asm/
Dspinlock.h28 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() argument
33 : "a"(&lock->slock) in arch_spin_unlock()
37 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() argument
48 : "a"(&lock->slock) in arch_spin_trylock()
54 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() argument
60 : "a"(&lock->slock) in arch_spin_lock()
64 static inline void arch_spin_lock_flags(arch_spinlock_t *lock, in arch_spin_lock_flags() argument
83 : "d" (flags), "a"(&lock->slock), "i"(EPSW_IE | MN10300_CLI_LEVEL) in arch_spin_lock_flags()
104 #define arch_read_can_lock(x) ((int)(x)->lock > 0)
110 #define arch_write_can_lock(x) ((x)->lock == RW_LOCK_BIAS)
[all …]
/linux-4.4.14/arch/s390/lib/
Dspinlock.c35 static inline void _raw_compare_and_delay(unsigned int *lock, unsigned int old) in _raw_compare_and_delay() argument
37 asm(".insn rsy,0xeb0000000022,%0,0,%1" : : "d" (old), "Q" (*lock)); in _raw_compare_and_delay()
47 owner = ACCESS_ONCE(lp->lock); in arch_spin_lock_wait()
50 if (_raw_compare_and_swap(&lp->lock, 0, cpu)) in arch_spin_lock_wait()
63 _raw_compare_and_delay(&lp->lock, owner); in arch_spin_lock_wait()
64 owner = ACCESS_ONCE(lp->lock); in arch_spin_lock_wait()
86 owner = ACCESS_ONCE(lp->lock); in arch_spin_lock_wait_flags()
90 if (_raw_compare_and_swap(&lp->lock, 0, cpu)) in arch_spin_lock_wait_flags()
103 _raw_compare_and_delay(&lp->lock, owner); in arch_spin_lock_wait_flags()
104 owner = ACCESS_ONCE(lp->lock); in arch_spin_lock_wait_flags()
[all …]
/linux-4.4.14/fs/lockd/
Dsvclock.c46 static int nlmsvc_setgrantargs(struct nlm_rqst *call, struct nlm_lock *lock);
144 nlmsvc_lookup_block(struct nlm_file *file, struct nlm_lock *lock) in nlmsvc_lookup_block() argument
150 file, lock->fl.fl_pid, in nlmsvc_lookup_block()
151 (long long)lock->fl.fl_start, in nlmsvc_lookup_block()
152 (long long)lock->fl.fl_end, lock->fl.fl_type); in nlmsvc_lookup_block()
154 fl = &block->b_call->a_args.lock.fl; in nlmsvc_lookup_block()
160 if (block->b_file == file && nlm_compare_locks(fl, &lock->fl)) { in nlmsvc_lookup_block()
216 struct nlm_file *file, struct nlm_lock *lock, in nlmsvc_create_block() argument
234 if (!nlmsvc_setgrantargs(call, lock)) in nlmsvc_create_block()
238 call->a_args.lock.fl.fl_flags |= FL_SLEEP; in nlmsvc_create_block()
[all …]
Dxdr4.c106 nlm4_decode_lock(__be32 *p, struct nlm_lock *lock) in nlm4_decode_lock() argument
108 struct file_lock *fl = &lock->fl; in nlm4_decode_lock()
112 if (!(p = xdr_decode_string_inplace(p, &lock->caller, in nlm4_decode_lock()
113 &lock->len, NLM_MAXSTRLEN)) in nlm4_decode_lock()
114 || !(p = nlm4_decode_fh(p, &lock->fh)) in nlm4_decode_lock()
115 || !(p = nlm4_decode_oh(p, &lock->oh))) in nlm4_decode_lock()
117 lock->svid = ntohl(*p++); in nlm4_decode_lock()
121 fl->fl_pid = (pid_t)lock->svid; in nlm4_decode_lock()
151 struct file_lock *fl = &resp->lock.fl; in nlm4_encode_testres()
154 *p++ = htonl(resp->lock.svid); in nlm4_encode_testres()
[all …]
Dxdr.c114 nlm_decode_lock(__be32 *p, struct nlm_lock *lock) in nlm_decode_lock() argument
116 struct file_lock *fl = &lock->fl; in nlm_decode_lock()
119 if (!(p = xdr_decode_string_inplace(p, &lock->caller, in nlm_decode_lock()
120 &lock->len, in nlm_decode_lock()
122 || !(p = nlm_decode_fh(p, &lock->fh)) in nlm_decode_lock()
123 || !(p = nlm_decode_oh(p, &lock->oh))) in nlm_decode_lock()
125 lock->svid = ntohl(*p++); in nlm_decode_lock()
129 fl->fl_pid = (pid_t)lock->svid; in nlm_decode_lock()
158 struct file_lock *fl = &resp->lock.fl; in nlm_encode_testres()
161 *p++ = htonl(resp->lock.svid); in nlm_encode_testres()
[all …]
Dclntxdr.c60 static void nlm_compute_offsets(const struct nlm_lock *lock, in nlm_compute_offsets() argument
63 const struct file_lock *fl = &lock->fl; in nlm_compute_offsets()
259 const struct nlm_lock *lock = &result->lock; in encode_nlm_holder() local
263 encode_bool(xdr, lock->fl.fl_type == F_RDLCK); in encode_nlm_holder()
264 encode_int32(xdr, lock->svid); in encode_nlm_holder()
265 encode_netobj(xdr, lock->oh.data, lock->oh.len); in encode_nlm_holder()
268 nlm_compute_offsets(lock, &l_offset, &l_len); in encode_nlm_holder()
275 struct nlm_lock *lock = &result->lock; in decode_nlm_holder() local
276 struct file_lock *fl = &lock->fl; in decode_nlm_holder()
282 memset(lock, 0, sizeof(*lock)); in decode_nlm_holder()
[all …]
Dclnt4xdr.c64 static void nlm4_compute_offsets(const struct nlm_lock *lock, in nlm4_compute_offsets() argument
67 const struct file_lock *fl = &lock->fl; in nlm4_compute_offsets()
264 const struct nlm_lock *lock = &result->lock; in encode_nlm4_holder() local
268 encode_bool(xdr, lock->fl.fl_type == F_RDLCK); in encode_nlm4_holder()
269 encode_int32(xdr, lock->svid); in encode_nlm4_holder()
270 encode_netobj(xdr, lock->oh.data, lock->oh.len); in encode_nlm4_holder()
273 nlm4_compute_offsets(lock, &l_offset, &l_len); in encode_nlm4_holder()
280 struct nlm_lock *lock = &result->lock; in decode_nlm4_holder() local
281 struct file_lock *fl = &lock->fl; in decode_nlm4_holder()
288 memset(lock, 0, sizeof(*lock)); in decode_nlm4_holder()
[all …]
/linux-4.4.14/drivers/staging/lustre/lustre/osc/
Dosc_lock.c79 struct ldlm_lock *lock; in osc_handle_ptr() local
81 lock = ldlm_handle2lock(handle); in osc_handle_ptr()
82 if (lock != NULL) in osc_handle_ptr()
83 LDLM_LOCK_PUT(lock); in osc_handle_ptr()
84 return lock; in osc_handle_ptr()
92 struct ldlm_lock *lock = osc_handle_ptr(&ols->ols_handle); in osc_lock_invariant() local
111 ergo(lock != NULL && olock != NULL, lock == olock) && in osc_lock_invariant()
112 ergo(lock == NULL, olock == NULL))) in osc_lock_invariant()
258 const struct cl_lock *lock, in osc_lock_build_policy() argument
261 const struct cl_lock_descr *d = &lock->cll_descr; in osc_lock_build_policy()
[all …]
/linux-4.4.14/drivers/clk/mmp/
Dclk-apbc.c32 spinlock_t *lock; member
45 if (apbc->lock) in clk_apbc_prepare()
46 spin_lock_irqsave(apbc->lock, flags); in clk_apbc_prepare()
54 if (apbc->lock) in clk_apbc_prepare()
55 spin_unlock_irqrestore(apbc->lock, flags); in clk_apbc_prepare()
59 if (apbc->lock) in clk_apbc_prepare()
60 spin_lock_irqsave(apbc->lock, flags); in clk_apbc_prepare()
66 if (apbc->lock) in clk_apbc_prepare()
67 spin_unlock_irqrestore(apbc->lock, flags); in clk_apbc_prepare()
72 if (apbc->lock) in clk_apbc_prepare()
[all …]
Dclk-gate.c34 if (gate->lock) in mmp_clk_gate_enable()
35 spin_lock_irqsave(gate->lock, flags); in mmp_clk_gate_enable()
42 if (gate->lock) in mmp_clk_gate_enable()
43 spin_unlock_irqrestore(gate->lock, flags); in mmp_clk_gate_enable()
60 if (gate->lock) in mmp_clk_gate_disable()
61 spin_lock_irqsave(gate->lock, flags); in mmp_clk_gate_disable()
68 if (gate->lock) in mmp_clk_gate_disable()
69 spin_unlock_irqrestore(gate->lock, flags); in mmp_clk_gate_disable()
78 if (gate->lock) in mmp_clk_gate_is_enabled()
79 spin_lock_irqsave(gate->lock, flags); in mmp_clk_gate_is_enabled()
[all …]
Dclk-apmu.c26 spinlock_t *lock; member
35 if (apmu->lock) in clk_apmu_enable()
36 spin_lock_irqsave(apmu->lock, flags); in clk_apmu_enable()
41 if (apmu->lock) in clk_apmu_enable()
42 spin_unlock_irqrestore(apmu->lock, flags); in clk_apmu_enable()
53 if (apmu->lock) in clk_apmu_disable()
54 spin_lock_irqsave(apmu->lock, flags); in clk_apmu_disable()
59 if (apmu->lock) in clk_apmu_disable()
60 spin_unlock_irqrestore(apmu->lock, flags); in clk_apmu_disable()
69 void __iomem *base, u32 enable_mask, spinlock_t *lock) in mmp_clk_register_apmu() argument
[all …]
/linux-4.4.14/Documentation/locking/
Dlockstat.txt10 Because things like lock contention can severely impact performance.
14 Lockdep already has hooks in the lock functions and maps lock instances to
15 lock classes. We build on that (see Documentation/locking/lockdep-design.txt).
16 The graph below shows the relation between the lock functions and the various
21 lock _____
39 lock, unlock - the regular lock functions
45 con-bounces - number of lock contention that involved x-cpu data
46 contentions - number of lock acquisitions that had to wait
47 wait time min - shortest (non-0) time we ever had to wait for a lock
48 max - longest time we ever had to wait for a lock
[all …]
Dlockdep-design.txt14 tens of thousands of) instantiations. For example a lock in the inode
16 lock class.
18 The validator tracks the 'state' of lock-classes, and it tracks
19 dependencies between different lock-classes. The validator maintains a
22 Unlike an lock instantiation, the lock-class itself never goes away: when
23 a lock-class is used for the first time after bootup it gets registered,
24 and all subsequent uses of that lock-class will be attached to this
25 lock-class.
30 The validator tracks lock-class usage history into 4n + 1 separate state bits:
47 modprobe/2287 is trying to acquire lock:
[all …]
Dmutex-design.txt26 transitions that can occur during the lifetime of a lock:
34 a pointer to the lock task owner (->owner) as well as a spinner MCS
35 lock (->osq), both described below in (ii).
38 taken, depending on the state of the lock:
40 (i) fastpath: tries to atomically acquire the lock by decrementing the
46 e21: f0 ff 0b lock decl (%rbx)
52 bc8: f0 ff 07 lock incl (%rdi)
57 while the lock owner is running and there are no other tasks ready
59 that if the lock owner is running, it is likely to release the lock
60 soon. The mutex spinners are queued up using MCS lock so that only
[all …]
/linux-4.4.14/drivers/staging/lustre/lustre/include/
Dlustre_dlm.h270 typedef int (*ldlm_cancel_for_recovery)(struct ldlm_lock *lock);
289 int (*lvbo_size)(struct ldlm_lock *lock);
291 int (*lvbo_fill)(struct ldlm_lock *lock, void *buf, int buflen);
491 typedef int (*ldlm_blocking_callback)(struct ldlm_lock *lock,
495 typedef int (*ldlm_completion_callback)(struct ldlm_lock *lock, __u64 flags,
498 typedef int (*ldlm_glimpse_callback)(struct ldlm_lock *lock, void *data);
886 static inline bool ldlm_has_layout(struct ldlm_lock *lock) in ldlm_has_layout() argument
888 return lock->l_resource->lr_type == LDLM_IBITS && in ldlm_has_layout()
889 lock->l_policy_data.l_inodebits.bits & MDS_INODELOCK_LAYOUT; in ldlm_has_layout()
905 ldlm_lock_to_ns(struct ldlm_lock *lock) in ldlm_lock_to_ns() argument
[all …]
/linux-4.4.14/drivers/clk/berlin/
Dberlin2-div.c68 spinlock_t *lock; member
81 if (div->lock) in berlin2_div_is_enabled()
82 spin_lock(div->lock); in berlin2_div_is_enabled()
87 if (div->lock) in berlin2_div_is_enabled()
88 spin_unlock(div->lock); in berlin2_div_is_enabled()
99 if (div->lock) in berlin2_div_enable()
100 spin_lock(div->lock); in berlin2_div_enable()
106 if (div->lock) in berlin2_div_enable()
107 spin_unlock(div->lock); in berlin2_div_enable()
118 if (div->lock) in berlin2_div_disable()
[all …]
/linux-4.4.14/drivers/gpu/drm/via/
Dvia_video.c40 XVMCLOCKPTR(dev_priv->sarea_priv, i)->lock = 0; in via_init_futex()
51 volatile int *lock; in via_release_futex() local
57 lock = (volatile int *)XVMCLOCKPTR(dev_priv->sarea_priv, i); in via_release_futex()
58 if ((_DRM_LOCKING_CONTEXT(*lock) == context)) { in via_release_futex()
59 if (_DRM_LOCK_IS_HELD(*lock) in via_release_futex()
60 && (*lock & _DRM_LOCK_CONT)) { in via_release_futex()
63 *lock = 0; in via_release_futex()
71 volatile int *lock; in via_decoder_futex() local
78 if (fx->lock >= VIA_NR_XVMC_LOCKS) in via_decoder_futex()
81 lock = (volatile int *)XVMCLOCKPTR(sAPriv, fx->lock); in via_decoder_futex()
[all …]
/linux-4.4.14/arch/parisc/include/asm/
Dspinlock.h15 #define arch_spin_lock(lock) arch_spin_lock_flags(lock, 0) argument
77 arch_spin_lock_flags(&rw->lock, flags); in arch_read_lock()
79 arch_spin_unlock(&rw->lock); in arch_read_lock()
89 arch_spin_lock_flags(&rw->lock, flags); in arch_read_unlock()
91 arch_spin_unlock(&rw->lock); in arch_read_unlock()
102 if (arch_spin_trylock(&rw->lock)) { in arch_read_trylock()
104 arch_spin_unlock(&rw->lock); in arch_read_trylock()
115 while (arch_spin_is_locked(&rw->lock) && rw->counter >= 0) in arch_read_trylock()
128 arch_spin_lock_flags(&rw->lock, flags); in arch_write_lock()
131 arch_spin_unlock(&rw->lock); in arch_write_lock()
[all …]
/linux-4.4.14/tools/perf/Documentation/
Dperf-lock.txt1 perf-lock(1)
6 perf-lock - Analyze lock events
11 'perf lock' {record|report|script|info}
15 You can analyze various lock behaviours
16 and statistics with this 'perf lock' command.
18 'perf lock record <command>' records lock events
21 results of lock events.
23 'perf lock report' reports statistical data.
25 'perf lock script' shows raw lock events.
27 'perf lock info' shows metadata like threads or addresses
[all …]
/linux-4.4.14/arch/arc/include/asm/
Dspinlock.h17 #define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock) argument
28 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() argument
41 : [slock] "r" (&(lock->slock)), in arch_spin_lock()
49 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() argument
65 : [slock] "r" (&(lock->slock)), in arch_spin_trylock()
74 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() argument
78 lock->slock = __ARCH_SPIN_LOCK_UNLOCKED__; in arch_spin_unlock()
271 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() argument
289 : [slock] "r" (&(lock->slock)), in arch_spin_lock()
297 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() argument
[all …]
/linux-4.4.14/Documentation/vm/
Dsplit_page_table_lock1 Split page table lock
6 multi-threaded applications due high contention on the lock. To improve
7 scalability, split page table lock was introduced.
9 With split page table lock we have separate per-table lock to serialize
10 access to the table. At the moment we use split lock for PTE and PMD
13 There are helpers to lock/unlock a table and other accessor functions:
15 maps pte and takes PTE table lock, returns pointer to the taken
16 lock;
20 allocates PTE table if needed and take the lock, returns pointer
21 to taken lock or NULL if allocation failed;
[all …]
/linux-4.4.14/arch/mips/vr41xx/common/
Dicu.c162 raw_spin_lock_irqsave(&desc->lock, flags); in vr41xx_enable_piuint()
164 raw_spin_unlock_irqrestore(&desc->lock, flags); in vr41xx_enable_piuint()
177 raw_spin_lock_irqsave(&desc->lock, flags); in vr41xx_disable_piuint()
179 raw_spin_unlock_irqrestore(&desc->lock, flags); in vr41xx_disable_piuint()
192 raw_spin_lock_irqsave(&desc->lock, flags); in vr41xx_enable_aiuint()
194 raw_spin_unlock_irqrestore(&desc->lock, flags); in vr41xx_enable_aiuint()
207 raw_spin_lock_irqsave(&desc->lock, flags); in vr41xx_disable_aiuint()
209 raw_spin_unlock_irqrestore(&desc->lock, flags); in vr41xx_disable_aiuint()
222 raw_spin_lock_irqsave(&desc->lock, flags); in vr41xx_enable_kiuint()
224 raw_spin_unlock_irqrestore(&desc->lock, flags); in vr41xx_enable_kiuint()
[all …]
/linux-4.4.14/arch/mips/include/asm/octeon/
Dcvmx-spinlock.h63 static inline void cvmx_spinlock_init(cvmx_spinlock_t *lock) in cvmx_spinlock_init() argument
65 lock->value = CVMX_SPINLOCK_UNLOCKED_VAL; in cvmx_spinlock_init()
74 static inline int cvmx_spinlock_locked(cvmx_spinlock_t *lock) in cvmx_spinlock_locked() argument
76 return lock->value != CVMX_SPINLOCK_UNLOCKED_VAL; in cvmx_spinlock_locked()
84 static inline void cvmx_spinlock_unlock(cvmx_spinlock_t *lock) in cvmx_spinlock_unlock() argument
87 lock->value = 0; in cvmx_spinlock_unlock()
103 static inline unsigned int cvmx_spinlock_trylock(cvmx_spinlock_t *lock) in cvmx_spinlock_trylock() argument
117 [val] "+m"(lock->value), [tmp] "=&r"(tmp) in cvmx_spinlock_trylock()
128 static inline void cvmx_spinlock_lock(cvmx_spinlock_t *lock) in cvmx_spinlock_lock() argument
140 [val] "+m"(lock->value), [tmp] "=&r"(tmp) in cvmx_spinlock_lock()
/linux-4.4.14/drivers/base/
Dmap.c26 int (*lock)(dev_t, void *); member
29 struct mutex *lock; member
34 int (*lock)(dev_t, void *), void *data) in kobj_map()
51 p->lock = lock; in kobj_map()
56 mutex_lock(domain->lock); in kobj_map()
64 mutex_unlock(domain->lock); in kobj_map()
78 mutex_lock(domain->lock); in kobj_unmap()
91 mutex_unlock(domain->lock); in kobj_unmap()
102 mutex_lock(domain->lock); in kobj_lookup()
119 if (p->lock && p->lock(dev, data) < 0) { in kobj_lookup()
[all …]
/linux-4.4.14/drivers/base/power/
Druntime.c202 spin_lock_irq(&dev->power.lock); in pm_runtime_set_memalloc_noio()
205 spin_unlock_irq(&dev->power.lock); in pm_runtime_set_memalloc_noio()
267 __releases(&dev->power.lock) __acquires(&dev->power.lock) in __rpm_callback()
272 spin_unlock(&dev->power.lock); in __rpm_callback()
274 spin_unlock_irq(&dev->power.lock); in __rpm_callback()
279 spin_lock(&dev->power.lock); in __rpm_callback()
281 spin_lock_irq(&dev->power.lock); in __rpm_callback()
416 __releases(&dev->power.lock) __acquires(&dev->power.lock) in rpm_suspend()
475 spin_unlock(&dev->power.lock); in rpm_suspend()
479 spin_lock(&dev->power.lock); in rpm_suspend()
[all …]
/linux-4.4.14/include/drm/
Ddrm_modeset_lock.h98 static inline void drm_modeset_lock_init(struct drm_modeset_lock *lock) in drm_modeset_lock_init() argument
100 ww_mutex_init(&lock->mutex, &crtc_ww_class); in drm_modeset_lock_init()
101 INIT_LIST_HEAD(&lock->head); in drm_modeset_lock_init()
108 static inline void drm_modeset_lock_fini(struct drm_modeset_lock *lock) in drm_modeset_lock_fini() argument
110 WARN_ON(!list_empty(&lock->head)); in drm_modeset_lock_fini()
117 static inline bool drm_modeset_is_locked(struct drm_modeset_lock *lock) in drm_modeset_is_locked() argument
119 return ww_mutex_is_locked(&lock->mutex); in drm_modeset_is_locked()
122 int drm_modeset_lock(struct drm_modeset_lock *lock,
124 int drm_modeset_lock_interruptible(struct drm_modeset_lock *lock,
126 void drm_modeset_unlock(struct drm_modeset_lock *lock);
/linux-4.4.14/sound/core/seq/
Dseq_timer.c61 spin_lock_init(&tmr->lock); in snd_seq_timer_new()
95 spin_lock_irqsave(&tmr->lock, flags); in snd_seq_timer_defaults()
111 spin_unlock_irqrestore(&tmr->lock, flags); in snd_seq_timer_defaults()
128 spin_lock_irqsave(&tmr->lock, flags); in snd_seq_timer_reset()
130 spin_unlock_irqrestore(&tmr->lock, flags); in snd_seq_timer_reset()
148 spin_lock_irqsave(&tmr->lock, flags); in snd_seq_timer_interrupt()
150 spin_unlock_irqrestore(&tmr->lock, flags); in snd_seq_timer_interrupt()
170 spin_unlock_irqrestore(&tmr->lock, flags); in snd_seq_timer_interrupt()
185 spin_lock_irqsave(&tmr->lock, flags); in snd_seq_timer_set_tempo()
190 spin_unlock_irqrestore(&tmr->lock, flags); in snd_seq_timer_set_tempo()
[all …]
Dseq_fifo.c50 spin_lock_init(&f->lock); in snd_seq_fifo_new()
102 spin_lock_irqsave(&f->lock, flags); in snd_seq_fifo_clear()
107 spin_unlock_irqrestore(&f->lock, flags); in snd_seq_fifo_clear()
132 spin_lock_irqsave(&f->lock, flags); in snd_seq_fifo_event_in()
139 spin_unlock_irqrestore(&f->lock, flags); in snd_seq_fifo_event_in()
183 spin_lock_irqsave(&f->lock, flags); in snd_seq_fifo_cell_out()
187 spin_unlock_irqrestore(&f->lock, flags); in snd_seq_fifo_cell_out()
192 spin_unlock_irq(&f->lock); in snd_seq_fifo_cell_out()
194 spin_lock_irq(&f->lock); in snd_seq_fifo_cell_out()
197 spin_unlock_irqrestore(&f->lock, flags); in snd_seq_fifo_cell_out()
[all …]
/linux-4.4.14/drivers/usb/gadget/function/
Du_ether_configfs.h38 mutex_lock(&opts->lock); \
40 mutex_unlock(&opts->lock); \
51 mutex_lock(&opts->lock); \
53 mutex_unlock(&opts->lock); \
58 mutex_unlock(&opts->lock); \
73 mutex_lock(&opts->lock); \
75 mutex_unlock(&opts->lock); \
86 mutex_lock(&opts->lock); \
88 mutex_unlock(&opts->lock); \
93 mutex_unlock(&opts->lock); \
[all …]
/linux-4.4.14/drivers/usb/usbip/
Dvhci_sysfs.c38 spin_lock(&the_controller->lock); in status_show()
56 spin_lock(&vdev->ud.lock); in status_show()
70 spin_unlock(&vdev->ud.lock); in status_show()
73 spin_unlock(&the_controller->lock); in status_show()
87 spin_lock(&the_controller->lock); in vhci_port_disconnect()
91 spin_lock(&vdev->ud.lock); in vhci_port_disconnect()
96 spin_unlock(&vdev->ud.lock); in vhci_port_disconnect()
97 spin_unlock(&the_controller->lock); in vhci_port_disconnect()
103 spin_unlock(&vdev->ud.lock); in vhci_port_disconnect()
104 spin_unlock(&the_controller->lock); in vhci_port_disconnect()
[all …]
Dvhci_hcd.c126 spin_lock(&the_controller->lock); in rh_port_connect()
142 spin_unlock(&the_controller->lock); in rh_port_connect()
151 spin_lock(&the_controller->lock); in rh_port_disconnect()
157 spin_unlock(&the_controller->lock); in rh_port_disconnect()
191 spin_lock(&vhci->lock); in vhci_hub_status()
212 spin_unlock(&vhci->lock); in vhci_hub_status()
252 spin_lock(&dum->lock); in vhci_hub_control()
406 spin_unlock(&dum->lock); in vhci_hub_control()
471 spin_lock(&the_controller->lock); in vhci_urb_enqueue()
475 spin_unlock(&the_controller->lock); in vhci_urb_enqueue()
[all …]
/linux-4.4.14/drivers/misc/mic/scif/
Dscif_epd.c53 spin_lock(&ep->lock); in scif_teardown_ep()
55 spin_unlock(&ep->lock); in scif_teardown_ep()
69 spin_lock(&ep->lock); in scif_add_epd_to_zombie_list()
71 spin_unlock(&ep->lock); in scif_add_epd_to_zombie_list()
143 spin_lock(&ep->lock); in scif_cnctreq()
147 spin_unlock(&ep->lock); in scif_cnctreq()
155 spin_unlock(&ep->lock); in scif_cnctreq()
178 spin_lock(&ep->lock); in scif_cnctgnt()
188 spin_unlock(&ep->lock); in scif_cnctgnt()
204 spin_lock(&ep->lock); in scif_cnctgnt_ack()
[all …]
/linux-4.4.14/Documentation/
Drobust-futex-ABI.txt54 pointer to a single linked list of 'lock entries', one per lock,
56 to itself, 'head'. The last 'lock entry' points back to the 'head'.
59 address of the associated 'lock entry', plus or minus, of what will
60 be called the 'lock word', from that 'lock entry'. The 'lock word'
61 is always a 32 bit word, unlike the other words above. The 'lock
63 of the thread holding the lock in the bottom 29 bits. See further
67 the address of the 'lock entry', during list insertion and removal,
71 Each 'lock entry' on the single linked list starting at 'head' consists
72 of just a single word, pointing to the next 'lock entry', or back to
73 'head' if there are no more entries. In addition, nearby to each 'lock
[all …]
/linux-4.4.14/drivers/infiniband/core/
Diwcm.c233 spin_lock_init(&cm_id_priv->lock); in iw_create_cm_id()
293 spin_lock_irqsave(&cm_id_priv->lock, flags); in iw_cm_disconnect()
323 spin_unlock_irqrestore(&cm_id_priv->lock, flags); in iw_cm_disconnect()
361 spin_lock_irqsave(&cm_id_priv->lock, flags); in destroy_cm_id()
365 spin_unlock_irqrestore(&cm_id_priv->lock, flags); in destroy_cm_id()
368 spin_lock_irqsave(&cm_id_priv->lock, flags); in destroy_cm_id()
372 spin_unlock_irqrestore(&cm_id_priv->lock, flags); in destroy_cm_id()
375 spin_lock_irqsave(&cm_id_priv->lock, flags); in destroy_cm_id()
389 spin_unlock_irqrestore(&cm_id_priv->lock, flags); in destroy_cm_id()
391 spin_lock_irqsave(&cm_id_priv->lock, flags); in destroy_cm_id()
[all …]
/linux-4.4.14/fs/
Dfs_struct.c18 spin_lock(&fs->lock); in set_fs_root()
23 spin_unlock(&fs->lock); in set_fs_root()
37 spin_lock(&fs->lock); in set_fs_pwd()
42 spin_unlock(&fs->lock); in set_fs_pwd()
68 spin_lock(&fs->lock); in chroot_fs_refs()
77 spin_unlock(&fs->lock); in chroot_fs_refs()
100 spin_lock(&fs->lock); in exit_fs()
103 spin_unlock(&fs->lock); in exit_fs()
117 spin_lock_init(&fs->lock); in copy_fs_struct()
121 spin_lock(&old->lock); in copy_fs_struct()
[all …]
/linux-4.4.14/drivers/nfc/
Dnfcsim.c38 struct mutex lock; member
69 mutex_lock(&dev->lock); in nfcsim_cleanup_dev()
77 mutex_unlock(&dev->lock); in nfcsim_cleanup_dev()
103 mutex_lock(&dev->lock); in nfcsim_dev_up()
107 mutex_unlock(&dev->lock); in nfcsim_dev_up()
118 mutex_lock(&dev->lock); in nfcsim_dev_down()
122 mutex_unlock(&dev->lock); in nfcsim_dev_down()
139 mutex_lock(&peer->lock); in nfcsim_dep_link_up()
148 mutex_unlock(&peer->lock); in nfcsim_dep_link_up()
152 mutex_unlock(&peer->lock); in nfcsim_dep_link_up()
[all …]
/linux-4.4.14/drivers/net/wireless/cw1200/
Dqueue.c105 spin_lock_bh(&stats->lock); in __cw1200_queue_gc()
109 spin_unlock_bh(&stats->lock); in __cw1200_queue_gc()
139 spin_lock_bh(&queue->lock); in cw1200_queue_gc()
141 spin_unlock_bh(&queue->lock); in cw1200_queue_gc()
154 spin_lock_init(&stats->lock); in cw1200_queue_stats_init()
181 spin_lock_init(&queue->lock); in cw1200_queue_init()
210 spin_lock_bh(&queue->lock); in cw1200_queue_clear()
222 spin_lock_bh(&stats->lock); in cw1200_queue_clear()
228 spin_unlock_bh(&stats->lock); in cw1200_queue_clear()
233 spin_unlock_bh(&queue->lock); in cw1200_queue_clear()
[all …]
/linux-4.4.14/scripts/coccinelle/locks/
Ddouble_lock.cocci3 /// an intervening function call that releases the lock.
43 identifier lock,unlock;
51 lock(E1@p,...)
67 identifier lock;
71 lock@p1 (E1@p,...);
74 lock@p2 (E1,...);
79 lock << r.lock;
82 cocci.print_main(lock,p1)
83 cocci.print_secs("second lock",p2)
88 lock << r.lock;
[all …]
Dmini_lock.cocci2 /// where the unlock is missing from an if branch, and there is a lock
5 /// supposed to exit with the lock held, or where there is some preceding
6 /// function call that releases the lock.
66 identifier lock,unlock;
69 *lock(E1@p,...);
80 lock << err.lock;
85 cocci.print_main(lock,p)
90 lock << err.lock;
95 msg = "preceding lock on line %s" % (p[0].line)
/linux-4.4.14/drivers/clk/
Dclk-fractional-divider.c30 if (fd->lock) in clk_fd_recalc_rate()
31 spin_lock_irqsave(fd->lock, flags); in clk_fd_recalc_rate()
33 __acquire(fd->lock); in clk_fd_recalc_rate()
37 if (fd->lock) in clk_fd_recalc_rate()
38 spin_unlock_irqrestore(fd->lock, flags); in clk_fd_recalc_rate()
40 __release(fd->lock); in clk_fd_recalc_rate()
96 if (fd->lock) in clk_fd_set_rate()
97 spin_lock_irqsave(fd->lock, flags); in clk_fd_set_rate()
99 __acquire(fd->lock); in clk_fd_set_rate()
106 if (fd->lock) in clk_fd_set_rate()
[all …]
/linux-4.4.14/drivers/gpu/ipu-v3/
Dipu-smfc.c31 spinlock_t lock; member
48 spin_lock_irqsave(&priv->lock, flags); in ipu_smfc_set_burstsize()
56 spin_unlock_irqrestore(&priv->lock, flags); in ipu_smfc_set_burstsize()
68 spin_lock_irqsave(&priv->lock, flags); in ipu_smfc_map_channel()
76 spin_unlock_irqrestore(&priv->lock, flags); in ipu_smfc_map_channel()
88 spin_lock_irqsave(&priv->lock, flags); in ipu_smfc_set_watermark()
96 spin_unlock_irqrestore(&priv->lock, flags); in ipu_smfc_set_watermark()
107 spin_lock_irqsave(&priv->lock, flags); in ipu_smfc_enable()
114 spin_unlock_irqrestore(&priv->lock, flags); in ipu_smfc_enable()
125 spin_lock_irqsave(&priv->lock, flags); in ipu_smfc_disable()
[all …]
/linux-4.4.14/fs/btrfs/
Dlocking.c50 write_unlock(&eb->lock); in btrfs_set_lock_blocking_rw()
57 read_unlock(&eb->lock); in btrfs_set_lock_blocking_rw()
79 write_lock(&eb->lock); in btrfs_clear_lock_blocking_rw()
90 read_lock(&eb->lock); in btrfs_clear_lock_blocking_rw()
112 read_lock(&eb->lock); in btrfs_tree_read_lock()
123 read_unlock(&eb->lock); in btrfs_tree_read_lock()
127 read_unlock(&eb->lock); in btrfs_tree_read_lock()
146 read_lock(&eb->lock); in btrfs_tree_read_lock_atomic()
148 read_unlock(&eb->lock); in btrfs_tree_read_lock_atomic()
165 if (!read_trylock(&eb->lock)) in btrfs_try_tree_read_lock()
[all …]
/linux-4.4.14/fs/nilfs2/
Dalloc.c141 spinlock_t *lock) in nilfs_palloc_group_desc_nfrees() argument
145 spin_lock(lock); in nilfs_palloc_group_desc_nfrees()
147 spin_unlock(lock); in nilfs_palloc_group_desc_nfrees()
159 spinlock_t *lock, u32 n) in nilfs_palloc_group_desc_add_entries() argument
163 spin_lock(lock); in nilfs_palloc_group_desc_add_entries()
166 spin_unlock(lock); in nilfs_palloc_group_desc_add_entries()
213 spinlock_t *lock) in nilfs_palloc_get_block() argument
217 spin_lock(lock); in nilfs_palloc_get_block()
221 spin_unlock(lock); in nilfs_palloc_get_block()
224 spin_unlock(lock); in nilfs_palloc_get_block()
[all …]
/linux-4.4.14/kernel/sched/
Dsched.h187 raw_spinlock_t lock; member
223 raw_spinlock_t lock; member
561 raw_spinlock_t lock; member
717 lockdep_assert_held(&rq->lock); in rq_clock()
723 lockdep_assert_held(&rq->lock); in rq_clock_task()
732 lockdep_assert_held(&rq->lock); in rq_clock_skip_update()
770 lockdep_assert_held(&rq->lock); in queue_balance_callback()
1085 rq->lock.owner = current; in finish_lock_switch()
1092 spin_acquire(&rq->lock.dep_map, 0, 0, _THIS_IP_); in finish_lock_switch()
1094 raw_spin_unlock_irq(&rq->lock); in finish_lock_switch()
[all …]
Dcompletion.c33 spin_lock_irqsave(&x->wait.lock, flags); in complete()
36 spin_unlock_irqrestore(&x->wait.lock, flags); in complete()
53 spin_lock_irqsave(&x->wait.lock, flags); in complete_all()
56 spin_unlock_irqrestore(&x->wait.lock, flags); in complete_all()
74 spin_unlock_irq(&x->wait.lock); in do_wait_for_common()
76 spin_lock_irq(&x->wait.lock); in do_wait_for_common()
92 spin_lock_irq(&x->wait.lock); in __wait_for_common()
94 spin_unlock_irq(&x->wait.lock); in __wait_for_common()
280 spin_lock_irqsave(&x->wait.lock, flags); in try_wait_for_completion()
285 spin_unlock_irqrestore(&x->wait.lock, flags); in try_wait_for_completion()
[all …]
/linux-4.4.14/drivers/usb/serial/
Dcypress_m8.c94 spinlock_t lock; /* private lock */ member
367 spin_lock_irqsave(&priv->lock, flags); in cypress_serial_control()
370 spin_unlock_irqrestore(&priv->lock, flags); in cypress_serial_control()
405 spin_lock_irqsave(&priv->lock, flags); in cypress_serial_control()
410 spin_unlock_irqrestore(&priv->lock, flags); in cypress_serial_control()
413 spin_lock_irqsave(&priv->lock, flags); in cypress_serial_control()
415 spin_unlock_irqrestore(&priv->lock, flags); in cypress_serial_control()
427 spin_lock_irqsave(&priv->lock, flags); in cypress_set_dead()
429 spin_unlock_irqrestore(&priv->lock, flags); in cypress_set_dead()
433 spin_unlock_irqrestore(&priv->lock, flags); in cypress_set_dead()
[all …]
Dgeneric.c85 spin_lock_irqsave(&port->lock, flags); in usb_serial_generic_open()
88 spin_unlock_irqrestore(&port->lock, flags); in usb_serial_generic_open()
106 spin_lock_irqsave(&port->lock, flags); in usb_serial_generic_close()
108 spin_unlock_irqrestore(&port->lock, flags); in usb_serial_generic_close()
120 return kfifo_out_locked(&port->write_fifo, dest, size, &port->lock); in usb_serial_generic_prepare_write_buffer()
143 spin_lock_irqsave(&port->lock, flags); in usb_serial_generic_write_start()
146 spin_unlock_irqrestore(&port->lock, flags); in usb_serial_generic_write_start()
151 spin_unlock_irqrestore(&port->lock, flags); in usb_serial_generic_write_start()
159 spin_lock_irqsave(&port->lock, flags); in usb_serial_generic_write_start()
161 spin_unlock_irqrestore(&port->lock, flags); in usb_serial_generic_write_start()
[all …]
Doti6858.c170 spinlock_t lock; member
226 spin_lock_irqsave(&priv->lock, flags); in setup_line()
232 spin_unlock_irqrestore(&priv->lock, flags); in setup_line()
241 spin_unlock_irqrestore(&priv->lock, flags); in setup_line()
246 spin_lock_irqsave(&priv->lock, flags); in setup_line()
250 spin_unlock_irqrestore(&priv->lock, flags); in setup_line()
269 spin_lock_irqsave(&priv->lock, flags); in send_data()
271 spin_unlock_irqrestore(&priv->lock, flags); in send_data()
277 spin_unlock_irqrestore(&priv->lock, flags); in send_data()
279 spin_lock_irqsave(&port->lock, flags); in send_data()
[all …]
Dopticon.c43 spinlock_t lock; /* protects the following flags */ member
63 spin_lock_irqsave(&priv->lock, flags); in opticon_process_status_packet()
68 spin_unlock_irqrestore(&priv->lock, flags); in opticon_process_status_packet()
134 spin_lock_irqsave(&priv->lock, flags); in opticon_open()
136 spin_unlock_irqrestore(&priv->lock, flags); in opticon_open()
173 spin_lock_irqsave(&priv->lock, flags); in opticon_write_control_callback()
175 spin_unlock_irqrestore(&priv->lock, flags); in opticon_write_control_callback()
191 spin_lock_irqsave(&priv->lock, flags); in opticon_write()
193 spin_unlock_irqrestore(&priv->lock, flags); in opticon_write()
198 spin_unlock_irqrestore(&priv->lock, flags); in opticon_write()
[all …]
/linux-4.4.14/drivers/gpu/drm/i915/
Dintel_frontbuffer.c91 mutex_lock(&dev_priv->fb_tracking.lock); in intel_fb_obj_invalidate()
96 mutex_unlock(&dev_priv->fb_tracking.lock); in intel_fb_obj_invalidate()
123 mutex_lock(&dev_priv->fb_tracking.lock); in intel_frontbuffer_flush()
125 mutex_unlock(&dev_priv->fb_tracking.lock); in intel_frontbuffer_flush()
160 mutex_lock(&dev_priv->fb_tracking.lock); in intel_fb_obj_flush()
165 mutex_unlock(&dev_priv->fb_tracking.lock); in intel_fb_obj_flush()
188 mutex_lock(&dev_priv->fb_tracking.lock); in intel_frontbuffer_flip_prepare()
192 mutex_unlock(&dev_priv->fb_tracking.lock); in intel_frontbuffer_flip_prepare()
212 mutex_lock(&dev_priv->fb_tracking.lock); in intel_frontbuffer_flip_complete()
216 mutex_unlock(&dev_priv->fb_tracking.lock); in intel_frontbuffer_flip_complete()
[all …]
/linux-4.4.14/drivers/rtc/
Drtc-tx4939.c22 spinlock_t lock; member
58 spin_lock_irq(&pdata->lock); in tx4939_rtc_set_mmss()
65 spin_unlock_irq(&pdata->lock); in tx4939_rtc_set_mmss()
77 spin_lock_irq(&pdata->lock); in tx4939_rtc_read_time()
82 spin_unlock_irq(&pdata->lock); in tx4939_rtc_read_time()
88 spin_unlock_irq(&pdata->lock); in tx4939_rtc_read_time()
116 spin_lock_irq(&pdata->lock); in tx4939_rtc_set_alarm()
122 spin_unlock_irq(&pdata->lock); in tx4939_rtc_set_alarm()
135 spin_lock_irq(&pdata->lock); in tx4939_rtc_read_alarm()
140 spin_unlock_irq(&pdata->lock); in tx4939_rtc_read_alarm()
[all …]
/linux-4.4.14/fs/afs/
Dvnode.c220 spin_lock(&vnode->lock); in afs_vnode_finalise_status_update()
225 spin_unlock(&vnode->lock); in afs_vnode_finalise_status_update()
239 spin_lock(&vnode->lock); in afs_vnode_status_update_failed()
251 spin_unlock(&vnode->lock); in afs_vnode_status_update_failed()
292 spin_lock(&vnode->lock); in afs_vnode_fetch_status()
296 spin_unlock(&vnode->lock); in afs_vnode_fetch_status()
327 spin_unlock(&vnode->lock); in afs_vnode_fetch_status()
332 spin_lock(&vnode->lock); in afs_vnode_fetch_status()
336 spin_unlock(&vnode->lock); in afs_vnode_fetch_status()
347 spin_unlock(&vnode->lock); in afs_vnode_fetch_status()
[all …]
/linux-4.4.14/arch/powerpc/lib/
Dlocks.c26 void __spin_yield(arch_spinlock_t *lock) in __spin_yield() argument
30 lock_value = lock->slock; in __spin_yield()
39 if (lock->slock != lock_value) in __spin_yield()
56 lock_value = rw->lock; in __rw_yield()
65 if (rw->lock != lock_value) in __rw_yield()
72 void arch_spin_unlock_wait(arch_spinlock_t *lock) in arch_spin_unlock_wait() argument
76 while (lock->slock) { in arch_spin_unlock_wait()
79 __spin_yield(lock); in arch_spin_unlock_wait()
/linux-4.4.14/drivers/net/ethernet/chelsio/cxgb3/
Dl2t.c135 spin_lock_bh(&e->lock); in t3_l2t_send_slow()
138 spin_unlock_bh(&e->lock); in t3_l2t_send_slow()
142 spin_lock_bh(&e->lock); in t3_l2t_send_slow()
145 spin_unlock_bh(&e->lock); in t3_l2t_send_slow()
149 spin_unlock_bh(&e->lock); in t3_l2t_send_slow()
165 spin_lock_bh(&e->lock); in t3_l2t_send_slow()
170 spin_unlock_bh(&e->lock); in t3_l2t_send_slow()
184 spin_lock_bh(&e->lock); in t3_l2t_send_event()
188 spin_unlock_bh(&e->lock); in t3_l2t_send_event()
193 spin_lock_bh(&e->lock); in t3_l2t_send_event()
[all …]
/linux-4.4.14/drivers/leds/
Dleds-bcm6328.c70 spinlock_t *lock; member
125 spin_lock_irqsave(led->lock, flags); in bcm6328_led_set()
132 spin_unlock_irqrestore(led->lock, flags); in bcm6328_led_set()
163 spin_lock_irqsave(led->lock, flags); in bcm6328_blink_set()
179 spin_unlock_irqrestore(led->lock, flags); in bcm6328_blink_set()
181 spin_unlock_irqrestore(led->lock, flags); in bcm6328_blink_set()
191 void __iomem *mem, spinlock_t *lock) in bcm6328_hwled() argument
196 spin_lock_irqsave(lock, flags); in bcm6328_hwled()
200 spin_unlock_irqrestore(lock, flags); in bcm6328_hwled()
225 spin_lock_irqsave(lock, flags); in bcm6328_hwled()
[all …]
Dleds-bcm6358.c45 spinlock_t *lock; member
93 spin_lock_irqsave(led->lock, flags); in bcm6358_led_set()
95 spin_unlock_irqrestore(led->lock, flags); in bcm6358_led_set()
99 void __iomem *mem, spinlock_t *lock) in bcm6358_led() argument
112 led->lock = lock; in bcm6358_led()
122 spin_lock_irqsave(lock, flags); in bcm6358_led()
145 spin_unlock_irqrestore(lock, flags); in bcm6358_led()
165 spinlock_t *lock; /* memory lock */ in bcm6358_leds_probe() local
177 lock = devm_kzalloc(dev, sizeof(*lock), GFP_KERNEL); in bcm6358_leds_probe()
178 if (!lock) in bcm6358_leds_probe()
[all …]
/linux-4.4.14/drivers/clk/tegra/
Dclk-pll-out.c47 if (pll_out->lock) in clk_pll_out_enable()
48 spin_lock_irqsave(pll_out->lock, flags); in clk_pll_out_enable()
57 if (pll_out->lock) in clk_pll_out_enable()
58 spin_unlock_irqrestore(pll_out->lock, flags); in clk_pll_out_enable()
69 if (pll_out->lock) in clk_pll_out_disable()
70 spin_lock_irqsave(pll_out->lock, flags); in clk_pll_out_disable()
79 if (pll_out->lock) in clk_pll_out_disable()
80 spin_unlock_irqrestore(pll_out->lock, flags); in clk_pll_out_disable()
92 spinlock_t *lock) in tegra_clk_register_pll_out() argument
112 pll_out->lock = lock; in tegra_clk_register_pll_out()
/linux-4.4.14/kernel/irq/
Dautoprobe.c47 raw_spin_lock_irq(&desc->lock); in probe_irq_on()
58 raw_spin_unlock_irq(&desc->lock); in probe_irq_on()
70 raw_spin_lock_irq(&desc->lock); in probe_irq_on()
76 raw_spin_unlock_irq(&desc->lock); in probe_irq_on()
88 raw_spin_lock_irq(&desc->lock); in probe_irq_on()
99 raw_spin_unlock_irq(&desc->lock); in probe_irq_on()
125 raw_spin_lock_irq(&desc->lock); in probe_irq_mask()
133 raw_spin_unlock_irq(&desc->lock); in probe_irq_mask()
164 raw_spin_lock_irq(&desc->lock); in probe_irq_off()
175 raw_spin_unlock_irq(&desc->lock); in probe_irq_off()
/linux-4.4.14/sound/pci/asihpi/
Dhpios.h97 spinlock_t lock; /* SEE hpios_spinlock */ member
114 spin_lock(&((l)->lock)); in cond_lock()
117 spin_lock_bh(&((l)->lock)); in cond_lock()
125 spin_unlock_bh(&((l)->lock)); in cond_unlock()
127 spin_unlock(&((l)->lock)); in cond_unlock()
130 #define hpios_msgxlock_init(obj) spin_lock_init(&(obj)->lock)
134 #define hpios_dsplock_init(obj) spin_lock_init(&(obj)->dsp_lock.lock)
143 #define hpios_alistlock_init(obj) spin_lock_init(&((obj)->list_lock.lock))
144 #define hpios_alistlock_lock(obj) spin_lock(&((obj)->list_lock.lock))
145 #define hpios_alistlock_unlock(obj) spin_unlock(&((obj)->list_lock.lock))
/linux-4.4.14/drivers/infiniband/hw/usnic/
Dusnic_fwd.c48 lockdep_assert_held(&ufdev->lock); in usnic_fwd_devcmd_locked()
77 spin_lock(&ufdev->lock); in usnic_fwd_devcmd()
79 spin_unlock(&ufdev->lock); in usnic_fwd_devcmd()
94 spin_lock_init(&ufdev->lock); in usnic_fwd_dev_alloc()
108 spin_lock(&ufdev->lock); in usnic_fwd_set_mac()
110 spin_unlock(&ufdev->lock); in usnic_fwd_set_mac()
117 spin_lock(&ufdev->lock); in usnic_fwd_add_ipaddr()
124 spin_unlock(&ufdev->lock); in usnic_fwd_add_ipaddr()
131 spin_lock(&ufdev->lock); in usnic_fwd_del_ipaddr()
133 spin_unlock(&ufdev->lock); in usnic_fwd_del_ipaddr()
[all …]
/linux-4.4.14/fs/xfs/
Dxfs_mru_cache.c107 spinlock_t lock; /* Lock to protect this struct. */ member
241 __releases(mru->lock) __acquires(mru->lock) in _xfs_mru_cache_clear_reap_list()
258 spin_unlock(&mru->lock); in _xfs_mru_cache_clear_reap_list()
265 spin_lock(&mru->lock); in _xfs_mru_cache_clear_reap_list()
287 spin_lock(&mru->lock); in _xfs_mru_cache_reap()
301 spin_unlock(&mru->lock); in _xfs_mru_cache_reap()
367 spin_lock_init(&mru->lock); in xfs_mru_cache_create()
397 spin_lock(&mru->lock); in xfs_mru_cache_flush()
399 spin_unlock(&mru->lock); in xfs_mru_cache_flush()
401 spin_lock(&mru->lock); in xfs_mru_cache_flush()
[all …]
/linux-4.4.14/drivers/tty/hvc/
Dhvcs.c265 spinlock_t lock; member
358 spin_lock_irqsave(&hvcsd->lock, flags); in hvcs_partner_vtys_show()
360 spin_unlock_irqrestore(&hvcsd->lock, flags); in hvcs_partner_vtys_show()
372 spin_lock_irqsave(&hvcsd->lock, flags); in hvcs_partner_clcs_show()
374 spin_unlock_irqrestore(&hvcsd->lock, flags); in hvcs_partner_clcs_show()
397 spin_lock_irqsave(&hvcsd->lock, flags); in hvcs_current_vty_show()
399 spin_unlock_irqrestore(&hvcsd->lock, flags); in hvcs_current_vty_show()
417 spin_lock_irqsave(&hvcsd->lock, flags); in hvcs_vterm_state_store()
420 spin_unlock_irqrestore(&hvcsd->lock, flags); in hvcs_vterm_state_store()
427 spin_unlock_irqrestore(&hvcsd->lock, flags); in hvcs_vterm_state_store()
[all …]
/linux-4.4.14/drivers/gpu/drm/vmwgfx/
Dvmwgfx_marker.c42 spin_lock_init(&queue->lock); in vmw_marker_queue_init()
49 spin_lock(&queue->lock); in vmw_marker_queue_takedown()
53 spin_unlock(&queue->lock); in vmw_marker_queue_takedown()
66 spin_lock(&queue->lock); in vmw_marker_push()
68 spin_unlock(&queue->lock); in vmw_marker_push()
80 spin_lock(&queue->lock); in vmw_marker_pull()
102 spin_unlock(&queue->lock); in vmw_marker_pull()
111 spin_lock(&queue->lock); in vmw_fifo_lag()
115 spin_unlock(&queue->lock); in vmw_fifo_lag()
136 spin_lock(&queue->lock); in vmw_wait_lag()
[all …]
Dvmwgfx_gmrid_manager.c40 spinlock_t lock; member
59 spin_lock(&gman->lock); in vmw_gmrid_man_get_node()
68 spin_unlock(&gman->lock); in vmw_gmrid_man_get_node()
73 spin_lock(&gman->lock); in vmw_gmrid_man_get_node()
90 spin_unlock(&gman->lock); in vmw_gmrid_man_get_node()
94 spin_lock(&gman->lock); in vmw_gmrid_man_get_node()
97 spin_unlock(&gman->lock); in vmw_gmrid_man_get_node()
108 spin_lock(&gman->lock); in vmw_gmrid_man_put_node()
111 spin_unlock(&gman->lock); in vmw_gmrid_man_put_node()
127 spin_lock_init(&gman->lock); in vmw_gmrid_man_init()
/linux-4.4.14/drivers/s390/scsi/
Dzfcp_reqlist.h22 spinlock_t lock; member
46 spin_lock_init(&rl->lock); in zfcp_reqlist_alloc()
109 spin_lock_irqsave(&rl->lock, flags); in zfcp_reqlist_find()
111 spin_unlock_irqrestore(&rl->lock, flags); in zfcp_reqlist_find()
134 spin_lock_irqsave(&rl->lock, flags); in zfcp_reqlist_find_rm()
138 spin_unlock_irqrestore(&rl->lock, flags); in zfcp_reqlist_find_rm()
161 spin_lock_irqsave(&rl->lock, flags); in zfcp_reqlist_add()
163 spin_unlock_irqrestore(&rl->lock, flags); in zfcp_reqlist_add()
177 spin_lock_irqsave(&rl->lock, flags); in zfcp_reqlist_move()
180 spin_unlock_irqrestore(&rl->lock, flags); in zfcp_reqlist_move()
/linux-4.4.14/drivers/clk/spear/
Dclk-vco-pll.c131 if (pll->vco->lock) in clk_pll_recalc_rate()
132 spin_lock_irqsave(pll->vco->lock, flags); in clk_pll_recalc_rate()
136 if (pll->vco->lock) in clk_pll_recalc_rate()
137 spin_unlock_irqrestore(pll->vco->lock, flags); in clk_pll_recalc_rate()
154 if (pll->vco->lock) in clk_pll_set_rate()
155 spin_lock_irqsave(pll->vco->lock, flags); in clk_pll_set_rate()
162 if (pll->vco->lock) in clk_pll_set_rate()
163 spin_unlock_irqrestore(pll->vco->lock, flags); in clk_pll_set_rate()
199 if (vco->lock) in clk_vco_recalc_rate()
200 spin_lock_irqsave(vco->lock, flags); in clk_vco_recalc_rate()
[all …]
Dclk-frac-synth.c75 if (frac->lock) in clk_frac_recalc_rate()
76 spin_lock_irqsave(frac->lock, flags); in clk_frac_recalc_rate()
80 if (frac->lock) in clk_frac_recalc_rate()
81 spin_unlock_irqrestore(frac->lock, flags); in clk_frac_recalc_rate()
106 if (frac->lock) in clk_frac_set_rate()
107 spin_lock_irqsave(frac->lock, flags); in clk_frac_set_rate()
113 if (frac->lock) in clk_frac_set_rate()
114 spin_unlock_irqrestore(frac->lock, flags); in clk_frac_set_rate()
127 struct frac_rate_tbl *rtbl, u8 rtbl_cnt, spinlock_t *lock) in clk_register_frac() argument
148 frac->lock = lock; in clk_register_frac()
Dclk-gpt-synth.c62 if (gpt->lock) in clk_gpt_recalc_rate()
63 spin_lock_irqsave(gpt->lock, flags); in clk_gpt_recalc_rate()
67 if (gpt->lock) in clk_gpt_recalc_rate()
68 spin_unlock_irqrestore(gpt->lock, flags); in clk_gpt_recalc_rate()
91 if (gpt->lock) in clk_gpt_set_rate()
92 spin_lock_irqsave(gpt->lock, flags); in clk_gpt_set_rate()
102 if (gpt->lock) in clk_gpt_set_rate()
103 spin_unlock_irqrestore(gpt->lock, flags); in clk_gpt_set_rate()
116 rtbl_cnt, spinlock_t *lock) in clk_register_gpt() argument
137 gpt->lock = lock; in clk_register_gpt()
Dclk.h55 spinlock_t *lock; member
68 spinlock_t *lock; member
82 spinlock_t *lock; member
99 spinlock_t *lock; member
106 spinlock_t *lock; member
116 u8 rtbl_cnt, spinlock_t *lock, struct clk **gate_clk);
119 struct frac_rate_tbl *rtbl, u8 rtbl_cnt, spinlock_t *lock);
122 rtbl_cnt, spinlock_t *lock);
127 spinlock_t *lock, struct clk **pll_clk,
/linux-4.4.14/drivers/mfd/
Dmcp-core.c67 spin_lock_irqsave(&mcp->lock, flags); in mcp_set_telecom_divisor()
69 spin_unlock_irqrestore(&mcp->lock, flags); in mcp_set_telecom_divisor()
84 spin_lock_irqsave(&mcp->lock, flags); in mcp_set_audio_divisor()
86 spin_unlock_irqrestore(&mcp->lock, flags); in mcp_set_audio_divisor()
103 spin_lock_irqsave(&mcp->lock, flags); in mcp_reg_write()
105 spin_unlock_irqrestore(&mcp->lock, flags); in mcp_reg_write()
122 spin_lock_irqsave(&mcp->lock, flags); in mcp_reg_read()
124 spin_unlock_irqrestore(&mcp->lock, flags); in mcp_reg_read()
141 spin_lock_irqsave(&mcp->lock, flags); in mcp_enable()
144 spin_unlock_irqrestore(&mcp->lock, flags); in mcp_enable()
[all …]
/linux-4.4.14/virt/lib/
Dirqbypass.c30 static DEFINE_MUTEX(lock);
97 mutex_lock(&lock); in irq_bypass_register_producer()
101 mutex_unlock(&lock); in irq_bypass_register_producer()
111 mutex_unlock(&lock); in irq_bypass_register_producer()
121 mutex_unlock(&lock); in irq_bypass_register_producer()
144 mutex_lock(&lock); in irq_bypass_unregister_producer()
162 mutex_unlock(&lock); in irq_bypass_unregister_producer()
188 mutex_lock(&lock); in irq_bypass_register_consumer()
192 mutex_unlock(&lock); in irq_bypass_register_consumer()
202 mutex_unlock(&lock); in irq_bypass_register_consumer()
[all …]
/linux-4.4.14/drivers/hsi/clients/
Dssi_protocol.c147 spinlock_t lock; member
231 spin_lock_bh(&ssi->lock); in ssip_release_cmd()
233 spin_unlock_bh(&ssi->lock); in ssip_release_cmd()
242 spin_lock_bh(&ssi->lock); in ssip_claim_cmd()
245 spin_unlock_bh(&ssi->lock); in ssip_claim_cmd()
356 spin_lock_bh(&ssi->lock); in ssip_slave_start_tx()
361 spin_unlock_bh(&ssi->lock); in ssip_slave_start_tx()
375 spin_lock_bh(&ssi->lock); in ssip_slave_stop_tx()
381 spin_unlock_bh(&ssi->lock); in ssip_slave_stop_tx()
405 spin_lock_bh(&ssi->lock); in ssip_reset()
[all …]
Dcmt_speech.c59 spinlock_t lock; member
131 spinlock_t lock; member
148 spin_lock(&cs_char_data.lock); in cs_notify()
151 spin_unlock(&cs_char_data.lock); in cs_notify()
159 spin_unlock(&cs_char_data.lock); in cs_notify()
166 spin_unlock(&cs_char_data.lock); in cs_notify()
197 spin_lock(&cs_char_data.lock); in cs_notify_data()
207 spin_unlock(&cs_char_data.lock); in cs_notify_data()
233 spin_lock(&hi->lock); in cs_cmd_destructor()
249 spin_unlock(&hi->lock); in cs_cmd_destructor()
[all …]
/linux-4.4.14/drivers/clk/zynq/
Dpll.c39 spinlock_t *lock; member
113 spin_lock_irqsave(clk->lock, flags); in zynq_pll_is_enabled()
117 spin_unlock_irqrestore(clk->lock, flags); in zynq_pll_is_enabled()
139 spin_lock_irqsave(clk->lock, flags); in zynq_pll_enable()
147 spin_unlock_irqrestore(clk->lock, flags); in zynq_pll_enable()
169 spin_lock_irqsave(clk->lock, flags); in zynq_pll_disable()
175 spin_unlock_irqrestore(clk->lock, flags); in zynq_pll_disable()
198 spinlock_t *lock) in clk_register_zynq_pll() argument
222 pll->lock = lock; in clk_register_zynq_pll()
224 spin_lock_irqsave(pll->lock, flags); in clk_register_zynq_pll()
[all …]
/linux-4.4.14/drivers/isdn/mISDN/
Dtimerdev.c40 spinlock_t lock; /* protect lists */ member
63 spin_lock_init(&dev->lock); in mISDN_open()
80 spin_lock_irq(&dev->lock); in mISDN_close()
83 spin_unlock_irq(&dev->lock); in mISDN_close()
85 spin_lock_irq(&dev->lock); in mISDN_close()
90 spin_unlock_irq(&dev->lock); in mISDN_close()
114 spin_lock_irq(&dev->lock); in mISDN_read()
116 spin_unlock_irq(&dev->lock); in mISDN_read()
123 spin_lock_irq(&dev->lock); in mISDN_read()
130 spin_unlock_irq(&dev->lock); in mISDN_read()
[all …]
/linux-4.4.14/drivers/infiniband/hw/cxgb4/
Dresource.c103 mutex_lock(&uctx->lock); in c4iw_get_cqid()
114 mutex_lock(&rdev->stats.lock); in c4iw_get_cqid()
116 mutex_unlock(&rdev->stats.lock); in c4iw_get_cqid()
143 mutex_unlock(&uctx->lock); in c4iw_get_cqid()
145 mutex_lock(&rdev->stats.lock); in c4iw_get_cqid()
148 mutex_unlock(&rdev->stats.lock); in c4iw_get_cqid()
162 mutex_lock(&uctx->lock); in c4iw_put_cqid()
164 mutex_unlock(&uctx->lock); in c4iw_put_cqid()
173 mutex_lock(&uctx->lock); in c4iw_get_qpid()
183 mutex_lock(&rdev->stats.lock); in c4iw_get_qpid()
[all …]
/linux-4.4.14/drivers/tty/ipwireless/
Dnetwork.c49 spinlock_t lock; member
71 spin_lock_irqsave(&network->lock, flags); in notify_packet_sent()
76 spin_unlock_irqrestore(&network->lock, flags); in notify_packet_sent()
82 spin_unlock_irqrestore(&network->lock, flags); in notify_packet_sent()
84 spin_unlock_irqrestore(&network->lock, flags); in notify_packet_sent()
96 spin_lock_irqsave(&network->lock, flags); in ipwireless_ppp_start_xmit()
106 spin_unlock_irqrestore(&network->lock, flags); in ipwireless_ppp_start_xmit()
147 spin_unlock_irqrestore(&network->lock, flags); in ipwireless_ppp_start_xmit()
254 spin_lock_irqsave(&network->lock, flags); in do_go_online()
258 spin_unlock_irqrestore(&network->lock, flags); in do_go_online()
[all …]
/linux-4.4.14/drivers/gpio/
Dgpio-cs5535.c50 spinlock_t lock; member
98 spin_lock_irqsave(&chip->lock, flags); in cs5535_gpio_set()
100 spin_unlock_irqrestore(&chip->lock, flags); in cs5535_gpio_set()
120 spin_lock_irqsave(&chip->lock, flags); in cs5535_gpio_clear()
122 spin_unlock_irqrestore(&chip->lock, flags); in cs5535_gpio_clear()
132 spin_lock_irqsave(&chip->lock, flags); in cs5535_gpio_isset()
141 spin_unlock_irqrestore(&chip->lock, flags); in cs5535_gpio_isset()
180 spin_lock_irqsave(&chip->lock, flags); in cs5535_gpio_setup_event()
194 spin_unlock_irqrestore(&chip->lock, flags); in cs5535_gpio_setup_event()
208 spin_lock_irqsave(&chip->lock, flags); in chip_gpio_request()
[all …]
Dgpio-mpc8xxx.c35 raw_spinlock_t lock; member
98 raw_spin_lock_irqsave(&mpc8xxx_gc->lock, flags); in mpc8xxx_gpio_set()
107 raw_spin_unlock_irqrestore(&mpc8xxx_gc->lock, flags); in mpc8xxx_gpio_set()
118 raw_spin_lock_irqsave(&mpc8xxx_gc->lock, flags); in mpc8xxx_gpio_set_multiple()
133 raw_spin_unlock_irqrestore(&mpc8xxx_gc->lock, flags); in mpc8xxx_gpio_set_multiple()
142 raw_spin_lock_irqsave(&mpc8xxx_gc->lock, flags); in mpc8xxx_gpio_dir_in()
146 raw_spin_unlock_irqrestore(&mpc8xxx_gc->lock, flags); in mpc8xxx_gpio_dir_in()
159 raw_spin_lock_irqsave(&mpc8xxx_gc->lock, flags); in mpc8xxx_gpio_dir_out()
163 raw_spin_unlock_irqrestore(&mpc8xxx_gc->lock, flags); in mpc8xxx_gpio_dir_out()
218 raw_spin_lock_irqsave(&mpc8xxx_gc->lock, flags); in mpc8xxx_irq_unmask()
[all …]
Dgpio-amdpt.c31 spinlock_t lock; member
44 spin_lock_irqsave(&pt_gpio->lock, flags); in pt_gpio_request()
50 spin_unlock_irqrestore(&pt_gpio->lock, flags); in pt_gpio_request()
56 spin_unlock_irqrestore(&pt_gpio->lock, flags); in pt_gpio_request()
67 spin_lock_irqsave(&pt_gpio->lock, flags); in pt_gpio_free()
73 spin_unlock_irqrestore(&pt_gpio->lock, flags); in pt_gpio_free()
87 spin_lock_irqsave(&pt_gpio->lock, flags); in pt_gpio_set_value()
95 spin_unlock_irqrestore(&pt_gpio->lock, flags); in pt_gpio_set_value()
104 spin_lock_irqsave(&pt_gpio->lock, flags); in pt_gpio_get_value()
114 spin_unlock_irqrestore(&pt_gpio->lock, flags); in pt_gpio_get_value()
[all …]
/linux-4.4.14/drivers/net/ethernet/ti/
Ddavinci_cpdma.c92 spinlock_t lock; member
108 spinlock_t lock; member
118 spinlock_t lock; member
165 spin_lock_init(&pool->lock); in cpdma_desc_pool_create()
231 spin_lock_irqsave(&pool->lock, flags); in cpdma_desc_alloc()
249 spin_unlock_irqrestore(&pool->lock, flags); in cpdma_desc_alloc()
260 spin_lock_irqsave(&pool->lock, flags); in cpdma_desc_free()
263 spin_unlock_irqrestore(&pool->lock, flags); in cpdma_desc_free()
277 spin_lock_init(&ctlr->lock); in cpdma_ctlr_create()
298 spin_lock_irqsave(&ctlr->lock, flags); in cpdma_ctlr_start()
[all …]
/linux-4.4.14/drivers/gpu/drm/radeon/
Dradeon_irq_kms.c125 spin_lock_irqsave(&rdev->irq.lock, irqflags); in radeon_driver_irq_preinstall_kms()
138 spin_unlock_irqrestore(&rdev->irq.lock, irqflags); in radeon_driver_irq_preinstall_kms()
179 spin_lock_irqsave(&rdev->irq.lock, irqflags); in radeon_driver_irq_uninstall_kms()
192 spin_unlock_irqrestore(&rdev->irq.lock, irqflags); in radeon_driver_irq_uninstall_kms()
289 spin_lock_init(&rdev->irq.lock); in radeon_irq_kms_init()
358 spin_lock_irqsave(&rdev->irq.lock, irqflags); in radeon_irq_kms_sw_irq_get()
360 spin_unlock_irqrestore(&rdev->irq.lock, irqflags); in radeon_irq_kms_sw_irq_get()
397 spin_lock_irqsave(&rdev->irq.lock, irqflags); in radeon_irq_kms_sw_irq_put()
399 spin_unlock_irqrestore(&rdev->irq.lock, irqflags); in radeon_irq_kms_sw_irq_put()
423 spin_lock_irqsave(&rdev->irq.lock, irqflags); in radeon_irq_kms_pflip_irq_get()
[all …]
/linux-4.4.14/include/asm-generic/bitops/
Dext2-atomic.h8 #define ext2_set_bit_atomic(lock, nr, addr) \ argument
11 spin_lock(lock); \
13 spin_unlock(lock); \
17 #define ext2_clear_bit_atomic(lock, nr, addr) \ argument
20 spin_lock(lock); \
22 spin_unlock(lock); \
/linux-4.4.14/drivers/firmware/efi/
Dvars.c460 spin_lock_irq(&__efivars->lock); in efivar_init()
476 spin_unlock_irq(&__efivars->lock); in efivar_init()
494 spin_lock_irq(&__efivars->lock); in efivar_init()
505 spin_lock_irq(&__efivars->lock); in efivar_init()
519 spin_unlock_irq(&__efivars->lock); in efivar_init()
534 spin_lock_irq(&__efivars->lock); in efivar_entry_add()
536 spin_unlock_irq(&__efivars->lock); in efivar_entry_add()
546 spin_lock_irq(&__efivars->lock); in efivar_entry_remove()
548 spin_unlock_irq(&__efivars->lock); in efivar_entry_remove()
565 lockdep_assert_held(&__efivars->lock); in efivar_entry_list_del_unlock()
[all …]
/linux-4.4.14/drivers/staging/unisys/visorbus/
Dperiodic_work.c27 rwlock_t lock; member
58 rwlock_init(&pw->lock); in visor_periodic_work_create()
83 write_lock(&pw->lock); in visor_periodic_work_nextperiod()
97 write_unlock(&pw->lock); in visor_periodic_work_nextperiod()
110 write_lock(&pw->lock); in visor_periodic_work_start()
128 write_unlock(&pw->lock); in visor_periodic_work_start()
171 write_lock(&pw->lock); in visor_periodic_work_stop()
193 write_unlock(&pw->lock); in visor_periodic_work_stop()
195 write_lock(&pw->lock); in visor_periodic_work_stop()
200 write_unlock(&pw->lock); in visor_periodic_work_stop()
/linux-4.4.14/drivers/clk/hisilicon/
Dclkgate-separated.c43 spinlock_t *lock; member
53 if (sclk->lock) in clkgate_separated_enable()
54 spin_lock_irqsave(sclk->lock, flags); in clkgate_separated_enable()
58 if (sclk->lock) in clkgate_separated_enable()
59 spin_unlock_irqrestore(sclk->lock, flags); in clkgate_separated_enable()
70 if (sclk->lock) in clkgate_separated_disable()
71 spin_lock_irqsave(sclk->lock, flags); in clkgate_separated_disable()
75 if (sclk->lock) in clkgate_separated_disable()
76 spin_unlock_irqrestore(sclk->lock, flags); in clkgate_separated_disable()
101 u8 clk_gate_flags, spinlock_t *lock) in hisi_register_clkgate_sep() argument
/linux-4.4.14/drivers/usb/misc/
Drio500.c71 struct mutex lock; /* general race avoidance */ member
83 mutex_lock(&(rio->lock)); in open_rio()
86 mutex_unlock(&(rio->lock)); in open_rio()
94 mutex_unlock(&(rio->lock)); in open_rio()
122 mutex_lock(&(rio->lock)); in ioctl_rio()
266 mutex_unlock(&(rio->lock)); in ioctl_rio()
286 intr = mutex_lock_interruptible(&(rio->lock)); in write_rio()
291 mutex_unlock(&(rio->lock)); in write_rio()
314 mutex_unlock(&(rio->lock)); in write_rio()
352 mutex_unlock(&(rio->lock)); in write_rio()
[all …]
/linux-4.4.14/drivers/staging/android/ion/
Dion.c57 struct rw_semaphore lock; member
88 struct mutex lock; member
243 mutex_init(&buffer->lock); in ion_buffer_create()
310 mutex_lock(&buffer->lock); in ion_buffer_add_to_handle()
312 mutex_unlock(&buffer->lock); in ion_buffer_add_to_handle()
326 mutex_lock(&buffer->lock); in ion_buffer_remove_from_handle()
336 mutex_unlock(&buffer->lock); in ion_buffer_remove_from_handle()
365 mutex_lock(&buffer->lock); in ion_handle_destroy()
368 mutex_unlock(&buffer->lock); in ion_handle_destroy()
395 mutex_lock(&client->lock); in ion_handle_put()
[all …]
/linux-4.4.14/drivers/clk/keystone/
Dgate.c68 spinlock_t *lock; member
123 if (psc->lock) in keystone_clk_enable()
124 spin_lock_irqsave(psc->lock, flags); in keystone_clk_enable()
129 if (psc->lock) in keystone_clk_enable()
130 spin_unlock_irqrestore(psc->lock, flags); in keystone_clk_enable()
141 if (psc->lock) in keystone_clk_disable()
142 spin_lock_irqsave(psc->lock, flags); in keystone_clk_disable()
147 if (psc->lock) in keystone_clk_disable()
148 spin_unlock_irqrestore(psc->lock, flags); in keystone_clk_disable()
169 spinlock_t *lock) in clk_register_psc() argument
[all …]
/linux-4.4.14/drivers/xen/xen-pciback/
Dpassthrough.c16 struct mutex lock; member
28 mutex_lock(&dev_data->lock); in __xen_pcibk_get_pci_dev()
39 mutex_unlock(&dev_data->lock); in __xen_pcibk_get_pci_dev()
58 mutex_lock(&dev_data->lock); in __xen_pcibk_add_pci_dev()
60 mutex_unlock(&dev_data->lock); in __xen_pcibk_add_pci_dev()
72 struct pci_dev *dev, bool lock) in __xen_pcibk_release_pci_dev() argument
78 mutex_lock(&dev_data->lock); in __xen_pcibk_release_pci_dev()
88 mutex_unlock(&dev_data->lock); in __xen_pcibk_release_pci_dev()
91 if (lock) in __xen_pcibk_release_pci_dev()
94 if (lock) in __xen_pcibk_release_pci_dev()
[all …]
/linux-4.4.14/drivers/staging/lustre/lustre/lclient/
Dglimpse.c93 struct cl_lock *lock; in cl_glimpse_lock() local
128 lock = cl_lock_request(env, io, descr, "glimpse", in cl_glimpse_lock()
132 if (!lock) in cl_glimpse_lock()
135 if (IS_ERR(lock)) in cl_glimpse_lock()
136 return PTR_ERR(lock); in cl_glimpse_lock()
139 result = cl_wait(env, lock); in cl_glimpse_lock()
152 cl_unuse(env, lock); in cl_glimpse_lock()
154 cl_lock_release(env, lock, "glimpse", current); in cl_glimpse_lock()
236 struct cl_lock *lock; in cl_local_size() local
257 lock = cl_lock_peek(env, io, descr, "localsize", current); in cl_local_size()
[all …]
/linux-4.4.14/Documentation/filesystems/
Ddirectory-locking19 the parent and finds source and target. If target already exists, lock
20 it. If source is a non-directory, lock it. If that means we need to
21 lock both, lock them in inode pointer order.
24 * lock parent
26 * lock source
31 * lock the filesystem
32 * lock parents in "ancestors first" order.
38 * If target exists, lock it. If source is a non-directory, lock
39 it. In case that means we need to lock both source and target,
56 (1) if object removal or non-cross-directory rename holds lock on A and
[all …]
/linux-4.4.14/drivers/usb/gadget/legacy/
Dinode.c117 spinlock_t lock; member
174 spin_lock_init (&dev->lock); in dev_new()
191 struct mutex lock; member
294 if (!mutex_trylock(&epdata->lock)) in get_ready_ep()
298 mutex_unlock(&epdata->lock); in get_ready_ep()
306 val = mutex_lock_interruptible(&epdata->lock); in get_ready_ep()
324 mutex_unlock(&epdata->lock); in get_ready_ep()
334 spin_lock_irq (&epdata->dev->lock); in ep_io()
345 spin_unlock_irq (&epdata->dev->lock); in ep_io()
350 spin_lock_irq (&epdata->dev->lock); in ep_io()
[all …]
/linux-4.4.14/drivers/hwspinlock/
Dhwspinlock_internal.h37 int (*trylock)(struct hwspinlock *lock);
38 void (*unlock)(struct hwspinlock *lock);
39 void (*relax)(struct hwspinlock *lock);
50 spinlock_t lock; member
67 struct hwspinlock lock[0]; member
72 int local_id = hwlock - &hwlock->bank->lock[0]; in hwlock_to_id()
/linux-4.4.14/sound/firewire/oxfw/
Doxfw-hwdep.c26 spin_lock_irq(&oxfw->lock); in hwdep_read()
30 spin_unlock_irq(&oxfw->lock); in hwdep_read()
35 spin_lock_irq(&oxfw->lock); in hwdep_read()
47 spin_unlock_irq(&oxfw->lock); in hwdep_read()
63 spin_lock_irq(&oxfw->lock); in hwdep_poll()
68 spin_unlock_irq(&oxfw->lock); in hwdep_poll()
96 spin_lock_irq(&oxfw->lock); in hwdep_lock()
105 spin_unlock_irq(&oxfw->lock); in hwdep_lock()
114 spin_lock_irq(&oxfw->lock); in hwdep_unlock()
123 spin_unlock_irq(&oxfw->lock); in hwdep_unlock()
[all …]
/linux-4.4.14/sound/firewire/digi00x/
Ddigi00x-hwdep.c27 spin_lock_irq(&dg00x->lock); in hwdep_read()
31 spin_unlock_irq(&dg00x->lock); in hwdep_read()
36 spin_lock_irq(&dg00x->lock); in hwdep_read()
55 spin_unlock_irq(&dg00x->lock); in hwdep_read()
71 spin_lock_irq(&dg00x->lock); in hwdep_poll()
76 spin_unlock_irq(&dg00x->lock); in hwdep_poll()
104 spin_lock_irq(&dg00x->lock); in hwdep_lock()
113 spin_unlock_irq(&dg00x->lock); in hwdep_lock()
122 spin_lock_irq(&dg00x->lock); in hwdep_unlock()
131 spin_unlock_irq(&dg00x->lock); in hwdep_unlock()
[all …]
/linux-4.4.14/net/bridge/
Dbr_stp_timer.c39 spin_lock(&br->lock); in br_hello_timer_expired()
47 spin_unlock(&br->lock); in br_hello_timer_expired()
69 spin_lock(&br->lock); in br_message_age_timer_expired()
80 spin_unlock(&br->lock); in br_message_age_timer_expired()
90 spin_lock(&br->lock); in br_forward_delay_timer_expired()
105 spin_unlock(&br->lock); in br_forward_delay_timer_expired()
113 spin_lock(&br->lock); in br_tcn_timer_expired()
119 spin_unlock(&br->lock); in br_tcn_timer_expired()
127 spin_lock(&br->lock); in br_topology_change_timer_expired()
130 spin_unlock(&br->lock); in br_topology_change_timer_expired()
[all …]
/linux-4.4.14/sound/firewire/dice/
Ddice-hwdep.c19 spin_lock_irq(&dice->lock); in hwdep_read()
23 spin_unlock_irq(&dice->lock); in hwdep_read()
28 spin_lock_irq(&dice->lock); in hwdep_read()
47 spin_unlock_irq(&dice->lock); in hwdep_read()
63 spin_lock_irq(&dice->lock); in hwdep_poll()
68 spin_unlock_irq(&dice->lock); in hwdep_poll()
96 spin_lock_irq(&dice->lock); in hwdep_lock()
105 spin_unlock_irq(&dice->lock); in hwdep_lock()
114 spin_lock_irq(&dice->lock); in hwdep_unlock()
123 spin_unlock_irq(&dice->lock); in hwdep_unlock()
[all …]
/linux-4.4.14/sound/firewire/bebob/
Dbebob_hwdep.c27 spin_lock_irq(&bebob->lock); in hwdep_read()
31 spin_unlock_irq(&bebob->lock); in hwdep_read()
36 spin_lock_irq(&bebob->lock); in hwdep_read()
48 spin_unlock_irq(&bebob->lock); in hwdep_read()
64 spin_lock_irq(&bebob->lock); in hwdep_poll()
69 spin_unlock_irq(&bebob->lock); in hwdep_poll()
99 spin_lock_irq(&bebob->lock); in hwdep_lock()
108 spin_unlock_irq(&bebob->lock); in hwdep_lock()
118 spin_lock_irq(&bebob->lock); in hwdep_unlock()
127 spin_unlock_irq(&bebob->lock); in hwdep_unlock()
[all …]
/linux-4.4.14/sound/firewire/tascam/
Dtascam-hwdep.c45 spin_lock_irq(&tscm->lock); in hwdep_read()
49 spin_unlock_irq(&tscm->lock); in hwdep_read()
54 spin_lock_irq(&tscm->lock); in hwdep_read()
59 spin_unlock_irq(&tscm->lock); in hwdep_read()
72 spin_lock_irq(&tscm->lock); in hwdep_poll()
77 spin_unlock_irq(&tscm->lock); in hwdep_poll()
105 spin_lock_irq(&tscm->lock); in hwdep_lock()
114 spin_unlock_irq(&tscm->lock); in hwdep_lock()
123 spin_lock_irq(&tscm->lock); in hwdep_unlock()
132 spin_unlock_irq(&tscm->lock); in hwdep_unlock()
[all …]
/linux-4.4.14/drivers/md/
Ddm-bio-prison.c20 spinlock_t lock; member
40 spin_lock_init(&prison->lock); in dm_bio_prison_create()
153 spin_lock_irqsave(&prison->lock, flags); in bio_detain()
155 spin_unlock_irqrestore(&prison->lock, flags); in bio_detain()
201 spin_lock_irqsave(&prison->lock, flags); in dm_cell_release()
203 spin_unlock_irqrestore(&prison->lock, flags); in dm_cell_release()
224 spin_lock_irqsave(&prison->lock, flags); in dm_cell_release_no_holder()
226 spin_unlock_irqrestore(&prison->lock, flags); in dm_cell_release_no_holder()
253 spin_lock_irqsave(&prison->lock, flags); in dm_cell_visit_release()
256 spin_unlock_irqrestore(&prison->lock, flags); in dm_cell_visit_release()
[all …]
/linux-4.4.14/drivers/net/ethernet/chelsio/cxgb4/
Dclip_tbl.c88 read_lock_bh(&ctbl->lock); in cxgb4_clip_get()
98 read_unlock_bh(&ctbl->lock); in cxgb4_clip_get()
102 read_unlock_bh(&ctbl->lock); in cxgb4_clip_get()
104 write_lock_bh(&ctbl->lock); in cxgb4_clip_get()
110 spin_lock_init(&ce->lock); in cxgb4_clip_get()
120 write_unlock_bh(&ctbl->lock); in cxgb4_clip_get()
129 write_unlock_bh(&ctbl->lock); in cxgb4_clip_get()
132 write_unlock_bh(&ctbl->lock); in cxgb4_clip_get()
151 read_lock_bh(&ctbl->lock); in cxgb4_clip_release()
161 read_unlock_bh(&ctbl->lock); in cxgb4_clip_release()
[all …]
/linux-4.4.14/arch/sparc/kernel/
Debus.c75 spin_lock_irqsave(&p->lock, flags); in ebus_dma_irq()
78 spin_unlock_irqrestore(&p->lock, flags); in ebus_dma_irq()
134 spin_lock_irqsave(&p->lock, flags); in ebus_dma_irq_enable()
138 spin_unlock_irqrestore(&p->lock, flags); in ebus_dma_irq_enable()
140 spin_lock_irqsave(&p->lock, flags); in ebus_dma_irq_enable()
144 spin_unlock_irqrestore(&p->lock, flags); in ebus_dma_irq_enable()
161 spin_lock_irqsave(&p->lock, flags); in ebus_dma_unregister()
168 spin_unlock_irqrestore(&p->lock, flags); in ebus_dma_unregister()
184 spin_lock_irqsave(&p->lock, flags); in ebus_dma_request()
198 spin_unlock_irqrestore(&p->lock, flags); in ebus_dma_request()
[all …]
/linux-4.4.14/drivers/s390/block/
Ddasd_alias.c49 .lock = __SPIN_LOCK_UNLOCKED(aliastree.lock),
155 spin_lock_init(&lcu->lock); in _allocate_lcu()
197 spin_lock_irqsave(&aliastree.lock, flags); in dasd_alias_make_device_known_to_lcu()
200 spin_unlock_irqrestore(&aliastree.lock, flags); in dasd_alias_make_device_known_to_lcu()
204 spin_lock_irqsave(&aliastree.lock, flags); in dasd_alias_make_device_known_to_lcu()
217 spin_unlock_irqrestore(&aliastree.lock, flags); in dasd_alias_make_device_known_to_lcu()
221 spin_lock_irqsave(&aliastree.lock, flags); in dasd_alias_make_device_known_to_lcu()
231 spin_lock(&lcu->lock); in dasd_alias_make_device_known_to_lcu()
234 spin_unlock(&lcu->lock); in dasd_alias_make_device_known_to_lcu()
235 spin_unlock_irqrestore(&aliastree.lock, flags); in dasd_alias_make_device_known_to_lcu()
[all …]
/linux-4.4.14/drivers/usb/chipidea/
Dudc.c621 __releases(hwep->lock) in _ep_nuke()
622 __acquires(hwep->lock) in _ep_nuke()
647 spin_unlock(hwep->lock); in _ep_nuke()
649 spin_lock(hwep->lock); in _ep_nuke()
671 spin_lock_irqsave(hwep->lock, flags); in _ep_set_halt()
676 spin_unlock_irqrestore(hwep->lock, flags); in _ep_set_halt()
692 spin_unlock_irqrestore(hwep->lock, flags); in _ep_set_halt()
709 spin_lock_irqsave(&ci->lock, flags); in _gadget_stop_activity()
713 spin_unlock_irqrestore(&ci->lock, flags); in _gadget_stop_activity()
745 __releases(ci->lock) in isr_reset_handler()
[all …]
/linux-4.4.14/drivers/usb/host/
Dehci-hub.c80 spin_lock_irq(&ehci->lock); in ehci_handover_companion_ports()
93 spin_unlock_irq(&ehci->lock); in ehci_handover_companion_ports()
97 spin_lock_irq(&ehci->lock); in ehci_handover_companion_ports()
101 spin_unlock_irq(&ehci->lock); in ehci_handover_companion_ports()
107 spin_lock_irq(&ehci->lock); in ehci_handover_companion_ports()
111 spin_unlock_irq(&ehci->lock); in ehci_handover_companion_ports()
115 spin_lock_irq(&ehci->lock); in ehci_handover_companion_ports()
134 spin_unlock_irq(&ehci->lock); in ehci_handover_companion_ports()
172 spin_lock_irq(&ehci->lock); in ehci_adjust_port_wakeup_flags()
183 spin_unlock_irq(&ehci->lock); in ehci_adjust_port_wakeup_flags()
[all …]
/linux-4.4.14/drivers/net/wireless/brcm80211/brcmsmac/
Dmac80211_if.c410 spin_lock_bh(&wl->lock); in brcms_ops_tx()
419 spin_unlock_bh(&wl->lock); in brcms_ops_tx()
435 spin_lock_bh(&wl->lock); in brcms_ops_start()
437 spin_unlock_bh(&wl->lock); in brcms_ops_start()
441 spin_lock_bh(&wl->lock); in brcms_ops_start()
452 spin_unlock_bh(&wl->lock); in brcms_ops_start()
472 spin_lock_bh(&wl->lock); in brcms_ops_stop()
474 spin_unlock_bh(&wl->lock); in brcms_ops_stop()
484 spin_lock_bh(&wl->lock); in brcms_ops_stop()
486 spin_unlock_bh(&wl->lock); in brcms_ops_stop()
[all …]
/linux-4.4.14/drivers/misc/ti-st/
Dst_core.c214 spin_lock_irqsave(&st_gdata->lock, flags); in st_wakeup_ack()
223 spin_unlock_irqrestore(&st_gdata->lock, flags); in st_wakeup_ack()
260 spin_lock_irqsave(&st_gdata->lock, flags); in st_int_recv()
324 spin_unlock_irqrestore(&st_gdata->lock, flags); in st_int_recv()
327 spin_lock_irqsave(&st_gdata->lock, flags); in st_int_recv()
335 spin_unlock_irqrestore(&st_gdata->lock, flags); in st_int_recv()
338 spin_lock_irqsave(&st_gdata->lock, flags); in st_int_recv()
388 spin_unlock_irqrestore(&st_gdata->lock, flags); in st_int_recv()
426 spin_lock_irqsave(&st_gdata->lock, flags); in st_int_enqueue()
452 spin_unlock_irqrestore(&st_gdata->lock, flags); in st_int_enqueue()
[all …]
/linux-4.4.14/net/ipv4/
Dinet_hashtables.c111 spin_lock(&head->lock); in __inet_put_port()
118 spin_unlock(&head->lock); in __inet_put_port()
138 spin_lock(&head->lock); in __inet_inherit_port()
141 spin_unlock(&head->lock); in __inet_inherit_port()
159 spin_unlock(&head->lock); in __inet_inherit_port()
165 spin_unlock(&head->lock); in __inet_inherit_port()
348 spinlock_t *lock = inet_ehash_lockp(hinfo, hash); in __inet_check_established() local
353 spin_lock(lock); in __inet_check_established()
382 spin_unlock(lock); in __inet_check_established()
394 spin_unlock(lock); in __inet_check_established()
[all …]
/linux-4.4.14/drivers/hwmon/
Dadcxx.c54 struct mutex lock; member
71 if (mutex_lock_interruptible(&adc->lock)) in adcxx_read()
93 mutex_unlock(&adc->lock); in adcxx_read()
111 if (mutex_lock_interruptible(&adc->lock)) in adcxx_show_max()
116 mutex_unlock(&adc->lock); in adcxx_show_max()
131 if (mutex_lock_interruptible(&adc->lock)) in adcxx_set_max()
136 mutex_unlock(&adc->lock); in adcxx_set_max()
178 mutex_init(&adc->lock); in adcxx_probe()
180 mutex_lock(&adc->lock); in adcxx_probe()
199 mutex_unlock(&adc->lock); in adcxx_probe()
[all …]
/linux-4.4.14/drivers/staging/gdm72xx/
Dgdm_usb.c168 spin_lock_irqsave(&tx->lock, flags); in release_usb()
185 spin_unlock_irqrestore(&tx->lock, flags); in release_usb()
187 spin_lock_irqsave(&rx->lock, flags); in release_usb()
199 spin_unlock_irqrestore(&rx->lock, flags); in release_usb()
221 spin_lock_init(&tx->lock); in init_usb()
222 spin_lock_init(&rx->lock); in init_usb()
224 spin_lock_irqsave(&tx->lock, flags); in init_usb()
228 spin_unlock_irqrestore(&tx->lock, flags); in init_usb()
234 spin_unlock_irqrestore(&tx->lock, flags); in init_usb()
242 spin_lock_irqsave(&rx->lock, flags); in init_usb()
[all …]
/linux-4.4.14/drivers/misc/vmw_vmci/
Dvmci_context.c39 spinlock_t lock; /* Spinlock for context list operations */ member
42 .lock = __SPIN_LOCK_UNLOCKED(ctx_list.lock),
75 spin_lock(&context->lock); in vmci_ctx_check_signal_notify()
78 spin_unlock(&context->lock); in vmci_ctx_check_signal_notify()
119 spin_lock_init(&context->lock); in vmci_ctx_create()
161 spin_lock(&ctx_list.lock); in vmci_ctx_create()
172 spin_unlock(&ctx_list.lock); in vmci_ctx_create()
191 spin_lock(&ctx_list.lock); in vmci_ctx_destroy()
193 spin_unlock(&ctx_list.lock); in vmci_ctx_destroy()
285 spin_lock(&context->lock); in vmci_ctx_pending_datagrams()
[all …]
/linux-4.4.14/arch/powerpc/sysdev/qe_lib/
Dgpio.c28 spinlock_t lock; member
77 spin_lock_irqsave(&qe_gc->lock, flags); in qe_gpio_set()
86 spin_unlock_irqrestore(&qe_gc->lock, flags); in qe_gpio_set()
95 spin_lock_irqsave(&qe_gc->lock, flags); in qe_gpio_dir_in()
99 spin_unlock_irqrestore(&qe_gc->lock, flags); in qe_gpio_dir_in()
112 spin_lock_irqsave(&qe_gc->lock, flags); in qe_gpio_dir_out()
116 spin_unlock_irqrestore(&qe_gc->lock, flags); in qe_gpio_dir_out()
170 spin_lock_irqsave(&qe_gc->lock, flags); in qe_pin_request()
181 spin_unlock_irqrestore(&qe_gc->lock, flags); in qe_pin_request()
206 spin_lock_irqsave(&qe_gc->lock, flags); in qe_pin_free()
[all …]
/linux-4.4.14/net/atm/
Daddr.c53 spin_lock_irqsave(&dev->lock, flags); in atm_reset_addr()
62 spin_unlock_irqrestore(&dev->lock, flags); in atm_reset_addr()
78 spin_lock_irqsave(&dev->lock, flags); in atm_add_addr()
85 spin_unlock_irqrestore(&dev->lock, flags); in atm_add_addr()
91 spin_unlock_irqrestore(&dev->lock, flags); in atm_add_addr()
96 spin_unlock_irqrestore(&dev->lock, flags); in atm_add_addr()
113 spin_lock_irqsave(&dev->lock, flags); in atm_del_addr()
121 spin_unlock_irqrestore(&dev->lock, flags); in atm_del_addr()
128 spin_unlock_irqrestore(&dev->lock, flags); in atm_del_addr()
141 spin_lock_irqsave(&dev->lock, flags); in atm_get_addr()
[all …]
/linux-4.4.14/drivers/net/wireless/iwlwifi/pcie/
Drx.c182 lockdep_assert_held(&rxq->lock); in iwl_pcie_rxq_inc_wr_ptr()
212 spin_lock(&rxq->lock); in iwl_pcie_rxq_check_wrptr()
221 spin_unlock(&rxq->lock); in iwl_pcie_rxq_check_wrptr()
252 spin_lock(&rxq->lock); in iwl_pcie_rxq_restock()
269 spin_unlock(&rxq->lock); in iwl_pcie_rxq_restock()
274 spin_lock(&rxq->lock); in iwl_pcie_rxq_restock()
276 spin_unlock(&rxq->lock); in iwl_pcie_rxq_restock()
336 spin_lock(&rxq->lock); in iwl_pcie_rxq_alloc_rbs()
338 spin_unlock(&rxq->lock); in iwl_pcie_rxq_alloc_rbs()
341 spin_unlock(&rxq->lock); in iwl_pcie_rxq_alloc_rbs()
[all …]
/linux-4.4.14/drivers/pinctrl/
Dpinctrl-adi2.c132 spinlock_t lock; member
186 spinlock_t lock; member
262 spin_lock_irqsave(&port->lock, flags); in adi_gpio_ack_irq()
263 spin_lock(&port->pint->lock); in adi_gpio_ack_irq()
274 spin_unlock(&port->pint->lock); in adi_gpio_ack_irq()
275 spin_unlock_irqrestore(&port->lock, flags); in adi_gpio_ack_irq()
285 spin_lock_irqsave(&port->lock, flags); in adi_gpio_mask_ack_irq()
286 spin_lock(&port->pint->lock); in adi_gpio_mask_ack_irq()
298 spin_unlock(&port->pint->lock); in adi_gpio_mask_ack_irq()
299 spin_unlock_irqrestore(&port->lock, flags); in adi_gpio_mask_ack_irq()
[all …]

12345678910>>...24