Searched refs:queue_lock (Results 1 - 90 of 90) sorted by relevance

/linux-4.4.14/drivers/connector/
H A Dcn_queue.c85 spin_lock_bh(&dev->queue_lock); cn_queue_add_callback()
94 spin_unlock_bh(&dev->queue_lock); cn_queue_add_callback()
112 spin_lock_bh(&dev->queue_lock); cn_queue_del_callback()
120 spin_unlock_bh(&dev->queue_lock); cn_queue_del_callback()
137 spin_lock_init(&dev->queue_lock); cn_queue_alloc_dev()
148 spin_lock_bh(&dev->queue_lock); cn_queue_free_dev()
151 spin_unlock_bh(&dev->queue_lock); cn_queue_free_dev()
H A Dconnector.c88 spin_lock_bh(&dev->cbdev->queue_lock); cn_netlink_send_mult()
97 spin_unlock_bh(&dev->cbdev->queue_lock); cn_netlink_send_mult()
157 spin_lock_bh(&dev->cbdev->queue_lock); cn_call_callback()
165 spin_unlock_bh(&dev->cbdev->queue_lock); cn_call_callback()
249 spin_lock_bh(&dev->queue_lock); cn_proc_show()
258 spin_unlock_bh(&dev->queue_lock); cn_proc_show()
/linux-4.4.14/drivers/scsi/arm/
H A Dqueue.c63 spin_lock_init(&queue->queue_lock); queue_initialise()
113 spin_lock_irqsave(&queue->queue_lock, flags); __queue_add()
133 spin_unlock_irqrestore(&queue->queue_lock, flags); __queue_add()
167 spin_lock_irqsave(&queue->queue_lock, flags); queue_remove_exclude()
176 spin_unlock_irqrestore(&queue->queue_lock, flags); queue_remove_exclude()
192 spin_lock_irqsave(&queue->queue_lock, flags); queue_remove()
195 spin_unlock_irqrestore(&queue->queue_lock, flags); queue_remove()
216 spin_lock_irqsave(&queue->queue_lock, flags); queue_remove_tgtluntag()
225 spin_unlock_irqrestore(&queue->queue_lock, flags); queue_remove_tgtluntag()
242 spin_lock_irqsave(&queue->queue_lock, flags); queue_remove_all_target()
248 spin_unlock_irqrestore(&queue->queue_lock, flags); queue_remove_all_target()
266 spin_lock_irqsave(&queue->queue_lock, flags); queue_probetgtlun()
274 spin_unlock_irqrestore(&queue->queue_lock, flags); queue_probetgtlun()
292 spin_lock_irqsave(&queue->queue_lock, flags); queue_remove_cmd()
301 spin_unlock_irqrestore(&queue->queue_lock, flags); queue_remove_cmd()
H A Dqueue.h16 spinlock_t queue_lock; member in struct:__anon9179
/linux-4.4.14/drivers/isdn/i4l/
H A Disdn_net.h84 spin_lock_irqsave(&nd->queue_lock, flags); isdn_net_get_locked_lp()
95 spin_unlock_irqrestore(&nd->queue_lock, flags); isdn_net_get_locked_lp()
100 spin_unlock_irqrestore(&nd->queue_lock, flags); isdn_net_get_locked_lp()
112 spin_lock_irqsave(&nd->queue_lock, flags); isdn_net_add_to_bundle()
123 spin_unlock_irqrestore(&nd->queue_lock, flags); isdn_net_add_to_bundle()
138 spin_lock_irqsave(&master_lp->netdev->queue_lock, flags); isdn_net_rm_from_bundle()
150 spin_unlock_irqrestore(&master_lp->netdev->queue_lock, flags); isdn_net_rm_from_bundle()
H A Disdn_net.c128 spin_lock_irqsave(&nd->queue_lock, flags); isdn_net_device_busy()
132 spin_unlock_irqrestore(&nd->queue_lock, flags); isdn_net_device_busy()
137 spin_unlock_irqrestore(&nd->queue_lock, flags); isdn_net_device_busy()
2593 spin_lock_init(&netdev->queue_lock); isdn_net_new()
/linux-4.4.14/block/
H A Dblk-ioc.c60 lockdep_assert_held(q->queue_lock); ioc_destroy_icq()
68 * under queue_lock. If it's not pointing to @icq now, it never ioc_destroy_icq()
107 if (spin_trylock(q->queue_lock)) { ioc_release_fn()
109 spin_unlock(q->queue_lock); ioc_release_fn()
141 * already be holding a queue_lock. Do it asynchronously from wq. put_io_context()
185 if (spin_trylock(icq->q->queue_lock)) { put_io_context_active()
187 spin_unlock(icq->q->queue_lock); put_io_context_active()
221 lockdep_assert_held(q->queue_lock); ioc_clear_queue()
315 * with @q->queue_lock held.
321 lockdep_assert_held(q->queue_lock); ioc_lookup_icq()
380 spin_lock_irq(q->queue_lock); ioc_create_icq()
396 spin_unlock_irq(q->queue_lock); ioc_create_icq()
H A Dblk-cgroup.c149 * hint can only be updated under queue_lock as otherwise @blkg blkg_lookup_slowpath()
151 * responsible for grabbing queue_lock if @update_hint. blkg_lookup_slowpath()
156 lockdep_assert_held(q->queue_lock); blkg_lookup_slowpath()
179 lockdep_assert_held(q->queue_lock); blkg_create()
264 * should be called under RCU read lock and @q->queue_lock.
276 lockdep_assert_held(q->queue_lock); blkg_lookup_create()
314 lockdep_assert_held(blkg->q->queue_lock); blkg_destroy()
341 * under queue_lock. If it's not pointing to @blkg now, it never blkg_destroy()
364 lockdep_assert_held(q->queue_lock); blkg_destroy_all()
506 spin_lock_irq(blkg->q->queue_lock); blkcg_print_blkgs()
509 spin_unlock_irq(blkg->q->queue_lock); blkcg_print_blkgs()
705 lockdep_assert_held(blkg->q->queue_lock); blkg_stat_recursive_sum()
748 lockdep_assert_held(blkg->q->queue_lock); blkg_rwstat_recursive_sum()
787 __acquires(rcu) __acquires(disk->queue->queue_lock) __acquires()
812 spin_lock_irq(disk->queue->queue_lock); __acquires()
822 spin_unlock_irq(disk->queue->queue_lock); __acquires()
852 __releases(ctx->disk->queue->queue_lock) __releases(rcu) __releases()
854 spin_unlock_irq(ctx->disk->queue->queue_lock); __releases()
876 spin_lock_irq(blkg->q->queue_lock); blkcg_print_stat()
888 spin_unlock_irq(blkg->q->queue_lock); blkcg_print_stat()
938 if (spin_trylock(q->queue_lock)) { blkcg_css_offline()
940 spin_unlock(q->queue_lock); blkcg_css_offline()
1064 spin_lock_irq(q->queue_lock); blkcg_init_queue()
1066 spin_unlock_irq(q->queue_lock); blkcg_init_queue()
1082 spin_lock_irq(q->queue_lock); blkcg_init_queue()
1084 spin_unlock_irq(q->queue_lock); blkcg_init_queue()
1097 lockdep_assert_held(q->queue_lock); blkcg_drain_queue()
1117 spin_lock_irq(q->queue_lock); blkcg_exit_queue()
1119 spin_unlock_irq(q->queue_lock); blkcg_exit_queue()
1226 spin_lock_irq(q->queue_lock); blkcg_activate_policy()
1238 spin_unlock_irq(q->queue_lock); blkcg_activate_policy()
1252 spin_unlock_irq(q->queue_lock); blkcg_activate_policy()
1278 spin_lock_irq(q->queue_lock); blkcg_deactivate_policy()
1296 spin_unlock_irq(q->queue_lock); blkcg_deactivate_policy()
H A Dnoop-iosched.c80 spin_lock_irq(q->queue_lock); noop_init_queue()
82 spin_unlock_irq(q->queue_lock); noop_init_queue()
H A Dblk-exec.c72 spin_lock_irq(q->queue_lock); blk_execute_rq_nowait()
78 spin_unlock_irq(q->queue_lock); blk_execute_rq_nowait()
84 spin_unlock_irq(q->queue_lock); blk_execute_rq_nowait()
H A Dblk-sysfs.c214 spin_lock_irq(q->queue_lock); queue_max_sectors_store()
216 spin_unlock_irq(q->queue_lock); queue_max_sectors_store()
247 spin_lock_irq(q->queue_lock); \
252 spin_unlock_irq(q->queue_lock); \
276 spin_lock_irq(q->queue_lock); queue_nomerges_store()
283 spin_unlock_irq(q->queue_lock); queue_nomerges_store()
307 spin_lock_irq(q->queue_lock); queue_rq_affinity_store()
318 spin_unlock_irq(q->queue_lock); queue_rq_affinity_store()
341 spin_lock_irq(q->queue_lock); queue_poll_store()
346 spin_unlock_irq(q->queue_lock); queue_poll_store()
585 spin_lock_irq(q->queue_lock); blk_release_queue()
587 spin_unlock_irq(q->queue_lock); blk_release_queue()
H A Dblk-timeout.c60 spin_lock_irq(q->queue_lock); part_timeout_store()
65 spin_unlock_irq(q->queue_lock); part_timeout_store()
137 spin_lock_irqsave(q->queue_lock, flags); blk_rq_timed_out_timer()
145 spin_unlock_irqrestore(q->queue_lock, flags); blk_rq_timed_out_timer()
H A Dbsg-lib.c177 spin_unlock_irq(q->queue_lock); bsg_request_fn()
183 spin_lock_irq(q->queue_lock); bsg_request_fn()
189 spin_lock_irq(q->queue_lock); bsg_request_fn()
194 spin_unlock_irq(q->queue_lock); bsg_request_fn()
196 spin_lock_irq(q->queue_lock); bsg_request_fn()
H A Dblk-core.c186 spin_lock_irq(q->queue_lock); blk_delay_work()
188 spin_unlock_irq(q->queue_lock); blk_delay_work()
373 spin_lock_irqsave(q->queue_lock, flags); blk_run_queue()
375 spin_unlock_irqrestore(q->queue_lock, flags); blk_run_queue()
395 __releases(q->queue_lock)
396 __acquires(q->queue_lock)
400 lockdep_assert_held(q->queue_lock);
446 spin_unlock_irq(q->queue_lock);
450 spin_lock_irq(q->queue_lock);
479 spin_lock_irq(q->queue_lock); blk_queue_bypass_start()
482 spin_unlock_irq(q->queue_lock); blk_queue_bypass_start()
490 spin_lock_irq(q->queue_lock); blk_queue_bypass_start()
492 spin_unlock_irq(q->queue_lock); blk_queue_bypass_start()
508 spin_lock_irq(q->queue_lock); blk_queue_bypass_end()
512 spin_unlock_irq(q->queue_lock); blk_queue_bypass_end()
544 spinlock_t *lock = q->queue_lock; blk_cleanup_queue()
592 if (q->queue_lock != &q->__queue_lock) blk_cleanup_queue()
593 q->queue_lock = &q->__queue_lock; blk_cleanup_queue()
728 * By default initialize queue_lock to internal lock and driver can blk_alloc_queue_node()
731 q->queue_lock = &q->__queue_lock; blk_alloc_queue_node()
851 q->queue_lock = lock; blk_init_allocated_queue()
952 * congestion status, wake up any waiters. Called under q->queue_lock.
975 spin_lock_irq(q->queue_lock); blk_update_nr_requests()
1007 spin_unlock_irq(q->queue_lock);
1056 * Must be called with @q->queue_lock held and,
1057 * Returns ERR_PTR on failure, with @q->queue_lock held.
1058 * Returns request pointer on success, with @q->queue_lock *not held*.
1123 * Also, lookup icq while holding queue_lock. If it doesn't exist, __get_request()
1124 * it will be created after releasing queue_lock. __get_request()
1135 spin_unlock_irq(q->queue_lock); __get_request()
1189 spin_lock_irq(q->queue_lock); __get_request()
1191 spin_unlock_irq(q->queue_lock); __get_request()
1202 spin_lock_irq(q->queue_lock); __get_request()
1228 * Must be called with @q->queue_lock held and,
1229 * Returns ERR_PTR on failure, with @q->queue_lock held.
1230 * Returns request pointer on success, with @q->queue_lock *not held*.
1257 spin_unlock_irq(q->queue_lock); get_request()
1267 spin_lock_irq(q->queue_lock); get_request()
1283 spin_lock_irq(q->queue_lock); blk_old_get_request()
1286 spin_unlock_irq(q->queue_lock); blk_old_get_request()
1287 /* q->queue_lock is unlocked at this point */ blk_old_get_request()
1505 spin_lock_irqsave(q->queue_lock, flags); blk_put_request()
1507 spin_unlock_irqrestore(q->queue_lock, flags); blk_put_request()
1603 * going through @q->queue_lock. As such it's more of an issuing mechanism
1724 spin_lock_irq(q->queue_lock); blk_queue_bio()
1739 spin_lock_irq(q->queue_lock); blk_queue_bio()
1807 spin_lock_irq(q->queue_lock); blk_queue_bio()
1811 spin_unlock_irq(q->queue_lock); blk_queue_bio()
2196 spin_lock_irqsave(q->queue_lock, flags); blk_insert_cloned_request()
2198 spin_unlock_irqrestore(q->queue_lock, flags); blk_insert_cloned_request()
2214 spin_unlock_irqrestore(q->queue_lock, flags); blk_insert_cloned_request()
2234 * queue_lock must be held.
2376 * queue_lock must be held.
2499 * queue_lock must be held.
2531 * queue_lock must be held.
2776 spin_lock_irqsave(q->queue_lock, flags); blk_end_bidi_request()
2778 spin_unlock_irqrestore(q->queue_lock, flags); blk_end_bidi_request()
3193 __releases(q->queue_lock)
3201 spin_unlock(q->queue_lock);
3286 spin_lock(q->queue_lock); blk_flush_plug_list()
3427 spin_lock_irq(q->queue_lock); blk_pre_runtime_suspend()
3434 spin_unlock_irq(q->queue_lock); blk_pre_runtime_suspend()
3457 spin_lock_irq(q->queue_lock); blk_post_runtime_suspend()
3464 spin_unlock_irq(q->queue_lock); blk_post_runtime_suspend()
3484 spin_lock_irq(q->queue_lock); blk_pre_runtime_resume()
3486 spin_unlock_irq(q->queue_lock); blk_pre_runtime_resume()
3509 spin_lock_irq(q->queue_lock); blk_post_runtime_resume()
3518 spin_unlock_irq(q->queue_lock); blk_post_runtime_resume()
H A Dblk-throttle.c1029 spin_lock_irq(q->queue_lock); throtl_pending_timer_fn()
1049 spin_unlock_irq(q->queue_lock); throtl_pending_timer_fn()
1051 spin_lock_irq(q->queue_lock); throtl_pending_timer_fn()
1073 spin_unlock_irq(q->queue_lock); throtl_pending_timer_fn()
1097 spin_lock_irq(q->queue_lock); blk_throtl_dispatch_work_fn()
1101 spin_unlock_irq(q->queue_lock); blk_throtl_dispatch_work_fn()
1169 * We're already holding queue_lock and know @tg is valid. Let's tg_conf_updated()
1411 spin_lock_irq(q->queue_lock); blk_throtl_bio()
1479 spin_unlock_irq(q->queue_lock); blk_throtl_bio()
1520 __releases(q->queue_lock) __acquires(q->queue_lock)
1544 spin_unlock_irq(q->queue_lock);
1552 spin_lock_irq(q->queue_lock);
H A Delevator.c582 lockdep_assert_held(q->queue_lock); elv_drain_elevator()
678 spin_lock_irqsave(q->queue_lock, flags); elv_add_request()
680 spin_unlock_irqrestore(q->queue_lock, flags); elv_add_request()
913 spin_lock_irq(q->queue_lock); elevator_switch()
915 spin_unlock_irq(q->queue_lock); elevator_switch()
H A Ddeadline-iosched.c366 spin_lock_irq(q->queue_lock); deadline_init_queue()
368 spin_unlock_irq(q->queue_lock); deadline_init_queue()
H A Dblk-flush.c161 * spin_lock_irq(q->queue_lock or fq->mq_flush_lock)
285 * spin_lock_irq(q->queue_lock or fq->mq_flush_lock)
382 * spin_lock_irq(q->queue_lock) in !mq case
H A Dcfq-iosched.c484 /* This should be called with the queue_lock held. */ cfqg_stats_update_group_wait_time()
499 /* This should be called with the queue_lock held. */ cfqg_stats_set_start_group_wait_time()
513 /* This should be called with the queue_lock held. */ cfqg_stats_end_empty_time()
749 lockdep_assert_held(cfqg_to_blkg(cfqg)->q->queue_lock); cfqg_stats_xfer_dead()
3021 * Must be called with the queue_lock held.
4359 spin_lock_irq(q->queue_lock); cfq_set_request()
4397 spin_unlock_irq(q->queue_lock); cfq_set_request()
4407 spin_lock_irq(q->queue_lock); cfq_kick_queue()
4409 spin_unlock_irq(q->queue_lock); cfq_kick_queue()
4424 spin_lock_irqsave(cfqd->queue->queue_lock, flags); cfq_idle_slice_timer()
4465 spin_unlock_irqrestore(cfqd->queue->queue_lock, flags); cfq_idle_slice_timer()
4481 spin_lock_irq(q->queue_lock); cfq_exit_queue()
4486 spin_unlock_irq(q->queue_lock); cfq_exit_queue()
4517 spin_lock_irq(q->queue_lock); cfq_init_queue()
4519 spin_unlock_irq(q->queue_lock); cfq_init_queue()
4561 spin_lock_irq(q->queue_lock); cfq_init_queue()
4564 spin_unlock_irq(q->queue_lock); cfq_init_queue()
/linux-4.4.14/drivers/ide/
H A Dide-pm.c54 spin_lock_irq(q->queue_lock); ide_pm_execute_rq()
59 spin_unlock_irq(q->queue_lock); ide_pm_execute_rq()
64 spin_unlock_irq(q->queue_lock); ide_pm_execute_rq()
226 spin_lock_irqsave(q->queue_lock, flags); ide_complete_pm_rq()
231 spin_unlock_irqrestore(q->queue_lock, flags); ide_complete_pm_rq()
274 spin_lock_irqsave(q->queue_lock, flags); ide_check_pm_state()
276 spin_unlock_irqrestore(q->queue_lock, flags); ide_check_pm_state()
H A Dide-io.c448 spin_lock_irqsave(q->queue_lock, flags); ide_requeue_and_plug()
450 spin_unlock_irqrestore(q->queue_lock, flags); ide_requeue_and_plug()
465 spin_unlock_irq(q->queue_lock); do_ide_request()
511 spin_lock_irq(q->queue_lock); do_ide_request()
519 spin_unlock_irq(q->queue_lock); do_ide_request()
565 spin_lock_irq(q->queue_lock); do_ide_request()
572 spin_lock_irq(q->queue_lock); do_ide_request()
/linux-4.4.14/drivers/mtd/
H A Dmtd_blkdevs.c133 spin_lock_irq(rq->queue_lock); mtd_blktrans_work()
141 spin_unlock_irq(rq->queue_lock); mtd_blktrans_work()
145 spin_lock_irq(rq->queue_lock); mtd_blktrans_work()
156 spin_unlock_irq(rq->queue_lock); mtd_blktrans_work()
162 spin_lock_irq(rq->queue_lock); mtd_blktrans_work()
170 spin_unlock_irq(rq->queue_lock); mtd_blktrans_work()
405 spin_lock_init(&new->queue_lock); add_mtd_blktrans_dev()
406 new->rq = blk_init_queue(mtd_blktrans_request, &new->queue_lock); add_mtd_blktrans_dev()
477 spin_lock_irqsave(&old->queue_lock, flags); del_mtd_blktrans_dev()
480 spin_unlock_irqrestore(&old->queue_lock, flags); del_mtd_blktrans_dev()
/linux-4.4.14/drivers/media/platform/omap3isp/
H A Dispvideo.c575 mutex_lock(&video->queue_lock); omap3isp_video_resume()
577 mutex_unlock(&video->queue_lock); omap3isp_video_resume()
830 mutex_lock(&video->queue_lock); isp_video_reqbufs()
832 mutex_unlock(&video->queue_lock); isp_video_reqbufs()
844 mutex_lock(&video->queue_lock); isp_video_querybuf()
846 mutex_unlock(&video->queue_lock); isp_video_querybuf()
858 mutex_lock(&video->queue_lock); isp_video_qbuf()
860 mutex_unlock(&video->queue_lock); isp_video_qbuf()
872 mutex_lock(&video->queue_lock); isp_video_dqbuf()
874 mutex_unlock(&video->queue_lock); isp_video_dqbuf()
1074 mutex_lock(&video->queue_lock); isp_video_streamon()
1076 mutex_unlock(&video->queue_lock); isp_video_streamon()
1099 mutex_lock(&video->queue_lock); isp_video_streamon()
1101 mutex_unlock(&video->queue_lock); isp_video_streamon()
1136 mutex_lock(&video->queue_lock); isp_video_streamoff()
1138 mutex_unlock(&video->queue_lock); isp_video_streamoff()
1159 mutex_lock(&video->queue_lock); isp_video_streamoff()
1161 mutex_unlock(&video->queue_lock); isp_video_streamoff()
1293 mutex_lock(&video->queue_lock); isp_video_release()
1295 mutex_unlock(&video->queue_lock); isp_video_release()
1315 mutex_lock(&video->queue_lock); isp_video_poll()
1317 mutex_unlock(&video->queue_lock); isp_video_poll()
1382 mutex_init(&video->queue_lock); omap3isp_video_init()
1406 mutex_destroy(&video->queue_lock); omap3isp_video_cleanup()
H A Dispvideo.h176 struct mutex queue_lock; /* protects the queue */ member in struct:isp_video
/linux-4.4.14/drivers/mmc/card/
H A Dqueue.c61 spin_lock_irq(q->queue_lock); mmc_queue_thread()
65 spin_unlock_irq(q->queue_lock); mmc_queue_thread()
335 spin_lock_irqsave(q->queue_lock, flags); mmc_cleanup_queue()
338 spin_unlock_irqrestore(q->queue_lock, flags); mmc_cleanup_queue()
421 spin_lock_irqsave(q->queue_lock, flags); mmc_queue_suspend()
423 spin_unlock_irqrestore(q->queue_lock, flags); mmc_queue_suspend()
443 spin_lock_irqsave(q->queue_lock, flags); mmc_queue_resume()
445 spin_unlock_irqrestore(q->queue_lock, flags); mmc_queue_resume()
H A Dblock.c1694 spin_lock_irq(q->queue_lock); mmc_blk_prep_packed_list()
1696 spin_unlock_irq(q->queue_lock); mmc_blk_prep_packed_list()
1731 spin_lock_irq(q->queue_lock); mmc_blk_prep_packed_list()
1733 spin_unlock_irq(q->queue_lock); mmc_blk_prep_packed_list()
1922 spin_lock_irq(q->queue_lock); mmc_blk_revert_packed_req()
1924 spin_unlock_irq(q->queue_lock); mmc_blk_revert_packed_req()
/linux-4.4.14/drivers/block/
H A Dnbd.c48 spinlock_t queue_lock; member in struct:nbd_device
122 spin_lock_irqsave(q->queue_lock, flags); nbd_end_request()
124 spin_unlock_irqrestore(q->queue_lock, flags); nbd_end_request()
309 spin_lock(&nbd->queue_lock); nbd_find_request()
314 spin_unlock(&nbd->queue_lock); nbd_find_request()
317 spin_unlock(&nbd->queue_lock); nbd_find_request()
527 spin_lock(&nbd->queue_lock); nbd_handle_req()
529 spin_unlock(&nbd->queue_lock); nbd_handle_req()
575 spin_lock_irq(&nbd->queue_lock); nbd_thread_send()
579 spin_unlock_irq(&nbd->queue_lock); nbd_thread_send()
604 __releases(q->queue_lock) __acquires(q->queue_lock)
611 spin_unlock_irq(q->queue_lock);
625 spin_lock_irq(q->queue_lock);
629 spin_lock_irq(&nbd->queue_lock);
631 spin_unlock_irq(&nbd->queue_lock);
635 spin_lock_irq(q->queue_lock);
1070 spin_lock_init(&nbd_dev[i].queue_lock); nbd_init()
H A DDAC960.c528 spin_unlock_irq(&Controller->queue_lock); DAC960_WaitForCommand()
530 spin_lock_irq(&Controller->queue_lock); DAC960_WaitForCommand()
790 spin_lock_irqsave(&Controller->queue_lock, flags); DAC960_ExecuteCommand()
792 spin_unlock_irqrestore(&Controller->queue_lock, flags); DAC960_ExecuteCommand()
2043 spin_lock_irqsave(&Controller->queue_lock, flags); DAC960_V1_ReadDeviceConfiguration()
2045 spin_unlock_irqrestore(&Controller->queue_lock, flags); DAC960_V1_ReadDeviceConfiguration()
2088 spin_lock_irqsave(&Controller->queue_lock, flags); DAC960_V1_ReadDeviceConfiguration()
2090 spin_unlock_irqrestore(&Controller->queue_lock, flags); DAC960_V1_ReadDeviceConfiguration()
2536 RequestQueue = blk_init_queue(DAC960_RequestFunction,&Controller->queue_lock); DAC960_RegisterBlockDevice()
2780 spin_lock_init(&Controller->queue_lock); DAC960_DetectController()
3119 spin_lock_irqsave(&Controller->queue_lock, flags); DAC960_FinalizeController()
3121 spin_unlock_irqrestore(&Controller->queue_lock, flags); DAC960_FinalizeController()
5271 spin_lock_irqsave(&Controller->queue_lock, flags); DAC960_GEM_InterruptHandler()
5295 spin_unlock_irqrestore(&Controller->queue_lock, flags); DAC960_GEM_InterruptHandler()
5312 spin_lock_irqsave(&Controller->queue_lock, flags); DAC960_BA_InterruptHandler()
5336 spin_unlock_irqrestore(&Controller->queue_lock, flags); DAC960_BA_InterruptHandler()
5354 spin_lock_irqsave(&Controller->queue_lock, flags); DAC960_LP_InterruptHandler()
5378 spin_unlock_irqrestore(&Controller->queue_lock, flags); DAC960_LP_InterruptHandler()
5396 spin_lock_irqsave(&Controller->queue_lock, flags); DAC960_LA_InterruptHandler()
5416 spin_unlock_irqrestore(&Controller->queue_lock, flags); DAC960_LA_InterruptHandler()
5434 spin_lock_irqsave(&Controller->queue_lock, flags); DAC960_PG_InterruptHandler()
5454 spin_unlock_irqrestore(&Controller->queue_lock, flags); DAC960_PG_InterruptHandler()
5471 spin_lock_irqsave(&Controller->queue_lock, flags); DAC960_PD_InterruptHandler()
5488 spin_unlock_irqrestore(&Controller->queue_lock, flags); DAC960_PD_InterruptHandler()
5509 spin_lock_irqsave(&Controller->queue_lock, flags); DAC960_P_InterruptHandler()
5561 spin_unlock_irqrestore(&Controller->queue_lock, flags); DAC960_P_InterruptHandler()
5628 spin_lock_irqsave(&Controller->queue_lock, flags); DAC960_MonitoringTimerFunction()
5636 spin_unlock_irqrestore(&Controller->queue_lock, flags); DAC960_MonitoringTimerFunction()
5686 spin_lock_irqsave(&Controller->queue_lock, flags); DAC960_MonitoringTimerFunction()
5694 spin_unlock_irqrestore(&Controller->queue_lock, flags); DAC960_MonitoringTimerFunction()
5971 spin_lock_irqsave(&Controller->queue_lock, flags); DAC960_V1_ExecuteUserCommand()
5974 spin_unlock_irqrestore(&Controller->queue_lock, flags); DAC960_V1_ExecuteUserCommand()
6165 spin_lock_irqsave(&Controller->queue_lock, flags); DAC960_V1_ExecuteUserCommand()
6167 spin_unlock_irqrestore(&Controller->queue_lock, flags); DAC960_V1_ExecuteUserCommand()
6234 spin_lock_irqsave(&Controller->queue_lock, flags); DAC960_V2_ExecuteUserCommand()
6237 spin_unlock_irqrestore(&Controller->queue_lock, flags); DAC960_V2_ExecuteUserCommand()
6428 spin_lock_irqsave(&Controller->queue_lock, flags); DAC960_V2_ExecuteUserCommand()
6430 spin_unlock_irqrestore(&Controller->queue_lock, flags); DAC960_V2_ExecuteUserCommand()
6765 spin_lock_irqsave(&Controller->queue_lock, flags); DAC960_gam_ioctl()
6771 spin_unlock_irq(&Controller->queue_lock); DAC960_gam_ioctl()
6775 spin_lock_irq(&Controller->queue_lock); DAC960_gam_ioctl()
6779 spin_unlock_irqrestore(&Controller->queue_lock, flags); DAC960_gam_ioctl()
6790 spin_lock_irqsave(&Controller->queue_lock, flags); DAC960_gam_ioctl()
6793 spin_unlock_irqrestore(&Controller->queue_lock, flags); DAC960_gam_ioctl()
6804 spin_lock_irqsave(&Controller->queue_lock, flags); DAC960_gam_ioctl()
6806 spin_unlock_irqrestore(&Controller->queue_lock, flags); DAC960_gam_ioctl()
6911 spin_lock_irqsave(&Controller->queue_lock, flags); DAC960_gam_ioctl()
6914 spin_unlock_irqrestore(&Controller->queue_lock, flags); DAC960_gam_ioctl()
6962 spin_lock_irqsave(&Controller->queue_lock, flags); DAC960_gam_ioctl()
6964 spin_unlock_irqrestore(&Controller->queue_lock, flags); DAC960_gam_ioctl()
H A Dhd.c540 spin_lock_irq(hd_queue->queue_lock); hd_times_out()
551 spin_unlock_irq(hd_queue->queue_lock); hd_times_out()
680 spin_lock(hd_queue->queue_lock); hd_interrupt()
688 spin_unlock(hd_queue->queue_lock); hd_interrupt()
H A Dnull_blk.c244 spin_lock_irqsave(q->queue_lock, flags); end_cmd()
246 spin_unlock_irqrestore(q->queue_lock, flags); end_cmd()
348 spin_unlock_irq(q->queue_lock); null_request_fn()
350 spin_lock_irq(q->queue_lock); null_request_fn()
H A Dfloppy.c2251 spin_lock_irqsave(q->queue_lock, flags); request_done()
2253 spin_unlock_irqrestore(q->queue_lock, flags); request_done()
2265 spin_lock_irqsave(q->queue_lock, flags); request_done()
2267 spin_unlock_irqrestore(q->queue_lock, flags); request_done()
H A Dumem.c900 card->queue->queue_lock = &card->lock; mm_pci_probe()
H A Dpktcdvd.c2220 spin_lock_irq(q->queue_lock); pkt_open_dev()
2222 spin_unlock_irq(q->queue_lock); pkt_open_dev()
H A Dcciss.c3330 spin_unlock_irq(q->queue_lock); do_cciss_request()
3444 spin_lock_irq(q->queue_lock); do_cciss_request()
H A DDAC960.h2325 spinlock_t queue_lock; member in struct:DAC960_Controller
/linux-4.4.14/drivers/block/rsxx/
H A Ddma.c282 spin_lock_bh(&ctrl->queue_lock); rsxx_requeue_dma()
285 spin_unlock_bh(&ctrl->queue_lock); rsxx_requeue_dma()
383 spin_lock(&ctrl->queue_lock); dma_engine_stalled()
385 spin_unlock(&ctrl->queue_lock); dma_engine_stalled()
411 spin_lock_bh(&ctrl->queue_lock); rsxx_issue_dmas()
413 spin_unlock_bh(&ctrl->queue_lock); rsxx_issue_dmas()
416 spin_unlock_bh(&ctrl->queue_lock); rsxx_issue_dmas()
422 spin_lock_bh(&ctrl->queue_lock); rsxx_issue_dmas()
426 spin_unlock_bh(&ctrl->queue_lock); rsxx_issue_dmas()
586 spin_lock_bh(&ctrl->queue_lock); rsxx_dma_done()
589 spin_unlock_bh(&ctrl->queue_lock); rsxx_dma_done()
756 spin_lock_bh(&card->ctrl[i].queue_lock);
759 spin_unlock_bh(&card->ctrl[i].queue_lock);
838 spin_lock_init(&ctrl->queue_lock); rsxx_dma_ctrl_init()
1020 spin_lock_bh(&ctrl->queue_lock); rsxx_dma_destroy()
1022 spin_unlock_bh(&ctrl->queue_lock); rsxx_dma_destroy()
1076 spin_lock_bh(&card->ctrl[i].queue_lock); rsxx_eeh_save_issued_dmas()
1082 spin_unlock_bh(&card->ctrl[i].queue_lock); rsxx_eeh_save_issued_dmas()
H A Dcore.c606 spin_lock_bh(&card->ctrl[i].queue_lock); rsxx_eeh_failure()
610 spin_unlock_bh(&card->ctrl[i].queue_lock); rsxx_eeh_failure()
713 spin_lock(&card->ctrl[i].queue_lock); rsxx_slot_reset()
715 spin_unlock(&card->ctrl[i].queue_lock); rsxx_slot_reset()
718 spin_unlock(&card->ctrl[i].queue_lock); rsxx_slot_reset()
H A Drsxx_priv.h109 spinlock_t queue_lock; member in struct:rsxx_dma_ctrl
/linux-4.4.14/net/sunrpc/
H A Dcache.c732 static DEFINE_SPINLOCK(queue_lock);
777 spin_lock(&queue_lock); cache_read()
786 spin_unlock(&queue_lock); cache_read()
795 spin_unlock(&queue_lock); cache_read()
806 spin_lock(&queue_lock); cache_read()
808 spin_unlock(&queue_lock); cache_read()
818 spin_lock(&queue_lock); cache_read()
820 spin_unlock(&queue_lock); cache_read()
827 spin_lock(&queue_lock); cache_read()
832 spin_unlock(&queue_lock); cache_read()
837 spin_unlock(&queue_lock); cache_read()
936 spin_lock(&queue_lock); cache_poll()
944 spin_unlock(&queue_lock); cache_poll()
959 spin_lock(&queue_lock); cache_ioctl()
972 spin_unlock(&queue_lock); cache_ioctl()
994 spin_lock(&queue_lock); cache_open()
996 spin_unlock(&queue_lock); cache_open()
1008 spin_lock(&queue_lock); cache_release()
1021 spin_unlock(&queue_lock); cache_release()
1042 spin_lock(&queue_lock); cache_dequeue()
1055 spin_unlock(&queue_lock); cache_dequeue()
1188 spin_lock(&queue_lock); sunrpc_cache_pipe_upcall()
1195 spin_unlock(&queue_lock); sunrpc_cache_pipe_upcall()
/linux-4.4.14/drivers/scsi/esas2r/
H A Desas2r_int.c218 spin_lock_irqsave(&a->queue_lock, flags); esas2r_get_outbound_responses()
228 spin_unlock_irqrestore(&a->queue_lock, flags); esas2r_get_outbound_responses()
235 spin_unlock_irqrestore(&a->queue_lock, flags); esas2r_get_outbound_responses()
295 spin_unlock_irqrestore(&a->queue_lock, flags); esas2r_get_outbound_responses()
344 spin_lock_irqsave(&a->queue_lock, flags); esas2r_do_deferred_processes()
376 spin_unlock_irqrestore(&a->queue_lock, flags); esas2r_do_deferred_processes()
399 spin_lock_irqsave(&a->queue_lock, flags); esas2r_process_adapter_reset()
440 spin_unlock_irqrestore(&a->queue_lock, flags); esas2r_process_adapter_reset()
458 spin_lock_irqsave(&a->queue_lock, flags); esas2r_process_bus_reset()
467 spin_unlock_irqrestore(&a->queue_lock, flags); esas2r_process_bus_reset()
H A Desas2r_disc.c475 spin_lock_irqsave(&a->queue_lock, flags); esas2r_disc_start_request()
483 spin_unlock_irqrestore(&a->queue_lock, flags); esas2r_disc_start_request()
1167 spin_lock_irqsave(&a->queue_lock, flags); esas2r_disc_fix_curr_requests()
1183 spin_unlock_irqrestore(&a->queue_lock, flags); esas2r_disc_fix_curr_requests()
H A Desas2r_io.c92 spin_lock_irqsave(&a->queue_lock, flags); esas2r_start_request()
102 spin_unlock_irqrestore(&a->queue_lock, flags); esas2r_start_request()
779 spin_lock_irqsave(&a->queue_lock, flags); esas2r_send_task_mgmt()
828 spin_unlock_irqrestore(&a->queue_lock, flags); esas2r_send_task_mgmt()
H A Desas2r_init.c845 spin_lock_init(&a->queue_lock); esas2r_init_adapter_struct()
1331 spin_lock_irqsave(&a->queue_lock, flags); esas2r_init_msgs()
1333 spin_unlock_irqrestore(&a->queue_lock, flags); esas2r_init_msgs()
H A Desas2r.h851 spinlock_t queue_lock; member in struct:esas2r_adapter
1400 spin_lock_irqsave(&a->queue_lock, flags); esas2r_start_ae_request()
1402 spin_unlock_irqrestore(&a->queue_lock, flags); esas2r_start_ae_request()
H A Desas2r_main.c1059 spin_lock_irqsave(&a->queue_lock, flags); esas2r_eh_abort()
1073 spin_unlock_irqrestore(&a->queue_lock, flags); esas2r_eh_abort()
1080 spin_unlock_irqrestore(&a->queue_lock, flags); esas2r_eh_abort()
/linux-4.4.14/drivers/usb/host/
H A Du132-hcd.c166 struct u132_spin queue_lock; member in struct:u132_endp
518 spin_lock_irqsave(&endp->queue_lock.slock, irqs); u132_hcd_giveback_urb()
523 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); u132_hcd_giveback_urb()
532 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); u132_hcd_giveback_urb()
557 spin_lock_irqsave(&endp->queue_lock.slock, irqs); u132_hcd_abandon_urb()
562 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); u132_hcd_abandon_urb()
571 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); u132_hcd_abandon_urb()
1872 spin_lock_init(&endp->queue_lock.slock); create_endpoint_and_queue_int()
1873 spin_lock_irqsave(&endp->queue_lock.slock, irqs); create_endpoint_and_queue_int()
1876 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); create_endpoint_and_queue_int()
1927 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); create_endpoint_and_queue_int()
1971 spin_lock_init(&endp->queue_lock.slock); create_endpoint_and_queue_bulk()
1972 spin_lock_irqsave(&endp->queue_lock.slock, irqs); create_endpoint_and_queue_bulk()
1975 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); create_endpoint_and_queue_bulk()
2026 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); create_endpoint_and_queue_bulk()
2067 spin_lock_init(&endp->queue_lock.slock); create_endpoint_and_queue_control()
2068 spin_lock_irqsave(&endp->queue_lock.slock, irqs); create_endpoint_and_queue_control()
2071 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); create_endpoint_and_queue_control()
2115 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); create_endpoint_and_queue_control()
2136 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); create_endpoint_and_queue_control()
2272 spin_lock_irqsave(&endp->queue_lock.slock, u132_urb_enqueue()
2285 spin_unlock_irqrestore(&endp->queue_lock.slock, u132_urb_enqueue()
2314 spin_lock_irqsave(&endp->queue_lock.slock, u132_urb_enqueue()
2327 spin_unlock_irqrestore(&endp->queue_lock.slock, u132_urb_enqueue()
2363 spin_lock_irqsave(&endp->queue_lock.slock, u132_urb_enqueue()
2375 spin_unlock_irqrestore(&endp->queue_lock.slock, u132_urb_enqueue()
2426 spin_lock_irqsave(&endp->queue_lock.slock, irqs); u132_endp_urb_dequeue()
2429 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); u132_endp_urb_dequeue()
2438 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); u132_endp_urb_dequeue()
2446 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); u132_endp_urb_dequeue()
2449 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); u132_endp_urb_dequeue()
2479 spin_unlock_irqrestore(&endp->queue_lock.slock, u132_endp_urb_dequeue()
2487 spin_unlock_irqrestore(&endp->queue_lock.slock, u132_endp_urb_dequeue()
2502 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); u132_endp_urb_dequeue()
2510 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); u132_endp_urb_dequeue()
/linux-4.4.14/drivers/media/usb/gspca/
H A Dgspca.c1173 if (mutex_lock_interruptible(&gspca_dev->queue_lock)) vidioc_s_fmt_vid_cap()
1199 mutex_unlock(&gspca_dev->queue_lock); vidioc_s_fmt_vid_cap()
1294 /* Needed for gspca_stream_off, always lock before queue_lock! */ dev_close()
1298 if (mutex_lock_interruptible(&gspca_dev->queue_lock)) { dev_close()
1310 mutex_unlock(&gspca_dev->queue_lock); dev_close()
1385 if (mutex_lock_interruptible(&gspca_dev->queue_lock)) vidioc_reqbufs()
1433 mutex_unlock(&gspca_dev->queue_lock); vidioc_reqbufs()
1460 if (mutex_lock_interruptible(&gspca_dev->queue_lock)) vidioc_streamon()
1484 mutex_unlock(&gspca_dev->queue_lock); vidioc_streamon()
1497 if (mutex_lock_interruptible(&gspca_dev->queue_lock)) vidioc_streamoff()
1524 mutex_unlock(&gspca_dev->queue_lock); vidioc_streamoff()
1594 if (mutex_lock_interruptible(&gspca_dev->queue_lock)) dev_mmap()
1646 mutex_unlock(&gspca_dev->queue_lock); dev_mmap()
1668 if (mutex_lock_interruptible(&gspca_dev->queue_lock)) frame_ready()
1671 mutex_unlock(&gspca_dev->queue_lock); frame_ready()
1689 if (mutex_lock_interruptible(&gspca_dev->queue_lock)) vidioc_dqbuf()
1699 mutex_unlock(&gspca_dev->queue_lock); vidioc_dqbuf()
1713 if (mutex_lock_interruptible(&gspca_dev->queue_lock)) vidioc_dqbuf()
1737 mutex_unlock(&gspca_dev->queue_lock); vidioc_dqbuf()
1765 if (mutex_lock_interruptible(&gspca_dev->queue_lock)) vidioc_qbuf()
1804 mutex_unlock(&gspca_dev->queue_lock); vidioc_qbuf()
1879 if (mutex_lock_interruptible(&gspca_dev->queue_lock) != 0) { dev_poll()
1885 mutex_unlock(&gspca_dev->queue_lock); dev_poll()
2073 mutex_init(&gspca_dev->queue_lock); gspca_dev_probe2()
H A Dgspca.h151 /* protected by queue_lock */
197 struct mutex queue_lock; /* ISOC queue protection */ member in struct:gspca_dev
211 /* (*) These variables are proteced by both usb_lock and queue_lock,
/linux-4.4.14/drivers/media/platform/
H A Dtimblogiw.c69 spinlock_t queue_lock; /* mutual exclusion */ member in struct:timblogiw_fh
124 spin_lock(&fh->queue_lock); timblogiw_dma_cb()
146 spin_unlock(&fh->queue_lock); timblogiw_dma_cb()
564 spin_unlock_irq(&fh->queue_lock); buffer_queue()
570 spin_lock_irq(&fh->queue_lock); buffer_queue()
581 spin_lock_irq(&fh->queue_lock); buffer_queue()
662 spin_lock_init(&fh->queue_lock); timblogiw_open()
680 &timblogiw_video_qops, lw->dev, &fh->queue_lock, timblogiw_open()
/linux-4.4.14/include/linux/
H A Dconnector.h40 spinlock_t queue_lock; member in struct:cn_queue_dev
H A Dblk-cgroup.h264 * holding @q->queue_lock and lookup hint is updated on success.
422 * under queue_lock. This function is guaranteed to return non-%NULL
461 * queue_lock.
498 * Should be used under queue_lock.
705 spin_lock_irq(q->queue_lock); blkcg_bio_issue_check()
709 spin_unlock_irq(q->queue_lock); blkcg_bio_issue_check()
H A Dsrcu.h50 spinlock_t queue_lock; /* protect ->batch_queue, ->running */ member in struct:srcu_struct
91 .queue_lock = __SPIN_LOCK_UNLOCKED(name.queue_lock), \
H A Dblkdev.h359 * ->queue_lock.
362 spinlock_t *queue_lock; member in struct:request_queue
405 * queue_lock internally, e.g. scsi_request_fn().
504 if (q->queue_lock) queue_lockdep_assert_held()
505 lockdep_assert_held(q->queue_lock); queue_lockdep_assert_held()
H A Disdn.h252 spinlock_t queue_lock; /* lock to protect queue */ member in struct:isdn_net_dev_s
/linux-4.4.14/drivers/spi/
H A Dspi.c1049 spin_lock_irqsave(&master->queue_lock, flags); __spi_pump_messages()
1053 spin_unlock_irqrestore(&master->queue_lock, flags); __spi_pump_messages()
1060 spin_unlock_irqrestore(&master->queue_lock, flags); __spi_pump_messages()
1067 spin_unlock_irqrestore(&master->queue_lock, flags); __spi_pump_messages()
1075 spin_unlock_irqrestore(&master->queue_lock, flags); __spi_pump_messages()
1081 spin_unlock_irqrestore(&master->queue_lock, flags); __spi_pump_messages()
1097 spin_lock_irqsave(&master->queue_lock, flags); __spi_pump_messages()
1099 spin_unlock_irqrestore(&master->queue_lock, flags); __spi_pump_messages()
1112 spin_unlock_irqrestore(&master->queue_lock, flags); __spi_pump_messages()
1228 spin_lock_irqsave(&master->queue_lock, flags); spi_get_next_queued_message()
1231 spin_unlock_irqrestore(&master->queue_lock, flags); spi_get_next_queued_message()
1250 spin_lock_irqsave(&master->queue_lock, flags); spi_finalize_current_message()
1252 spin_unlock_irqrestore(&master->queue_lock, flags); spi_finalize_current_message()
1264 spin_lock_irqsave(&master->queue_lock, flags); spi_finalize_current_message()
1268 spin_unlock_irqrestore(&master->queue_lock, flags); spi_finalize_current_message()
1282 spin_lock_irqsave(&master->queue_lock, flags); spi_start_queue()
1285 spin_unlock_irqrestore(&master->queue_lock, flags); spi_start_queue()
1291 spin_unlock_irqrestore(&master->queue_lock, flags); spi_start_queue()
1304 spin_lock_irqsave(&master->queue_lock, flags); spi_stop_queue()
1313 spin_unlock_irqrestore(&master->queue_lock, flags); spi_stop_queue()
1315 spin_lock_irqsave(&master->queue_lock, flags); spi_stop_queue()
1323 spin_unlock_irqrestore(&master->queue_lock, flags); spi_stop_queue()
1363 spin_lock_irqsave(&master->queue_lock, flags); __spi_queued_transfer()
1366 spin_unlock_irqrestore(&master->queue_lock, flags); __spi_queued_transfer()
1376 spin_unlock_irqrestore(&master->queue_lock, flags); __spi_queued_transfer()
1812 spin_lock_init(&master->queue_lock); spi_register_master()
H A Dspi-pl022.c352 * @queue_lock: spinlock to syncronise access to message queue
/linux-4.4.14/kernel/rcu/
H A Dsrcu.c102 spin_lock_init(&sp->queue_lock); init_srcu_struct_fields()
394 spin_lock_irqsave(&sp->queue_lock, flags); call_srcu()
400 spin_unlock_irqrestore(&sp->queue_lock, flags); call_srcu()
427 spin_lock_irq(&sp->queue_lock); __synchronize_srcu()
432 spin_unlock_irq(&sp->queue_lock); __synchronize_srcu()
444 spin_unlock_irq(&sp->queue_lock); __synchronize_srcu()
547 spin_lock_irq(&sp->queue_lock); srcu_collect_new()
549 spin_unlock_irq(&sp->queue_lock); srcu_collect_new()
645 spin_lock_irq(&sp->queue_lock); srcu_reschedule()
653 spin_unlock_irq(&sp->queue_lock); srcu_reschedule()
/linux-4.4.14/drivers/s390/block/
H A Ddasd_ioctl.c161 spin_lock(&block->queue_lock); dasd_ioctl_abortio()
167 spin_unlock(&block->queue_lock); dasd_ioctl_abortio()
169 spin_lock(&block->queue_lock); dasd_ioctl_abortio()
172 spin_unlock(&block->queue_lock); dasd_ioctl_abortio()
H A Ddasd.c163 spin_lock_init(&block->queue_lock); dasd_alloc_block()
1802 spin_lock_bh(&block->queue_lock); list_for_each_safe()
1824 spin_unlock_bh(&block->queue_lock); list_for_each_safe()
2782 spin_lock(&block->queue_lock); dasd_block_tasklet()
2785 spin_unlock(&block->queue_lock); dasd_block_tasklet()
2793 spin_lock(&block->queue_lock); dasd_block_tasklet()
2798 spin_unlock(&block->queue_lock); dasd_block_tasklet()
2822 spin_lock_irqsave(&block->queue_lock, flags); _dasd_requeue_request()
2825 spin_unlock_irqrestore(&block->queue_lock, flags); _dasd_requeue_request()
2842 spin_lock_bh(&block->queue_lock); dasd_flush_block_queue()
2863 spin_unlock_bh(&block->queue_lock); dasd_flush_block_queue()
2870 spin_lock_bh(&block->queue_lock); dasd_flush_block_queue()
2872 spin_unlock_bh(&block->queue_lock); dasd_flush_block_queue()
2915 spin_lock(&block->queue_lock); do_dasd_request()
2920 spin_unlock(&block->queue_lock); do_dasd_request()
2950 spin_lock(&block->queue_lock); dasd_times_out()
2994 spin_unlock(&block->queue_lock); dasd_times_out()
H A Ddasd_int.h499 spinlock_t queue_lock; member in struct:dasd_block
/linux-4.4.14/include/linux/mtd/
H A Dblktrans.h50 spinlock_t queue_lock; member in struct:mtd_blktrans_dev
/linux-4.4.14/drivers/net/wireless/mwifiex/
H A Dinit.c332 spin_lock_irqsave(&adapter->queue_lock, dev_queue_flags); mwifiex_wake_up_net_dev_queue()
341 spin_unlock_irqrestore(&adapter->queue_lock, dev_queue_flags); mwifiex_wake_up_net_dev_queue()
353 spin_lock_irqsave(&adapter->queue_lock, dev_queue_flags); mwifiex_stop_net_dev_queue()
362 spin_unlock_irqrestore(&adapter->queue_lock, dev_queue_flags); mwifiex_stop_net_dev_queue()
460 spin_lock_init(&adapter->queue_lock); mwifiex_init_lock_list()
H A Dmain.h958 spinlock_t queue_lock; /* lock for tx queues */ member in struct:mwifiex_adapter
/linux-4.4.14/drivers/scsi/
H A Dscsi_lib.c171 spin_lock_irqsave(q->queue_lock, flags); __scsi_queue_insert()
174 spin_unlock_irqrestore(q->queue_lock, flags); __scsi_queue_insert()
430 * drops the queue_lock and can add us back to the scsi_starved_list_run()
535 spin_lock_irqsave(q->queue_lock, flags); scsi_requeue_command()
540 spin_unlock_irqrestore(q->queue_lock, flags); scsi_requeue_command()
729 spin_lock_irqsave(q->queue_lock, flags); scsi_end_request()
731 spin_unlock_irqrestore(q->queue_lock, flags); scsi_end_request()
1405 * Called with the queue_lock held.
1767 __releases(q->queue_lock)
1768 __acquires(q->queue_lock)
1807 spin_unlock_irq(q->queue_lock);
1859 spin_lock_irq(q->queue_lock);
1862 spin_lock_irq(q->queue_lock);
1873 * must return with queue_lock held.
1879 spin_lock_irq(q->queue_lock);
2974 spin_lock_irqsave(q->queue_lock, flags); scsi_internal_device_block()
2976 spin_unlock_irqrestore(q->queue_lock, flags); scsi_internal_device_block()
3026 spin_lock_irqsave(q->queue_lock, flags); scsi_internal_device_unblock()
3028 spin_unlock_irqrestore(q->queue_lock, flags); scsi_internal_device_unblock()
H A Dscsi_dh.c297 spin_lock_irqsave(q->queue_lock, flags); get_sdev_from_queue()
301 spin_unlock_irqrestore(q->queue_lock, flags); get_sdev_from_queue()
H A Dscsi_transport_srp.c420 spin_lock_irq(q->queue_lock); shost_for_each_device()
422 spin_unlock_irq(q->queue_lock); shost_for_each_device()
H A Dscsi_transport_fc.c3954 spin_unlock_irq(q->queue_lock); fc_bsg_request_handler()
3956 spin_lock_irq(q->queue_lock); fc_bsg_request_handler()
3960 spin_unlock_irq(q->queue_lock); fc_bsg_request_handler()
3966 spin_lock_irq(q->queue_lock); fc_bsg_request_handler()
3979 spin_lock_irq(q->queue_lock); fc_bsg_request_handler()
3983 /* the dispatch routines will unlock the queue_lock */ fc_bsg_request_handler()
3995 spin_lock_irq(q->queue_lock); fc_bsg_request_handler()
3998 spin_unlock_irq(q->queue_lock); fc_bsg_request_handler()
4000 spin_lock_irq(q->queue_lock); fc_bsg_request_handler()
H A Dscsi_transport_sas.c178 spin_unlock_irq(q->queue_lock); sas_smp_request()
186 spin_lock_irq(q->queue_lock); sas_smp_request()
/linux-4.4.14/drivers/media/usb/usbvision/
H A Dusbvision-video.c760 spin_lock_irqsave(&usbvision->queue_lock, lock_flags); vidioc_qbuf()
762 spin_unlock_irqrestore(&usbvision->queue_lock, lock_flags); vidioc_qbuf()
784 spin_lock_irqsave(&usbvision->queue_lock, lock_flags); vidioc_dqbuf()
788 spin_unlock_irqrestore(&usbvision->queue_lock, lock_flags); vidioc_dqbuf()
962 spin_lock_irqsave(&usbvision->queue_lock, lock_flags); usbvision_read()
964 spin_unlock_irqrestore(&usbvision->queue_lock, usbvision_read()
981 spin_lock_irqsave(&usbvision->queue_lock, lock_flags); usbvision_read()
985 spin_unlock_irqrestore(&usbvision->queue_lock, lock_flags); usbvision_read()
1537 spin_lock_init(&usbvision->queue_lock); usbvision_probe()
H A Dusbvision.h410 spinlock_t queue_lock; /* spinlock for protecting mods on inqueue and outqueue */ member in struct:usb_usbvision
H A Dusbvision-core.c1175 spin_lock_irqsave(&usbvision->queue_lock, lock_flags); usbvision_parse_data()
1178 spin_unlock_irqrestore(&usbvision->queue_lock, lock_flags); usbvision_parse_data()
/linux-4.4.14/drivers/gpu/drm/amd/scheduler/
H A Dgpu_scheduler.c133 spin_lock_init(&entity->queue_lock); amd_sched_entity_init()
295 spin_lock(&entity->queue_lock); amd_sched_entity_in()
302 spin_unlock(&entity->queue_lock); amd_sched_entity_in()
H A Dgpu_scheduler.h49 spinlock_t queue_lock; member in struct:amd_sched_entity
/linux-4.4.14/drivers/media/usb/go7007/
H A Dgo7007-priv.h233 struct mutex queue_lock; member in struct:go7007
H A Dgo7007-usb.c1326 mutex_lock(&go->queue_lock); go7007_usb_disconnect()
1336 mutex_unlock(&go->queue_lock); go7007_usb_disconnect()
H A Dgo7007-v4l2.c1110 mutex_init(&go->queue_lock); go7007_v4l2_init()
1120 go->vidq.lock = &go->queue_lock; go7007_v4l2_init()
/linux-4.4.14/drivers/net/ethernet/qlogic/qlcnic/
H A Dqlcnic_83xx_hw.c3853 spin_lock(&mbx->queue_lock); qlcnic_83xx_flush_mbx_queue()
3864 spin_unlock(&mbx->queue_lock); qlcnic_83xx_flush_mbx_queue()
3900 spin_lock(&mbx->queue_lock); qlcnic_83xx_dequeue_mbx_cmd()
3905 spin_unlock(&mbx->queue_lock); qlcnic_83xx_dequeue_mbx_cmd()
3970 spin_lock(&mbx->queue_lock); qlcnic_83xx_enqueue_mbx_cmd()
3978 spin_unlock(&mbx->queue_lock); qlcnic_83xx_enqueue_mbx_cmd()
4074 spin_lock(&mbx->queue_lock); qlcnic_83xx_mailbox_worker()
4077 spin_unlock(&mbx->queue_lock); qlcnic_83xx_mailbox_worker()
4082 spin_unlock(&mbx->queue_lock); qlcnic_83xx_mailbox_worker()
4128 spin_lock_init(&mbx->queue_lock); qlcnic_83xx_init_mailbox_work()
H A Dqlcnic.h1101 spinlock_t queue_lock; /* Mailbox queue lock */ member in struct:qlcnic_mailbox
/linux-4.4.14/drivers/scsi/libsas/
H A Dsas_ata.c597 spin_lock_irqsave(q->queue_lock, flags); sas_ata_task_abort()
599 spin_unlock_irqrestore(q->queue_lock, flags); sas_ata_task_abort()
H A Dsas_scsi_host.c947 spin_lock_irqsave(q->queue_lock, flags); sas_task_abort()
949 spin_unlock_irqrestore(q->queue_lock, flags); sas_task_abort()
/linux-4.4.14/arch/x86/platform/uv/
H A Dtlb_uv.c717 spin_lock(&hmaster->queue_lock); destination_plugged()
719 spin_unlock(&hmaster->queue_lock); destination_plugged()
739 spin_lock(&hmaster->queue_lock); destination_timeout()
741 spin_unlock(&hmaster->queue_lock); destination_timeout()
1904 spin_lock_init(&bcp->queue_lock); for_each_present_cpu()
/linux-4.4.14/drivers/md/
H A Ddm.c1202 spin_lock_irqsave(q->queue_lock, flags); old_requeue_request()
1205 spin_unlock_irqrestore(q->queue_lock, flags); old_requeue_request()
1233 spin_lock_irqsave(q->queue_lock, flags); old_stop_queue()
1235 spin_unlock_irqrestore(q->queue_lock, flags); old_stop_queue()
1250 spin_lock_irqsave(q->queue_lock, flags); old_start_queue()
1253 spin_unlock_irqrestore(q->queue_lock, flags); old_start_queue()
2090 * ->pending within a single queue_lock not to increment the dm_request_fn()
H A Ddm-table.c1697 spin_lock_irqsave(queue->queue_lock, flags); dm_table_run_md_queue_async()
1699 spin_unlock_irqrestore(queue->queue_lock, flags); dm_table_run_md_queue_async()
/linux-4.4.14/drivers/nvme/host/
H A Dpci.c606 spin_lock_irqsave(req->q->queue_lock, flags); req_completion()
609 spin_unlock_irqrestore(req->q->queue_lock, flags); req_completion()
2937 spin_lock_irq(ns->queue->queue_lock); nvme_freeze_queues()
2939 spin_unlock_irq(ns->queue->queue_lock); nvme_freeze_queues()
/linux-4.4.14/arch/x86/include/asm/uv/
H A Duv_bau.h619 spinlock_t queue_lock; member in struct:bau_control
/linux-4.4.14/include/linux/spi/
H A Dspi.h321 * @queue_lock: spinlock to syncronise access to message queue
489 spinlock_t queue_lock; member in struct:spi_master
/linux-4.4.14/kernel/
H A Dfutex.c164 * Refer to the comment in queue_lock().
1860 static inline struct futex_hash_bucket *queue_lock(struct futex_q *q)
1870 * waiting for the spinlock. This is safe as all queue_lock()
2250 *hb = queue_lock(q); futex_wait_setup()
2404 hb = queue_lock(&q); futex_lock_pi()
/linux-4.4.14/drivers/ata/
H A Dlibata-eh.c985 spin_lock_irqsave(q->queue_lock, flags); ata_qc_schedule_eh()
987 spin_unlock_irqrestore(q->queue_lock, flags); ata_qc_schedule_eh()
/linux-4.4.14/drivers/block/mtip32xx/
H A Dmtip32xx.c3009 spin_lock(dd->queue->queue_lock); mtip_service_thread()
3012 spin_unlock(dd->queue->queue_lock); mtip_service_thread()
/linux-4.4.14/drivers/block/drbd/
H A Ddrbd_main.c2777 q->queue_lock = &resource->req_lock; drbd_create_device()

Completed in 3178 milliseconds