rq_lock            79 drivers/gpu/drm/scheduler/sched_entity.c 	spin_lock_init(&entity->rq_lock);
rq_lock           198 drivers/gpu/drm/scheduler/sched_entity.c 		spin_lock(&entity->rq_lock);
rq_lock           201 drivers/gpu/drm/scheduler/sched_entity.c 		spin_unlock(&entity->rq_lock);
rq_lock           374 drivers/gpu/drm/scheduler/sched_entity.c 	spin_lock(&entity->rq_lock);
rq_lock           385 drivers/gpu/drm/scheduler/sched_entity.c 	spin_unlock(&entity->rq_lock);
rq_lock           500 drivers/gpu/drm/scheduler/sched_entity.c 	spin_lock(&entity->rq_lock);
rq_lock           503 drivers/gpu/drm/scheduler/sched_entity.c 	spin_unlock(&entity->rq_lock);
rq_lock           531 drivers/gpu/drm/scheduler/sched_entity.c 		spin_lock(&entity->rq_lock);
rq_lock           533 drivers/gpu/drm/scheduler/sched_entity.c 			spin_unlock(&entity->rq_lock);
rq_lock           539 drivers/gpu/drm/scheduler/sched_entity.c 		spin_unlock(&entity->rq_lock);
rq_lock          1182 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	spin_lock_init(&qp->rq_lock);
rq_lock          2466 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	spin_lock_irqsave(&qp->rq_lock, flags);
rq_lock          2509 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	spin_unlock_irqrestore(&qp->rq_lock, flags);
rq_lock            85 drivers/infiniband/hw/bnxt_re/ib_verbs.h 	spinlock_t		rq_lock;	/* protect rq */
rq_lock           464 drivers/infiniband/sw/siw/siw.h 	spinlock_t rq_lock;
rq_lock           379 drivers/infiniband/sw/siw/siw_verbs.c 	spin_lock_init(&qp->rq_lock);
rq_lock          1042 drivers/infiniband/sw/siw/siw_verbs.c 	spin_lock_irqsave(&qp->rq_lock, flags);
rq_lock          1070 drivers/infiniband/sw/siw/siw_verbs.c 	spin_unlock_irqrestore(&qp->rq_lock, flags);
rq_lock           525 drivers/net/ethernet/intel/ice/ice_controlq.c 	mutex_lock(&cq->rq_lock);
rq_lock           547 drivers/net/ethernet/intel/ice/ice_controlq.c 	mutex_unlock(&cq->rq_lock);
rq_lock           676 drivers/net/ethernet/intel/ice/ice_controlq.c 	mutex_init(&cq->rq_lock);
rq_lock           757 drivers/net/ethernet/intel/ice/ice_controlq.c 	mutex_destroy(&cq->rq_lock);
rq_lock          1051 drivers/net/ethernet/intel/ice/ice_controlq.c 	mutex_lock(&cq->rq_lock);
rq_lock          1122 drivers/net/ethernet/intel/ice/ice_controlq.c 	mutex_unlock(&cq->rq_lock);
rq_lock            93 drivers/net/ethernet/intel/ice/ice_controlq.h 	struct mutex rq_lock;		/* Receive queue lock */
rq_lock            84 include/drm/gpu_scheduler.h 	spinlock_t			rq_lock;
rq_lock           298 include/linux/sunrpc/svc.h 	spinlock_t		rq_lock;	/* per-request lock */
rq_lock           244 kernel/sched/core.c 	rq_lock(rq, &rf);
rq_lock           269 kernel/sched/core.c 	rq_lock(rq, &rf);
rq_lock          1496 kernel/sched/core.c 	rq_lock(rq, rf);
rq_lock          1557 kernel/sched/core.c 	rq_lock(rq, &rf);
rq_lock          2403 kernel/sched/core.c 	rq_lock(rq, &rf);
rq_lock          3594 kernel/sched/core.c 	rq_lock(rq, &rf);
rq_lock          4025 kernel/sched/core.c 	rq_lock(rq, &rf);
rq_lock          7476 kernel/sched/fair.c 	rq_lock(rq, &rf);
rq_lock          7492 kernel/sched/fair.c 	rq_lock(env->dst_rq, &rf);
rq_lock          10008 kernel/sched/fair.c 	rq_lock(rq, &rf);
rq_lock          1265 kernel/sched/sched.h 	rq_lock(rq, rf);
rq_lock           613 net/sunrpc/svc.c 	spin_lock_init(&rqstp->rq_lock);