Lines Matching refs:rdma
147 struct svcxprt_rdma *rdma = in svc_rdma_bc_free() local
152 kfree(rdma); in svc_rdma_bc_free()
693 struct svcxprt_rdma *rdma = in rdma_cma_handler() local
701 clear_bit(RDMAXPRT_CONN_PENDING, &rdma->sc_flags); in rdma_cma_handler()
839 struct svc_rdma_fastreg_mr *svc_rdma_get_frmr(struct svcxprt_rdma *rdma) in svc_rdma_get_frmr() argument
843 spin_lock_bh(&rdma->sc_frmr_q_lock); in svc_rdma_get_frmr()
844 if (!list_empty(&rdma->sc_frmr_q)) { in svc_rdma_get_frmr()
845 frmr = list_entry(rdma->sc_frmr_q.next, in svc_rdma_get_frmr()
850 spin_unlock_bh(&rdma->sc_frmr_q_lock); in svc_rdma_get_frmr()
854 return rdma_alloc_frmr(rdma); in svc_rdma_get_frmr()
857 void svc_rdma_put_frmr(struct svcxprt_rdma *rdma, in svc_rdma_put_frmr() argument
861 ib_dma_unmap_sg(rdma->sc_cm_id->device, in svc_rdma_put_frmr()
863 atomic_dec(&rdma->sc_dma_used); in svc_rdma_put_frmr()
864 spin_lock_bh(&rdma->sc_frmr_q_lock); in svc_rdma_put_frmr()
866 list_add(&frmr->frmr_list, &rdma->sc_frmr_q); in svc_rdma_put_frmr()
867 spin_unlock_bh(&rdma->sc_frmr_q_lock); in svc_rdma_put_frmr()
1151 struct svcxprt_rdma *rdma = in svc_rdma_detach() local
1156 rdma_disconnect(rdma->sc_cm_id); in svc_rdma_detach()
1161 struct svcxprt_rdma *rdma = in __svc_rdma_free() local
1163 dprintk("svcrdma: svc_rdma_free(%p)\n", rdma); in __svc_rdma_free()
1166 if (atomic_read(&rdma->sc_xprt.xpt_ref.refcount) != 0) in __svc_rdma_free()
1168 atomic_read(&rdma->sc_xprt.xpt_ref.refcount)); in __svc_rdma_free()
1176 while (!list_empty(&rdma->sc_read_complete_q)) { in __svc_rdma_free()
1178 ctxt = list_entry(rdma->sc_read_complete_q.next, in __svc_rdma_free()
1186 while (!list_empty(&rdma->sc_rq_dto_q)) { in __svc_rdma_free()
1188 ctxt = list_entry(rdma->sc_rq_dto_q.next, in __svc_rdma_free()
1196 if (atomic_read(&rdma->sc_ctxt_used) != 0) in __svc_rdma_free()
1198 atomic_read(&rdma->sc_ctxt_used)); in __svc_rdma_free()
1199 if (atomic_read(&rdma->sc_dma_used) != 0) in __svc_rdma_free()
1201 atomic_read(&rdma->sc_dma_used)); in __svc_rdma_free()
1204 rdma_dealloc_frmr_q(rdma); in __svc_rdma_free()
1207 if (rdma->sc_qp && !IS_ERR(rdma->sc_qp)) in __svc_rdma_free()
1208 ib_destroy_qp(rdma->sc_qp); in __svc_rdma_free()
1210 if (rdma->sc_sq_cq && !IS_ERR(rdma->sc_sq_cq)) in __svc_rdma_free()
1211 ib_destroy_cq(rdma->sc_sq_cq); in __svc_rdma_free()
1213 if (rdma->sc_rq_cq && !IS_ERR(rdma->sc_rq_cq)) in __svc_rdma_free()
1214 ib_destroy_cq(rdma->sc_rq_cq); in __svc_rdma_free()
1216 if (rdma->sc_phys_mr && !IS_ERR(rdma->sc_phys_mr)) in __svc_rdma_free()
1217 ib_dereg_mr(rdma->sc_phys_mr); in __svc_rdma_free()
1219 if (rdma->sc_pd && !IS_ERR(rdma->sc_pd)) in __svc_rdma_free()
1220 ib_dealloc_pd(rdma->sc_pd); in __svc_rdma_free()
1223 rdma_destroy_id(rdma->sc_cm_id); in __svc_rdma_free()
1225 kfree(rdma); in __svc_rdma_free()
1230 struct svcxprt_rdma *rdma = in svc_rdma_free() local
1232 INIT_WORK(&rdma->sc_work, __svc_rdma_free); in svc_rdma_free()
1233 queue_work(svc_rdma_wq, &rdma->sc_work); in svc_rdma_free()
1238 struct svcxprt_rdma *rdma = in svc_rdma_has_wspace() local
1245 if (waitqueue_active(&rdma->sc_send_wait)) in svc_rdma_has_wspace()