/linux-4.4.14/net/rds/ |
D | iw_send.c | 143 send->s_send_wr.num_sge = 1; in rds_iw_send_init_ring() 261 send->s_send_wr.num_sge = 1; in rds_iw_send_cq_comp_handler() 451 send->s_send_wr.num_sge = 2; in rds_iw_xmit_populate_wr() 466 send->s_send_wr.num_sge = 1; in rds_iw_xmit_populate_wr() 682 &send->s_send_wr, send->s_send_wr.num_sge, send->s_send_wr.next); in rds_iw_xmit() 776 send->s_reg_wr.wr.num_sge = 0; in rds_iw_build_send_reg() 803 int num_sge; in rds_iw_xmit_rdma() local 857 num_sge = op->op_count; in rds_iw_xmit_rdma() 886 if (num_sge > rds_iwdev->max_sge) { in rds_iw_xmit_rdma() 887 send->s_rdma_wr.wr.num_sge = rds_iwdev->max_sge; in rds_iw_xmit_rdma() [all …]
|
D | ib_send.c | 629 send->s_wr.num_sge = 1; in rds_ib_xmit() 645 send->s_wr.num_sge = 2; in rds_ib_xmit() 672 &send->s_wr, send->s_wr.num_sge, send->s_wr.next); in rds_ib_xmit() 793 send->s_atomic_wr.wr.num_sge = 1; in rds_ib_xmit_atomic() 860 int num_sge; in rds_ib_xmit_rdma() local 897 num_sge = op->op_count; in rds_ib_xmit_rdma() 910 if (num_sge > max_sge) { in rds_ib_xmit_rdma() 911 send->s_rdma_wr.wr.num_sge = max_sge; in rds_ib_xmit_rdma() 912 num_sge -= max_sge; in rds_ib_xmit_rdma() 914 send->s_rdma_wr.wr.num_sge = num_sge; in rds_ib_xmit_rdma() [all …]
|
D | iw_recv.c | 96 recv->r_wr.num_sge = RDS_IW_RECV_SGE; in rds_iw_recv_init_ring() 357 wr->num_sge = 1; in rds_iw_recv_init_ack()
|
D | ib_recv.c | 60 recv->r_wr.num_sge = RDS_IB_RECV_SGE; in rds_ib_recv_init_ring() 570 wr->num_sge = 1; in rds_ib_recv_init_ack()
|
D | iw_rdma.c | 676 reg_wr.wr.num_sge = 0;
|
/linux-4.4.14/drivers/staging/rdma/hfi1/ |
D | srq.c | 78 if ((unsigned) wr->num_sge > srq->rq.max_sge) { in hfi1_post_srq_receive() 98 wqe->num_sge = wr->num_sge; in hfi1_post_srq_receive() 99 for (i = 0; i < wr->num_sge; i++) in hfi1_post_srq_receive() 304 p->num_sge = wqe->num_sge; in hfi1_modify_srq() 305 for (i = 0; i < wqe->num_sge; i++) in hfi1_modify_srq()
|
D | ruc.c | 113 for (i = j = 0; i < wqe->num_sge; i++) { in init_sge() 123 ss->num_sge = j; in init_sge() 134 ss->num_sge = 0; in init_sge() 452 sqp->s_sge.num_sge = wqe->wr.num_sge; in ruc_loopback() 489 qp->r_sge.num_sge = 1; in ruc_loopback() 503 sqp->s_sge.num_sge = 1; in ruc_loopback() 506 qp->r_sge.num_sge = wqe->wr.num_sge; in ruc_loopback() 528 qp->r_sge.num_sge = 0; in ruc_loopback() 552 if (--sqp->s_sge.num_sge) in ruc_loopback() 907 for (i = 0; i < wqe->wr.num_sge; i++) { in hfi1_send_complete()
|
D | uc.c | 119 qp->s_sge.num_sge = wqe->wr.num_sge; in hfi1_make_uc_req() 332 qp->r_sge.num_sge = 0; in hfi1_uc_rcv() 495 qp->r_sge.num_sge = 1; in hfi1_uc_rcv() 497 qp->r_sge.num_sge = 0; in hfi1_uc_rcv() 576 qp->r_sge.num_sge = 0; in hfi1_uc_rcv()
|
D | verbs.c | 298 if (--ss->num_sge) in hfi1_copy_sge() 339 if (--ss->num_sge) in hfi1_skip_sge() 375 if (unlikely(wr->num_sge > qp->s_max_sge)) in post_one_send() 400 (wr->num_sge == 0 || in post_one_send() 433 if (wr->num_sge) { in post_one_send() 436 for (i = 0; i < wr->num_sge; i++) { in post_one_send() 449 wqe->wr.num_sge = j; in post_one_send() 548 if ((unsigned) wr->num_sge > qp->r_rq.max_sge) { in post_receive() 567 wqe->num_sge = wr->num_sge; in post_receive() 568 for (i = 0; i < wr->num_sge; i++) in post_receive() [all …]
|
D | ud.c | 196 ssge.num_sge = swqe->wr.num_sge; in ud_loopback() 211 if (--ssge.num_sge) in ud_loopback() 352 qp->s_sge.num_sge = wqe->wr.num_sge; in hfi1_make_ud_req()
|
D | rc.c | 71 ss->num_sge = wqe->wr.num_sge; in restart_sge() 159 qp->s_ack_rdma_sge.num_sge = 1; in make_rc_ack() 519 qp->s_sge.num_sge = wqe->wr.num_sge; in hfi1_make_rc_req() 1028 for (i = 0; i < wqe->wr.num_sge; i++) { in hfi1_rc_send_complete() 1085 for (i = 0; i < wqe->wr.num_sge; i++) { in do_rc_completion() 1584 WARN_ON(qp->s_rdma_read_sge.num_sge); in rc_rcv_resp() 2164 qp->r_sge.num_sge = 1; in hfi1_rc_rcv() 2166 qp->r_sge.num_sge = 0; in hfi1_rc_rcv() 2302 qp->r_sge.num_sge = 0; in hfi1_rc_rcv()
|
D | verbs.h | 371 u8 num_sge; member 409 u8 num_sge; member 1052 while (ss->num_sge) { in hfi1_put_ss() 1054 if (--ss->num_sge) in hfi1_put_ss()
|
D | qp.c | 400 qp->r_sge.num_sge = 0; in reset_qp() 417 for (i = 0; i < wqe->wr.num_sge; i++) { in clear_mr_refs()
|
/linux-4.4.14/drivers/infiniband/hw/qib/ |
D | qib_srq.c | 61 if ((unsigned) wr->num_sge > srq->rq.max_sge) { in qib_post_srq_receive() 81 wqe->num_sge = wr->num_sge; in qib_post_srq_receive() 82 for (i = 0; i < wr->num_sge; i++) in qib_post_srq_receive() 287 p->num_sge = wqe->num_sge; in qib_modify_srq() 288 for (i = 0; i < wqe->num_sge; i++) in qib_modify_srq()
|
D | qib_ruc.c | 95 for (i = j = 0; i < wqe->num_sge; i++) { in qib_init_sge() 105 ss->num_sge = j; in qib_init_sge() 116 ss->num_sge = 0; in qib_init_sge() 430 sqp->s_sge.num_sge = wqe->wr.num_sge; in qib_ruc_loopback() 467 qp->r_sge.num_sge = 1; in qib_ruc_loopback() 481 sqp->s_sge.num_sge = 1; in qib_ruc_loopback() 484 qp->r_sge.num_sge = wqe->wr.num_sge; in qib_ruc_loopback() 506 qp->r_sge.num_sge = 0; in qib_ruc_loopback() 530 if (--sqp->s_sge.num_sge) in qib_ruc_loopback() 780 for (i = 0; i < wqe->wr.num_sge; i++) { in qib_send_complete()
|
D | qib_uc.c | 101 qp->s_sge.num_sge = wqe->wr.num_sge; in qib_make_uc_req() 282 qp->r_sge.num_sge = 0; in qib_uc_rcv() 445 qp->r_sge.num_sge = 1; in qib_uc_rcv() 447 qp->r_sge.num_sge = 0; in qib_uc_rcv() 527 qp->r_sge.num_sge = 0; in qib_uc_rcv()
|
D | qib_ud.c | 174 ssge.num_sge = swqe->wr.num_sge; in qib_ud_loopback() 189 if (--ssge.num_sge) in qib_ud_loopback() 322 qp->s_sge.num_sge = wqe->wr.num_sge; in qib_make_ud_req()
|
D | qib_verbs.c | 188 if (--ss->num_sge) in qib_copy_sge() 229 if (--ss->num_sge) in qib_skip_sge() 255 u8 num_sge = ss->num_sge; in qib_count_sge() local 276 if (--num_sge) in qib_count_sge() 314 if (--ss->num_sge) in qib_copy_from_sge() 357 if (wr->num_sge > qp->s_max_sge) in qib_post_one_send() 382 (wr->num_sge == 0 || in qib_post_one_send() 419 if (wr->num_sge) { in qib_post_one_send() 422 for (i = 0; i < wr->num_sge; i++) { in qib_post_one_send() 435 wqe->wr.num_sge = j; in qib_post_one_send() [all …]
|
D | qib_rc.c | 51 ss->num_sge = wqe->wr.num_sge; in restart_sge() 138 qp->s_ack_rdma_sge.num_sge = 1; in qib_make_rc_ack() 491 qp->s_sge.num_sge = wqe->wr.num_sge; in qib_make_rc_req() 1014 for (i = 0; i < wqe->wr.num_sge; i++) { in qib_rc_send_complete() 1070 for (i = 0; i < wqe->wr.num_sge; i++) { in do_rc_completion() 1582 WARN_ON(qp->s_rdma_read_sge.num_sge); in qib_rc_rcv_resp() 2076 qp->r_sge.num_sge = 1; in qib_rc_rcv() 2078 qp->r_sge.num_sge = 0; in qib_rc_rcv() 2212 qp->r_sge.num_sge = 0; in qib_rc_rcv()
|
D | qib_verbs.h | 364 u8 num_sge; member 402 u8 num_sge; member 1080 while (ss->num_sge) { in qib_put_ss() 1082 if (--ss->num_sge) in qib_put_ss()
|
D | qib_qp.c | 415 qp->r_sge.num_sge = 0; in qib_reset_qp() 432 for (i = 0; i < wqe->wr.num_sge; i++) { in clear_mr_refs()
|
D | qib_sdma.c | 621 if (--ss->num_sge) in qib_sdma_verbs_send()
|
/linux-4.4.14/drivers/staging/rdma/ipath/ |
D | ipath_srq.c | 61 if ((unsigned) wr->num_sge > srq->rq.max_sge) { in ipath_post_srq_receive() 81 wqe->num_sge = wr->num_sge; in ipath_post_srq_receive() 82 for (i = 0; i < wr->num_sge; i++) in ipath_post_srq_receive() 291 p->num_sge = wqe->num_sge; in ipath_modify_srq() 292 for (i = 0; i < wqe->num_sge; i++) in ipath_modify_srq()
|
D | ipath_ruc.c | 129 for (i = j = 0; i < wqe->num_sge; i++) { in ipath_init_sge() 139 ss->num_sge = j; in ipath_init_sge() 329 sqp->s_sge.num_sge = wqe->wr.num_sge; in ipath_ruc_loopback() 371 qp->r_sge.num_sge = wqe->wr.num_sge; in ipath_ruc_loopback() 412 if (--sqp->s_sge.num_sge) in ipath_ruc_loopback()
|
D | ipath_verbs.c | 188 if (--ss->num_sge) in ipath_copy_sge() 227 if (--ss->num_sge) in ipath_skip_sge() 253 u8 num_sge = ss->num_sge; in ipath_count_sge() local 274 if (--num_sge) in ipath_count_sge() 313 if (--ss->num_sge) in ipath_copy_from_sge() 360 if (wr->num_sge > qp->s_max_sge) in ipath_post_one_send() 382 (wr->num_sge == 0 || in ipath_post_one_send() 413 if (wr->num_sge) { in ipath_post_one_send() 416 for (i = 0, j = 0; i < wr->num_sge; i++) { in ipath_post_one_send() 429 wqe->wr.num_sge = j; in ipath_post_one_send() [all …]
|
D | ipath_keys.c | 227 ss->num_sge = 1; in ipath_rkey_ok() 264 ss->num_sge = 1; in ipath_rkey_ok()
|
D | ipath_ud.c | 197 if (--swqe->wr.num_sge) in ipath_ud_loopback() 324 qp->s_sge.num_sge = wqe->ud_wr.wr.num_sge; in ipath_make_ud_req()
|
D | ipath_rc.c | 50 ss->num_sge = wqe->wr.num_sge; in restart_sge() 463 qp->s_sge.num_sge = wqe->wr.num_sge; in ipath_make_rc_req() 1453 e->rdma_sge.num_sge = 0; in ipath_rc_rcv_error() 1840 e->rdma_sge.num_sge = 0; in ipath_rc_rcv()
|
D | ipath_uc.c | 100 qp->s_sge.num_sge = wqe->wr.num_sge; in ipath_make_uc_req()
|
D | ipath_verbs.h | 301 u8 num_sge; member 337 u8 num_sge; member
|
D | ipath_sdma.c | 763 if (--ss->num_sge) in ipath_sdma_verbs_send()
|
/linux-4.4.14/drivers/staging/rdma/ehca/ |
D | ehca_reqs.c | 76 if (unlikely((recv_wr->num_sge < 0) || in ehca_write_rwqe() 77 (recv_wr->num_sge > ipz_rqueue->act_nr_of_sg))) { in ehca_write_rwqe() 80 recv_wr->num_sge, ipz_rqueue->act_nr_of_sg); in ehca_write_rwqe() 88 wqe_p->nr_of_data_seg = recv_wr->num_sge; in ehca_write_rwqe() 90 for (cnt_ds = 0; cnt_ds < recv_wr->num_sge; cnt_ds++) { in ehca_write_rwqe() 122 ud_wr->wr.num_sge, ud_wr->wr.send_flags, in trace_ud_wr() 137 for (j = 0; j < ud_wr->wr.num_sge; j++) { in trace_ud_wr() 166 if (unlikely((send_wr->num_sge < 0) || in ehca_write_swqe() 167 (send_wr->num_sge > qp->ipz_squeue.act_nr_of_sg))) { in ehca_write_swqe() 170 send_wr->num_sge, qp->ipz_squeue.act_nr_of_sg); in ehca_write_swqe() [all …]
|
/linux-4.4.14/drivers/staging/rdma/amso1100/ |
D | c2_qp.c | 836 sizeof(struct c2_data_addr) * ib_wr->num_sge; in c2_post_send() 837 if (ib_wr->num_sge > qp->send_sgl_depth) { in c2_post_send() 846 ib_wr->num_sge, in c2_post_send() 854 (sizeof(struct c2_data_addr) * ib_wr->num_sge); in c2_post_send() 855 if (ib_wr->num_sge > qp->rdma_write_sgl_depth) { in c2_post_send() 869 ib_wr->num_sge, in c2_post_send() 879 if (ib_wr->num_sge > 1) { in c2_post_send() 965 if (ib_wr->num_sge > qp->recv_sgl_depth) { in c2_post_receive() 978 BUG_ON(ib_wr->num_sge >= 256); in c2_post_receive() 981 ib_wr->num_sge, &tot_len, &actual_sge_count); in c2_post_receive()
|
/linux-4.4.14/drivers/infiniband/hw/cxgb3/ |
D | iwch_qp.c | 66 if (wr->num_sge > T3_MAX_SGE) in build_rdma_send() 72 for (i = 0; i < wr->num_sge; i++) { in build_rdma_send() 81 wqe->send.num_sgle = cpu_to_be32(wr->num_sge); in build_rdma_send() 82 *flit_cnt = 4 + ((wr->num_sge) << 1); in build_rdma_send() 92 if (wr->num_sge > T3_MAX_SGE) in build_rdma_write() 109 for (i = 0; i < wr->num_sge; i++) { in build_rdma_write() 121 wqe->write.num_sgle = cpu_to_be32(wr->num_sge); in build_rdma_write() 122 *flit_cnt = 5 + ((wr->num_sge) << 1); in build_rdma_write() 131 if (wr->num_sge > 1) in build_rdma_read() 255 err = iwch_sgl2pbl_map(qhp->rhp, wr->sg_list, wr->num_sge, pbl_addr, in build_rdma_recv() [all …]
|
/linux-4.4.14/drivers/infiniband/hw/cxgb4/ |
D | qp.c | 397 for (i = 0; i < wr->num_sge; i++) { in build_immd() 429 int num_sge, u32 *plenp) in build_isgl() argument 436 for (i = 0; i < num_sge; i++) { in build_isgl() 451 isglp->nsge = cpu_to_be16(num_sge); in build_isgl() 465 if (wr->num_sge > T4_MAX_SEND_SGE) in build_rdma_send() 494 if (wr->num_sge) { in build_rdma_send() 506 wr->sg_list, wr->num_sge, &plen); in build_rdma_send() 510 wr->num_sge * sizeof(struct fw_ri_sge); in build_rdma_send() 532 if (wr->num_sge > T4_MAX_SEND_SGE) in build_rdma_write() 537 if (wr->num_sge) { in build_rdma_write() [all …]
|
/linux-4.4.14/drivers/infiniband/ulp/iser/ |
D | iser_initiator.c | 154 iser_task->desc.num_sge = 2; in iser_prepare_write_cmd() 171 tx_desc->num_sge = 1; in iser_create_send_desc() 480 tx_desc->num_sge = 2; in iser_send_data_out() 542 mdesc->num_sge = 2; in iser_send_control()
|
D | iser_memory.c | 425 inv_wr->num_sge = 0; in iser_inv_rkey() 457 wr->wr.num_sge = 1; in iser_reg_sig_mr() 506 wr->wr.num_sge = 0; in iser_fast_reg_mr()
|
D | iser_verbs.c | 1059 rx_wr.num_sge = 1; in iser_post_recvl() 1083 rx_wr->num_sge = 1; in iser_post_recvm() 1120 wr->num_sge = tx_desc->num_sge; in iser_post_send()
|
D | iscsi_iser.h | 295 int num_sge; member
|
/linux-4.4.14/drivers/infiniband/ulp/isert/ |
D | ib_isert.c | 989 rx_wr->num_sge = 1; in isert_post_recvm() 1011 rx_wr.num_sge = 1; in isert_post_recv() 1034 send_wr.num_sge = tx_desc->num_sge; in isert_post_send() 1059 tx_desc->num_sge = 1; in isert_create_send_desc() 1105 send_wr->num_sge = isert_cmd->tx_desc.num_sge; in isert_init_send_wr() 1127 rx_wr.num_sge = 1; in isert_rdma_post_recvl() 1167 tx_desc->num_sge = 2; in isert_put_login_tx() 2188 isert_cmd->tx_desc.num_sge = 2; in isert_put_response() 2316 isert_cmd->tx_desc.num_sge = 2; in isert_put_reject() 2357 isert_cmd->tx_desc.num_sge = 2; in isert_put_text_rsp() [all …]
|
D | ib_isert.h | 74 int num_sge; member
|
/linux-4.4.14/drivers/infiniband/hw/mthca/ |
D | mthca_srq.c | 514 if (unlikely(wr->num_sge > srq->max_gs)) { in mthca_tavor_post_srq_recv() 521 for (i = 0; i < wr->num_sge; ++i) { in mthca_tavor_post_srq_recv() 607 if (unlikely(wr->num_sge > srq->max_gs)) { in mthca_arbel_post_srq_recv() 613 for (i = 0; i < wr->num_sge; ++i) { in mthca_arbel_post_srq_recv()
|
D | mthca_qp.c | 1729 if (wr->num_sge > qp->sq.max_gs) { in mthca_tavor_post_send() 1736 for (i = 0; i < wr->num_sge; ++i) { in mthca_tavor_post_send() 1855 if (unlikely(wr->num_sge > qp->rq.max_gs)) { in mthca_tavor_post_receive() 1861 for (i = 0; i < wr->num_sge; ++i) { in mthca_tavor_post_receive() 2070 if (wr->num_sge > qp->sq.max_gs) { in mthca_arbel_post_send() 2077 for (i = 0; i < wr->num_sge; ++i) { in mthca_arbel_post_send() 2191 if (unlikely(wr->num_sge > qp->rq.max_gs)) { in mthca_arbel_post_receive() 2197 for (i = 0; i < wr->num_sge; ++i) { in mthca_arbel_post_receive()
|
/linux-4.4.14/drivers/infiniband/hw/ocrdma/ |
D | ocrdma_verbs.c | 2014 struct ocrdma_sge *sge, int num_sge, in ocrdma_build_sges() argument 2019 for (i = 0; i < num_sge; i++) { in ocrdma_build_sges() 2026 if (num_sge == 0) in ocrdma_build_sges() 2030 static inline uint32_t ocrdma_sglist_len(struct ib_sge *sg_list, int num_sge) in ocrdma_sglist_len() argument 2034 for (i = 0; i < num_sge; i++) in ocrdma_sglist_len() 2049 hdr->total_len = ocrdma_sglist_len(wr->sg_list, wr->num_sge); in ocrdma_build_inline_sges() 2057 for (i = 0; i < wr->num_sge; i++) { in ocrdma_build_inline_sges() 2069 ocrdma_build_sges(hdr, sge, wr->num_sge, wr->sg_list); in ocrdma_build_inline_sges() 2070 if (wr->num_sge) in ocrdma_build_inline_sges() 2071 wqe_size += (wr->num_sge * sizeof(struct ocrdma_sge)); in ocrdma_build_inline_sges() [all …]
|
/linux-4.4.14/drivers/infiniband/core/ |
D | uverbs_cmd.c | 2432 static void *alloc_wr(size_t wr_size, __u32 num_sge) in alloc_wr() argument 2435 num_sge * sizeof (struct ib_sge), GFP_KERNEL); in alloc_wr() 2482 if (user_wr->num_sge + sg_ind > cmd.sge_count) { in ib_uverbs_post_send() 2497 ud = alloc_wr(next_size, user_wr->num_sge); in ib_uverbs_post_send() 2519 rdma = alloc_wr(next_size, user_wr->num_sge); in ib_uverbs_post_send() 2534 atomic = alloc_wr(next_size, user_wr->num_sge); in ib_uverbs_post_send() 2550 next = alloc_wr(next_size, user_wr->num_sge); in ib_uverbs_post_send() 2576 next->num_sge = user_wr->num_sge; in ib_uverbs_post_send() 2580 if (next->num_sge) { in ib_uverbs_post_send() 2587 next->num_sge * sizeof (struct ib_sge))) { in ib_uverbs_post_send() [all …]
|
D | mad.c | 1044 mad_send_wr->send_wr.wr.num_sge = 2; in ib_create_send_mad() 2888 recv_wr.num_sge = 1; in ib_mad_post_receive_mads()
|
/linux-4.4.14/drivers/infiniband/hw/mlx5/ |
D | qp.c | 1979 if (!wr->num_sge) in set_reg_umr_segment() 2091 for (i = 0; i < wr->num_sge; i++) { in set_data_inl_seg() 2366 if (unlikely(wr->wr.num_sge != 1) || in set_sig_umr_wr() 2603 int num_sge; in mlx5_ib_post_send() local 2621 num_sge = wr->num_sge; in mlx5_ib_post_send() 2622 if (unlikely(num_sge > qp->sq.max_gs)) { in mlx5_ib_post_send() 2667 num_sge = 0; in mlx5_ib_post_send() 2679 num_sge = 0; in mlx5_ib_post_send() 2746 num_sge = 0; in mlx5_ib_post_send() 2803 if (wr->send_flags & IB_SEND_INLINE && num_sge) { in mlx5_ib_post_send() [all …]
|
D | srq.c | 439 if (unlikely(wr->num_sge > srq->msrq.max_gs)) { in mlx5_ib_post_srq_recv() 457 for (i = 0; i < wr->num_sge; i++) { in mlx5_ib_post_srq_recv()
|
D | mr.c | 712 wr->num_sge = 1; in prep_umr_reg_wqe() 714 wr->num_sge = 0; in prep_umr_reg_wqe() 943 wr.wr.num_sge = 1; in mlx5_ib_update_mtt()
|
/linux-4.4.14/drivers/scsi/lpfc/ |
D | lpfc_scsi.c | 2260 int i = 0, num_sge = 0, status; in lpfc_bg_setup_sgl() local 2324 num_sge++; in lpfc_bg_setup_sgl() 2344 num_sge++; in lpfc_bg_setup_sgl() 2348 return num_sge; in lpfc_bg_setup_sgl() 2412 int num_sge = 0; in lpfc_bg_setup_sgl_prot() local 2445 if (num_sge >= (phba->cfg_total_seg_cnt - 2)) in lpfc_bg_setup_sgl_prot() 2446 return num_sge + 3; in lpfc_bg_setup_sgl_prot() 2493 num_sge++; in lpfc_bg_setup_sgl_prot() 2524 num_sge++; in lpfc_bg_setup_sgl_prot() 2531 if (num_sge >= phba->cfg_total_seg_cnt) in lpfc_bg_setup_sgl_prot() [all …]
|
/linux-4.4.14/drivers/infiniband/hw/mlx4/ |
D | srq.c | 335 if (unlikely(wr->num_sge > srq->msrq.max_gs)) { in mlx4_ib_post_srq_recv() 353 for (i = 0; i < wr->num_sge; ++i) { in mlx4_ib_post_srq_recv()
|
D | qp.c | 2163 for (i = 0; i < wr->wr.num_sge; ++i) in build_sriov_qp0_header() 2286 for (i = 0; i < wr->wr.num_sge; ++i) in build_mlx_header() 2716 wr->wr.num_sge > qp->sq.max_gs - (halign >> 4))) in build_lso_seg() 2789 if (unlikely(wr->num_sge > qp->sq.max_gs)) { in mlx4_ib_post_send() 2977 dseg += wr->num_sge - 1; in mlx4_ib_post_send() 2978 size += wr->num_sge * (sizeof (struct mlx4_wqe_data_seg) / 16); in mlx4_ib_post_send() 2989 for (i = wr->num_sge - 1; i >= 0; --i, --dseg) in mlx4_ib_post_send() 3099 if (unlikely(wr->num_sge > qp->rq.max_gs)) { in mlx4_ib_post_recv() 3122 for (i = 0; i < wr->num_sge; ++i) in mlx4_ib_post_recv()
|
D | mad.c | 593 wr.wr.num_sge = 1; in mlx4_ib_send_to_slave() 1152 recv_wr.num_sge = 1; in mlx4_ib_post_pv_qp_buf() 1270 wr.wr.num_sge = 1; in mlx4_ib_send_to_wire()
|
/linux-4.4.14/net/sunrpc/xprtrdma/ |
D | svc_rdma_recvfrom.c | 190 read_wr.wr.num_sge = pages_needed; in rdma_read_chunk_lcl() 305 reg_wr.wr.num_sge = 0; in rdma_read_chunk_frmr() 317 read_wr.wr.num_sge = 1; in rdma_read_chunk_frmr()
|
D | verbs.c | 1273 send_wr.num_sge = req->rl_niovs; in rpcrdma_ep_post() 1276 for (i = 0; i < send_wr.num_sge; i++) in rpcrdma_ep_post() 1280 __func__, send_wr.num_sge); in rpcrdma_ep_post() 1311 recv_wr.num_sge = 1; in rpcrdma_ep_post_recv()
|
D | svc_rdma_sendto.c | 287 write_wr.wr.num_sge = sge_no; in send_write() 549 send_wr.num_sge = sge_no; in send_reply()
|
D | frwr_ops.c | 387 reg_wr.wr.num_sge = 0; in frwr_op_map()
|
D | svc_rdma_transport.c | 580 recv_wr.num_sge = ctxt->count; in svc_rdma_post_recv() 1352 err_wr.num_sge = 1; in svc_rdma_send_error()
|
/linux-4.4.14/include/uapi/rdma/ |
D | ib_user_verbs.h | 684 __u32 num_sge; member 728 __u32 num_sge; member
|
/linux-4.4.14/net/9p/ |
D | trans_rdma.c | 419 wr.num_sge = 1; in post_recv() 512 wr.num_sge = 1; in rdma_request()
|
/linux-4.4.14/drivers/infiniband/ulp/ipoib/ |
D | ipoib_verbs.c | 231 priv->rx_wr.num_sge = 1; in ipoib_transport_dev_init()
|
D | ipoib.h | 531 priv->tx_wr.wr.num_sge = nr_frags + off; in ipoib_build_sge()
|
D | ipoib_cm.c | 343 wr->num_sge = priv->cm.num_frags; in ipoib_cm_init_rx_wr()
|
/linux-4.4.14/drivers/infiniband/hw/nes/ |
D | nes_verbs.c | 3231 for (sge_index = 0; sge_index < ib_wr->num_sge; sge_index++) { in fill_wqe_sg_send() 3316 if (ib_wr->num_sge > nesdev->nesadapter->max_sge) { in nes_post_send() 3339 if (ib_wr->num_sge > nesdev->nesadapter->max_sge) { in nes_post_send() 3341 ib_wr->num_sge, nesdev->nesadapter->max_sge); in nes_post_send() 3372 if (ib_wr->num_sge > 1) { in nes_post_send() 3374 ib_wr->num_sge); in nes_post_send() 3433 " ib_wr=%u, max=1\n", ib_wr->num_sge); in nes_post_send() 3548 if (ib_wr->num_sge > nesdev->nesadapter->max_sge) { in nes_post_recv() 3558 nes_debug(NES_DBG_IW_RX, "ibwr sge count = %u.\n", ib_wr->num_sge); in nes_post_recv() 3568 for (sge_index=0; sge_index < ib_wr->num_sge; sge_index++) { in nes_post_recv()
|
/linux-4.4.14/drivers/infiniband/ulp/srpt/ |
D | ib_srpt.c | 789 wr.num_sge = 1; in srpt_post_recv() 825 wr.num_sge = 1; in srpt_post_send() 2810 wr.wr.num_sge = riu->sge_cnt; in srpt_perform_rdmas() 2826 wr.wr.num_sge = 0; in srpt_perform_rdmas()
|
/linux-4.4.14/drivers/scsi/be2iscsi/ |
D | be_main.h | 296 unsigned int num_sge; member
|
/linux-4.4.14/drivers/infiniband/ulp/srp/ |
D | ib_srp.c | 1051 .num_sge = 0, in srp_inv_rkey() 1355 wr.wr.num_sge = 0; in srp_map_finish_fr() 1755 wr.num_sge = 1; in srp_post_send() 1775 wr.num_sge = 1; in srp_post_recv()
|
/linux-4.4.14/drivers/message/fusion/ |
D | mptbase.c | 4299 int scale, num_sge, numSGE; in initChainBuffers() local 4337 num_sge = scale + (ioc->req_sz - 60) / ioc->SGE_size; in initChainBuffers() 4339 num_sge = 1 + scale + (ioc->req_sz - 64) / ioc->SGE_size; in initChainBuffers() 4349 ioc->name, num_sge, numSGE)); in initChainBuffers() 4360 while (numSGE - num_sge > 0) { in initChainBuffers() 4362 num_sge += (scale - 1); in initChainBuffers() 4367 ioc->name, numSGE, num_sge, num_chain)); in initChainBuffers()
|
/linux-4.4.14/include/rdma/ |
D | ib_verbs.h | 1086 int num_sge; member 1180 int num_sge; member
|
/linux-4.4.14/drivers/staging/lustre/lnet/klnds/o2iblnd/ |
D | o2iblnd_cb.c | 170 rx->rx_wrq.num_sge = 1; in kiblnd_post_rx() 1037 wrq->wr.num_sge = 1; in kiblnd_init_tx_msg() 1102 wrq->wr.num_sge = 1; in kiblnd_init_rdma()
|