Lines Matching refs:wqe

458 static int build_rdma_send(struct t4_sq *sq, union t4_wr *wqe,  in build_rdma_send()  argument
470 wqe->send.sendop_pkd = cpu_to_be32( in build_rdma_send()
473 wqe->send.sendop_pkd = cpu_to_be32( in build_rdma_send()
475 wqe->send.stag_inv = 0; in build_rdma_send()
479 wqe->send.sendop_pkd = cpu_to_be32( in build_rdma_send()
482 wqe->send.sendop_pkd = cpu_to_be32( in build_rdma_send()
484 wqe->send.stag_inv = cpu_to_be32(wr->ex.invalidate_rkey); in build_rdma_send()
490 wqe->send.r3 = 0; in build_rdma_send()
491 wqe->send.r4 = 0; in build_rdma_send()
496 ret = build_immd(sq, wqe->send.u.immd_src, wr, in build_rdma_send()
500 size = sizeof wqe->send + sizeof(struct fw_ri_immd) + in build_rdma_send()
505 wqe->send.u.isgl_src, in build_rdma_send()
509 size = sizeof wqe->send + sizeof(struct fw_ri_isgl) + in build_rdma_send()
513 wqe->send.u.immd_src[0].op = FW_RI_DATA_IMMD; in build_rdma_send()
514 wqe->send.u.immd_src[0].r1 = 0; in build_rdma_send()
515 wqe->send.u.immd_src[0].r2 = 0; in build_rdma_send()
516 wqe->send.u.immd_src[0].immdlen = 0; in build_rdma_send()
517 size = sizeof wqe->send + sizeof(struct fw_ri_immd); in build_rdma_send()
521 wqe->send.plen = cpu_to_be32(plen); in build_rdma_send()
525 static int build_rdma_write(struct t4_sq *sq, union t4_wr *wqe, in build_rdma_write() argument
534 wqe->write.r2 = 0; in build_rdma_write()
535 wqe->write.stag_sink = cpu_to_be32(rdma_wr(wr)->rkey); in build_rdma_write()
536 wqe->write.to_sink = cpu_to_be64(rdma_wr(wr)->remote_addr); in build_rdma_write()
539 ret = build_immd(sq, wqe->write.u.immd_src, wr, in build_rdma_write()
543 size = sizeof wqe->write + sizeof(struct fw_ri_immd) + in build_rdma_write()
548 wqe->write.u.isgl_src, in build_rdma_write()
552 size = sizeof wqe->write + sizeof(struct fw_ri_isgl) + in build_rdma_write()
556 wqe->write.u.immd_src[0].op = FW_RI_DATA_IMMD; in build_rdma_write()
557 wqe->write.u.immd_src[0].r1 = 0; in build_rdma_write()
558 wqe->write.u.immd_src[0].r2 = 0; in build_rdma_write()
559 wqe->write.u.immd_src[0].immdlen = 0; in build_rdma_write()
560 size = sizeof wqe->write + sizeof(struct fw_ri_immd); in build_rdma_write()
564 wqe->write.plen = cpu_to_be32(plen); in build_rdma_write()
568 static int build_rdma_read(union t4_wr *wqe, struct ib_send_wr *wr, u8 *len16) in build_rdma_read() argument
573 wqe->read.stag_src = cpu_to_be32(rdma_wr(wr)->rkey); in build_rdma_read()
574 wqe->read.to_src_hi = cpu_to_be32((u32)(rdma_wr(wr)->remote_addr in build_rdma_read()
576 wqe->read.to_src_lo = cpu_to_be32((u32)rdma_wr(wr)->remote_addr); in build_rdma_read()
577 wqe->read.stag_sink = cpu_to_be32(wr->sg_list[0].lkey); in build_rdma_read()
578 wqe->read.plen = cpu_to_be32(wr->sg_list[0].length); in build_rdma_read()
579 wqe->read.to_sink_hi = cpu_to_be32((u32)(wr->sg_list[0].addr in build_rdma_read()
581 wqe->read.to_sink_lo = cpu_to_be32((u32)(wr->sg_list[0].addr)); in build_rdma_read()
583 wqe->read.stag_src = cpu_to_be32(2); in build_rdma_read()
584 wqe->read.to_src_hi = 0; in build_rdma_read()
585 wqe->read.to_src_lo = 0; in build_rdma_read()
586 wqe->read.stag_sink = cpu_to_be32(2); in build_rdma_read()
587 wqe->read.plen = 0; in build_rdma_read()
588 wqe->read.to_sink_hi = 0; in build_rdma_read()
589 wqe->read.to_sink_lo = 0; in build_rdma_read()
591 wqe->read.r2 = 0; in build_rdma_read()
592 wqe->read.r5 = 0; in build_rdma_read()
593 *len16 = DIV_ROUND_UP(sizeof wqe->read, 16); in build_rdma_read()
597 static int build_rdma_recv(struct c4iw_qp *qhp, union t4_recv_wr *wqe, in build_rdma_recv() argument
604 &wqe->recv.isgl, wr->sg_list, wr->num_sge, NULL); in build_rdma_recv()
607 *len16 = DIV_ROUND_UP(sizeof wqe->recv + in build_rdma_recv()
612 static int build_memreg(struct t4_sq *sq, union t4_wr *wqe, in build_memreg() argument
625 wqe->fr.qpbinde_to_dcacpu = 0; in build_memreg()
626 wqe->fr.pgsz_shift = ilog2(wr->mr->page_size) - 12; in build_memreg()
627 wqe->fr.addr_type = FW_RI_VA_BASED_TO; in build_memreg()
628 wqe->fr.mem_perms = c4iw_ib_to_tpt_access(wr->access); in build_memreg()
629 wqe->fr.len_hi = 0; in build_memreg()
630 wqe->fr.len_lo = cpu_to_be32(mhp->ibmr.length); in build_memreg()
631 wqe->fr.stag = cpu_to_be32(wr->key); in build_memreg()
632 wqe->fr.va_hi = cpu_to_be32(mhp->ibmr.iova >> 32); in build_memreg()
633 wqe->fr.va_lo_fbo = cpu_to_be32(mhp->ibmr.iova & in build_memreg()
642 sglp = (struct fw_ri_dsgl *)(&wqe->fr + 1); in build_memreg()
649 *len16 = DIV_ROUND_UP(sizeof(wqe->fr) + sizeof(*sglp), 16); in build_memreg()
651 imdp = (struct fw_ri_immd *)(&wqe->fr + 1); in build_memreg()
671 *len16 = DIV_ROUND_UP(sizeof(wqe->fr) + sizeof(*imdp) in build_memreg()
677 static int build_inv_stag(union t4_wr *wqe, struct ib_send_wr *wr, in build_inv_stag() argument
680 wqe->inv.stag_inv = cpu_to_be32(wr->ex.invalidate_rkey); in build_inv_stag()
681 wqe->inv.r2 = 0; in build_inv_stag()
682 *len16 = DIV_ROUND_UP(sizeof wqe->inv, 16); in build_inv_stag()
747 union t4_wr *wqe = NULL; in c4iw_post_send() local
770 wqe = (union t4_wr *)((u8 *)qhp->wq.sq.queue + in c4iw_post_send()
789 err = build_rdma_send(&qhp->wq.sq, wqe, wr, &len16); in c4iw_post_send()
794 err = build_rdma_write(&qhp->wq.sq, wqe, wr, &len16); in c4iw_post_send()
804 err = build_rdma_read(wqe, wr, &len16); in c4iw_post_send()
814 err = build_memreg(&qhp->wq.sq, wqe, reg_wr(wr), &len16, in c4iw_post_send()
824 err = build_inv_stag(wqe, wr, &len16); in c4iw_post_send()
847 init_wr_hdr(wqe, qhp->wq.sq.pidx, fw_opcode, fw_flags, len16); in c4iw_post_send()
858 t4_ring_sq_db(&qhp->wq, idx, wqe); in c4iw_post_send()
872 union t4_recv_wr *wqe = NULL; in c4iw_post_receive() local
895 wqe = (union t4_recv_wr *)((u8 *)qhp->wq.rq.queue + in c4iw_post_receive()
899 err = build_rdma_recv(qhp, wqe, wr, &len16); in c4iw_post_receive()
916 wqe->recv.opcode = FW_RI_RECV_WR; in c4iw_post_receive()
917 wqe->recv.r1 = 0; in c4iw_post_receive()
918 wqe->recv.wrid = qhp->wq.rq.pidx; in c4iw_post_receive()
919 wqe->recv.r2[0] = 0; in c4iw_post_receive()
920 wqe->recv.r2[1] = 0; in c4iw_post_receive()
921 wqe->recv.r2[2] = 0; in c4iw_post_receive()
922 wqe->recv.len16 = len16; in c4iw_post_receive()
931 t4_ring_rq_db(&qhp->wq, idx, wqe); in c4iw_post_receive()
1084 struct fw_ri_wr *wqe; in post_terminate() local
1091 skb = alloc_skb(sizeof *wqe, gfp); in post_terminate()
1096 wqe = (struct fw_ri_wr *)__skb_put(skb, sizeof(*wqe)); in post_terminate()
1097 memset(wqe, 0, sizeof *wqe); in post_terminate()
1098 wqe->op_compl = cpu_to_be32(FW_WR_OP_V(FW_RI_INIT_WR)); in post_terminate()
1099 wqe->flowid_len16 = cpu_to_be32( in post_terminate()
1101 FW_WR_LEN16_V(DIV_ROUND_UP(sizeof(*wqe), 16))); in post_terminate()
1103 wqe->u.terminate.type = FW_RI_TYPE_TERMINATE; in post_terminate()
1104 wqe->u.terminate.immdlen = cpu_to_be32(sizeof *term); in post_terminate()
1105 term = (struct terminate_message *)wqe->u.terminate.termmsg; in post_terminate()
1205 struct fw_ri_wr *wqe; in rdma_fini() local
1212 skb = alloc_skb(sizeof *wqe, GFP_KERNEL); in rdma_fini()
1217 wqe = (struct fw_ri_wr *)__skb_put(skb, sizeof(*wqe)); in rdma_fini()
1218 memset(wqe, 0, sizeof *wqe); in rdma_fini()
1219 wqe->op_compl = cpu_to_be32( in rdma_fini()
1222 wqe->flowid_len16 = cpu_to_be32( in rdma_fini()
1224 FW_WR_LEN16_V(DIV_ROUND_UP(sizeof(*wqe), 16))); in rdma_fini()
1225 wqe->cookie = (uintptr_t)&ep->com.wr_wait; in rdma_fini()
1227 wqe->u.fini.type = FW_RI_TYPE_FINI; in rdma_fini()
1266 struct fw_ri_wr *wqe; in rdma_init() local
1273 skb = alloc_skb(sizeof *wqe, GFP_KERNEL); in rdma_init()
1286 wqe = (struct fw_ri_wr *)__skb_put(skb, sizeof(*wqe)); in rdma_init()
1287 memset(wqe, 0, sizeof *wqe); in rdma_init()
1288 wqe->op_compl = cpu_to_be32( in rdma_init()
1291 wqe->flowid_len16 = cpu_to_be32( in rdma_init()
1293 FW_WR_LEN16_V(DIV_ROUND_UP(sizeof(*wqe), 16))); in rdma_init()
1295 wqe->cookie = (uintptr_t)&qhp->ep->com.wr_wait; in rdma_init()
1297 wqe->u.init.type = FW_RI_TYPE_INIT; in rdma_init()
1298 wqe->u.init.mpareqbit_p2ptype = in rdma_init()
1301 wqe->u.init.mpa_attrs = FW_RI_MPA_IETF_ENABLE; in rdma_init()
1303 wqe->u.init.mpa_attrs |= FW_RI_MPA_RX_MARKER_ENABLE; in rdma_init()
1305 wqe->u.init.mpa_attrs |= FW_RI_MPA_TX_MARKER_ENABLE; in rdma_init()
1307 wqe->u.init.mpa_attrs |= FW_RI_MPA_CRC_ENABLE; in rdma_init()
1309 wqe->u.init.qp_caps = FW_RI_QP_RDMA_READ_ENABLE | in rdma_init()
1313 wqe->u.init.qp_caps |= FW_RI_QP_FAST_REGISTER_ENABLE | in rdma_init()
1315 wqe->u.init.nrqe = cpu_to_be16(t4_rqes_posted(&qhp->wq)); in rdma_init()
1316 wqe->u.init.pdid = cpu_to_be32(qhp->attr.pd); in rdma_init()
1317 wqe->u.init.qpid = cpu_to_be32(qhp->wq.sq.qid); in rdma_init()
1318 wqe->u.init.sq_eqid = cpu_to_be32(qhp->wq.sq.qid); in rdma_init()
1319 wqe->u.init.rq_eqid = cpu_to_be32(qhp->wq.rq.qid); in rdma_init()
1320 wqe->u.init.scqid = cpu_to_be32(qhp->attr.scq); in rdma_init()
1321 wqe->u.init.rcqid = cpu_to_be32(qhp->attr.rcq); in rdma_init()
1322 wqe->u.init.ord_max = cpu_to_be32(qhp->attr.max_ord); in rdma_init()
1323 wqe->u.init.ird_max = cpu_to_be32(qhp->attr.max_ird); in rdma_init()
1324 wqe->u.init.iss = cpu_to_be32(qhp->ep->snd_seq); in rdma_init()
1325 wqe->u.init.irs = cpu_to_be32(qhp->ep->rcv_seq); in rdma_init()
1326 wqe->u.init.hwrqsize = cpu_to_be32(qhp->wq.rq.rqt_size); in rdma_init()
1327 wqe->u.init.hwrqaddr = cpu_to_be32(qhp->wq.rq.rqt_hwaddr - in rdma_init()
1330 build_rtr_msg(qhp->attr.mpa_attr.p2p_type, &wqe->u.init); in rdma_init()