Lines Matching refs:wqe
42 static int build_rdma_send(union t3_wr *wqe, struct ib_send_wr *wr, in build_rdma_send() argument
51 wqe->send.rdmaop = T3_SEND_WITH_SE; in build_rdma_send()
53 wqe->send.rdmaop = T3_SEND; in build_rdma_send()
54 wqe->send.rem_stag = 0; in build_rdma_send()
58 wqe->send.rdmaop = T3_SEND_WITH_SE_INV; in build_rdma_send()
60 wqe->send.rdmaop = T3_SEND_WITH_INV; in build_rdma_send()
61 wqe->send.rem_stag = cpu_to_be32(wr->ex.invalidate_rkey); in build_rdma_send()
68 wqe->send.reserved[0] = 0; in build_rdma_send()
69 wqe->send.reserved[1] = 0; in build_rdma_send()
70 wqe->send.reserved[2] = 0; in build_rdma_send()
77 wqe->send.sgl[i].stag = cpu_to_be32(wr->sg_list[i].lkey); in build_rdma_send()
78 wqe->send.sgl[i].len = cpu_to_be32(wr->sg_list[i].length); in build_rdma_send()
79 wqe->send.sgl[i].to = cpu_to_be64(wr->sg_list[i].addr); in build_rdma_send()
81 wqe->send.num_sgle = cpu_to_be32(wr->num_sge); in build_rdma_send()
83 wqe->send.plen = cpu_to_be32(plen); in build_rdma_send()
87 static int build_rdma_write(union t3_wr *wqe, struct ib_send_wr *wr, in build_rdma_write() argument
94 wqe->write.rdmaop = T3_RDMA_WRITE; in build_rdma_write()
95 wqe->write.reserved[0] = 0; in build_rdma_write()
96 wqe->write.reserved[1] = 0; in build_rdma_write()
97 wqe->write.reserved[2] = 0; in build_rdma_write()
98 wqe->write.stag_sink = cpu_to_be32(rdma_wr(wr)->rkey); in build_rdma_write()
99 wqe->write.to_sink = cpu_to_be64(rdma_wr(wr)->remote_addr); in build_rdma_write()
103 wqe->write.sgl[0].stag = wr->ex.imm_data; in build_rdma_write()
104 wqe->write.sgl[0].len = cpu_to_be32(0); in build_rdma_write()
105 wqe->write.num_sgle = cpu_to_be32(0); in build_rdma_write()
114 wqe->write.sgl[i].stag = in build_rdma_write()
116 wqe->write.sgl[i].len = in build_rdma_write()
118 wqe->write.sgl[i].to = in build_rdma_write()
121 wqe->write.num_sgle = cpu_to_be32(wr->num_sge); in build_rdma_write()
124 wqe->write.plen = cpu_to_be32(plen); in build_rdma_write()
128 static int build_rdma_read(union t3_wr *wqe, struct ib_send_wr *wr, in build_rdma_read() argument
133 wqe->read.rdmaop = T3_READ_REQ; in build_rdma_read()
135 wqe->read.local_inv = 1; in build_rdma_read()
137 wqe->read.local_inv = 0; in build_rdma_read()
138 wqe->read.reserved[0] = 0; in build_rdma_read()
139 wqe->read.reserved[1] = 0; in build_rdma_read()
140 wqe->read.rem_stag = cpu_to_be32(rdma_wr(wr)->rkey); in build_rdma_read()
141 wqe->read.rem_to = cpu_to_be64(rdma_wr(wr)->remote_addr); in build_rdma_read()
142 wqe->read.local_stag = cpu_to_be32(wr->sg_list[0].lkey); in build_rdma_read()
143 wqe->read.local_len = cpu_to_be32(wr->sg_list[0].length); in build_rdma_read()
144 wqe->read.local_to = cpu_to_be64(wr->sg_list[0].addr); in build_rdma_read()
149 static int build_memreg(union t3_wr *wqe, struct ib_reg_wr *wr, in build_memreg() argument
159 wqe->fastreg.stag = cpu_to_be32(wr->key); in build_memreg()
160 wqe->fastreg.len = cpu_to_be32(mhp->ibmr.length); in build_memreg()
161 wqe->fastreg.va_base_hi = cpu_to_be32(mhp->ibmr.iova >> 32); in build_memreg()
162 wqe->fastreg.va_base_lo_fbo = in build_memreg()
164 wqe->fastreg.page_type_perms = cpu_to_be32( in build_memreg()
169 p = &wqe->fastreg.pbl_addrs[0]; in build_memreg()
175 wqe = (union t3_wr *)(wq->queue + in build_memreg()
177 build_fw_riwrh((void *)wqe, T3_WR_FASTREG, 0, in build_memreg()
182 p = &wqe->pbl_frag.pbl_addrs[0]; in build_memreg()
192 static int build_inv_stag(union t3_wr *wqe, struct ib_send_wr *wr, in build_inv_stag() argument
195 wqe->local_inv.stag = cpu_to_be32(wr->ex.invalidate_rkey); in build_inv_stag()
196 wqe->local_inv.reserved = 0; in build_inv_stag()
248 static int build_rdma_recv(struct iwch_qp *qhp, union t3_wr *wqe, in build_rdma_recv() argument
259 wqe->recv.pagesz[0] = page_size[0]; in build_rdma_recv()
260 wqe->recv.pagesz[1] = page_size[1]; in build_rdma_recv()
261 wqe->recv.pagesz[2] = page_size[2]; in build_rdma_recv()
262 wqe->recv.pagesz[3] = page_size[3]; in build_rdma_recv()
263 wqe->recv.num_sgle = cpu_to_be32(wr->num_sge); in build_rdma_recv()
265 wqe->recv.sgl[i].stag = cpu_to_be32(wr->sg_list[i].lkey); in build_rdma_recv()
266 wqe->recv.sgl[i].len = cpu_to_be32(wr->sg_list[i].length); in build_rdma_recv()
269 wqe->recv.sgl[i].to = cpu_to_be64(((u32)wr->sg_list[i].addr) & in build_rdma_recv()
273 wqe->recv.pbl_addr[i] = cpu_to_be32(pbl_addr[i]); in build_rdma_recv()
276 wqe->recv.sgl[i].stag = 0; in build_rdma_recv()
277 wqe->recv.sgl[i].len = 0; in build_rdma_recv()
278 wqe->recv.sgl[i].to = 0; in build_rdma_recv()
279 wqe->recv.pbl_addr[i] = 0; in build_rdma_recv()
288 static int build_zero_stag_recv(struct iwch_qp *qhp, union t3_wr *wqe, in build_zero_stag_recv() argument
311 wqe->recv.num_sgle = cpu_to_be32(wr->num_sge); in build_zero_stag_recv()
324 wqe->recv.pagesz[i] = T3_STAG0_PAGE_SHIFT; in build_zero_stag_recv()
331 wqe->recv.sgl[i].stag = 0; in build_zero_stag_recv()
332 wqe->recv.sgl[i].len = cpu_to_be32(wr->sg_list[i].length); in build_zero_stag_recv()
333 wqe->recv.sgl[i].to = cpu_to_be64(wr->sg_list[i].addr); in build_zero_stag_recv()
334 wqe->recv.pbl_addr[i] = cpu_to_be32(pbl_offset); in build_zero_stag_recv()
338 wqe->recv.pagesz[i] = 0; in build_zero_stag_recv()
339 wqe->recv.sgl[i].stag = 0; in build_zero_stag_recv()
340 wqe->recv.sgl[i].len = 0; in build_zero_stag_recv()
341 wqe->recv.sgl[i].to = 0; in build_zero_stag_recv()
342 wqe->recv.pbl_addr[i] = 0; in build_zero_stag_recv()
360 union t3_wr *wqe; in iwch_post_send() local
386 wqe = (union t3_wr *) (qhp->wq.queue + idx); in iwch_post_send()
400 err = build_rdma_send(wqe, wr, &t3_wr_flit_cnt); in iwch_post_send()
405 err = build_rdma_write(wqe, wr, &t3_wr_flit_cnt); in iwch_post_send()
411 err = build_rdma_read(wqe, wr, &t3_wr_flit_cnt); in iwch_post_send()
414 sqp->read_len = wqe->read.local_len; in iwch_post_send()
420 err = build_memreg(wqe, reg_wr(wr), &t3_wr_flit_cnt, in iwch_post_send()
427 err = build_inv_stag(wqe, wr, &t3_wr_flit_cnt); in iwch_post_send()
436 wqe->send.wrid.id0.hi = qhp->wq.sq_wptr; in iwch_post_send()
443 build_fw_riwrh((void *) wqe, t3_wr_opcode, t3_wr_flags, in iwch_post_send()
472 union t3_wr *wqe; in iwch_post_receive() local
496 wqe = (union t3_wr *) (qhp->wq.queue + idx); in iwch_post_receive()
499 err = build_rdma_recv(qhp, wqe, wr); in iwch_post_receive()
501 err = build_zero_stag_recv(qhp, wqe, wr); in iwch_post_receive()
508 build_fw_riwrh((void *) wqe, T3_WR_RCV, T3_COMPLETION_FLAG, in iwch_post_receive()
513 idx, qhp->wq.rq_wptr, qhp->wq.rq_rptr, wqe); in iwch_post_receive()
536 union t3_wr *wqe; in iwch_bind_mw() local
565 wqe = (union t3_wr *) (qhp->wq.queue + idx); in iwch_bind_mw()
574 wqe->bind.reserved = 0; in iwch_bind_mw()
575 wqe->bind.type = TPT_VATO; in iwch_bind_mw()
578 wqe->bind.perms = iwch_ib_to_tpt_bind_access( in iwch_bind_mw()
580 wqe->bind.mr_stag = cpu_to_be32(mw_bind->bind_info.mr->lkey); in iwch_bind_mw()
581 wqe->bind.mw_stag = cpu_to_be32(mw->rkey); in iwch_bind_mw()
582 wqe->bind.mw_len = cpu_to_be32(mw_bind->bind_info.length); in iwch_bind_mw()
583 wqe->bind.mw_va = cpu_to_be64(mw_bind->bind_info.addr); in iwch_bind_mw()
589 wqe->send.wrid.id0.hi = qhp->wq.sq_wptr; in iwch_bind_mw()
596 wqe->bind.mr_pbl_addr = cpu_to_be32(pbl_addr); in iwch_bind_mw()
597 wqe->bind.mr_pagesz = page_size; in iwch_bind_mw()
598 build_fw_riwrh((void *)wqe, T3_WR_BIND, t3_wr_flags, in iwch_bind_mw()
745 union t3_wr *wqe; in iwch_post_zb_read() local
755 wqe = (union t3_wr *)skb_put(skb, sizeof(struct t3_rdma_read_wr)); in iwch_post_zb_read()
756 memset(wqe, 0, sizeof(struct t3_rdma_read_wr)); in iwch_post_zb_read()
757 wqe->read.rdmaop = T3_READ_REQ; in iwch_post_zb_read()
758 wqe->read.reserved[0] = 0; in iwch_post_zb_read()
759 wqe->read.reserved[1] = 0; in iwch_post_zb_read()
760 wqe->read.rem_stag = cpu_to_be32(1); in iwch_post_zb_read()
761 wqe->read.rem_to = cpu_to_be64(1); in iwch_post_zb_read()
762 wqe->read.local_stag = cpu_to_be32(1); in iwch_post_zb_read()
763 wqe->read.local_len = cpu_to_be32(0); in iwch_post_zb_read()
764 wqe->read.local_to = cpu_to_be64(1); in iwch_post_zb_read()
765 wqe->send.wrh.op_seop_flags = cpu_to_be32(V_FW_RIWR_OP(T3_WR_READ)); in iwch_post_zb_read()
766 wqe->send.wrh.gen_tid_len = cpu_to_be32(V_FW_RIWR_TID(ep->hwtid)| in iwch_post_zb_read()
777 union t3_wr *wqe; in iwch_post_terminate() local
787 wqe = (union t3_wr *)skb_put(skb, 40); in iwch_post_terminate()
788 memset(wqe, 0, 40); in iwch_post_terminate()
789 wqe->send.rdmaop = T3_TERMINATE; in iwch_post_terminate()
792 wqe->send.plen = htonl(4); in iwch_post_terminate()
795 term = (struct terminate_message *)wqe->send.sgl; in iwch_post_terminate()
797 wqe->send.wrh.op_seop_flags = cpu_to_be32(V_FW_RIWR_OP(T3_WR_SEND) | in iwch_post_terminate()
799 wqe->send.wrh.gen_tid_len = cpu_to_be32(V_FW_RIWR_TID(qhp->ep->hwtid)); in iwch_post_terminate()
885 union t3_wr *wqe = qhp->wq.queue; in iwch_rqes_posted() local
888 while (count < USHRT_MAX && fw_riwrh_opcode((struct fw_riwrh *)wqe) == T3_WR_RCV) { in iwch_rqes_posted()
890 wqe++; in iwch_rqes_posted()