Lines Matching refs:rq

210 		return qp->queue.direct.buf + (n << qp->rq.wqe_shift);  in get_recv_wqe()
212 return qp->queue.page_list[(n << qp->rq.wqe_shift) >> PAGE_SHIFT].buf + in get_recv_wqe()
213 ((n << qp->rq.wqe_shift) & (PAGE_SIZE - 1)); in get_recv_wqe()
499 qp_attr->cap.max_recv_wr = qp->rq.max; in mthca_query_qp()
501 qp_attr->cap.max_recv_sge = qp->rq.max_gs; in mthca_query_qp()
598 if (qp->rq.max) in __mthca_modify_qp()
599 qp_context->rq_size_stride = ilog2(qp->rq.max) << 3; in __mthca_modify_qp()
600 qp_context->rq_size_stride |= qp->rq.wqe_shift - 4; in __mthca_modify_qp()
761 qp_context->rcv_db_index = cpu_to_be32(qp->rq.db_index); in __mthca_modify_qp()
827 mthca_wq_reset(&qp->rq); in __mthca_modify_qp()
828 qp->rq.last = get_recv_wqe(qp, qp->rq.max - 1); in __mthca_modify_qp()
832 *qp->rq.db = 0; in __mthca_modify_qp()
855 spin_lock(&qp->rq.lock); in mthca_modify_qp()
857 spin_unlock(&qp->rq.lock); in mthca_modify_qp()
957 qp->rq.max_gs = min_t(int, dev->limits.max_sg, in mthca_adjust_qp_caps()
958 (min(dev->limits.max_desc_sz, 1 << qp->rq.wqe_shift) - in mthca_adjust_qp_caps()
978 qp->rq.max_gs * sizeof (struct mthca_data_seg); in mthca_alloc_wqe_buf()
983 for (qp->rq.wqe_shift = 6; 1 << qp->rq.wqe_shift < size; in mthca_alloc_wqe_buf()
984 qp->rq.wqe_shift++) in mthca_alloc_wqe_buf()
1031 qp->send_wqe_offset = ALIGN(qp->rq.max << qp->rq.wqe_shift, in mthca_alloc_wqe_buf()
1045 qp->wrid = kmalloc((qp->rq.max + qp->sq.max) * sizeof (u64), in mthca_alloc_wqe_buf()
1116 qp->rq.db_index = mthca_alloc_db(dev, MTHCA_DB_TYPE_RQ, in mthca_alloc_memfree()
1117 qp->qpn, &qp->rq.db); in mthca_alloc_memfree()
1118 if (qp->rq.db_index < 0) in mthca_alloc_memfree()
1124 mthca_free_db(dev, MTHCA_DB_TYPE_RQ, qp->rq.db_index); in mthca_alloc_memfree()
1137 mthca_free_db(dev, MTHCA_DB_TYPE_RQ, qp->rq.db_index); in mthca_free_memfree()
1160 mthca_wq_reset(&qp->rq); in mthca_alloc_qp_common()
1163 spin_lock_init(&qp->rq.lock); in mthca_alloc_qp_common()
1195 qp->rq.max_gs * sizeof (struct mthca_data_seg)) / 16; in mthca_alloc_qp_common()
1197 for (i = 0; i < qp->rq.max; ++i) { in mthca_alloc_qp_common()
1199 next->nda_op = cpu_to_be32(((i + 1) & (qp->rq.max - 1)) << in mthca_alloc_qp_common()
1200 qp->rq.wqe_shift); in mthca_alloc_qp_common()
1204 (void *) scatter < (void *) next + (1 << qp->rq.wqe_shift); in mthca_alloc_qp_common()
1216 for (i = 0; i < qp->rq.max; ++i) { in mthca_alloc_qp_common()
1218 next->nda_op = htonl((((i + 1) % qp->rq.max) << in mthca_alloc_qp_common()
1219 qp->rq.wqe_shift) | 1); in mthca_alloc_qp_common()
1225 qp->rq.last = get_recv_wqe(qp, qp->rq.max - 1); in mthca_alloc_qp_common()
1251 qp->rq.max = cap->max_recv_wr ? in mthca_set_qp_size()
1256 qp->rq.max = cap->max_recv_wr; in mthca_set_qp_size()
1260 qp->rq.max_gs = cap->max_recv_sge; in mthca_set_qp_size()
1751 qp->wrid[ind + qp->rq.max] = wr->wr_id; in mthca_tavor_post_send()
1827 spin_lock_irqsave(&qp->rq.lock, flags); in mthca_tavor_post_receive()
1831 ind = qp->rq.next_ind; in mthca_tavor_post_receive()
1834 if (mthca_wq_overflow(&qp->rq, nreq, qp->ibqp.recv_cq)) { in mthca_tavor_post_receive()
1837 qp->rq.head, qp->rq.tail, in mthca_tavor_post_receive()
1838 qp->rq.max, nreq); in mthca_tavor_post_receive()
1845 prev_wqe = qp->rq.last; in mthca_tavor_post_receive()
1846 qp->rq.last = wqe; in mthca_tavor_post_receive()
1855 if (unlikely(wr->num_sge > qp->rq.max_gs)) { in mthca_tavor_post_receive()
1876 if (unlikely(ind >= qp->rq.max)) in mthca_tavor_post_receive()
1877 ind -= qp->rq.max; in mthca_tavor_post_receive()
1885 mthca_write64((qp->rq.next_ind << qp->rq.wqe_shift) | size0, in mthca_tavor_post_receive()
1889 qp->rq.next_ind = ind; in mthca_tavor_post_receive()
1890 qp->rq.head += MTHCA_TAVOR_MAX_WQES_PER_RECV_DB; in mthca_tavor_post_receive()
1898 mthca_write64((qp->rq.next_ind << qp->rq.wqe_shift) | size0, in mthca_tavor_post_receive()
1903 qp->rq.next_ind = ind; in mthca_tavor_post_receive()
1904 qp->rq.head += nreq; in mthca_tavor_post_receive()
1912 spin_unlock_irqrestore(&qp->rq.lock, flags); in mthca_tavor_post_receive()
2092 qp->wrid[ind + qp->rq.max] = wr->wr_id; in mthca_arbel_post_send()
2168 spin_lock_irqsave(&qp->rq.lock, flags); in mthca_arbel_post_receive()
2172 ind = qp->rq.head & (qp->rq.max - 1); in mthca_arbel_post_receive()
2175 if (mthca_wq_overflow(&qp->rq, nreq, qp->ibqp.recv_cq)) { in mthca_arbel_post_receive()
2178 qp->rq.head, qp->rq.tail, in mthca_arbel_post_receive()
2179 qp->rq.max, nreq); in mthca_arbel_post_receive()
2191 if (unlikely(wr->num_sge > qp->rq.max_gs)) { in mthca_arbel_post_receive()
2202 if (i < qp->rq.max_gs) in mthca_arbel_post_receive()
2208 if (unlikely(ind >= qp->rq.max)) in mthca_arbel_post_receive()
2209 ind -= qp->rq.max; in mthca_arbel_post_receive()
2213 qp->rq.head += nreq; in mthca_arbel_post_receive()
2220 *qp->rq.db = cpu_to_be32(qp->rq.head & 0xffff); in mthca_arbel_post_receive()
2223 spin_unlock_irqrestore(&qp->rq.lock, flags); in mthca_arbel_post_receive()