Lines Matching refs:rq

191 	return get_wqe(qp, qp->rq.offset + (n << qp->rq.wqe_shift));  in get_recv_wqe()
390 qp->rq.wqe_cnt = qp->rq.max_gs = 0; in set_rq_size()
396 qp->rq.wqe_cnt = roundup_pow_of_two(max(1U, cap->max_recv_wr)); in set_rq_size()
397 qp->rq.max_gs = roundup_pow_of_two(max(1U, cap->max_recv_sge)); in set_rq_size()
398 qp->rq.wqe_shift = ilog2(qp->rq.max_gs * sizeof (struct mlx4_wqe_data_seg)); in set_rq_size()
403 cap->max_recv_wr = qp->rq.max_post = qp->rq.wqe_cnt; in set_rq_size()
404 cap->max_recv_sge = qp->rq.max_gs; in set_rq_size()
406 cap->max_recv_wr = qp->rq.max_post = in set_rq_size()
407 min(dev->dev->caps.max_wqes - MLX4_IB_SQ_MAX_SPARE, qp->rq.wqe_cnt); in set_rq_size()
408 cap->max_recv_sge = min(qp->rq.max_gs, in set_rq_size()
510 qp->buf_size = (qp->rq.wqe_cnt << qp->rq.wqe_shift) + in set_kernel_sq_size()
512 if (qp->rq.wqe_shift > qp->sq.wqe_shift) { in set_kernel_sq_size()
513 qp->rq.offset = 0; in set_kernel_sq_size()
514 qp->sq.offset = qp->rq.wqe_cnt << qp->rq.wqe_shift; in set_kernel_sq_size()
516 qp->rq.offset = qp->sq.wqe_cnt << qp->sq.wqe_shift; in set_kernel_sq_size()
545 qp->buf_size = (qp->rq.wqe_cnt << qp->rq.wqe_shift) + in set_user_sq_size()
556 kmalloc(sizeof (struct mlx4_ib_buf) * qp->rq.wqe_cnt, in alloc_proxy_bufs()
560 for (i = 0; i < qp->rq.wqe_cnt; i++) { in alloc_proxy_bufs()
594 for (i = 0; i < qp->rq.wqe_cnt; i++) { in free_proxy_bufs()
711 spin_lock_init(&qp->rq.lock); in create_qp_common()
803 qp->rq.wrid = kmalloc(qp->rq.wqe_cnt * sizeof(u64), gfp); in create_qp_common()
804 if (!qp->rq.wrid) in create_qp_common()
805 qp->rq.wrid = __vmalloc(qp->rq.wqe_cnt * sizeof(u64), in create_qp_common()
807 if (!qp->sq.wrid || !qp->rq.wrid) { in create_qp_common()
897 kvfree(qp->rq.wrid); in create_qp_common()
1067 if (qp->rq.wqe_cnt) in destroy_qp_common()
1073 kvfree(qp->rq.wrid); in destroy_qp_common()
1078 if (qp->rq.wqe_cnt) in destroy_qp_common()
1575 if (qp->rq.wqe_cnt) in __mlx4_ib_modify_qp()
1576 context->rq_size_stride = ilog2(qp->rq.wqe_cnt) << 3; in __mlx4_ib_modify_qp()
1577 context->rq_size_stride |= qp->rq.wqe_shift - 4; in __mlx4_ib_modify_qp()
1780 if (qp->rq.wqe_cnt && cur_state == IB_QPS_RESET && new_state == IB_QPS_INIT) in __mlx4_ib_modify_qp()
1918 qp->rq.head = 0; in __mlx4_ib_modify_qp()
1919 qp->rq.tail = 0; in __mlx4_ib_modify_qp()
1923 if (qp->rq.wqe_cnt) in __mlx4_ib_modify_qp()
3080 max_gs = qp->rq.max_gs; in mlx4_ib_post_recv()
3081 spin_lock_irqsave(&qp->rq.lock, flags); in mlx4_ib_post_recv()
3090 ind = qp->rq.head & (qp->rq.wqe_cnt - 1); in mlx4_ib_post_recv()
3093 if (mlx4_wq_overflow(&qp->rq, nreq, qp->ibqp.recv_cq)) { in mlx4_ib_post_recv()
3099 if (unlikely(wr->num_sge > qp->rq.max_gs)) { in mlx4_ib_post_recv()
3131 qp->rq.wrid[ind] = wr->wr_id; in mlx4_ib_post_recv()
3133 ind = (ind + 1) & (qp->rq.wqe_cnt - 1); in mlx4_ib_post_recv()
3138 qp->rq.head += nreq; in mlx4_ib_post_recv()
3146 *qp->db.db = cpu_to_be32(qp->rq.head & 0xffff); in mlx4_ib_post_recv()
3149 spin_unlock_irqrestore(&qp->rq.lock, flags); in mlx4_ib_post_recv()
3294 qp_attr->cap.max_recv_wr = qp->rq.wqe_cnt; in mlx4_ib_query_qp()
3295 qp_attr->cap.max_recv_sge = qp->rq.max_gs; in mlx4_ib_query_qp()