Lines Matching refs:wqe_cnt
222 stamp = ind & qp->sq.wqe_cnt ? cpu_to_be32(0x7fffffff) : in stamp_send_wqe()
224 buf = get_send_wqe(qp, ind & (qp->sq.wqe_cnt - 1)); in stamp_send_wqe()
229 ctrl = buf = get_send_wqe(qp, n & (qp->sq.wqe_cnt - 1)); in stamp_send_wqe()
245 ctrl = wqe = get_send_wqe(qp, n & (qp->sq.wqe_cnt - 1)); in post_nop_wqe()
270 (n & qp->sq.wqe_cnt ? cpu_to_be32(1 << 31) : 0); in post_nop_wqe()
278 unsigned s = qp->sq.wqe_cnt - (ind & (qp->sq.wqe_cnt - 1)); in pad_wraparound()
390 qp->rq.wqe_cnt = qp->rq.max_gs = 0; in set_rq_size()
396 qp->rq.wqe_cnt = roundup_pow_of_two(max(1U, cap->max_recv_wr)); in set_rq_size()
403 cap->max_recv_wr = qp->rq.max_post = qp->rq.wqe_cnt; in set_rq_size()
407 min(dev->dev->caps.max_wqes - MLX4_IB_SQ_MAX_SPARE, qp->rq.wqe_cnt); in set_rq_size()
492 qp->sq.wqe_cnt = roundup_pow_of_two(cap->max_send_wr * in set_kernel_sq_size()
496 if (qp->sq.wqe_cnt <= dev->dev->caps.max_wqes) in set_kernel_sq_size()
510 qp->buf_size = (qp->rq.wqe_cnt << qp->rq.wqe_shift) + in set_kernel_sq_size()
511 (qp->sq.wqe_cnt << qp->sq.wqe_shift); in set_kernel_sq_size()
514 qp->sq.offset = qp->rq.wqe_cnt << qp->rq.wqe_shift; in set_kernel_sq_size()
516 qp->rq.offset = qp->sq.wqe_cnt << qp->sq.wqe_shift; in set_kernel_sq_size()
521 (qp->sq.wqe_cnt - qp->sq_spare_wqes) / qp->sq_max_wqes_per_wr; in set_kernel_sq_size()
542 qp->sq.wqe_cnt = 1 << ucmd->log_sq_bb_count; in set_user_sq_size()
545 qp->buf_size = (qp->rq.wqe_cnt << qp->rq.wqe_shift) + in set_user_sq_size()
546 (qp->sq.wqe_cnt << qp->sq.wqe_shift); in set_user_sq_size()
556 kmalloc(sizeof (struct mlx4_ib_buf) * qp->rq.wqe_cnt, in alloc_proxy_bufs()
560 for (i = 0; i < qp->rq.wqe_cnt; i++) { in alloc_proxy_bufs()
594 for (i = 0; i < qp->rq.wqe_cnt; i++) { in free_proxy_bufs()
799 qp->sq.wrid = kmalloc(qp->sq.wqe_cnt * sizeof(u64), gfp); in create_qp_common()
801 qp->sq.wrid = __vmalloc(qp->sq.wqe_cnt * sizeof(u64), in create_qp_common()
803 qp->rq.wrid = kmalloc(qp->rq.wqe_cnt * sizeof(u64), gfp); in create_qp_common()
805 qp->rq.wrid = __vmalloc(qp->rq.wqe_cnt * sizeof(u64), in create_qp_common()
1067 if (qp->rq.wqe_cnt) in destroy_qp_common()
1078 if (qp->rq.wqe_cnt) in destroy_qp_common()
1575 if (qp->rq.wqe_cnt) in __mlx4_ib_modify_qp()
1576 context->rq_size_stride = ilog2(qp->rq.wqe_cnt) << 3; in __mlx4_ib_modify_qp()
1579 if (qp->sq.wqe_cnt) in __mlx4_ib_modify_qp()
1580 context->sq_size_stride = ilog2(qp->sq.wqe_cnt) << 3; in __mlx4_ib_modify_qp()
1780 if (qp->rq.wqe_cnt && cur_state == IB_QPS_RESET && new_state == IB_QPS_INIT) in __mlx4_ib_modify_qp()
1860 for (i = 0; i < qp->sq.wqe_cnt; ++i) { in __mlx4_ib_modify_qp()
1923 if (qp->rq.wqe_cnt) in __mlx4_ib_modify_qp()
2795 ctrl = wqe = get_send_wqe(qp, ind & (qp->sq.wqe_cnt - 1)); in mlx4_ib_post_send()
2796 qp->sq.wrid[(qp->sq.head + nreq) & (qp->sq.wqe_cnt - 1)] = wr->wr_id; in mlx4_ib_post_send()
3017 (ind & qp->sq.wqe_cnt ? cpu_to_be32(1 << 31) : 0) | blh; in mlx4_ib_post_send()
3090 ind = qp->rq.head & (qp->rq.wqe_cnt - 1); in mlx4_ib_post_recv()
3133 ind = (ind + 1) & (qp->rq.wqe_cnt - 1); in mlx4_ib_post_recv()
3294 qp_attr->cap.max_recv_wr = qp->rq.wqe_cnt; in mlx4_ib_query_qp()
3298 qp_attr->cap.max_send_wr = qp->sq.wqe_cnt; in mlx4_ib_query_qp()