Home
last modified time | relevance | path

Searched refs:wqe_size (Results 1 – 12 of 12) sorted by relevance

/linux-4.1.27/drivers/infiniband/hw/mlx5/
Dqp.c224 int wqe_size; in set_rq_size() local
243 wqe_size = qp->wq_sig ? sizeof(struct mlx5_wqe_signature_seg) : 0; in set_rq_size()
244 wqe_size += cap->max_recv_sge * sizeof(struct mlx5_wqe_data_seg); in set_rq_size()
245 wqe_size = roundup_pow_of_two(wqe_size); in set_rq_size()
246 wq_size = roundup_pow_of_two(cap->max_recv_wr) * wqe_size; in set_rq_size()
248 qp->rq.wqe_cnt = wq_size / wqe_size; in set_rq_size()
249 if (wqe_size > gen->max_rq_desc_sz) { in set_rq_size()
251 wqe_size, in set_rq_size()
255 qp->rq.wqe_shift = ilog2(wqe_size); in set_rq_size()
334 int wqe_size; in calc_sq_size() local
[all …]
Dodp.c501 int wqe_size = 1 << wq->wqe_shift; in mlx5_ib_mr_responder_pfault_handler() local
513 if (wqe_size > wqe_length) { in mlx5_ib_mr_responder_pfault_handler()
531 *wqe_end = *wqe + wqe_size; in mlx5_ib_mr_responder_pfault_handler()
/linux-4.1.27/drivers/infiniband/hw/ocrdma/
Docrdma_verbs.c373 dev->attr.wqe_size) : 0; in _ocrdma_alloc_pd()
512 resp.wqe_size = dev->attr.wqe_size; in ocrdma_alloc_ucontext()
514 resp.dpp_wqe_size = dev->attr.wqe_size; in ocrdma_alloc_ucontext()
1972 struct ib_send_wr *wr, u32 wqe_size) in ocrdma_build_inline_sges() argument
1993 wqe_size += roundup(hdr->total_len, OCRDMA_WQE_ALIGN_BYTES); in ocrdma_build_inline_sges()
1995 wqe_size += sizeof(struct ocrdma_sge); in ocrdma_build_inline_sges()
2000 wqe_size += (wr->num_sge * sizeof(struct ocrdma_sge)); in ocrdma_build_inline_sges()
2002 wqe_size += sizeof(struct ocrdma_sge); in ocrdma_build_inline_sges()
2005 hdr->cw |= ((wqe_size / OCRDMA_WQE_STRIDE) << OCRDMA_WQE_SIZE_SHIFT); in ocrdma_build_inline_sges()
2014 u32 wqe_size = sizeof(*hdr); in ocrdma_build_send() local
[all …]
Docrdma_abi.h37 u32 wqe_size; member
Docrdma.h96 u32 wqe_size; member
Docrdma_hw.c1133 attr->wqe_size = ((rsp->wqe_rqe_stride_max_dpp_cqs & in ocrdma_get_attr()
1142 attr->wqe_size - (sizeof(struct ocrdma_hdr_wqe) + in ocrdma_get_attr()
2137 dev->attr.wqe_size, &hw_pages, &hw_page_size); in ocrdma_set_create_qp_sq_cmd()
2152 qp->sq.entry_size = dev->attr.wqe_size; in ocrdma_set_create_qp_sq_cmd()
2169 cmd->wqe_rqe_size |= (dev->attr.wqe_size << in ocrdma_set_create_qp_sq_cmd()
/linux-4.1.27/include/uapi/rdma/
Dib_user_verbs.h673 __u32 wqe_size; member
692 __u32 wqe_size; member
705 __u32 wqe_size; member
/linux-4.1.27/drivers/infiniband/core/
Duverbs_cmd.c2195 if (in_len < sizeof cmd + cmd.wqe_size * cmd.wr_count + in ib_uverbs_post_send()
2199 if (cmd.wqe_size < sizeof (struct ib_uverbs_send_wr)) in ib_uverbs_post_send()
2202 user_wr = kmalloc(cmd.wqe_size, GFP_KERNEL); in ib_uverbs_post_send()
2215 buf + sizeof cmd + i * cmd.wqe_size, in ib_uverbs_post_send()
2216 cmd.wqe_size)) { in ib_uverbs_post_send()
2305 cmd.wr_count * cmd.wqe_size + in ib_uverbs_post_send()
2350 u32 wqe_size) in ib_uverbs_unmarshall_recv() argument
2358 if (in_len < wqe_size * wr_count + in ib_uverbs_unmarshall_recv()
2362 if (wqe_size < sizeof (struct ib_uverbs_recv_wr)) in ib_uverbs_unmarshall_recv()
2365 user_wr = kmalloc(wqe_size, GFP_KERNEL); in ib_uverbs_unmarshall_recv()
[all …]
/linux-4.1.27/drivers/infiniband/hw/ehca/
Dehca_qp.c286 int wqe_size) in init_qp_queue() argument
301 wqe_size, parms->act_nr_sges, 1); in init_qp_queue()
305 EHCA_PAGESIZE, wqe_size, in init_qp_queue()
376 u32 wqe_size, q_size; in ehca_determine_small_queue() local
386 wqe_size = ehca_calc_wqe_size(act_nr_sge, is_llqp); in ehca_determine_small_queue()
387 q_size = wqe_size * (queue->max_wr + 1); in ehca_determine_small_queue()
/linux-4.1.27/drivers/net/ethernet/ibm/ehea/
Dehea_qmr.c378 int nr_pages, int wqe_size, int act_nr_sges, in ehea_qp_alloc_register() argument
385 ret = hw_queue_ctor(hw_queue, nr_pages, EHEA_PAGESIZE, wqe_size); in ehea_qp_alloc_register()
/linux-4.1.27/drivers/scsi/lpfc/
Dlpfc_sli4.h415 uint32_t wqe_size; member
Dlpfc_init.c9394 sli4_params->wqe_size = bf_get(wqe_size, &mqe->un.sli4_params); in lpfc_pc_sli4_params_get()