cqe_size          105 drivers/infiniband/hw/mlx4/cq.c 	err = mlx4_buf_alloc(dev->dev, nent * dev->dev->caps.cqe_size,
cqe_size          111 drivers/infiniband/hw/mlx4/cq.c 	buf->entry_size = dev->dev->caps.cqe_size;
cqe_size          143 drivers/infiniband/hw/mlx4/cq.c 	int cqe_size = dev->dev->caps.cqe_size;
cqe_size          147 drivers/infiniband/hw/mlx4/cq.c 	*umem = ib_umem_get(udata, buf_addr, cqe * cqe_size,
cqe_size          359 drivers/infiniband/hw/mlx4/cq.c 	int cqe_size = cq->buf.entry_size;
cqe_size          360 drivers/infiniband/hw/mlx4/cq.c 	int cqe_inc = cqe_size == 64 ? 1 : 0;
cqe_size          369 drivers/infiniband/hw/mlx4/cq.c 		memcpy(new_cqe, get_cqe(cq, i & cq->ibcq.cqe), cqe_size);
cqe_size         1115 drivers/infiniband/hw/mlx4/main.c 		resp.cqe_size	      = dev->dev->caps.cqe_size;
cqe_size          660 drivers/infiniband/hw/mlx5/cq.c 			     int cqe_size)
cqe_size          663 drivers/infiniband/hw/mlx5/cq.c 	u8 log_wq_stride = 6 + (cqe_size == 128 ? 1 : 0);
cqe_size          664 drivers/infiniband/hw/mlx5/cq.c 	u8 log_wq_sz     = ilog2(cqe_size);
cqe_size          668 drivers/infiniband/hw/mlx5/cq.c 				       nent * cqe_size,
cqe_size          676 drivers/infiniband/hw/mlx5/cq.c 	buf->cqe_size = cqe_size;
cqe_size          706 drivers/infiniband/hw/mlx5/cq.c 			  int *cqe_size, int *index, int *inlen)
cqe_size          729 drivers/infiniband/hw/mlx5/cq.c 	if (ucmd.cqe_size != 64 && ucmd.cqe_size != 128)
cqe_size          732 drivers/infiniband/hw/mlx5/cq.c 	*cqe_size = ucmd.cqe_size;
cqe_size          735 drivers/infiniband/hw/mlx5/cq.c 		ib_umem_get(udata, ucmd.buf_addr, entries * ucmd.cqe_size,
cqe_size          749 drivers/infiniband/hw/mlx5/cq.c 		    ucmd.buf_addr, entries * ucmd.cqe_size, npages, page_shift, ncont);
cqe_size          771 drivers/infiniband/hw/mlx5/cq.c 		if (!((*cqe_size == 128 &&
cqe_size          773 drivers/infiniband/hw/mlx5/cq.c 		      (*cqe_size == 64  &&
cqe_size          777 drivers/infiniband/hw/mlx5/cq.c 				     *cqe_size);
cqe_size          796 drivers/infiniband/hw/mlx5/cq.c 		if (*cqe_size != 128 ||
cqe_size          801 drivers/infiniband/hw/mlx5/cq.c 				     *cqe_size);
cqe_size          840 drivers/infiniband/hw/mlx5/cq.c 		cqe64 = buf->cqe_size == 64 ? cqe : cqe + 64;
cqe_size          846 drivers/infiniband/hw/mlx5/cq.c 			    int entries, int cqe_size,
cqe_size          859 drivers/infiniband/hw/mlx5/cq.c 	cq->mcq.cqe_sz = cqe_size;
cqe_size          861 drivers/infiniband/hw/mlx5/cq.c 	err = alloc_cq_frag_buf(dev, &cq->buf, entries, cqe_size);
cqe_size          923 drivers/infiniband/hw/mlx5/cq.c 	int cqe_size;
cqe_size          949 drivers/infiniband/hw/mlx5/cq.c 		err = create_cq_user(dev, udata, cq, entries, &cqb, &cqe_size,
cqe_size          954 drivers/infiniband/hw/mlx5/cq.c 		cqe_size = cache_line_size() == 128 ? 128 : 64;
cqe_size          955 drivers/infiniband/hw/mlx5/cq.c 		err = create_cq_kernel(dev, cq, entries, cqe_size, &cqb,
cqe_size          967 drivers/infiniband/hw/mlx5/cq.c 	cq->cqe_size = cqe_size;
cqe_size          971 drivers/infiniband/hw/mlx5/cq.c 		 cqe_sz_to_mlx_sz(cqe_size,
cqe_size         1117 drivers/infiniband/hw/mlx5/cq.c 		       int *page_shift, int *cqe_size)
cqe_size         1132 drivers/infiniband/hw/mlx5/cq.c 	if (ucmd.cqe_size && SIZE_MAX / ucmd.cqe_size <= entries - 1)
cqe_size         1136 drivers/infiniband/hw/mlx5/cq.c 			   (size_t)ucmd.cqe_size * entries,
cqe_size         1147 drivers/infiniband/hw/mlx5/cq.c 	*cqe_size = ucmd.cqe_size;
cqe_size         1153 drivers/infiniband/hw/mlx5/cq.c 			 int entries, int cqe_size)
cqe_size         1161 drivers/infiniband/hw/mlx5/cq.c 	err = alloc_cq_frag_buf(dev, cq->resize_buf, entries, cqe_size);
cqe_size         1187 drivers/infiniband/hw/mlx5/cq.c 	ssize = cq->buf.cqe_size;
cqe_size         1188 drivers/infiniband/hw/mlx5/cq.c 	dsize = cq->resize_buf->cqe_size;
cqe_size         1240 drivers/infiniband/hw/mlx5/cq.c 	int uninitialized_var(cqe_size);
cqe_size         1266 drivers/infiniband/hw/mlx5/cq.c 				  &cqe_size);
cqe_size         1268 drivers/infiniband/hw/mlx5/cq.c 		cqe_size = 64;
cqe_size         1269 drivers/infiniband/hw/mlx5/cq.c 		err = resize_kernel(dev, cq, entries, cqe_size);
cqe_size         1308 drivers/infiniband/hw/mlx5/cq.c 		 cqe_sz_to_mlx_sz(cqe_size,
cqe_size         1372 drivers/infiniband/hw/mlx5/cq.c 	return cq->cqe_size;
cqe_size          454 drivers/infiniband/hw/mlx5/mlx5_ib.h 	int			cqe_size;
cqe_size          518 drivers/infiniband/hw/mlx5/mlx5_ib.h 	int			cqe_size;
cqe_size         1791 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 	u32 hw_pages, cqe_size, page_size, cqe_count;
cqe_size         1804 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 		cqe_size = OCRDMA_DPP_CQE_SIZE;
cqe_size         1809 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 		cqe_size = sizeof(struct ocrdma_cqe);
cqe_size         1813 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 	cq->len = roundup(max_hw_cqe * cqe_size, OCRDMA_MIN_Q_PAGE_SIZE);
cqe_size         1833 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 	cqe_count = cq->len / cqe_size;
cqe_size         1862 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 		cmd->cmd.pdid_cqecnt = (cq->len / cqe_size);
cqe_size         1864 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 		cmd->cmd.pdid_cqecnt = (cq->len / cqe_size) - 1;
cqe_size          315 drivers/net/ethernet/huawei/hinic/hinic_hw_qp.c 	size_t cqe_dma_size, cqe_size;
cqe_size          319 drivers/net/ethernet/huawei/hinic/hinic_hw_qp.c 	cqe_size = wq->q_depth * sizeof(*rq->cqe);
cqe_size          320 drivers/net/ethernet/huawei/hinic/hinic_hw_qp.c 	rq->cqe = vzalloc(cqe_size);
cqe_size          290 drivers/net/ethernet/mellanox/mlx4/cq.c static int mlx4_init_user_cqes(void *buf, int entries, int cqe_size)
cqe_size          292 drivers/net/ethernet/mellanox/mlx4/cq.c 	int entries_per_copy = PAGE_SIZE / cqe_size;
cqe_size          317 drivers/net/ethernet/mellanox/mlx4/cq.c 		err = copy_to_user((void __user *)buf, init_ents, entries * cqe_size) ?
cqe_size          329 drivers/net/ethernet/mellanox/mlx4/cq.c 				  int cqe_size)
cqe_size          334 drivers/net/ethernet/mellanox/mlx4/cq.c 		memset(buf->direct.buf, 0xcc, entries * cqe_size);
cqe_size          394 drivers/net/ethernet/mellanox/mlx4/cq.c 						  dev->caps.cqe_size);
cqe_size          399 drivers/net/ethernet/mellanox/mlx4/cq.c 					      dev->caps.cqe_size);
cqe_size           62 drivers/net/ethernet/mellanox/mlx4/en_cq.c 	cq->buf_size = cq->size * mdev->dev->caps.cqe_size;
cqe_size         1669 drivers/net/ethernet/mellanox/mlx4/en_netdev.c 			cqe = mlx4_en_get_cqe(cq->buf, j, priv->cqe_size) +
cqe_size         3312 drivers/net/ethernet/mellanox/mlx4/en_netdev.c 	priv->cqe_factor = (mdev->dev->caps.cqe_size == 64) ? 1 : 0;
cqe_size         3313 drivers/net/ethernet/mellanox/mlx4/en_netdev.c 	priv->cqe_size = mdev->dev->caps.cqe_size;
cqe_size          692 drivers/net/ethernet/mellanox/mlx4/en_rx.c 	cqe = mlx4_en_get_cqe(cq->buf, index, priv->cqe_size) + factor;
cqe_size          899 drivers/net/ethernet/mellanox/mlx4/en_rx.c 		cqe = mlx4_en_get_cqe(cq->buf, index, priv->cqe_size) + factor;
cqe_size          423 drivers/net/ethernet/mellanox/mlx4/en_tx.c 	cqe = mlx4_en_get_cqe(buf, index, priv->cqe_size) + factor;
cqe_size          479 drivers/net/ethernet/mellanox/mlx4/en_tx.c 		cqe = mlx4_en_get_cqe(buf, index, priv->cqe_size) + factor;
cqe_size         1941 drivers/net/ethernet/mellanox/mlx4/fw.c 		dev->caps.cqe_size   = 64;
cqe_size         1944 drivers/net/ethernet/mellanox/mlx4/fw.c 		dev->caps.cqe_size   = 32;
cqe_size         1951 drivers/net/ethernet/mellanox/mlx4/fw.c 		dev->caps.cqe_size = cache_line_size();
cqe_size         2175 drivers/net/ethernet/mellanox/mlx4/fw.c 		param->cqe_size = 1 << ((byte_field &
cqe_size          202 drivers/net/ethernet/mellanox/mlx4/fw.h 	u16 cqe_size; /* For use only when CQE stride feature enabled */
cqe_size         1049 drivers/net/ethernet/mellanox/mlx4/main.c 		dev->caps.cqe_size   = 64;
cqe_size         1052 drivers/net/ethernet/mellanox/mlx4/main.c 		dev->caps.cqe_size   = 32;
cqe_size         1061 drivers/net/ethernet/mellanox/mlx4/main.c 		dev->caps.cqe_size = hca_param->cqe_size;
cqe_size          576 drivers/net/ethernet/mellanox/mlx4/mlx4_en.h 	int cqe_size;
cqe_size          661 drivers/scsi/bnx2i/bnx2i.h 	u32 cqe_size;
cqe_size          171 drivers/scsi/bnx2i/bnx2i_hwi.c 		if (cq_index > ep->qp.cqe_size * 2)
cqe_size          172 drivers/scsi/bnx2i/bnx2i_hwi.c 			cq_index -= ep->qp.cqe_size * 2;
cqe_size         1122 drivers/scsi/bnx2i/bnx2i_hwi.c 	ep->qp.cqe_size = hba->max_cqes;
cqe_size         2060 drivers/scsi/bnx2i/bnx2i_hwi.c 		if (qp->cqe_exp_seq_sn == (qp->cqe_size * 2 + 1))
cqe_size         11629 drivers/scsi/lpfc/lpfc_init.c 	sli4_params->cqe_size = bf_get(cqe_size, &mqe->un.sli4_params);
cqe_size          510 drivers/scsi/lpfc/lpfc_sli4.h 	uint32_t cqe_size;
cqe_size          619 include/linux/mlx4/device.h 	u32			cqe_size;
cqe_size           71 include/uapi/rdma/mlx4-abi.h 	__u32	cqe_size;
cqe_size          274 include/uapi/rdma/mlx5-abi.h 	__u32	cqe_size;
cqe_size          287 include/uapi/rdma/mlx5-abi.h 	__u16	cqe_size;