cq_count 3694 drivers/infiniband/core/uverbs_cmd.c ret = rdma_set_cq_moderation(cq, cmd.attr.cq_count, cmd.attr.cq_period); cq_count 1952 drivers/infiniband/core/verbs.c int rdma_set_cq_moderation(struct ib_cq *cq, u16 cq_count, u16 cq_period) cq_count 1955 drivers/infiniband/core/verbs.c cq->device->ops.modify_cq(cq, cq_count, cq_count 158 drivers/infiniband/hw/bnxt_re/bnxt_re.h atomic_t cq_count; cq_count 128 drivers/infiniband/hw/bnxt_re/hw_counters.c stats->value[BNXT_RE_ACTIVE_CQ] = atomic_read(&rdev->cq_count); cq_count 2528 drivers/infiniband/hw/bnxt_re/ib_verbs.c atomic_dec(&rdev->cq_count); cq_count 2608 drivers/infiniband/hw/bnxt_re/ib_verbs.c atomic_inc(&rdev->cq_count); cq_count 100 drivers/infiniband/hw/bnxt_re/ib_verbs.h u16 cq_count; cq_count 139 drivers/infiniband/hw/bnxt_re/main.c rdev->qplib_ctx.cq_count = min_t(u32, BNXT_RE_MAX_CQ_COUNT, cq_count 155 drivers/infiniband/hw/bnxt_re/main.c vf_cqs = (rdev->qplib_ctx.cq_count * vf_pct) / num_vfs; cq_count 741 drivers/infiniband/hw/bnxt_re/main.c atomic_set(&rdev->cq_count, 0); cq_count 377 drivers/infiniband/hw/bnxt_re/qplib_res.c ctx->cq_tbl.max_elements = ctx->cq_count; cq_count 169 drivers/infiniband/hw/bnxt_re/qplib_res.h u32 cq_count; cq_count 188 drivers/infiniband/hw/bnxt_re/qplib_sp.c req.number_of_cq = cpu_to_le32(ctx->cq_count); cq_count 978 drivers/infiniband/hw/hns/hns_roce_device.h int (*modify_cq)(struct ib_cq *cq, u16 cq_count, u16 cq_period); cq_count 2151 drivers/infiniband/hw/hns/hns_roce_hw_v1.c static int hns_roce_v1_modify_cq(struct ib_cq *cq, u16 cq_count, u16 cq_period) cq_count 4793 drivers/infiniband/hw/hns/hns_roce_hw_v2.c static int hns_roce_v2_modify_cq(struct ib_cq *cq, u16 cq_count, u16 cq_period) cq_count 4813 drivers/infiniband/hw/hns/hns_roce_hw_v2.c cq_count); cq_count 93 drivers/infiniband/hw/mlx4/cq.c int mlx4_ib_modify_cq(struct ib_cq *cq, u16 cq_count, u16 cq_period) cq_count 98 drivers/infiniband/hw/mlx4/cq.c return mlx4_cq_modify(dev->dev, &mcq->mcq, cq_count, cq_period); cq_count 744 drivers/infiniband/hw/mlx4/mlx4_ib.h int mlx4_ib_modify_cq(struct ib_cq *cq, u16 cq_count, u16 cq_period); cq_count 1095 drivers/infiniband/hw/mlx5/cq.c int mlx5_ib_modify_cq(struct ib_cq *cq, u16 cq_count, u16 cq_period) cq_count 1108 drivers/infiniband/hw/mlx5/cq.c cq_period, cq_count); cq_count 1145 drivers/infiniband/hw/mlx5/mlx5_ib.h int mlx5_ib_modify_cq(struct ib_cq *cq, u16 cq_count, u16 cq_period); cq_count 1809 drivers/net/ethernet/cavium/thunder/nicvf_main.c u8 cq_count, txq_count; cq_count 1821 drivers/net/ethernet/cavium/thunder/nicvf_main.c cq_count = max(nic->rx_queues, txq_count); cq_count 1822 drivers/net/ethernet/cavium/thunder/nicvf_main.c if (cq_count > MAX_CMP_QUEUES_PER_QS) { cq_count 1823 drivers/net/ethernet/cavium/thunder/nicvf_main.c nic->sqs_count = roundup(cq_count, MAX_CMP_QUEUES_PER_QS); cq_count 198 drivers/net/ethernet/cisco/enic/enic.h unsigned int cq_count; cq_count 2043 drivers/net/ethernet/cisco/enic/enic_main.c for (i = 0; i < enic->cq_count; i++) cq_count 2405 drivers/net/ethernet/cisco/enic/enic_main.c enic->cq_count >= n + m && cq_count 2413 drivers/net/ethernet/cisco/enic/enic_main.c enic->cq_count = n + m; cq_count 2426 drivers/net/ethernet/cisco/enic/enic_main.c enic->cq_count >= 1 + m && cq_count 2433 drivers/net/ethernet/cisco/enic/enic_main.c enic->cq_count = 1 + m; cq_count 2451 drivers/net/ethernet/cisco/enic/enic_main.c enic->cq_count >= 2 && cq_count 2457 drivers/net/ethernet/cisco/enic/enic_main.c enic->cq_count = 2; cq_count 2476 drivers/net/ethernet/cisco/enic/enic_main.c enic->cq_count >= 2 && cq_count 2481 drivers/net/ethernet/cisco/enic/enic_main.c enic->cq_count = 2; cq_count 195 drivers/net/ethernet/cisco/enic/enic_res.c for (i = 0; i < enic->cq_count; i++) cq_count 205 drivers/net/ethernet/cisco/enic/enic_res.c enic->cq_count = vnic_dev_get_res_count(enic->vdev, RES_TYPE_CQ); cq_count 212 drivers/net/ethernet/cisco/enic/enic_res.c enic->cq_count, enic->intr_count); cq_count 269 drivers/net/ethernet/cisco/enic/enic_res.c for (i = 0; i < enic->cq_count; i++) { cq_count 328 drivers/net/ethernet/cisco/enic/enic_res.c enic->cq_count, enic->intr_count, cq_count 353 drivers/net/ethernet/cisco/enic/enic_res.c for (i = 0; i < enic->cq_count; i++) { cq_count 4330 drivers/net/ethernet/emulex/benet/be_cmds.c res->max_cq_count = le16_to_cpu(desc->cq_count); cq_count 4570 drivers/net/ethernet/emulex/benet/be_cmds.c nic->cq_count = 0xFFFF; cq_count 4658 drivers/net/ethernet/emulex/benet/be_cmds.c desc.nic_vft.cq_count = cpu_to_le16(vft_res->max_cq_count); cq_count 2199 drivers/net/ethernet/emulex/benet/be_cmds.h u16 cq_count; cq_count 724 drivers/net/ethernet/mellanox/mlxsw/pci.c u8 cq_count = mlxsw_pci_cq_count(mlxsw_pci); cq_count 764 drivers/net/ethernet/mellanox/mlxsw/pci.c for_each_set_bit(cqn, active_cqns, cq_count) { cq_count 88 drivers/scsi/be2iscsi/be.h u32 cq_count; cq_count 2060 drivers/scsi/be2iscsi/be_main.c pbe_eq->cq_count += ret; cq_count 5217 drivers/scsi/be2iscsi/be_main.c pbe_eq->cq_count < aic->eq_prev) { cq_count 5219 drivers/scsi/be2iscsi/be_main.c aic->eq_prev = pbe_eq->cq_count; cq_count 5223 drivers/scsi/be2iscsi/be_main.c pps = (((u32)(pbe_eq->cq_count - aic->eq_prev) * 1000) / delta); cq_count 5232 drivers/scsi/be2iscsi/be_main.c aic->eq_prev = pbe_eq->cq_count; cq_count 236 drivers/scsi/fnic/fnic.h unsigned int cq_count; cq_count 253 drivers/scsi/fnic/fnic_isr.c fnic->cq_count >= n + m + o) { cq_count 262 drivers/scsi/fnic/fnic_isr.c fnic->cq_count = n + m + o; cq_count 281 drivers/scsi/fnic/fnic_isr.c fnic->cq_count >= 3 && cq_count 288 drivers/scsi/fnic/fnic_isr.c fnic->cq_count = 3; cq_count 309 drivers/scsi/fnic/fnic_isr.c fnic->cq_count >= 3 && cq_count 315 drivers/scsi/fnic/fnic_isr.c fnic->cq_count = 3; cq_count 524 drivers/scsi/fnic/fnic_main.c for (i = 0; i < fnic->cq_count; i++) cq_count 205 drivers/scsi/fnic/fnic_res.c fnic->cq_count = vnic_dev_get_res_count(fnic->vdev, RES_TYPE_CQ); cq_count 223 drivers/scsi/fnic/fnic_res.c for (i = 0; i < fnic->cq_count; i++) cq_count 252 drivers/scsi/fnic/fnic_res.c fnic->rq_count, fnic->cq_count, fnic->intr_count); cq_count 378 drivers/scsi/fnic/fnic_res.c for (i = 0; i < fnic->cq_count; i++) { cq_count 293 drivers/scsi/snic/snic.h unsigned int cq_count; cq_count 170 drivers/scsi/snic/snic_isr.c if (snic->wq_count < n || snic->cq_count < n + m) cq_count 177 drivers/scsi/snic/snic_isr.c snic->cq_count = n + m; cq_count 252 drivers/scsi/snic/snic_main.c for (i = 0; i < snic->cq_count; i++) cq_count 124 drivers/scsi/snic/snic_res.c snic->cq_count = svnic_dev_get_res_count(snic->vdev, RES_TYPE_CQ); cq_count 125 drivers/scsi/snic/snic_res.c SNIC_BUG_ON(snic->cq_count == 0); cq_count 139 drivers/scsi/snic/snic_res.c for (i = 0; i < snic->cq_count; i++) cq_count 171 drivers/scsi/snic/snic_res.c snic->cq_count, cq_count 197 drivers/scsi/snic/snic_res.c SNIC_BUG_ON(snic->cq_count != 2 * snic->wq_count); cq_count 199 drivers/scsi/snic/snic_res.c for (i = snic->wq_count; i < snic->cq_count; i++) { cq_count 230 drivers/scsi/snic/snic_res.c for (i = 0; i < snic->cq_count; i++) { cq_count 1288 drivers/scsi/snic/snic_scsi.c for (cq_idx = snic->wq_count; cq_idx < snic->cq_count; cq_idx++) { cq_count 451 include/linux/qed/qed_rdma_if.h u64 cq_count; cq_count 2391 include/rdma/ib_verbs.h int (*modify_cq)(struct ib_cq *cq, u16 cq_count, u16 cq_period); cq_count 3842 include/rdma/ib_verbs.h int rdma_set_cq_moderation(struct ib_cq *cq, u16 cq_count, u16 cq_period); cq_count 1291 include/uapi/rdma/ib_user_verbs.h __u16 cq_count;