queue_size        475 drivers/dma/fsl-qdma.c 	unsigned int queue_size[FSL_QDMA_QUEUE_MAX];
queue_size        489 drivers/dma/fsl-qdma.c 					     queue_size, queue_num);
queue_size        496 drivers/dma/fsl-qdma.c 			if (queue_size[i] > FSL_QDMA_CIRCULAR_DESC_SIZE_MAX ||
queue_size        497 drivers/dma/fsl-qdma.c 			    queue_size[i] < FSL_QDMA_CIRCULAR_DESC_SIZE_MIN) {
queue_size        507 drivers/dma/fsl-qdma.c 					   queue_size[i],
queue_size        514 drivers/dma/fsl-qdma.c 			queue_temp->n_cq = queue_size[i];
queue_size        111 drivers/firmware/tegra/bpmp-tegra186.c 	size_t message_size, queue_size;
queue_size        121 drivers/firmware/tegra/bpmp-tegra186.c 	queue_size = tegra_ivc_total_queue_size(message_size);
queue_size        122 drivers/firmware/tegra/bpmp-tegra186.c 	offset = queue_size * index;
queue_size        549 drivers/firmware/tegra/ivc.c unsigned tegra_ivc_total_queue_size(unsigned queue_size)
queue_size        551 drivers/firmware/tegra/ivc.c 	if (!IS_ALIGNED(queue_size, TEGRA_IVC_ALIGN)) {
queue_size        553 drivers/firmware/tegra/ivc.c 		       __func__, queue_size, TEGRA_IVC_ALIGN);
queue_size        557 drivers/firmware/tegra/ivc.c 	return queue_size + sizeof(struct tegra_ivc_header);
queue_size        618 drivers/firmware/tegra/ivc.c 	size_t queue_size;
queue_size        636 drivers/firmware/tegra/ivc.c 	queue_size = tegra_ivc_total_queue_size(num_frames * frame_size);
queue_size        639 drivers/firmware/tegra/ivc.c 		ivc->rx.phys = dma_map_single(peer, rx, queue_size,
queue_size        644 drivers/firmware/tegra/ivc.c 		ivc->tx.phys = dma_map_single(peer, tx, queue_size,
queue_size        647 drivers/firmware/tegra/ivc.c 			dma_unmap_single(peer, ivc->rx.phys, queue_size,
queue_size        418 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gfx_v10.c 		uint32_t queue_size =
queue_size        421 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gfx_v10.c 		uint64_t guessed_wptr = m->cp_hqd_pq_rptr & (queue_size - 1);
queue_size        423 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gfx_v10.c 		if ((m->cp_hqd_pq_wptr_lo & (queue_size - 1)) < guessed_wptr)
queue_size        424 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gfx_v10.c 			guessed_wptr += queue_size;
queue_size        425 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gfx_v10.c 		guessed_wptr += m->cp_hqd_pq_wptr_lo & ~(queue_size - 1);
queue_size        319 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gfx_v9.c 		uint32_t queue_size =
queue_size        322 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gfx_v9.c 		uint64_t guessed_wptr = m->cp_hqd_pq_rptr & (queue_size - 1);
queue_size        324 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gfx_v9.c 		if ((m->cp_hqd_pq_wptr_lo & (queue_size - 1)) < guessed_wptr)
queue_size        325 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gfx_v9.c 			guessed_wptr += queue_size;
queue_size        326 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gfx_v9.c 		guessed_wptr += m->cp_hqd_pq_wptr_lo & ~(queue_size - 1);
queue_size        202 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 	q_properties->queue_size = args->ring_size;
queue_size        236 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 			q_properties->queue_size, args->ring_size);
queue_size        377 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 	properties.queue_size = args->ring_size;
queue_size         38 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue.c 		enum kfd_queue_type type, unsigned int queue_size)
queue_size         48 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue.c 			queue_size);
queue_size         81 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue.c 	retval = kfd_gtt_sa_allocate(dev, queue_size, &kq->pq);
queue_size         83 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue.c 		pr_err("Failed to init pq queues size %d\n", queue_size);
queue_size         90 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue.c 	retval = kq->ops_asic_specific.initialize(kq, dev, type, queue_size);
queue_size        112 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue.c 	memset(kq->pq_kernel_addr, 0, queue_size);
queue_size        116 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue.c 	prop.queue_size = queue_size;
queue_size        228 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue.c 	queue_size_dwords = kq->queue->properties.queue_size / 4;
queue_size        292 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue.c 			(kq->queue->properties.queue_size / 4);
queue_size         57 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue.h 			enum kfd_queue_type type, unsigned int queue_size);
queue_size         27 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue_cik.c 			enum kfd_queue_type type, unsigned int queue_size);
queue_size         39 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue_cik.c 			enum kfd_queue_type type, unsigned int queue_size)
queue_size         31 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue_v10.c 			enum kfd_queue_type type, unsigned int queue_size);
queue_size         43 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue_v10.c 			enum kfd_queue_type type, unsigned int queue_size)
queue_size         30 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue_v9.c 			enum kfd_queue_type type, unsigned int queue_size);
queue_size         42 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue_v9.c 			enum kfd_queue_type type, unsigned int queue_size)
queue_size         30 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue_vi.c 			enum kfd_queue_type type, unsigned int queue_size);
queue_size         42 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue_vi.c 			enum kfd_queue_type type, unsigned int queue_size)
queue_size        171 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_cik.c 	uint32_t wptr_mask = (uint32_t)((p->queue_size / 4) - 1);
queue_size        205 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_cik.c 	m->cp_hqd_pq_control |= order_base_2(q->queue_size / 4) - 1;
queue_size        241 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_cik.c 	m->sdma_rlc_rb_cntl = order_base_2(q->queue_size / 4)
queue_size        328 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_cik.c 	m->cp_hqd_pq_control |= order_base_2(q->queue_size / 4) - 1;
queue_size        184 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_v10.c 			ffs(q->queue_size / sizeof(unsigned int)) - 1 - 1;
queue_size        234 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_v10.c 	q->is_active = (q->queue_size > 0 &&
queue_size        355 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_v10.c 	m->sdmax_rlcx_rb_cntl = (ffs(q->queue_size / sizeof(unsigned int)) - 1)
queue_size        373 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_v10.c 	q->is_active = (q->queue_size > 0 &&
queue_size        202 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_v9.c 	m->cp_hqd_pq_control |= order_base_2(q->queue_size / 4) - 1;
queue_size        373 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_v9.c 	m->sdmax_rlcx_rb_cntl = order_base_2(q->queue_size / 4)
queue_size        162 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_vi.c 	uint32_t wptr_mask = (uint32_t)((p->queue_size / 4) - 1);
queue_size        180 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_vi.c 	m->cp_hqd_pq_control |=	order_base_2(q->queue_size / 4) - 1;
queue_size        354 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_vi.c 	m->sdmax_rlcx_rb_cntl = order_base_2(q->queue_size / 4)
queue_size        422 drivers/gpu/drm/amd/amdkfd/kfd_priv.h 	uint64_t  queue_size;
queue_size        451 drivers/gpu/drm/amd/amdkfd/kfd_priv.h #define QUEUE_IS_ACTIVE(q) ((q).queue_size > 0 &&	\
queue_size        423 drivers/gpu/drm/amd/amdkfd/kfd_process_queue_manager.c 	pqn->q->properties.queue_size = p->queue_size;
queue_size         34 drivers/gpu/drm/amd/amdkfd/kfd_queue.c 	pr_debug("Queue Size: %llu\n", q->queue_size);
queue_size         51 drivers/gpu/drm/amd/amdkfd/kfd_queue.c 	pr_debug("Queue Size: %llu\n", q->properties.queue_size);
queue_size        568 drivers/infiniband/ulp/srp/ib_srp.c 	recv_cq = ib_alloc_cq(dev->dev, ch, target->queue_size + 1,
queue_size        575 drivers/infiniband/ulp/srp/ib_srp.c 	send_cq = ib_alloc_cq(dev->dev, ch, m * target->queue_size,
queue_size        583 drivers/infiniband/ulp/srp/ib_srp.c 	init_attr->cap.max_send_wr     = m * target->queue_size;
queue_size        584 drivers/infiniband/ulp/srp/ib_srp.c 	init_attr->cap.max_recv_wr     = target->queue_size + 1;
queue_size        723 drivers/infiniband/ulp/srp/ib_srp.c 		for (i = 0; i < target->queue_size; ++i)
queue_size        729 drivers/infiniband/ulp/srp/ib_srp.c 		for (i = 0; i < target->queue_size; ++i)
queue_size       1428 drivers/infiniband/ulp/srp/ib_srp.c 		for (j = 0; j < target->queue_size; ++j)
queue_size       2454 drivers/infiniband/ulp/srp/ib_srp.c 	ch->rx_ring = kcalloc(target->queue_size, sizeof(*ch->rx_ring),
queue_size       2458 drivers/infiniband/ulp/srp/ib_srp.c 	ch->tx_ring = kcalloc(target->queue_size, sizeof(*ch->tx_ring),
queue_size       2463 drivers/infiniband/ulp/srp/ib_srp.c 	for (i = 0; i < target->queue_size; ++i) {
queue_size       2471 drivers/infiniband/ulp/srp/ib_srp.c 	for (i = 0; i < target->queue_size; ++i) {
queue_size       2484 drivers/infiniband/ulp/srp/ib_srp.c 	for (i = 0; i < target->queue_size; ++i) {
queue_size       2573 drivers/infiniband/ulp/srp/ib_srp.c 	for (i = 0; i < target->queue_size; i++) {
queue_size       3641 drivers/infiniband/ulp/srp/ib_srp.c 			target->queue_size = token + SRP_RSP_SQ_SIZE +
queue_size       3810 drivers/infiniband/ulp/srp/ib_srp.c 	target->queue_size	= SRP_DEFAULT_QUEUE_SIZE;
queue_size       3826 drivers/infiniband/ulp/srp/ib_srp.c 	target->req_ring_size = target->queue_size - SRP_TSK_MGMT_SQ_SIZE;
queue_size        231 drivers/infiniband/ulp/srp/ib_srp.h 	int			queue_size;
queue_size       1019 drivers/misc/genwqe/card_ddcb.c 	unsigned int queue_size;
queue_size       1025 drivers/misc/genwqe/card_ddcb.c 	queue_size = roundup(GENWQE_DDCB_MAX * sizeof(struct ddcb), PAGE_SIZE);
queue_size       1035 drivers/misc/genwqe/card_ddcb.c 	queue->ddcb_vaddr = __genwqe_alloc_consistent(cd, queue_size,
queue_size       1083 drivers/misc/genwqe/card_ddcb.c 	__genwqe_free_consistent(cd, queue_size, queue->ddcb_vaddr,
queue_size       1098 drivers/misc/genwqe/card_ddcb.c 	unsigned int queue_size;
queue_size       1100 drivers/misc/genwqe/card_ddcb.c 	queue_size = roundup(queue->ddcb_max * sizeof(struct ddcb), PAGE_SIZE);
queue_size       1106 drivers/misc/genwqe/card_ddcb.c 		__genwqe_free_consistent(cd, queue_size, queue->ddcb_vaddr,
queue_size        275 drivers/misc/vmw_vmci/vmci_queue_pair.c 	size_t queue_size = sizeof(*queue) + sizeof(*queue->kernel_if);
queue_size        282 drivers/misc/vmw_vmci/vmci_queue_pair.c 		 (SIZE_MAX - queue_size) /
queue_size        289 drivers/misc/vmw_vmci/vmci_queue_pair.c 	queue_size += pas_size + vas_size;
queue_size        291 drivers/misc/vmw_vmci/vmci_queue_pair.c 	queue = vmalloc(queue_size);
queue_size        529 drivers/misc/vmw_vmci/vmci_queue_pair.c 	const size_t queue_size = sizeof(*queue) + sizeof(*(queue->kernel_if));
queue_size        534 drivers/misc/vmw_vmci/vmci_queue_pair.c 	if (num_pages > (SIZE_MAX - queue_size) /
queue_size        540 drivers/misc/vmw_vmci/vmci_queue_pair.c 	queue = kzalloc(queue_size + queue_page_size, GFP_KERNEL);
queue_size        549 drivers/misc/vmw_vmci/vmci_queue_pair.c 		    (struct page **)((u8 *)queue + queue_size);
queue_size        561 drivers/misc/vmw_vmci/vmci_queue_pair.c static void qp_host_free_queue(struct vmci_queue *queue, u64 queue_size)
queue_size       1838 drivers/net/ethernet/amazon/ena/ena_com.c 	io_cq->q_depth = ctx->queue_size;
queue_size       1844 drivers/net/ethernet/amazon/ena/ena_com.c 	io_sq->q_depth = ctx->queue_size;
queue_size        369 drivers/net/ethernet/amazon/ena/ena_com.h 	u16 queue_size;
queue_size       1649 drivers/net/ethernet/amazon/ena/ena_netdev.c 	ctx.queue_size = tx_ring->ring_size;
queue_size       1716 drivers/net/ethernet/amazon/ena/ena_netdev.c 	ctx.queue_size = rx_ring->ring_size;
queue_size       1385 drivers/net/ethernet/emulex/benet/be_cmds.c 	req->queue_size = be_encoded_q_len(txq->len);
queue_size        586 drivers/net/ethernet/emulex/benet/be_cmds.h 	u8 queue_size;
queue_size        263 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 	unsigned int queue_size = kinfo->rss_size * kinfo->num_tc;
queue_size        287 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 	ret = netif_set_real_num_tx_queues(netdev, queue_size);
queue_size        294 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 	ret = netif_set_real_num_rx_queues(netdev, queue_size);
queue_size        211 drivers/net/wireless/intel/iwlwifi/pcie/internal.h 	u32 queue_size;
queue_size        679 drivers/net/wireless/intel/iwlwifi/pcie/internal.h 			  int queue_size);
queue_size        179 drivers/net/wireless/intel/iwlwifi/pcie/rx.c 	WARN_ON(rxq->queue_size & (rxq->queue_size - 1));
queue_size        187 drivers/net/wireless/intel/iwlwifi/pcie/rx.c 	return (rxq->read - rxq->write - 1) & (rxq->queue_size - 1);
queue_size        699 drivers/net/wireless/intel/iwlwifi/pcie/rx.c 				  free_size * rxq->queue_size,
queue_size        710 drivers/net/wireless/intel/iwlwifi/pcie/rx.c 				   sizeof(__le32)) * rxq->queue_size,
queue_size        745 drivers/net/wireless/intel/iwlwifi/pcie/rx.c 		rxq->queue_size = MQ_RX_TABLE_SIZE;
queue_size        747 drivers/net/wireless/intel/iwlwifi/pcie/rx.c 		rxq->queue_size = RX_QUEUE_SIZE;
queue_size        755 drivers/net/wireless/intel/iwlwifi/pcie/rx.c 	rxq->bd = dma_alloc_coherent(dev, free_size * rxq->queue_size,
queue_size        762 drivers/net/wireless/intel/iwlwifi/pcie/rx.c 						  (use_rx_td ? sizeof(*rxq->cd) : sizeof(__le32)) * rxq->queue_size,
queue_size       1041 drivers/net/wireless/intel/iwlwifi/pcie/rx.c 	int i, err, queue_size, allocator_pool_size, num_alloc;
queue_size       1091 drivers/net/wireless/intel/iwlwifi/pcie/rx.c 	queue_size = trans->trans_cfg->mq_rx_supported ?
queue_size       1095 drivers/net/wireless/intel/iwlwifi/pcie/rx.c 	num_alloc = queue_size + allocator_pool_size;
queue_size       1450 drivers/net/wireless/intel/iwlwifi/pcie/rx.c 	r &= (rxq->queue_size - 1);
queue_size       1464 drivers/net/wireless/intel/iwlwifi/pcie/rx.c 		if (unlikely(rb_pending_alloc >= rxq->queue_size / 2 &&
queue_size       1481 drivers/net/wireless/intel/iwlwifi/pcie/rx.c 		i = (i + 1) & (rxq->queue_size - 1);
queue_size       1500 drivers/net/wireless/intel/iwlwifi/pcie/rx.c 				if (rb_pending_alloc < rxq->queue_size / 3) {
queue_size        234 drivers/net/wireless/intel/iwlwifi/pcie/trans-gen2.c 	int queue_size = max_t(u32, IWL_CMD_QUEUE_SIZE,
queue_size        249 drivers/net/wireless/intel/iwlwifi/pcie/trans-gen2.c 	if (iwl_pcie_gen2_tx_init(trans, trans_pcie->cmd_queue, queue_size))
queue_size       1308 drivers/net/wireless/intel/iwlwifi/pcie/tx-gen2.c int iwl_pcie_gen2_tx_init(struct iwl_trans *trans, int txq_id, int queue_size)
queue_size       1322 drivers/net/wireless/intel/iwlwifi/pcie/tx-gen2.c 		ret = iwl_pcie_txq_alloc(trans, queue, queue_size, true);
queue_size       1331 drivers/net/wireless/intel/iwlwifi/pcie/tx-gen2.c 	ret = iwl_pcie_txq_init(trans, queue, queue_size,
queue_size        629 drivers/nvme/host/fabrics.c 	opts->queue_size = NVMF_DEF_QUEUE_SIZE;
queue_size        708 drivers/nvme/host/fabrics.c 			opts->queue_size = token;
queue_size        100 drivers/nvme/host/fabrics.h 	size_t			queue_size;
queue_size       2462 drivers/nvme/host/fc.c 	ctrl->tag_set.queue_depth = ctrl->ctrl.opts->queue_size;
queue_size       2694 drivers/nvme/host/fc.c 	if (opts->queue_size > ctrl->ctrl.maxcmd) {
queue_size       2699 drivers/nvme/host/fc.c 			opts->queue_size, ctrl->ctrl.maxcmd);
queue_size       2700 drivers/nvme/host/fc.c 		opts->queue_size = ctrl->ctrl.maxcmd;
queue_size       2703 drivers/nvme/host/fc.c 	if (opts->queue_size > ctrl->ctrl.sqsize + 1) {
queue_size       2707 drivers/nvme/host/fc.c 			opts->queue_size, ctrl->ctrl.sqsize + 1);
queue_size       2708 drivers/nvme/host/fc.c 		opts->queue_size = ctrl->ctrl.sqsize + 1;
queue_size       3107 drivers/nvme/host/fc.c 	ctrl->ctrl.sqsize = opts->queue_size - 1;
queue_size         77 drivers/nvme/host/rdma.c 	int			queue_size;
queue_size        263 drivers/nvme/host/rdma.c 	init_attr.cap.max_send_wr = factor * queue->queue_size + 1;
queue_size        265 drivers/nvme/host/rdma.c 	init_attr.cap.max_recv_wr = queue->queue_size + 1;
queue_size        421 drivers/nvme/host/rdma.c 	nvme_rdma_free_ring(ibdev, queue->rsp_ring, queue->queue_size,
queue_size        464 drivers/nvme/host/rdma.c 				cq_factor * queue->queue_size + 1,
queue_size        475 drivers/nvme/host/rdma.c 	queue->rsp_ring = nvme_rdma_alloc_ring(ibdev, queue->queue_size,
queue_size        489 drivers/nvme/host/rdma.c 			      queue->queue_size,
queue_size        495 drivers/nvme/host/rdma.c 			queue->queue_size, idx);
queue_size        504 drivers/nvme/host/rdma.c 	nvme_rdma_free_ring(ibdev, queue->rsp_ring, queue->queue_size,
queue_size        516 drivers/nvme/host/rdma.c 		int idx, size_t queue_size)
queue_size        531 drivers/nvme/host/rdma.c 	queue->queue_size = queue_size;
queue_size       1003 drivers/nvme/host/rdma.c 	if (ctrl->ctrl.opts->queue_size > ctrl->ctrl.sqsize + 1) {
queue_size       1006 drivers/nvme/host/rdma.c 			ctrl->ctrl.opts->queue_size, ctrl->ctrl.sqsize + 1);
queue_size       1521 drivers/nvme/host/rdma.c 	for (i = 0; i < queue->queue_size; i++) {
queue_size       1617 drivers/nvme/host/rdma.c 		priv.hrqsize = cpu_to_le16(queue->queue_size);
queue_size       2026 drivers/nvme/host/rdma.c 	ctrl->ctrl.sqsize = opts->queue_size - 1;
queue_size         81 drivers/nvme/host/tcp.c 	int			queue_size;
queue_size       1250 drivers/nvme/host/tcp.c 		int qid, size_t queue_size)
queue_size       1261 drivers/nvme/host/tcp.c 	queue->queue_size = queue_size;
queue_size       1843 drivers/nvme/host/tcp.c 	if (opts->queue_size > ctrl->sqsize + 1)
queue_size       1846 drivers/nvme/host/tcp.c 			opts->queue_size, ctrl->sqsize + 1);
queue_size       2298 drivers/nvme/host/tcp.c 	ctrl->ctrl.sqsize = opts->queue_size - 1;
queue_size        515 drivers/nvme/target/loop.c 	ctrl->tag_set.queue_depth = ctrl->ctrl.opts->queue_size;
queue_size        590 drivers/nvme/target/loop.c 	ctrl->ctrl.sqsize = opts->queue_size - 1;
queue_size        603 drivers/nvme/target/loop.c 	if (opts->queue_size > ctrl->ctrl.maxcmd) {
queue_size        607 drivers/nvme/target/loop.c 			opts->queue_size, ctrl->ctrl.maxcmd);
queue_size        608 drivers/nvme/target/loop.c 		opts->queue_size = ctrl->ctrl.maxcmd;
queue_size        103 drivers/platform/chrome/wilco_ec/event.c static int queue_size = 64;
queue_size        104 drivers/platform/chrome/wilco_ec/event.c module_param(queue_size, int, 0644);
queue_size        470 drivers/platform/chrome/wilco_ec/event.c 	dev_data->events = event_queue_new(queue_size);
queue_size        163 drivers/usb/host/u132-hcd.c 	u16 queue_size;
queue_size        517 drivers/usb/host/u132-hcd.c 	if (ENDP_QUEUE_SIZE > --endp->queue_size) {
queue_size        556 drivers/usb/host/u132-hcd.c 	if (ENDP_QUEUE_SIZE > --endp->queue_size) {
queue_size       1915 drivers/usb/host/u132-hcd.c 	endp->queue_size = 1;
queue_size       1932 drivers/usb/host/u132-hcd.c 	if (endp->queue_size++ < ENDP_QUEUE_SIZE) {
queue_size       1938 drivers/usb/host/u132-hcd.c 			endp->queue_size -= 1;
queue_size       2014 drivers/usb/host/u132-hcd.c 	endp->queue_size = 1;
queue_size       2029 drivers/usb/host/u132-hcd.c 	if (endp->queue_size++ < ENDP_QUEUE_SIZE) {
queue_size       2035 drivers/usb/host/u132-hcd.c 			endp->queue_size -= 1;
queue_size       2103 drivers/usb/host/u132-hcd.c 		endp->queue_size = 1;
queue_size       2124 drivers/usb/host/u132-hcd.c 		endp->queue_size = 1;
queue_size       2142 drivers/usb/host/u132-hcd.c 			if (endp->queue_size++ < ENDP_QUEUE_SIZE) {
queue_size       2150 drivers/usb/host/u132-hcd.c 					endp->queue_size -= 1;
queue_size       2192 drivers/usb/host/u132-hcd.c 			if (endp->queue_size++ < ENDP_QUEUE_SIZE) {
queue_size       2200 drivers/usb/host/u132-hcd.c 					endp->queue_size -= 1;
queue_size       2216 drivers/usb/host/u132-hcd.c 		if (endp->queue_size++ < ENDP_QUEUE_SIZE) {
queue_size       2223 drivers/usb/host/u132-hcd.c 				endp->queue_size -= 1;
queue_size       2394 drivers/usb/host/u132-hcd.c 			endp->queue_size -= 1;
queue_size       2405 drivers/usb/host/u132-hcd.c 		endp->usb_endp, endp->usb_addr, endp->queue_size,
queue_size       2422 drivers/usb/host/u132-hcd.c 	if (endp->queue_size == 0) {
queue_size       2445 drivers/usb/host/u132-hcd.c 		u16 queue_size = endp->queue_size;
queue_size       2448 drivers/usb/host/u132-hcd.c 		while (++queue_list < ENDP_QUEUE_SIZE && --queue_size > 0) {
queue_size       2457 drivers/usb/host/u132-hcd.c 		while (++queue_list < ENDP_QUEUE_SIZE && --queue_size > 0) {
queue_size       2467 drivers/usb/host/u132-hcd.c 			endp->queue_size -= 1;
queue_size       2491 drivers/usb/host/u132-hcd.c 				endp->usb_addr, endp->queue_size,
queue_size        326 drivers/virtio/virtio_pci_modern.c 	num = vp_ioread16(&cfg->queue_size);
queue_size        349 drivers/virtio/virtio_pci_modern.c 	vp_iowrite16(virtqueue_get_vring_size(vq), &cfg->queue_size);
queue_size        559 drivers/virtio/virtio_pci_modern.c 		     offsetof(struct virtio_pci_common_cfg, queue_size));
queue_size        804 include/linux/vmw_vmci_defs.h 				u64 queue_size)
queue_size        806 include/linux/vmw_vmci_defs.h 	vmci_qp_add_pointer(&q_header->producer_tail, add, queue_size);
queue_size        816 include/linux/vmw_vmci_defs.h 				u64 queue_size)
queue_size        818 include/linux/vmw_vmci_defs.h 	vmci_qp_add_pointer(&q_header->consumer_head, add, queue_size);
queue_size         93 include/soc/tegra/ivc.h unsigned tegra_ivc_total_queue_size(unsigned queue_size);
queue_size        148 include/uapi/linux/virtio_pci.h 	__le16 queue_size;		/* read-write, power of 2. */
queue_size        758 include/uapi/sound/asound.h 	unsigned int queue_size;	/* total size of queue (32-1024) */
queue_size         72 kernel/bpf/queue_stack_maps.c 	u64 size, queue_size, cost;
queue_size         75 kernel/bpf/queue_stack_maps.c 	cost = queue_size = sizeof(*qs) + size * attr->value_size;
queue_size         81 kernel/bpf/queue_stack_maps.c 	qs = bpf_map_area_alloc(queue_size, numa_node);
queue_size        499 samples/bpf/xdp_redirect_cpu_user.c static int create_cpu_entry(__u32 cpu, __u32 queue_size,
queue_size        509 samples/bpf/xdp_redirect_cpu_user.c 	ret = bpf_map_update_elem(cpu_map_fd, &cpu, &queue_size, 0);
queue_size        542 samples/bpf/xdp_redirect_cpu_user.c 	       queue_size, curr_cpus_count);
queue_size         55 sound/core/timer.c 	int queue_size;
queue_size       1302 sound/core/timer.c 		prev = tu->qtail == 0 ? tu->queue_size - 1 : tu->qtail - 1;
queue_size       1309 sound/core/timer.c 	if (tu->qused >= tu->queue_size) {
queue_size       1313 sound/core/timer.c 		tu->qtail %= tu->queue_size;
queue_size       1327 sound/core/timer.c 	if (tu->qused >= tu->queue_size) {
queue_size       1331 sound/core/timer.c 		tu->qtail %= tu->queue_size;
queue_size       1406 sound/core/timer.c 		prev = tu->qtail == 0 ? tu->queue_size - 1 : tu->qtail - 1;
queue_size       1446 sound/core/timer.c 	tu->queue_size = size;
queue_size       1815 sound/core/timer.c 	if (params.queue_size > 0 &&
queue_size       1816 sound/core/timer.c 	    (params.queue_size < 32 || params.queue_size > 1024)) {
queue_size       1849 sound/core/timer.c 	if (params.queue_size > 0 &&
queue_size       1850 sound/core/timer.c 	    (unsigned int)tu->queue_size != params.queue_size) {
queue_size       1851 sound/core/timer.c 		err = realloc_user_queue(tu, params.queue_size);
queue_size       2000 sound/core/timer.c 		    realloc_user_queue(tu, tu->queue_size) < 0) {
queue_size       2100 sound/core/timer.c 		tu->qhead %= tu->queue_size;
queue_size        758 tools/include/uapi/sound/asound.h 	unsigned int queue_size;	/* total size of queue (32-1024) */
queue_size         98 tools/perf/builtin-report.c 	u64			queue_size;
queue_size        125 tools/perf/builtin-report.c 		return perf_config_u64(&rep->queue_size, var, value);
queue_size       1289 tools/perf/builtin-report.c 	if (report.queue_size) {
queue_size       1291 tools/perf/builtin-report.c 					       report.queue_size);
queue_size         92 tools/perf/util/data-convert-bt.c 	u64			queue_size;
queue_size       1571 tools/perf/util/data-convert-bt.c 		return perf_config_u64(&c->queue_size, var, value);
queue_size       1628 tools/perf/util/data-convert-bt.c 	if (c.queue_size) {
queue_size       1630 tools/perf/util/data-convert-bt.c 					       c.queue_size);