queue_mask        491 drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c 	uint64_t queue_mask = 0;
queue_mask        504 drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c 		if (WARN_ON(i > (sizeof(queue_mask)*8))) {
queue_mask        509 drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c 		queue_mask |= (1ull << i);
queue_mask        523 drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c 	kiq->pmf->kiq_set_resources(kiq_ring, queue_mask);
queue_mask         68 drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.h 					uint64_t queue_mask);
queue_mask        257 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c static void gfx10_kiq_set_resources(struct amdgpu_ring *kiq_ring, uint64_t queue_mask)
queue_mask        262 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 	amdgpu_ring_write(kiq_ring, lower_32_bits(queue_mask));	/* queue mask lo */
queue_mask        263 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 	amdgpu_ring_write(kiq_ring, upper_32_bits(queue_mask));	/* queue mask hi */
queue_mask       4378 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	uint64_t queue_mask = 0;
queue_mask       4388 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 		if (WARN_ON(i >= (sizeof(queue_mask)*8))) {
queue_mask       4393 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 		queue_mask |= (1ull << i);
queue_mask       4404 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	amdgpu_ring_write(kiq_ring, lower_32_bits(queue_mask));	/* queue mask lo */
queue_mask       4405 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	amdgpu_ring_write(kiq_ring, upper_32_bits(queue_mask));	/* queue mask hi */
queue_mask       3345 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	uint64_t queue_mask = 0;
queue_mask       3355 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 		if (WARN_ON(i >= (sizeof(queue_mask)*8))) {
queue_mask       3360 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 		queue_mask |= (1ull << i);
queue_mask       3373 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	amdgpu_ring_write(kiq_ring, lower_32_bits(queue_mask));	/* queue mask lo */
queue_mask       3374 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	amdgpu_ring_write(kiq_ring, upper_32_bits(queue_mask));	/* queue mask hi */
queue_mask        978 drivers/gpu/drm/amd/amdkfd/kfd_device_queue_manager.c 	res.queue_mask = 0;
queue_mask        994 drivers/gpu/drm/amd/amdkfd/kfd_device_queue_manager.c 		if (WARN_ON(i >= (sizeof(res.queue_mask)*8))) {
queue_mask        999 drivers/gpu/drm/amd/amdkfd/kfd_device_queue_manager.c 		res.queue_mask |= (1ull << i);
queue_mask       1007 drivers/gpu/drm/amd/amdkfd/kfd_device_queue_manager.c 			res.vmid_mask, res.queue_mask);
queue_mask        169 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue_v9.c 	packet->queue_mask_lo = lower_32_bits(res->queue_mask);
queue_mask        170 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue_v9.c 	packet->queue_mask_hi = upper_32_bits(res->queue_mask);
queue_mask        176 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue_vi.c 	packet->queue_mask_lo = lower_32_bits(res->queue_mask);
queue_mask        177 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue_vi.c 	packet->queue_mask_hi = upper_32_bits(res->queue_mask);
queue_mask        529 drivers/gpu/drm/amd/amdkfd/kfd_priv.h 	uint64_t queue_mask;
queue_mask       1169 drivers/net/ethernet/cadence/macb.h 	unsigned int		queue_mask;
queue_mask       3353 drivers/net/ethernet/cadence/macb_main.c 			      unsigned int *queue_mask,
queue_mask       3358 drivers/net/ethernet/cadence/macb_main.c 	*queue_mask = 0x1;
queue_mask       3371 drivers/net/ethernet/cadence/macb_main.c 	*queue_mask = readl_relaxed(mem + GEM_DCFG6) & 0xff;
queue_mask       3373 drivers/net/ethernet/cadence/macb_main.c 	*queue_mask |= 0x1;
queue_mask       3376 drivers/net/ethernet/cadence/macb_main.c 		if (*queue_mask & (1 << hw_q))
queue_mask       3490 drivers/net/ethernet/cadence/macb_main.c 		if (!(bp->queue_mask & (1 << hw_q)))
queue_mask       4188 drivers/net/ethernet/cadence/macb_main.c 	unsigned int queue_mask, num_queues;
queue_mask       4225 drivers/net/ethernet/cadence/macb_main.c 	macb_probe_queues(mem, native_io, &queue_mask, &num_queues);
queue_mask       4249 drivers/net/ethernet/cadence/macb_main.c 	bp->queue_mask = queue_mask;
queue_mask       2253 drivers/net/ethernet/marvell/mv643xx_eth.c 		u8 queue_mask;
queue_mask       2264 drivers/net/ethernet/marvell/mv643xx_eth.c 		queue_mask = mp->work_tx | mp->work_tx_end | mp->work_rx;
queue_mask       2266 drivers/net/ethernet/marvell/mv643xx_eth.c 			queue_mask |= mp->work_rx_refill;
queue_mask       2268 drivers/net/ethernet/marvell/mv643xx_eth.c 		if (!queue_mask) {
queue_mask       2274 drivers/net/ethernet/marvell/mv643xx_eth.c 		queue = fls(queue_mask) - 1;
queue_mask       2275 drivers/net/ethernet/marvell/mv643xx_eth.c 		queue_mask = 1 << queue;
queue_mask       2281 drivers/net/ethernet/marvell/mv643xx_eth.c 		if (mp->work_tx_end & queue_mask) {
queue_mask       2283 drivers/net/ethernet/marvell/mv643xx_eth.c 		} else if (mp->work_tx & queue_mask) {
queue_mask       2286 drivers/net/ethernet/marvell/mv643xx_eth.c 		} else if (mp->work_rx & queue_mask) {
queue_mask       2288 drivers/net/ethernet/marvell/mv643xx_eth.c 		} else if (!mp->oom && (mp->work_rx_refill & queue_mask)) {
queue_mask         89 drivers/soc/ti/knav_qmss.h 	u32		queue_mask;
queue_mask        283 drivers/soc/ti/knav_qmss_acc.c 		cmd->command, cmd->queue_mask, cmd->list_dma,
queue_mask        289 drivers/soc/ti/knav_qmss_acc.c 	writel_relaxed(cmd->queue_mask, &pdsp->acc_command->queue_mask);
queue_mask        308 drivers/soc/ti/knav_qmss_acc.c 	u32 queue_mask;
queue_mask        313 drivers/soc/ti/knav_qmss_acc.c 		queue_mask = BIT(range->num_queues) - 1;
queue_mask        317 drivers/soc/ti/knav_qmss_acc.c 		queue_mask = 0;
queue_mask        322 drivers/soc/ti/knav_qmss_acc.c 	cmd->queue_mask = queue_mask;
queue_mask       1287 include/uapi/linux/ethtool.h 	__u32	queue_mask[__KERNEL_DIV_ROUND_UP(MAX_NUM_QUEUE, 32)];
queue_mask       2346 net/core/ethtool.c 	DECLARE_BITMAP(queue_mask, MAX_NUM_QUEUE);
queue_mask       2353 net/core/ethtool.c 	bitmap_from_arr32(queue_mask, per_queue_opt->queue_mask,
queue_mask       2356 net/core/ethtool.c 	for_each_set_bit(bit, queue_mask, MAX_NUM_QUEUE) {
queue_mask       2379 net/core/ethtool.c 	DECLARE_BITMAP(queue_mask, MAX_NUM_QUEUE);
queue_mask       2387 net/core/ethtool.c 	bitmap_from_arr32(queue_mask, per_queue_opt->queue_mask, MAX_NUM_QUEUE);
queue_mask       2388 net/core/ethtool.c 	n_queue = bitmap_weight(queue_mask, MAX_NUM_QUEUE);
queue_mask       2393 net/core/ethtool.c 	for_each_set_bit(bit, queue_mask, MAX_NUM_QUEUE) {
queue_mask       2417 net/core/ethtool.c 		for_each_set_bit(i, queue_mask, bit) {