rx_q              202 drivers/media/radio/wl128x/fmdrv.h 	struct sk_buff_head rx_q;	/* RX queue */
rx_q              259 drivers/media/radio/wl128x/fmdrv_common.c 	while ((skb = skb_dequeue(&fmdev->rx_q))) {
rx_q             1453 drivers/media/radio/wl128x/fmdrv_common.c 	skb_queue_tail(&fmdev->rx_q, skb);
rx_q             1541 drivers/media/radio/wl128x/fmdrv_common.c 	skb_queue_head_init(&fmdev->rx_q);
rx_q             1592 drivers/media/radio/wl128x/fmdrv_common.c 	skb_queue_purge(&fmdev->rx_q);
rx_q              315 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	writel(upper_32_bits(adpt->rx_q.rfd.dma_addr),
rx_q              318 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	writel(lower_32_bits(adpt->rx_q.rfd.dma_addr),
rx_q              320 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	writel(lower_32_bits(adpt->rx_q.rrd.dma_addr),
rx_q              323 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	writel(adpt->rx_q.rfd.count & RFD_RING_SIZE_BMSK,
rx_q              325 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	writel(adpt->rx_q.rrd.count & RRD_RING_SIZE_BMSK,
rx_q              624 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	struct emac_rx_queue *rx_q = &adpt->rx_q;
rx_q              629 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	if (!rx_q->rfd.rfbuff)
rx_q              632 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	for (i = 0; i < rx_q->rfd.count; i++) {
rx_q              633 drivers/net/ethernet/qualcomm/emac/emac-mac.c 		struct emac_buffer *rfbuf = GET_RFD_BUFFER(rx_q, i);
rx_q              646 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	size =  sizeof(struct emac_buffer) * rx_q->rfd.count;
rx_q              647 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	memset(rx_q->rfd.rfbuff, 0, size);
rx_q              650 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	memset(rx_q->rrd.v_addr, 0, rx_q->rrd.size);
rx_q              651 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	rx_q->rrd.produce_idx = 0;
rx_q              652 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	rx_q->rrd.consume_idx = 0;
rx_q              654 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	memset(rx_q->rfd.v_addr, 0, rx_q->rfd.size);
rx_q              655 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	rx_q->rfd.produce_idx = 0;
rx_q              656 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	rx_q->rfd.consume_idx = 0;
rx_q              699 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	struct emac_rx_queue *rx_q = &adpt->rx_q;
rx_q              703 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	kfree(rx_q->rfd.rfbuff);
rx_q              704 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	rx_q->rfd.rfbuff   = NULL;
rx_q              706 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	rx_q->rfd.v_addr   = NULL;
rx_q              707 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	rx_q->rfd.dma_addr = 0;
rx_q              708 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	rx_q->rfd.size     = 0;
rx_q              710 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	rx_q->rrd.v_addr   = NULL;
rx_q              711 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	rx_q->rrd.dma_addr = 0;
rx_q              712 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	rx_q->rrd.size     = 0;
rx_q              720 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	struct emac_rx_queue *rx_q = &adpt->rx_q;
rx_q              723 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	size = sizeof(struct emac_buffer) * rx_q->rfd.count;
rx_q              724 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	rx_q->rfd.rfbuff = kzalloc_node(size, GFP_KERNEL, node);
rx_q              725 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	if (!rx_q->rfd.rfbuff)
rx_q              728 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	rx_q->rrd.size = rx_q->rrd.count * (adpt->rrd_size * 4);
rx_q              729 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	rx_q->rfd.size = rx_q->rfd.count * (adpt->rfd_size * 4);
rx_q              731 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	rx_q->rrd.dma_addr = ring_header->dma_addr + ring_header->used;
rx_q              732 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	rx_q->rrd.v_addr   = ring_header->v_addr + ring_header->used;
rx_q              733 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	ring_header->used += ALIGN(rx_q->rrd.size, 8);
rx_q              735 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	rx_q->rfd.dma_addr = ring_header->dma_addr + ring_header->used;
rx_q              736 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	rx_q->rfd.v_addr   = ring_header->v_addr + ring_header->used;
rx_q              737 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	ring_header->used += ALIGN(rx_q->rfd.size, 8);
rx_q              739 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	rx_q->rrd.produce_idx = 0;
rx_q              740 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	rx_q->rrd.consume_idx = 0;
rx_q              742 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	rx_q->rfd.produce_idx = 0;
rx_q              743 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	rx_q->rfd.consume_idx = 0;
rx_q              759 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	adpt->rx_q.rrd.count = adpt->rx_desc_cnt;
rx_q              760 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	adpt->rx_q.rfd.count = adpt->rx_desc_cnt;
rx_q              836 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	adpt->rx_q.rrd.produce_idx = 0;
rx_q              837 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	adpt->rx_q.rrd.consume_idx = 0;
rx_q              838 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	adpt->rx_q.rfd.produce_idx = 0;
rx_q              839 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	adpt->rx_q.rfd.consume_idx = 0;
rx_q              840 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	for (i = 0; i < adpt->rx_q.rfd.count; i++)
rx_q              841 drivers/net/ethernet/qualcomm/emac/emac-mac.c 		adpt->rx_q.rfd.rfbuff[i].dma_addr = 0;
rx_q              846 drivers/net/ethernet/qualcomm/emac/emac-mac.c 				   struct emac_rx_queue *rx_q,
rx_q              849 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	u32 *hw_rfd = EMAC_RFD(rx_q, adpt->rfd_size, rx_q->rfd.produce_idx);
rx_q              854 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	if (++rx_q->rfd.produce_idx == rx_q->rfd.count)
rx_q              855 drivers/net/ethernet/qualcomm/emac/emac-mac.c 		rx_q->rfd.produce_idx = 0;
rx_q              860 drivers/net/ethernet/qualcomm/emac/emac-mac.c 				    struct emac_rx_queue *rx_q)
rx_q              867 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	next_produce_idx = rx_q->rfd.produce_idx + 1;
rx_q              868 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	if (next_produce_idx == rx_q->rfd.count)
rx_q              871 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	curr_rxbuf = GET_RFD_BUFFER(rx_q, rx_q->rfd.produce_idx);
rx_q              872 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	next_rxbuf = GET_RFD_BUFFER(rx_q, next_produce_idx);
rx_q              896 drivers/net/ethernet/qualcomm/emac/emac-mac.c 		emac_mac_rx_rfd_create(adpt, rx_q, curr_rxbuf->dma_addr);
rx_q              897 drivers/net/ethernet/qualcomm/emac/emac-mac.c 		next_produce_idx = rx_q->rfd.produce_idx + 1;
rx_q              898 drivers/net/ethernet/qualcomm/emac/emac-mac.c 		if (next_produce_idx == rx_q->rfd.count)
rx_q              901 drivers/net/ethernet/qualcomm/emac/emac-mac.c 		curr_rxbuf = GET_RFD_BUFFER(rx_q, rx_q->rfd.produce_idx);
rx_q              902 drivers/net/ethernet/qualcomm/emac/emac-mac.c 		next_rxbuf = GET_RFD_BUFFER(rx_q, next_produce_idx);
rx_q              907 drivers/net/ethernet/qualcomm/emac/emac-mac.c 		u32 prod_idx = (rx_q->rfd.produce_idx << rx_q->produce_shift) &
rx_q              908 drivers/net/ethernet/qualcomm/emac/emac-mac.c 				rx_q->produce_mask;
rx_q              909 drivers/net/ethernet/qualcomm/emac/emac-mac.c 		emac_reg_update32(adpt->base + rx_q->produce_reg,
rx_q              910 drivers/net/ethernet/qualcomm/emac/emac-mac.c 				  rx_q->produce_mask, prod_idx);
rx_q              938 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	emac_mac_rx_descs_refill(adpt, &adpt->rx_q);
rx_q              956 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	napi_enable(&adpt->rx_q.napi);
rx_q              968 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	napi_disable(&adpt->rx_q.napi);
rx_q              991 drivers/net/ethernet/qualcomm/emac/emac-mac.c 				struct emac_rx_queue *rx_q,
rx_q              994 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	u32 *hw_rrd = EMAC_RRD(rx_q, adpt->rrd_size, rx_q->rrd.consume_idx);
rx_q             1018 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	if (++rx_q->rrd.consume_idx == rx_q->rrd.count)
rx_q             1019 drivers/net/ethernet/qualcomm/emac/emac-mac.c 		rx_q->rrd.consume_idx = 0;
rx_q             1055 drivers/net/ethernet/qualcomm/emac/emac-mac.c static void emac_rx_rfd_clean(struct emac_rx_queue *rx_q, struct emac_rrd *rrd)
rx_q             1057 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	struct emac_buffer *rfbuf = rx_q->rfd.rfbuff;
rx_q             1063 drivers/net/ethernet/qualcomm/emac/emac-mac.c 		if (++consume_idx == rx_q->rfd.count)
rx_q             1067 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	rx_q->rfd.consume_idx = consume_idx;
rx_q             1068 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	rx_q->rfd.process_idx = consume_idx;
rx_q             1072 drivers/net/ethernet/qualcomm/emac/emac-mac.c static void emac_receive_skb(struct emac_rx_queue *rx_q,
rx_q             1083 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	napi_gro_receive(&rx_q->napi, skb);
rx_q             1087 drivers/net/ethernet/qualcomm/emac/emac-mac.c void emac_mac_rx_process(struct emac_adapter *adpt, struct emac_rx_queue *rx_q,
rx_q             1098 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	reg = readl_relaxed(adpt->base + rx_q->consume_reg);
rx_q             1100 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	hw_consume_idx = (reg & rx_q->consume_mask) >> rx_q->consume_shift;
rx_q             1101 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	num_consume_pkts = (hw_consume_idx >= rx_q->rrd.consume_idx) ?
rx_q             1102 drivers/net/ethernet/qualcomm/emac/emac-mac.c 		(hw_consume_idx -  rx_q->rrd.consume_idx) :
rx_q             1103 drivers/net/ethernet/qualcomm/emac/emac-mac.c 		(hw_consume_idx + rx_q->rrd.count - rx_q->rrd.consume_idx);
rx_q             1109 drivers/net/ethernet/qualcomm/emac/emac-mac.c 		if (!emac_rx_process_rrd(adpt, rx_q, &rrd))
rx_q             1114 drivers/net/ethernet/qualcomm/emac/emac-mac.c 			rfbuf = GET_RFD_BUFFER(rx_q, RRD_SI(&rrd));
rx_q             1125 drivers/net/ethernet/qualcomm/emac/emac-mac.c 		emac_rx_rfd_clean(rx_q, &rrd);
rx_q             1152 drivers/net/ethernet/qualcomm/emac/emac-mac.c 		emac_receive_skb(rx_q, skb, (u16)RRD_CVALN_TAG(&rrd),
rx_q             1159 drivers/net/ethernet/qualcomm/emac/emac-mac.c 		proc_idx = (rx_q->rfd.process_idx << rx_q->process_shft) &
rx_q             1160 drivers/net/ethernet/qualcomm/emac/emac-mac.c 				rx_q->process_mask;
rx_q             1161 drivers/net/ethernet/qualcomm/emac/emac-mac.c 		emac_reg_update32(adpt->base + rx_q->process_reg,
rx_q             1162 drivers/net/ethernet/qualcomm/emac/emac-mac.c 				  rx_q->process_mask, proc_idx);
rx_q             1163 drivers/net/ethernet/qualcomm/emac/emac-mac.c 		emac_mac_rx_descs_refill(adpt, rx_q);
rx_q             1218 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	adpt->rx_q.netdev = adpt->netdev;
rx_q             1220 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	adpt->rx_q.produce_reg  = EMAC_MAILBOX_0;
rx_q             1221 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	adpt->rx_q.produce_mask = RFD0_PROD_IDX_BMSK;
rx_q             1222 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	adpt->rx_q.produce_shift = RFD0_PROD_IDX_SHFT;
rx_q             1224 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	adpt->rx_q.process_reg  = EMAC_MAILBOX_0;
rx_q             1225 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	adpt->rx_q.process_mask = RFD0_PROC_IDX_BMSK;
rx_q             1226 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	adpt->rx_q.process_shft = RFD0_PROC_IDX_SHFT;
rx_q             1228 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	adpt->rx_q.consume_reg  = EMAC_MAILBOX_3;
rx_q             1229 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	adpt->rx_q.consume_mask = RFD0_CONS_IDX_BMSK;
rx_q             1230 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	adpt->rx_q.consume_shift = RFD0_CONS_IDX_SHFT;
rx_q             1232 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	adpt->rx_q.irq          = &adpt->irq;
rx_q             1233 drivers/net/ethernet/qualcomm/emac/emac-mac.c 	adpt->rx_q.intr         = adpt->irq.mask & ISR_RX_PKT;
rx_q              228 drivers/net/ethernet/qualcomm/emac/emac-mac.h void emac_mac_rx_process(struct emac_adapter *adpt, struct emac_rx_queue *rx_q,
rx_q               99 drivers/net/ethernet/qualcomm/emac/emac.c 	struct emac_rx_queue *rx_q =
rx_q              101 drivers/net/ethernet/qualcomm/emac/emac.c 	struct emac_adapter *adpt = netdev_priv(rx_q->netdev);
rx_q              102 drivers/net/ethernet/qualcomm/emac/emac.c 	struct emac_irq *irq = rx_q->irq;
rx_q              105 drivers/net/ethernet/qualcomm/emac/emac.c 	emac_mac_rx_process(adpt, rx_q, &work_done, budget);
rx_q              110 drivers/net/ethernet/qualcomm/emac/emac.c 		irq->mask |= rx_q->intr;
rx_q              130 drivers/net/ethernet/qualcomm/emac/emac.c 	struct emac_rx_queue *rx_q = &adpt->rx_q;
rx_q              152 drivers/net/ethernet/qualcomm/emac/emac.c 	if (status & rx_q->intr) {
rx_q              153 drivers/net/ethernet/qualcomm/emac/emac.c 		if (napi_schedule_prep(&rx_q->napi)) {
rx_q              154 drivers/net/ethernet/qualcomm/emac/emac.c 			irq->mask &= ~rx_q->intr;
rx_q              155 drivers/net/ethernet/qualcomm/emac/emac.c 			__napi_schedule(&rx_q->napi);
rx_q              689 drivers/net/ethernet/qualcomm/emac/emac.c 	netif_napi_add(netdev, &adpt->rx_q.napi, emac_napi_rtx,
rx_q              713 drivers/net/ethernet/qualcomm/emac/emac.c 	netif_napi_del(&adpt->rx_q.napi);
rx_q              731 drivers/net/ethernet/qualcomm/emac/emac.c 	netif_napi_del(&adpt->rx_q.napi);
rx_q              342 drivers/net/ethernet/qualcomm/emac/emac.h 	struct emac_rx_queue		rx_q;
rx_q              130 drivers/net/ethernet/stmicro/stmmac/chain_mode.c 	struct stmmac_rx_queue *rx_q = (struct stmmac_rx_queue *)priv_ptr;
rx_q              131 drivers/net/ethernet/stmicro/stmmac/chain_mode.c 	struct stmmac_priv *priv = rx_q->priv_data;
rx_q              138 drivers/net/ethernet/stmicro/stmmac/chain_mode.c 		p->des3 = cpu_to_le32((unsigned int)(rx_q->dma_rx_phy +
rx_q              139 drivers/net/ethernet/stmicro/stmmac/chain_mode.c 				      (((rx_q->dirty_rx) + 1) %
rx_q              106 drivers/net/ethernet/stmicro/stmmac/ring_mode.c 	struct stmmac_rx_queue *rx_q = priv_ptr;
rx_q              107 drivers/net/ethernet/stmicro/stmmac/ring_mode.c 	struct stmmac_priv *priv = rx_q->priv_data;
rx_q              311 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 	struct stmmac_rx_queue *rx_q = &priv->rx_queue[queue];
rx_q              314 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 	if (rx_q->dirty_rx <= rx_q->cur_rx)
rx_q              315 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		dirty = rx_q->cur_rx - rx_q->dirty_rx;
rx_q              317 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		dirty = DMA_RX_SIZE - rx_q->dirty_rx + rx_q->cur_rx;
rx_q             1065 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		struct stmmac_rx_queue *rx_q = &priv->rx_queue[queue];
rx_q             1070 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			head_rx = (void *)rx_q->dma_erx;
rx_q             1072 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			head_rx = (void *)rx_q->dma_rx;
rx_q             1136 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 	struct stmmac_rx_queue *rx_q = &priv->rx_queue[queue];
rx_q             1142 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			stmmac_init_rx_desc(priv, &rx_q->dma_erx[i].basic,
rx_q             1147 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			stmmac_init_rx_desc(priv, &rx_q->dma_rx[i],
rx_q             1209 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 	struct stmmac_rx_queue *rx_q = &priv->rx_queue[queue];
rx_q             1210 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 	struct stmmac_rx_buffer *buf = &rx_q->buf_pool[i];
rx_q             1212 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 	buf->page = page_pool_dev_alloc_pages(rx_q->page_pool);
rx_q             1217 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		buf->sec_page = page_pool_dev_alloc_pages(rx_q->page_pool);
rx_q             1243 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 	struct stmmac_rx_queue *rx_q = &priv->rx_queue[queue];
rx_q             1244 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 	struct stmmac_rx_buffer *buf = &rx_q->buf_pool[i];
rx_q             1247 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		page_pool_put_page(rx_q->page_pool, buf->page, false);
rx_q             1251 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		page_pool_put_page(rx_q->page_pool, buf->sec_page, false);
rx_q             1307 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		struct stmmac_rx_queue *rx_q = &priv->rx_queue[queue];
rx_q             1311 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			  (u32)rx_q->dma_rx_phy);
rx_q             1319 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 				p = &((rx_q->dma_erx + i)->basic);
rx_q             1321 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 				p = rx_q->dma_rx + i;
rx_q             1329 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		rx_q->cur_rx = 0;
rx_q             1330 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		rx_q->dirty_rx = (unsigned int)(i - DMA_RX_SIZE);
rx_q             1335 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 				stmmac_mode_init(priv, rx_q->dma_erx,
rx_q             1336 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 						rx_q->dma_rx_phy, DMA_RX_SIZE, 1);
rx_q             1338 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 				stmmac_mode_init(priv, rx_q->dma_rx,
rx_q             1339 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 						rx_q->dma_rx_phy, DMA_RX_SIZE, 0);
rx_q             1481 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		struct stmmac_rx_queue *rx_q = &priv->rx_queue[queue];
rx_q             1490 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 					  rx_q->dma_rx, rx_q->dma_rx_phy);
rx_q             1494 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 					  rx_q->dma_erx, rx_q->dma_rx_phy);
rx_q             1496 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		kfree(rx_q->buf_pool);
rx_q             1497 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		if (rx_q->page_pool)
rx_q             1498 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			page_pool_destroy(rx_q->page_pool);
rx_q             1549 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		struct stmmac_rx_queue *rx_q = &priv->rx_queue[queue];
rx_q             1553 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		rx_q->queue_index = queue;
rx_q             1554 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		rx_q->priv_data = priv;
rx_q             1564 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		rx_q->page_pool = page_pool_create(&pp_params);
rx_q             1565 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		if (IS_ERR(rx_q->page_pool)) {
rx_q             1566 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			ret = PTR_ERR(rx_q->page_pool);
rx_q             1567 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			rx_q->page_pool = NULL;
rx_q             1571 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		rx_q->buf_pool = kcalloc(DMA_RX_SIZE, sizeof(*rx_q->buf_pool),
rx_q             1573 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		if (!rx_q->buf_pool)
rx_q             1577 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			rx_q->dma_erx = dma_alloc_coherent(priv->device,
rx_q             1579 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 							   &rx_q->dma_rx_phy,
rx_q             1581 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			if (!rx_q->dma_erx)
rx_q             1585 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			rx_q->dma_rx = dma_alloc_coherent(priv->device,
rx_q             1587 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 							  &rx_q->dma_rx_phy,
rx_q             1589 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			if (!rx_q->dma_rx)
rx_q             2187 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 	struct stmmac_rx_queue *rx_q;
rx_q             2219 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		rx_q = &priv->rx_queue[chan];
rx_q             2222 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 				    rx_q->dma_rx_phy, chan);
rx_q             2224 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		rx_q->rx_tail_addr = rx_q->dma_rx_phy +
rx_q             2227 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 				       rx_q->rx_tail_addr, chan);
rx_q             3369 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c static inline int stmmac_rx_threshold_count(struct stmmac_rx_queue *rx_q)
rx_q             3371 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 	if (rx_q->rx_zeroc_thresh < STMMAC_RX_THRESH)
rx_q             3386 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 	struct stmmac_rx_queue *rx_q = &priv->rx_queue[queue];
rx_q             3388 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 	unsigned int entry = rx_q->dirty_rx;
rx_q             3393 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		struct stmmac_rx_buffer *buf = &rx_q->buf_pool[entry];
rx_q             3398 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			p = (struct dma_desc *)(rx_q->dma_erx + entry);
rx_q             3400 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			p = rx_q->dma_rx + entry;
rx_q             3403 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			buf->page = page_pool_dev_alloc_pages(rx_q->page_pool);
rx_q             3409 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			buf->sec_page = page_pool_dev_alloc_pages(rx_q->page_pool);
rx_q             3429 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		stmmac_refill_desc3(priv, rx_q, p);
rx_q             3431 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		rx_q->rx_count_frames++;
rx_q             3432 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		rx_q->rx_count_frames += priv->rx_coal_frames;
rx_q             3433 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		if (rx_q->rx_count_frames > priv->rx_coal_frames)
rx_q             3434 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			rx_q->rx_count_frames = 0;
rx_q             3435 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		use_rx_wd = priv->use_riwt && rx_q->rx_count_frames;
rx_q             3442 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 	rx_q->dirty_rx = entry;
rx_q             3443 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 	rx_q->rx_tail_addr = rx_q->dma_rx_phy +
rx_q             3444 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			    (rx_q->dirty_rx * sizeof(struct dma_desc));
rx_q             3445 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 	stmmac_set_rx_tail_ptr(priv, priv->ioaddr, rx_q->rx_tail_addr, queue);
rx_q             3458 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 	struct stmmac_rx_queue *rx_q = &priv->rx_queue[queue];
rx_q             3462 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 	unsigned int next_entry = rx_q->cur_rx;
rx_q             3470 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			rx_head = (void *)rx_q->dma_erx;
rx_q             3472 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			rx_head = (void *)rx_q->dma_rx;
rx_q             3485 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		if (!count && rx_q->state_saved) {
rx_q             3486 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			skb = rx_q->state.skb;
rx_q             3487 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			error = rx_q->state.error;
rx_q             3488 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			len = rx_q->state.len;
rx_q             3490 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			rx_q->state_saved = false;
rx_q             3502 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		buf = &rx_q->buf_pool[entry];
rx_q             3505 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			p = (struct dma_desc *)(rx_q->dma_erx + entry);
rx_q             3507 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			p = rx_q->dma_rx + entry;
rx_q             3516 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		rx_q->cur_rx = STMMAC_GET_ENTRY(rx_q->cur_rx, DMA_RX_SIZE);
rx_q             3517 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		next_entry = rx_q->cur_rx;
rx_q             3520 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			np = (struct dma_desc *)(rx_q->dma_erx + next_entry);
rx_q             3522 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			np = rx_q->dma_rx + next_entry;
rx_q             3529 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 					&priv->xstats, rx_q->dma_erx + entry);
rx_q             3531 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			page_pool_recycle_direct(rx_q->page_pool, buf->page);
rx_q             3593 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			page_pool_recycle_direct(rx_q->page_pool, buf->page);
rx_q             3608 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			page_pool_release_page(rx_q->page_pool, buf->page);
rx_q             3622 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			page_pool_release_page(rx_q->page_pool, buf->sec_page);
rx_q             3652 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		rx_q->state_saved = true;
rx_q             3653 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		rx_q->state.skb = skb;
rx_q             3654 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		rx_q->state.error = error;
rx_q             3655 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		rx_q->state.len = len;
rx_q             3881 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			struct stmmac_rx_queue *rx_q = &priv->rx_queue[queue];
rx_q             3890 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 						       rx_q->rx_tail_addr,
rx_q             4075 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		struct stmmac_rx_queue *rx_q = &priv->rx_queue[queue];
rx_q             4081 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			sysfs_display_ring((void *)rx_q->dma_erx,
rx_q             4085 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 			sysfs_display_ring((void *)rx_q->dma_rx,
rx_q             4820 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		struct stmmac_rx_queue *rx_q = &priv->rx_queue[queue];
rx_q             4822 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		rx_q->cur_rx = 0;
rx_q             4823 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		rx_q->dirty_rx = 0;
rx_q              279 drivers/net/fddi/skfp/fplustm.c 	smc->hw.fp.rx[QUEUE_R1] = queue = &smc->hw.fp.rx_q[QUEUE_R1] ;
rx_q              286 drivers/net/fddi/skfp/fplustm.c 	smc->hw.fp.rx[QUEUE_R2] = queue = &smc->hw.fp.rx_q[QUEUE_R2] ;
rx_q              192 drivers/net/fddi/skfp/h/fplustm.h 	struct s_smt_rx_queue rx_q[USED_QUEUES] ;
rx_q              311 drivers/net/fddi/skfp/h/hwmtm.h #define	HWM_GET_RX_USED(smc)	((int)(smc)->hw.fp.rx_q[QUEUE_R1].rx_used)
rx_q              325 drivers/net/fddi/skfp/h/hwmtm.h #define	HWM_GET_RX_FREE(smc)	((int)(smc)->hw.fp.rx_q[QUEUE_R1].rx_free-1)
rx_q              341 drivers/net/fddi/skfp/h/hwmtm.h 				(smc)->hw.fp.rx_q[QUEUE_R1].rx_curr_put
rx_q              359 drivers/net/fddi/skfp/h/hwmtm.h 	if ((low_water) >= (smc)->hw.fp.rx_q[QUEUE_R1].rx_used) {\
rx_q              718 drivers/net/fddi/skfp/hwmtm.c 		if (smc->hw.fp.rx_q[QUEUE_R1].rx_used > 0) {
rx_q             1419 drivers/net/fddi/skfp/hwmtm.c 	r = smc->hw.fp.rx_q[QUEUE_R1].rx_curr_put ;
rx_q             1430 drivers/net/fddi/skfp/hwmtm.c 	smc->hw.fp.rx_q[QUEUE_R1].rx_free-- ;
rx_q             1431 drivers/net/fddi/skfp/hwmtm.c 	smc->hw.fp.rx_q[QUEUE_R1].rx_used++ ;
rx_q             1432 drivers/net/fddi/skfp/hwmtm.c 	smc->hw.fp.rx_q[QUEUE_R1].rx_curr_put = r->rxd_next ;
rx_q              164 drivers/net/wireless/mediatek/mt7601u/dma.c 	struct mt7601u_rx_queue *q = &dev->rx_q;
rx_q              185 drivers/net/wireless/mediatek/mt7601u/dma.c 	struct mt7601u_rx_queue *q = &dev->rx_q;
rx_q              383 drivers/net/wireless/mediatek/mt7601u/dma.c 	for (i = 0; i < dev->rx_q.entries; i++)
rx_q              384 drivers/net/wireless/mediatek/mt7601u/dma.c 		usb_poison_urb(dev->rx_q.e[i].urb);
rx_q              412 drivers/net/wireless/mediatek/mt7601u/dma.c 	for (i = 0; i < dev->rx_q.entries; i++) {
rx_q              413 drivers/net/wireless/mediatek/mt7601u/dma.c 		ret = mt7601u_submit_rx_buf(dev, &dev->rx_q.e[i], GFP_KERNEL);
rx_q              425 drivers/net/wireless/mediatek/mt7601u/dma.c 	for (i = 0; i < dev->rx_q.entries; i++) {
rx_q              426 drivers/net/wireless/mediatek/mt7601u/dma.c 		__free_pages(dev->rx_q.e[i].p, MT_RX_ORDER);
rx_q              427 drivers/net/wireless/mediatek/mt7601u/dma.c 		usb_free_urb(dev->rx_q.e[i].urb);
rx_q              435 drivers/net/wireless/mediatek/mt7601u/dma.c 	memset(&dev->rx_q, 0, sizeof(dev->rx_q));
rx_q              436 drivers/net/wireless/mediatek/mt7601u/dma.c 	dev->rx_q.dev = dev;
rx_q              437 drivers/net/wireless/mediatek/mt7601u/dma.c 	dev->rx_q.entries = N_RX_ENTRIES;
rx_q              440 drivers/net/wireless/mediatek/mt7601u/dma.c 		dev->rx_q.e[i].urb = usb_alloc_urb(0, GFP_KERNEL);
rx_q              441 drivers/net/wireless/mediatek/mt7601u/dma.c 		dev->rx_q.e[i].p = dev_alloc_pages(MT_RX_ORDER);
rx_q              443 drivers/net/wireless/mediatek/mt7601u/dma.c 		if (!dev->rx_q.e[i].urb || !dev->rx_q.e[i].p)
rx_q              209 drivers/net/wireless/mediatek/mt7601u/mt7601u.h 	struct mt7601u_rx_queue rx_q;
rx_q             1062 drivers/net/wireless/rsi/rsi_91x_sdio.c 	skb_queue_head_init(&sdev->rx_q.head);
rx_q             1063 drivers/net/wireless/rsi/rsi_91x_sdio.c 	sdev->rx_q.num_rx_pkts = 0;
rx_q               78 drivers/net/wireless/rsi/rsi_91x_sdio_ops.c 			skb = skb_dequeue(&sdev->rx_q.head);
rx_q               81 drivers/net/wireless/rsi/rsi_91x_sdio_ops.c 			if (sdev->rx_q.num_rx_pkts > 0)
rx_q               82 drivers/net/wireless/rsi/rsi_91x_sdio_ops.c 				sdev->rx_q.num_rx_pkts--;
rx_q               95 drivers/net/wireless/rsi/rsi_91x_sdio_ops.c 	skb_queue_purge(&sdev->rx_q.head);
rx_q              118 drivers/net/wireless/rsi/rsi_91x_sdio_ops.c 	if (dev->rx_q.num_rx_pkts >= RSI_MAX_RX_PKTS)
rx_q              159 drivers/net/wireless/rsi/rsi_91x_sdio_ops.c 	skb_queue_tail(&dev->rx_q.head, skb);
rx_q              160 drivers/net/wireless/rsi/rsi_91x_sdio_ops.c 	dev->rx_q.num_rx_pkts++;
rx_q              278 drivers/net/wireless/rsi/rsi_91x_usb.c 	if (skb_queue_len(&dev->rx_q) >= RSI_MAX_RX_PKTS) {
rx_q              283 drivers/net/wireless/rsi/rsi_91x_usb.c 	skb_queue_tail(&dev->rx_q, rx_cb->rx_skb);
rx_q              580 drivers/net/wireless/rsi/rsi_91x_usb.c 	skb_queue_head_init(&dev->rx_q);
rx_q               43 drivers/net/wireless/rsi/rsi_91x_usb_ops.c 			skb = skb_dequeue(&dev->rx_q);
rx_q               58 drivers/net/wireless/rsi/rsi_91x_usb_ops.c 	skb_queue_purge(&dev->rx_q);
rx_q              131 drivers/net/wireless/rsi/rsi_sdio.h 	struct rsi_sdio_rx_q rx_q;
rx_q               68 drivers/net/wireless/rsi/rsi_usb.h 	struct sk_buff_head rx_q;
rx_q              412 drivers/nfc/nfcmrvl/fw_dnld.c 	while ((skb = skb_dequeue(&fw_dnld->rx_q))) {
rx_q              464 drivers/nfc/nfcmrvl/fw_dnld.c 	skb_queue_head_init(&priv->fw_dnld.rx_q);
rx_q              484 drivers/nfc/nfcmrvl/fw_dnld.c 	skb_queue_tail(&priv->fw_dnld.rx_q, skb);
rx_q               86 drivers/nfc/nfcmrvl/fw_dnld.h 	struct sk_buff_head rx_q;
rx_q              369 include/net/bluetooth/hci_core.h 	struct sk_buff_head	rx_q;
rx_q              225 include/net/nfc/nci_core.h 	struct sk_buff_head	rx_q;
rx_q             1566 net/bluetooth/hci_core.c 		skb_queue_purge(&hdev->rx_q);
rx_q             1753 net/bluetooth/hci_core.c 	skb_queue_purge(&hdev->rx_q);
rx_q             1821 net/bluetooth/hci_core.c 	skb_queue_purge(&hdev->rx_q);
rx_q             3256 net/bluetooth/hci_core.c 	skb_queue_head_init(&hdev->rx_q);
rx_q             3510 net/bluetooth/hci_core.c 	skb_queue_tail(&hdev->rx_q, skb);
rx_q             3526 net/bluetooth/hci_core.c 	skb_queue_tail(&hdev->rx_q, skb);
rx_q             4456 net/bluetooth/hci_core.c 	while ((skb = skb_dequeue(&hdev->rx_q))) {
rx_q              521 net/nfc/nci/core.c 		skb_queue_purge(&ndev->rx_q);
rx_q              546 net/nfc/nci/core.c 	skb_queue_purge(&ndev->rx_q);
rx_q             1220 net/nfc/nci/core.c 	skb_queue_head_init(&ndev->rx_q);
rx_q             1287 net/nfc/nci/core.c 	skb_queue_tail(&ndev->rx_q, skb);
rx_q             1464 net/nfc/nci/core.c 	while ((skb = skb_dequeue(&ndev->rx_q))) {