rx_desc           713 drivers/atm/horizon.c   rx_ch_desc * rx_desc = &memmap->rx_descs[channel];
rx_desc           718 drivers/atm/horizon.c   channel_type = rd_mem (dev, &rx_desc->wr_buf_type) & BUFFER_PTR_MASK;
rx_desc           746 drivers/atm/horizon.c   wr_mem (dev, &rx_desc->wr_buf_type,
rx_desc           749 drivers/atm/horizon.c     wr_mem (dev, &rx_desc->rd_buf_type, buf_ptr);
rx_desc           787 drivers/atm/horizon.c   rx_ch_desc * rx_desc = &memmap->rx_descs[vc];
rx_desc           792 drivers/atm/horizon.c   value = rd_mem (dev, &rx_desc->wr_buf_type) & BUFFER_PTR_MASK;
rx_desc           806 drivers/atm/horizon.c     wr_mem (dev, &rx_desc->wr_buf_type, RX_CHANNEL_DISABLED);
rx_desc           808 drivers/atm/horizon.c     if ((rd_mem (dev, &rx_desc->wr_buf_type) & BUFFER_PTR_MASK) == RX_CHANNEL_DISABLED)
rx_desc           852 drivers/atm/horizon.c     r1 = rd_mem (dev, &rx_desc->rd_buf_type);
rx_desc           870 drivers/atm/horizon.c     r2 = rd_mem (dev, &rx_desc->rd_buf_type);
rx_desc          1821 drivers/atm/horizon.c   cell_buf * rx_desc;
rx_desc          1891 drivers/atm/horizon.c     rx_ch_desc * rx_desc = &memmap->rx_descs[chan];
rx_desc          1893 drivers/atm/horizon.c     wr_mem (dev, &rx_desc->wr_buf_type, CHANNEL_TYPE_AAL5 | RX_CHANNEL_DISABLED);
rx_desc          1900 drivers/atm/horizon.c   rx_desc = memmap->bufn4;
rx_desc          1902 drivers/atm/horizon.c   wr_mem (dev, &memmap->rxfreebufstart.next, BUF_PTR(rx_desc) | BUFF_STATUS_EMPTY);
rx_desc          1905 drivers/atm/horizon.c     wr_mem (dev, &rx_desc->next, BUF_PTR(rx_desc+1) | BUFF_STATUS_EMPTY);
rx_desc          1907 drivers/atm/horizon.c     rx_desc++;
rx_desc          1910 drivers/atm/horizon.c   wr_mem (dev, &rx_desc->next, BUF_PTR(&memmap->rxfreebufend) | BUFF_STATUS_EMPTY);
rx_desc           244 drivers/infiniband/ulp/iser/iser_initiator.c 	struct iser_rx_desc *rx_desc;
rx_desc           267 drivers/infiniband/ulp/iser/iser_initiator.c 	rx_desc = iser_conn->rx_descs;
rx_desc           269 drivers/infiniband/ulp/iser/iser_initiator.c 	for (i = 0; i < iser_conn->qp_max_recv_dtos; i++, rx_desc++)  {
rx_desc           270 drivers/infiniband/ulp/iser/iser_initiator.c 		dma_addr = ib_dma_map_single(device->ib_device, (void *)rx_desc,
rx_desc           275 drivers/infiniband/ulp/iser/iser_initiator.c 		rx_desc->dma_addr = dma_addr;
rx_desc           276 drivers/infiniband/ulp/iser/iser_initiator.c 		rx_desc->cqe.done = iser_task_rsp;
rx_desc           277 drivers/infiniband/ulp/iser/iser_initiator.c 		rx_sg = &rx_desc->rx_sg;
rx_desc           278 drivers/infiniband/ulp/iser/iser_initiator.c 		rx_sg->addr = rx_desc->dma_addr;
rx_desc           287 drivers/infiniband/ulp/iser/iser_initiator.c 	rx_desc = iser_conn->rx_descs;
rx_desc           288 drivers/infiniband/ulp/iser/iser_initiator.c 	for (j = 0; j < i; j++, rx_desc++)
rx_desc           289 drivers/infiniband/ulp/iser/iser_initiator.c 		ib_dma_unmap_single(device->ib_device, rx_desc->dma_addr,
rx_desc           305 drivers/infiniband/ulp/iser/iser_initiator.c 	struct iser_rx_desc *rx_desc;
rx_desc           312 drivers/infiniband/ulp/iser/iser_initiator.c 	rx_desc = iser_conn->rx_descs;
rx_desc           313 drivers/infiniband/ulp/iser/iser_initiator.c 	for (i = 0; i < iser_conn->qp_max_recv_dtos; i++, rx_desc++)
rx_desc           314 drivers/infiniband/ulp/iser/iser_initiator.c 		ib_dma_unmap_single(device->ib_device, rx_desc->dma_addr,
rx_desc          1003 drivers/infiniband/ulp/iser/iser_verbs.c 	struct iser_rx_desc *rx_desc;
rx_desc          1008 drivers/infiniband/ulp/iser/iser_verbs.c 		rx_desc = &iser_conn->rx_descs[my_rx_head];
rx_desc          1009 drivers/infiniband/ulp/iser/iser_verbs.c 		rx_desc->cqe.done = iser_task_rsp;
rx_desc          1010 drivers/infiniband/ulp/iser/iser_verbs.c 		wr->wr_cqe = &rx_desc->cqe;
rx_desc          1011 drivers/infiniband/ulp/iser/iser_verbs.c 		wr->sg_list = &rx_desc->rx_sg;
rx_desc           171 drivers/infiniband/ulp/isert/ib_isert.c 	struct iser_rx_desc *rx_desc;
rx_desc           182 drivers/infiniband/ulp/isert/ib_isert.c 	rx_desc = isert_conn->rx_descs;
rx_desc           184 drivers/infiniband/ulp/isert/ib_isert.c 	for (i = 0; i < ISERT_QP_MAX_RECV_DTOS; i++, rx_desc++)  {
rx_desc           185 drivers/infiniband/ulp/isert/ib_isert.c 		dma_addr = ib_dma_map_single(ib_dev, (void *)rx_desc,
rx_desc           190 drivers/infiniband/ulp/isert/ib_isert.c 		rx_desc->dma_addr = dma_addr;
rx_desc           192 drivers/infiniband/ulp/isert/ib_isert.c 		rx_sg = &rx_desc->rx_sg;
rx_desc           193 drivers/infiniband/ulp/isert/ib_isert.c 		rx_sg->addr = rx_desc->dma_addr;
rx_desc           196 drivers/infiniband/ulp/isert/ib_isert.c 		rx_desc->rx_cqe.done = isert_recv_done;
rx_desc           202 drivers/infiniband/ulp/isert/ib_isert.c 	rx_desc = isert_conn->rx_descs;
rx_desc           203 drivers/infiniband/ulp/isert/ib_isert.c 	for (j = 0; j < i; j++, rx_desc++) {
rx_desc           204 drivers/infiniband/ulp/isert/ib_isert.c 		ib_dma_unmap_single(ib_dev, rx_desc->dma_addr,
rx_desc           217 drivers/infiniband/ulp/isert/ib_isert.c 	struct iser_rx_desc *rx_desc;
rx_desc           223 drivers/infiniband/ulp/isert/ib_isert.c 	rx_desc = isert_conn->rx_descs;
rx_desc           224 drivers/infiniband/ulp/isert/ib_isert.c 	for (i = 0; i < ISERT_QP_MAX_RECV_DTOS; i++, rx_desc++)  {
rx_desc           225 drivers/infiniband/ulp/isert/ib_isert.c 		ib_dma_unmap_single(ib_dev, rx_desc->dma_addr,
rx_desc           807 drivers/infiniband/ulp/isert/ib_isert.c 	struct iser_rx_desc *rx_desc;
rx_desc           810 drivers/infiniband/ulp/isert/ib_isert.c 		rx_desc = &isert_conn->rx_descs[i];
rx_desc           812 drivers/infiniband/ulp/isert/ib_isert.c 		rx_wr->wr_cqe = &rx_desc->rx_cqe;
rx_desc           813 drivers/infiniband/ulp/isert/ib_isert.c 		rx_wr->sg_list = &rx_desc->rx_sg;
rx_desc           816 drivers/infiniband/ulp/isert/ib_isert.c 		rx_desc->in_use = false;
rx_desc           829 drivers/infiniband/ulp/isert/ib_isert.c isert_post_recv(struct isert_conn *isert_conn, struct iser_rx_desc *rx_desc)
rx_desc           834 drivers/infiniband/ulp/isert/ib_isert.c 	if (!rx_desc->in_use) {
rx_desc           842 drivers/infiniband/ulp/isert/ib_isert.c 	rx_desc->in_use = false;
rx_desc           843 drivers/infiniband/ulp/isert/ib_isert.c 	rx_wr.wr_cqe = &rx_desc->rx_cqe;
rx_desc           844 drivers/infiniband/ulp/isert/ib_isert.c 	rx_wr.sg_list = &rx_desc->rx_sg;
rx_desc          1054 drivers/infiniband/ulp/isert/ib_isert.c 	struct iser_rx_desc *rx_desc = isert_conn->login_req_buf;
rx_desc          1066 drivers/infiniband/ulp/isert/ib_isert.c 			(struct iscsi_login_req *)&rx_desc->iscsi_header;
rx_desc          1085 drivers/infiniband/ulp/isert/ib_isert.c 	memcpy(&login->req[0], (void *)&rx_desc->iscsi_header, ISCSI_HDR_LEN);
rx_desc          1091 drivers/infiniband/ulp/isert/ib_isert.c 	memcpy(login->req_buf, &rx_desc->data[0], size);
rx_desc          1101 drivers/infiniband/ulp/isert/ib_isert.c *isert_allocate_cmd(struct iscsi_conn *conn, struct iser_rx_desc *rx_desc)
rx_desc          1115 drivers/infiniband/ulp/isert/ib_isert.c 	isert_cmd->rx_desc = rx_desc;
rx_desc          1123 drivers/infiniband/ulp/isert/ib_isert.c 		      struct iser_rx_desc *rx_desc, unsigned char *buf)
rx_desc          1156 drivers/infiniband/ulp/isert/ib_isert.c 				    &rx_desc->data[0], imm_data_len);
rx_desc          1163 drivers/infiniband/ulp/isert/ib_isert.c 		sg_set_buf(&isert_cmd->sg, &rx_desc->data[0], imm_data_len);
rx_desc          1190 drivers/infiniband/ulp/isert/ib_isert.c 			   struct iser_rx_desc *rx_desc, unsigned char *buf)
rx_desc          1232 drivers/infiniband/ulp/isert/ib_isert.c 		  sg_nents, &rx_desc->data[0], unsol_data_len);
rx_desc          1234 drivers/infiniband/ulp/isert/ib_isert.c 	sg_copy_from_buffer(sg_start, sg_nents, &rx_desc->data[0],
rx_desc          1245 drivers/infiniband/ulp/isert/ib_isert.c 	rc = isert_post_recv(isert_conn, rx_desc);
rx_desc          1255 drivers/infiniband/ulp/isert/ib_isert.c 		     struct iscsi_cmd *cmd, struct iser_rx_desc *rx_desc,
rx_desc          1274 drivers/infiniband/ulp/isert/ib_isert.c 		      struct iscsi_cmd *cmd, struct iser_rx_desc *rx_desc,
rx_desc          1293 drivers/infiniband/ulp/isert/ib_isert.c 	memcpy(cmd->text_in_ptr, &rx_desc->data[0], payload_length);
rx_desc          1299 drivers/infiniband/ulp/isert/ib_isert.c isert_rx_opcode(struct isert_conn *isert_conn, struct iser_rx_desc *rx_desc,
rx_desc          1303 drivers/infiniband/ulp/isert/ib_isert.c 	struct iscsi_hdr *hdr = &rx_desc->iscsi_header;
rx_desc          1319 drivers/infiniband/ulp/isert/ib_isert.c 		cmd = isert_allocate_cmd(conn, rx_desc);
rx_desc          1331 drivers/infiniband/ulp/isert/ib_isert.c 					rx_desc, (unsigned char *)hdr);
rx_desc          1334 drivers/infiniband/ulp/isert/ib_isert.c 		cmd = isert_allocate_cmd(conn, rx_desc);
rx_desc          1340 drivers/infiniband/ulp/isert/ib_isert.c 					   rx_desc, (unsigned char *)hdr);
rx_desc          1343 drivers/infiniband/ulp/isert/ib_isert.c 		ret = isert_handle_iscsi_dataout(isert_conn, rx_desc,
rx_desc          1347 drivers/infiniband/ulp/isert/ib_isert.c 		cmd = isert_allocate_cmd(conn, rx_desc);
rx_desc          1355 drivers/infiniband/ulp/isert/ib_isert.c 		cmd = isert_allocate_cmd(conn, rx_desc);
rx_desc          1365 drivers/infiniband/ulp/isert/ib_isert.c 			cmd = isert_allocate_cmd(conn, rx_desc);
rx_desc          1372 drivers/infiniband/ulp/isert/ib_isert.c 					    rx_desc, (struct iscsi_text *)hdr);
rx_desc          1400 drivers/infiniband/ulp/isert/ib_isert.c 	struct iser_rx_desc *rx_desc = cqe_to_rx_desc(wc->wr_cqe);
rx_desc          1401 drivers/infiniband/ulp/isert/ib_isert.c 	struct iscsi_hdr *hdr = &rx_desc->iscsi_header;
rx_desc          1402 drivers/infiniband/ulp/isert/ib_isert.c 	struct iser_ctrl *iser_ctrl = &rx_desc->iser_header;
rx_desc          1413 drivers/infiniband/ulp/isert/ib_isert.c 	rx_desc->in_use = true;
rx_desc          1415 drivers/infiniband/ulp/isert/ib_isert.c 	ib_dma_sync_single_for_cpu(ib_dev, rx_desc->dma_addr,
rx_desc          1419 drivers/infiniband/ulp/isert/ib_isert.c 		 rx_desc->dma_addr, hdr->opcode, hdr->itt, hdr->flags,
rx_desc          1447 drivers/infiniband/ulp/isert/ib_isert.c 	isert_rx_opcode(isert_conn, rx_desc,
rx_desc          1450 drivers/infiniband/ulp/isert/ib_isert.c 	ib_dma_sync_single_for_device(ib_dev, rx_desc->dma_addr,
rx_desc          1826 drivers/infiniband/ulp/isert/ib_isert.c 	ret = isert_post_recv(isert_conn, isert_cmd->rx_desc);
rx_desc          2201 drivers/infiniband/ulp/isert/ib_isert.c 		rc = isert_post_recv(isert_conn, isert_cmd->rx_desc);
rx_desc           125 drivers/infiniband/ulp/isert/ib_isert.h 	struct iser_rx_desc	*rx_desc;
rx_desc           246 drivers/net/ethernet/3com/typhoon.c 	struct rx_desc			rxLo[RX_ENTRIES]	__3xp_aligned;
rx_desc           247 drivers/net/ethernet/3com/typhoon.c 	struct rx_desc			rxHi[RX_ENTRIES]	__3xp_aligned;
rx_desc           374 drivers/net/ethernet/3com/typhoon.c 	*index += count * sizeof(struct rx_desc);
rx_desc           375 drivers/net/ethernet/3com/typhoon.c 	*index %= RX_ENTRIES * sizeof(struct rx_desc);
rx_desc          1210 drivers/net/ethernet/3com/typhoon.c 	iface->rxLoSize = cpu_to_le32(RX_ENTRIES * sizeof(struct rx_desc));
rx_desc          1214 drivers/net/ethernet/3com/typhoon.c 	iface->rxHiSize = cpu_to_le32(RX_ENTRIES * sizeof(struct rx_desc));
rx_desc          1636 drivers/net/ethernet/3com/typhoon.c 	struct rx_desc *rx;
rx_desc          1651 drivers/net/ethernet/3com/typhoon.c 		rx = (struct rx_desc *) (rxRing->ringBase + rxaddr);
rx_desc           711 drivers/net/ethernet/alteon/acenic.c 		size = (sizeof(struct rx_desc) *
rx_desc           760 drivers/net/ethernet/alteon/acenic.c 	size = (sizeof(struct rx_desc) *
rx_desc          1220 drivers/net/ethernet/alteon/acenic.c 	       RX_STD_RING_ENTRIES * sizeof(struct rx_desc));
rx_desc          1230 drivers/net/ethernet/alteon/acenic.c 		     (sizeof(struct rx_desc) * RX_STD_RING_ENTRIES)));
rx_desc          1236 drivers/net/ethernet/alteon/acenic.c 	       RX_JUMBO_RING_ENTRIES * sizeof(struct rx_desc));
rx_desc          1245 drivers/net/ethernet/alteon/acenic.c 	       RX_MINI_RING_ENTRIES * sizeof(struct rx_desc));
rx_desc          1250 drivers/net/ethernet/alteon/acenic.c 			     (sizeof(struct rx_desc) *
rx_desc          1271 drivers/net/ethernet/alteon/acenic.c 		     (sizeof(struct rx_desc) *
rx_desc          1279 drivers/net/ethernet/alteon/acenic.c 	       RX_RETURN_RING_ENTRIES * sizeof(struct rx_desc));
rx_desc          1642 drivers/net/ethernet/alteon/acenic.c 		struct rx_desc *rd;
rx_desc          1703 drivers/net/ethernet/alteon/acenic.c 		struct rx_desc *rd;
rx_desc          1759 drivers/net/ethernet/alteon/acenic.c 		struct rx_desc *rd;
rx_desc          1933 drivers/net/ethernet/alteon/acenic.c 		struct rx_desc *retdesc;
rx_desc           477 drivers/net/ethernet/alteon/acenic.h #define RX_STD_RING_SIZE	(RX_STD_RING_ENTRIES * sizeof(struct rx_desc))
rx_desc           480 drivers/net/ethernet/alteon/acenic.h #define RX_JUMBO_RING_SIZE	(RX_JUMBO_RING_ENTRIES *sizeof(struct rx_desc))
rx_desc           483 drivers/net/ethernet/alteon/acenic.h #define RX_MINI_RING_SIZE	(RX_MINI_RING_ENTRIES *sizeof(struct rx_desc))
rx_desc           487 drivers/net/ethernet/alteon/acenic.h 				 sizeof(struct rx_desc))
rx_desc           664 drivers/net/ethernet/alteon/acenic.h 	struct rx_desc		*rx_std_ring;
rx_desc           665 drivers/net/ethernet/alteon/acenic.h 	struct rx_desc		*rx_jumbo_ring;
rx_desc           666 drivers/net/ethernet/alteon/acenic.h 	struct rx_desc		*rx_mini_ring;
rx_desc           667 drivers/net/ethernet/alteon/acenic.h 	struct rx_desc		*rx_return_ring;
rx_desc           318 drivers/net/ethernet/atheros/atlx/atl2.c 	adapter->rxd_ring = (struct rx_desc *) (((u8 *)adapter->txs_ring) +
rx_desc           409 drivers/net/ethernet/atheros/atlx/atl2.c 	struct rx_desc *rxd;
rx_desc           464 drivers/net/ethernet/atheros/atlx/atl2.h 	struct rx_desc	*rxd_ring;
rx_desc            99 drivers/net/ethernet/chelsio/cxgb3/adapter.h struct rx_desc;
rx_desc           114 drivers/net/ethernet/chelsio/cxgb3/adapter.h 	struct rx_desc *desc;       /* address of HW Rx descriptor ring */
rx_desc           412 drivers/net/ethernet/chelsio/cxgb3/sge.c 				 struct rx_desc *d, struct rx_sw_desc *sd,
rx_desc           431 drivers/net/ethernet/chelsio/cxgb3/sge.c static inline int add_one_rx_chunk(dma_addr_t mapping, struct rx_desc *d,
rx_desc           508 drivers/net/ethernet/chelsio/cxgb3/sge.c 	struct rx_desc *d = &q->desc[q->pidx];
rx_desc           581 drivers/net/ethernet/chelsio/cxgb3/sge.c 	struct rx_desc *from = &q->desc[idx];
rx_desc           582 drivers/net/ethernet/chelsio/cxgb3/sge.c 	struct rx_desc *to = &q->desc[q->pidx];
rx_desc           690 drivers/net/ethernet/chelsio/cxgb3/sge.c 					  sizeof(struct rx_desc), q->fl[i].desc,
rx_desc          3048 drivers/net/ethernet/chelsio/cxgb3/sge.c 				   sizeof(struct rx_desc),
rx_desc          3055 drivers/net/ethernet/chelsio/cxgb3/sge.c 				   sizeof(struct rx_desc),
rx_desc           197 drivers/net/ethernet/dec/tulip/dmfe.c 	struct rx_desc *next_rx_desc;
rx_desc           228 drivers/net/ethernet/dec/tulip/dmfe.c 	struct rx_desc *first_rx_desc;
rx_desc           229 drivers/net/ethernet/dec/tulip/dmfe.c 	struct rx_desc *rx_insert_ptr;
rx_desc           230 drivers/net/ethernet/dec/tulip/dmfe.c 	struct rx_desc *rx_ready_ptr;	/* packet come pointer */
rx_desc           952 drivers/net/ethernet/dec/tulip/dmfe.c 	struct rx_desc *rxptr;
rx_desc          1338 drivers/net/ethernet/dec/tulip/dmfe.c 	struct rx_desc *rxptr = db->rx_insert_ptr;
rx_desc          1363 drivers/net/ethernet/dec/tulip/dmfe.c 	struct rx_desc *tmp_rx;
rx_desc          1409 drivers/net/ethernet/dec/tulip/dmfe.c 		tmp_rx_dma += sizeof(struct rx_desc);
rx_desc          1548 drivers/net/ethernet/dec/tulip/dmfe.c 	struct rx_desc *rxptr;
rx_desc           117 drivers/net/ethernet/dec/tulip/uli526x.c 	struct rx_desc *next_rx_desc;
rx_desc           150 drivers/net/ethernet/dec/tulip/uli526x.c 	struct rx_desc *first_rx_desc;
rx_desc           151 drivers/net/ethernet/dec/tulip/uli526x.c 	struct rx_desc *rx_insert_ptr;
rx_desc           152 drivers/net/ethernet/dec/tulip/uli526x.c 	struct rx_desc *rx_ready_ptr;	/* packet come pointer */
rx_desc           805 drivers/net/ethernet/dec/tulip/uli526x.c 	struct rx_desc *rxptr;
rx_desc          1275 drivers/net/ethernet/dec/tulip/uli526x.c 	struct rx_desc *rxptr = db->rx_insert_ptr;
rx_desc          1301 drivers/net/ethernet/dec/tulip/uli526x.c 	struct rx_desc *tmp_rx;
rx_desc          1344 drivers/net/ethernet/dec/tulip/uli526x.c 		tmp_rx_dma += sizeof(struct rx_desc);
rx_desc          1444 drivers/net/ethernet/dec/tulip/uli526x.c 	struct rx_desc *rxptr;
rx_desc           139 drivers/net/ethernet/ec_bhf.c 	struct rx_desc *rx_descs;
rx_desc           194 drivers/net/ethernet/ec_bhf.c static int ec_bhf_pkt_received(struct rx_desc *desc)
rx_desc           199 drivers/net/ethernet/ec_bhf.c static void ec_bhf_add_rx_desc(struct ec_bhf_priv *priv, struct rx_desc *desc)
rx_desc           207 drivers/net/ethernet/ec_bhf.c 	struct rx_desc *desc = &priv->rx_descs[priv->rx_dnext];
rx_desc           373 drivers/net/ethernet/ec_bhf.c 	priv->rx_dcount = priv->rx_buf.len / sizeof(struct rx_desc);
rx_desc           374 drivers/net/ethernet/ec_bhf.c 	priv->rx_descs = (struct rx_desc *)priv->rx_buf.buf;
rx_desc           378 drivers/net/ethernet/ec_bhf.c 		struct rx_desc *desc = &priv->rx_descs[i];
rx_desc           401 drivers/net/ethernet/ec_bhf.c 				   FIFO_SIZE * sizeof(struct rx_desc));
rx_desc           275 drivers/net/ethernet/google/gve/gve_rx.c static bool gve_rx(struct gve_rx_ring *rx, struct gve_rx_desc *rx_desc,
rx_desc           287 drivers/net/ethernet/google/gve/gve_rx.c 	if (unlikely(rx_desc->flags_seq & GVE_RXF_ERR))
rx_desc           290 drivers/net/ethernet/google/gve/gve_rx.c 	len = be16_to_cpu(rx_desc->len) - GVE_RX_PAD;
rx_desc           345 drivers/net/ethernet/google/gve/gve_rx.c 		if (rx_desc->csum)
rx_desc           349 drivers/net/ethernet/google/gve/gve_rx.c 		skb->csum = csum_unfold(rx_desc->csum);
rx_desc           354 drivers/net/ethernet/google/gve/gve_rx.c 	    gve_needs_rss(rx_desc->flags_seq))
rx_desc           355 drivers/net/ethernet/google/gve/gve_rx.c 		skb_set_hash(skb, be32_to_cpu(rx_desc->rss_hash),
rx_desc           356 drivers/net/ethernet/google/gve/gve_rx.c 			     gve_rss_type(rx_desc->flags_seq));
rx_desc           579 drivers/net/ethernet/hisilicon/hip04_eth.c 	struct rx_desc *desc;
rx_desc           604 drivers/net/ethernet/hisilicon/hip04_eth.c 		desc = (struct rx_desc *)skb->data;
rx_desc           164 drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c 	struct hns3_desc *rx_desc, *tx_desc;
rx_desc           220 drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c 	rx_desc	 = &ring->desc[rx_index];
rx_desc           222 drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c 	addr = le64_to_cpu(rx_desc->addr);
rx_desc           225 drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c 	dev_info(dev, "(RX)l234_info: %u\n", rx_desc->rx.l234_info);
rx_desc           226 drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c 	dev_info(dev, "(RX)pkt_len: %u\n", rx_desc->rx.pkt_len);
rx_desc           227 drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c 	dev_info(dev, "(RX)size: %u\n", rx_desc->rx.size);
rx_desc           228 drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c 	dev_info(dev, "(RX)rss_hash: %u\n", rx_desc->rx.rss_hash);
rx_desc           229 drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c 	dev_info(dev, "(RX)fd_id: %u\n", rx_desc->rx.fd_id);
rx_desc           230 drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c 	dev_info(dev, "(RX)vlan_tag: %u\n", rx_desc->rx.vlan_tag);
rx_desc           231 drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c 	dev_info(dev, "(RX)o_dm_vlan_id_fb: %u\n", rx_desc->rx.o_dm_vlan_id_fb);
rx_desc           232 drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c 	dev_info(dev, "(RX)ot_vlan_tag: %u\n", rx_desc->rx.ot_vlan_tag);
rx_desc           233 drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c 	dev_info(dev, "(RX)bd_base_info: %u\n", rx_desc->rx.bd_base_info);
rx_desc          1055 drivers/net/ethernet/ibm/emac/core.c 		if (dev->rx_desc[i].ctrl & MAL_RX_CTRL_FIRST)
rx_desc          1058 drivers/net/ethernet/ibm/emac/core.c 		dev->rx_desc[i].data_len = 0;
rx_desc          1059 drivers/net/ethernet/ibm/emac/core.c 		dev->rx_desc[i].ctrl = MAL_RX_CTRL_EMPTY |
rx_desc          1080 drivers/net/ethernet/ibm/emac/core.c 		dev->rx_desc[i].data_ptr =
rx_desc          1162 drivers/net/ethernet/ibm/emac/core.c 			dev->rx_desc[i].ctrl = 0;
rx_desc          1165 drivers/net/ethernet/ibm/emac/core.c 			dev->rx_desc[i].data_ptr = 0;
rx_desc          1181 drivers/net/ethernet/ibm/emac/core.c 	dev->rx_desc[slot].data_len = 0;
rx_desc          1183 drivers/net/ethernet/ibm/emac/core.c 	dev->rx_desc[slot].data_ptr =
rx_desc          1187 drivers/net/ethernet/ibm/emac/core.c 	dev->rx_desc[slot].ctrl = MAL_RX_CTRL_EMPTY |
rx_desc          1683 drivers/net/ethernet/ibm/emac/core.c 	dev->rx_desc[slot].data_len = 0;
rx_desc          1685 drivers/net/ethernet/ibm/emac/core.c 	dev->rx_desc[slot].ctrl = MAL_RX_CTRL_EMPTY |
rx_desc          1730 drivers/net/ethernet/ibm/emac/core.c 		int len = dev->rx_desc[slot].data_len;
rx_desc          1761 drivers/net/ethernet/ibm/emac/core.c 		u16 ctrl = dev->rx_desc[slot].ctrl;
rx_desc          1768 drivers/net/ethernet/ibm/emac/core.c 		len = dev->rx_desc[slot].data_len;
rx_desc          1860 drivers/net/ethernet/ibm/emac/core.c 		if (!(dev->rx_desc[slot].ctrl & MAL_RX_CTRL_EMPTY)) {
rx_desc          1886 drivers/net/ethernet/ibm/emac/core.c 	return !(dev->rx_desc[dev->rx_slot].ctrl & MAL_RX_CTRL_EMPTY);
rx_desc          1896 drivers/net/ethernet/ibm/emac/core.c 		u16 ctrl = dev->rx_desc[slot].ctrl;
rx_desc          3124 drivers/net/ethernet/ibm/emac/core.c 	dev->rx_desc =
rx_desc          3128 drivers/net/ethernet/ibm/emac/core.c 	DBG(dev, "rx_desc %p" NL, dev->rx_desc);
rx_desc          3132 drivers/net/ethernet/ibm/emac/core.c 	memset(dev->rx_desc, 0, NUM_RX_BUFF * sizeof(struct mal_descriptor));
rx_desc           246 drivers/net/ethernet/ibm/emac/core.h 	struct mal_descriptor		*rx_desc;
rx_desc          1073 drivers/net/ethernet/intel/e1000/e1000_ethtool.c 		struct e1000_rx_desc *rx_desc = E1000_RX_DESC(*rxdr, i);
rx_desc          1092 drivers/net/ethernet/intel/e1000/e1000_ethtool.c 		rx_desc->buffer_addr = cpu_to_le64(rxdr->buffer_info[i].dma);
rx_desc          3444 drivers/net/ethernet/intel/e1000/e1000_main.c 		struct e1000_rx_desc *rx_desc = E1000_RX_DESC(*rx_ring, i);
rx_desc          3447 drivers/net/ethernet/intel/e1000/e1000_main.c 		struct my_u *u = (struct my_u *)rx_desc;
rx_desc          4124 drivers/net/ethernet/intel/e1000/e1000_main.c 	struct e1000_rx_desc *rx_desc, *next_rxd;
rx_desc          4133 drivers/net/ethernet/intel/e1000/e1000_main.c 	rx_desc = E1000_RX_DESC(*rx_ring, i);
rx_desc          4136 drivers/net/ethernet/intel/e1000/e1000_main.c 	while (rx_desc->status & E1000_RXD_STAT_DD) {
rx_desc          4145 drivers/net/ethernet/intel/e1000/e1000_main.c 		status = rx_desc->status;
rx_desc          4161 drivers/net/ethernet/intel/e1000/e1000_main.c 		length = le16_to_cpu(rx_desc->length);
rx_desc          4165 drivers/net/ethernet/intel/e1000/e1000_main.c 		    (rx_desc->errors & E1000_RXD_ERR_FRAME_ERR_MASK))) {
rx_desc          4169 drivers/net/ethernet/intel/e1000/e1000_main.c 						    rx_desc->errors,
rx_desc          4239 drivers/net/ethernet/intel/e1000/e1000_main.c 							  status | rx_desc->errors << 24,
rx_desc          4240 drivers/net/ethernet/intel/e1000/e1000_main.c 							  le16_to_cpu(rx_desc->csum), skb);
rx_desc          4246 drivers/net/ethernet/intel/e1000/e1000_main.c 							  rx_desc->special, skb);
rx_desc          4265 drivers/net/ethernet/intel/e1000/e1000_main.c 				  ((u32)(rx_desc->errors) << 24),
rx_desc          4266 drivers/net/ethernet/intel/e1000/e1000_main.c 				  le16_to_cpu(rx_desc->csum), skb);
rx_desc          4274 drivers/net/ethernet/intel/e1000/e1000_main.c 			__le16 vlan = rx_desc->special;
rx_desc          4283 drivers/net/ethernet/intel/e1000/e1000_main.c 		rx_desc->status = 0;
rx_desc          4292 drivers/net/ethernet/intel/e1000/e1000_main.c 		rx_desc = next_rxd;
rx_desc          4345 drivers/net/ethernet/intel/e1000/e1000_main.c 	struct e1000_rx_desc *rx_desc, *next_rxd;
rx_desc          4354 drivers/net/ethernet/intel/e1000/e1000_main.c 	rx_desc = E1000_RX_DESC(*rx_ring, i);
rx_desc          4357 drivers/net/ethernet/intel/e1000/e1000_main.c 	while (rx_desc->status & E1000_RXD_STAT_DD) {
rx_desc          4367 drivers/net/ethernet/intel/e1000/e1000_main.c 		status = rx_desc->status;
rx_desc          4368 drivers/net/ethernet/intel/e1000/e1000_main.c 		length = le16_to_cpu(rx_desc->length);
rx_desc          4419 drivers/net/ethernet/intel/e1000/e1000_main.c 		if (unlikely(rx_desc->errors & E1000_RXD_ERR_FRAME_ERR_MASK)) {
rx_desc          4421 drivers/net/ethernet/intel/e1000/e1000_main.c 						    rx_desc->errors,
rx_desc          4450 drivers/net/ethernet/intel/e1000/e1000_main.c 				  ((u32)(rx_desc->errors) << 24),
rx_desc          4451 drivers/net/ethernet/intel/e1000/e1000_main.c 				  le16_to_cpu(rx_desc->csum), skb);
rx_desc          4453 drivers/net/ethernet/intel/e1000/e1000_main.c 		e1000_receive_skb(adapter, status, rx_desc->special, skb);
rx_desc          4456 drivers/net/ethernet/intel/e1000/e1000_main.c 		rx_desc->status = 0;
rx_desc          4465 drivers/net/ethernet/intel/e1000/e1000_main.c 		rx_desc = next_rxd;
rx_desc          4492 drivers/net/ethernet/intel/e1000/e1000_main.c 	struct e1000_rx_desc *rx_desc;
rx_desc          4523 drivers/net/ethernet/intel/e1000/e1000_main.c 		rx_desc = E1000_RX_DESC(*rx_ring, i);
rx_desc          4524 drivers/net/ethernet/intel/e1000/e1000_main.c 		rx_desc->buffer_addr = cpu_to_le64(buffer_info->dma);
rx_desc          4556 drivers/net/ethernet/intel/e1000/e1000_main.c 	struct e1000_rx_desc *rx_desc;
rx_desc          4638 drivers/net/ethernet/intel/e1000/e1000_main.c 		rx_desc = E1000_RX_DESC(*rx_ring, i);
rx_desc          4639 drivers/net/ethernet/intel/e1000/e1000_main.c 		rx_desc->buffer_addr = cpu_to_le64(buffer_info->dma);
rx_desc          1271 drivers/net/ethernet/intel/e1000e/ethtool.c 		union e1000_rx_desc_extended *rx_desc;
rx_desc          1289 drivers/net/ethernet/intel/e1000e/ethtool.c 		rx_desc = E1000_RX_DESC_EXT(*rx_ring, i);
rx_desc          1290 drivers/net/ethernet/intel/e1000e/ethtool.c 		rx_desc->read.buffer_addr =
rx_desc           210 drivers/net/ethernet/intel/e1000e/netdev.c 	union e1000_rx_desc_extended *rx_desc;
rx_desc           430 drivers/net/ethernet/intel/e1000e/netdev.c 			rx_desc = E1000_RX_DESC_EXT(*rx_ring, i);
rx_desc           431 drivers/net/ethernet/intel/e1000e/netdev.c 			u1 = (struct my_u1 *)rx_desc;
rx_desc           432 drivers/net/ethernet/intel/e1000e/netdev.c 			staterr = le32_to_cpu(rx_desc->wb.upper.status_error);
rx_desc           650 drivers/net/ethernet/intel/e1000e/netdev.c 	union e1000_rx_desc_extended *rx_desc;
rx_desc           684 drivers/net/ethernet/intel/e1000e/netdev.c 		rx_desc = E1000_RX_DESC_EXT(*rx_ring, i);
rx_desc           685 drivers/net/ethernet/intel/e1000e/netdev.c 		rx_desc->read.buffer_addr = cpu_to_le64(buffer_info->dma);
rx_desc           718 drivers/net/ethernet/intel/e1000e/netdev.c 	union e1000_rx_desc_packet_split *rx_desc;
rx_desc           728 drivers/net/ethernet/intel/e1000e/netdev.c 		rx_desc = E1000_RX_DESC_PS(*rx_ring, i);
rx_desc           734 drivers/net/ethernet/intel/e1000e/netdev.c 				rx_desc->read.buffer_addr[j + 1] =
rx_desc           760 drivers/net/ethernet/intel/e1000e/netdev.c 			rx_desc->read.buffer_addr[j + 1] =
rx_desc           785 drivers/net/ethernet/intel/e1000e/netdev.c 		rx_desc->read.buffer_addr[0] = cpu_to_le64(buffer_info->dma);
rx_desc           822 drivers/net/ethernet/intel/e1000e/netdev.c 	union e1000_rx_desc_extended *rx_desc;
rx_desc           867 drivers/net/ethernet/intel/e1000e/netdev.c 		rx_desc = E1000_RX_DESC_EXT(*rx_ring, i);
rx_desc           868 drivers/net/ethernet/intel/e1000e/netdev.c 		rx_desc->read.buffer_addr = cpu_to_le64(buffer_info->dma);
rx_desc           914 drivers/net/ethernet/intel/e1000e/netdev.c 	union e1000_rx_desc_extended *rx_desc, *next_rxd;
rx_desc           923 drivers/net/ethernet/intel/e1000e/netdev.c 	rx_desc = E1000_RX_DESC_EXT(*rx_ring, i);
rx_desc           924 drivers/net/ethernet/intel/e1000e/netdev.c 	staterr = le32_to_cpu(rx_desc->wb.upper.status_error);
rx_desc           954 drivers/net/ethernet/intel/e1000e/netdev.c 		length = le16_to_cpu(rx_desc->wb.upper.length);
rx_desc          1023 drivers/net/ethernet/intel/e1000e/netdev.c 		e1000_rx_hash(netdev, rx_desc->wb.lower.hi_dword.rss, skb);
rx_desc          1026 drivers/net/ethernet/intel/e1000e/netdev.c 				  rx_desc->wb.upper.vlan);
rx_desc          1029 drivers/net/ethernet/intel/e1000e/netdev.c 		rx_desc->wb.upper.status_error &= cpu_to_le32(~0xFF);
rx_desc          1039 drivers/net/ethernet/intel/e1000e/netdev.c 		rx_desc = next_rxd;
rx_desc          1042 drivers/net/ethernet/intel/e1000e/netdev.c 		staterr = le32_to_cpu(rx_desc->wb.upper.status_error);
rx_desc          1306 drivers/net/ethernet/intel/e1000e/netdev.c 	union e1000_rx_desc_packet_split *rx_desc, *next_rxd;
rx_desc          1319 drivers/net/ethernet/intel/e1000e/netdev.c 	rx_desc = E1000_RX_DESC_PS(*rx_ring, i);
rx_desc          1320 drivers/net/ethernet/intel/e1000e/netdev.c 	staterr = le32_to_cpu(rx_desc->wb.middle.status_error);
rx_desc          1365 drivers/net/ethernet/intel/e1000e/netdev.c 		length = le16_to_cpu(rx_desc->wb.middle.length0);
rx_desc          1380 drivers/net/ethernet/intel/e1000e/netdev.c 			int l1 = le16_to_cpu(rx_desc->wb.upper.length[0]);
rx_desc          1421 drivers/net/ethernet/intel/e1000e/netdev.c 			length = le16_to_cpu(rx_desc->wb.upper.length[j]);
rx_desc          1450 drivers/net/ethernet/intel/e1000e/netdev.c 		e1000_rx_hash(netdev, rx_desc->wb.lower.hi_dword.rss, skb);
rx_desc          1452 drivers/net/ethernet/intel/e1000e/netdev.c 		if (rx_desc->wb.upper.header_status &
rx_desc          1457 drivers/net/ethernet/intel/e1000e/netdev.c 				  rx_desc->wb.middle.vlan);
rx_desc          1460 drivers/net/ethernet/intel/e1000e/netdev.c 		rx_desc->wb.middle.status_error &= cpu_to_le32(~0xFF);
rx_desc          1471 drivers/net/ethernet/intel/e1000e/netdev.c 		rx_desc = next_rxd;
rx_desc          1474 drivers/net/ethernet/intel/e1000e/netdev.c 		staterr = le32_to_cpu(rx_desc->wb.middle.status_error);
rx_desc          1512 drivers/net/ethernet/intel/e1000e/netdev.c 	union e1000_rx_desc_extended *rx_desc, *next_rxd;
rx_desc          1522 drivers/net/ethernet/intel/e1000e/netdev.c 	rx_desc = E1000_RX_DESC_EXT(*rx_ring, i);
rx_desc          1523 drivers/net/ethernet/intel/e1000e/netdev.c 	staterr = le32_to_cpu(rx_desc->wb.upper.status_error);
rx_desc          1551 drivers/net/ethernet/intel/e1000e/netdev.c 		length = le16_to_cpu(rx_desc->wb.upper.length);
rx_desc          1626 drivers/net/ethernet/intel/e1000e/netdev.c 		e1000_rx_hash(netdev, rx_desc->wb.lower.hi_dword.rss, skb);
rx_desc          1640 drivers/net/ethernet/intel/e1000e/netdev.c 				  rx_desc->wb.upper.vlan);
rx_desc          1643 drivers/net/ethernet/intel/e1000e/netdev.c 		rx_desc->wb.upper.status_error &= cpu_to_le32(~0xFF);
rx_desc          1653 drivers/net/ethernet/intel/e1000e/netdev.c 		rx_desc = next_rxd;
rx_desc          1656 drivers/net/ethernet/intel/e1000e/netdev.c 		staterr = le32_to_cpu(rx_desc->wb.upper.status_error);
rx_desc           415 drivers/net/ethernet/intel/fm10k/fm10k.h static inline __le32 fm10k_test_staterr(union fm10k_rx_desc *rx_desc,
rx_desc           418 drivers/net/ethernet/intel/fm10k/fm10k.h 	return rx_desc->d.staterr & cpu_to_le32(stat_err_bits);
rx_desc           114 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	union fm10k_rx_desc *rx_desc;
rx_desc           122 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	rx_desc = FM10K_RX_DESC(rx_ring, i);
rx_desc           133 drivers/net/ethernet/intel/fm10k/fm10k_main.c 		rx_desc->q.pkt_addr = cpu_to_le64(bi->dma + bi->page_offset);
rx_desc           135 drivers/net/ethernet/intel/fm10k/fm10k_main.c 		rx_desc++;
rx_desc           139 drivers/net/ethernet/intel/fm10k/fm10k_main.c 			rx_desc = FM10K_RX_DESC(rx_ring, 0);
rx_desc           145 drivers/net/ethernet/intel/fm10k/fm10k_main.c 		rx_desc->d.staterr = 0;
rx_desc           253 drivers/net/ethernet/intel/fm10k/fm10k_main.c 			      union fm10k_rx_desc *rx_desc,
rx_desc           300 drivers/net/ethernet/intel/fm10k/fm10k_main.c 					     union fm10k_rx_desc *rx_desc,
rx_desc           303 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	unsigned int size = le16_to_cpu(rx_desc->w.length);
rx_desc           344 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	if (fm10k_add_rx_frag(rx_buffer, size, rx_desc, skb)) {
rx_desc           360 drivers/net/ethernet/intel/fm10k/fm10k_main.c 				     union fm10k_rx_desc *rx_desc,
rx_desc           370 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	if (fm10k_test_staterr(rx_desc,
rx_desc           380 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	if (fm10k_test_staterr(rx_desc, FM10K_RXD_STATUS_L4CS2))
rx_desc           382 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	else if (!fm10k_test_staterr(rx_desc, FM10K_RXD_STATUS_L4CS))
rx_desc           397 drivers/net/ethernet/intel/fm10k/fm10k_main.c 				 union fm10k_rx_desc *rx_desc,
rx_desc           405 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	rss_type = le16_to_cpu(rx_desc->w.pkt_info) & FM10K_RXD_RSSTYPE_MASK;
rx_desc           409 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	skb_set_hash(skb, le32_to_cpu(rx_desc->d.rss),
rx_desc           415 drivers/net/ethernet/intel/fm10k/fm10k_main.c 			     union fm10k_rx_desc __maybe_unused *rx_desc,
rx_desc           453 drivers/net/ethernet/intel/fm10k/fm10k_main.c 					     union fm10k_rx_desc *rx_desc,
rx_desc           458 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	fm10k_rx_hash(rx_ring, rx_desc, skb);
rx_desc           460 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	fm10k_rx_checksum(rx_ring, rx_desc, skb);
rx_desc           462 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	FM10K_CB(skb)->tstamp = rx_desc->q.timestamp;
rx_desc           464 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	FM10K_CB(skb)->fi.w.vlan = rx_desc->w.vlan;
rx_desc           466 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	FM10K_CB(skb)->fi.d.glort = rx_desc->d.glort;
rx_desc           468 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	if (rx_desc->w.vlan) {
rx_desc           469 drivers/net/ethernet/intel/fm10k/fm10k_main.c 		u16 vid = le16_to_cpu(rx_desc->w.vlan);
rx_desc           478 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	fm10k_type_trans(rx_ring, rx_desc, skb);
rx_desc           494 drivers/net/ethernet/intel/fm10k/fm10k_main.c 			     union fm10k_rx_desc *rx_desc)
rx_desc           504 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	if (likely(fm10k_test_staterr(rx_desc, FM10K_RXD_STATUS_EOP)))
rx_desc           525 drivers/net/ethernet/intel/fm10k/fm10k_main.c 				  union fm10k_rx_desc *rx_desc,
rx_desc           528 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	if (unlikely((fm10k_test_staterr(rx_desc,
rx_desc           532 drivers/net/ethernet/intel/fm10k/fm10k_main.c 		if (FM10K_TEST_RXD_BIT(rx_desc, FM10K_RXD_ERR_SWITCH_ERROR))
rx_desc           534 drivers/net/ethernet/intel/fm10k/fm10k_main.c 		if (FM10K_TEST_RXD_BIT(rx_desc, FM10K_RXD_ERR_NO_DESCRIPTOR))
rx_desc           536 drivers/net/ethernet/intel/fm10k/fm10k_main.c 		if (FM10K_TEST_RXD_BIT(rx_desc, FM10K_RXD_ERR_PP_ERROR))
rx_desc           538 drivers/net/ethernet/intel/fm10k/fm10k_main.c 		if (FM10K_TEST_RXD_BIT(rx_desc, FM10K_RXD_ERR_SWITCH_READY))
rx_desc           540 drivers/net/ethernet/intel/fm10k/fm10k_main.c 		if (FM10K_TEST_RXD_BIT(rx_desc, FM10K_RXD_ERR_TOO_BIG))
rx_desc           574 drivers/net/ethernet/intel/fm10k/fm10k_main.c 		union fm10k_rx_desc *rx_desc;
rx_desc           582 drivers/net/ethernet/intel/fm10k/fm10k_main.c 		rx_desc = FM10K_RX_DESC(rx_ring, rx_ring->next_to_clean);
rx_desc           584 drivers/net/ethernet/intel/fm10k/fm10k_main.c 		if (!rx_desc->d.staterr)
rx_desc           594 drivers/net/ethernet/intel/fm10k/fm10k_main.c 		skb = fm10k_fetch_rx_buffer(rx_ring, rx_desc, skb);
rx_desc           603 drivers/net/ethernet/intel/fm10k/fm10k_main.c 		if (fm10k_is_non_eop(rx_ring, rx_desc))
rx_desc           607 drivers/net/ethernet/intel/fm10k/fm10k_main.c 		if (fm10k_cleanup_headers(rx_ring, rx_desc, skb)) {
rx_desc           613 drivers/net/ethernet/intel/fm10k/fm10k_main.c 		total_bytes += fm10k_process_skb_fields(rx_ring, rx_desc, skb);
rx_desc           531 drivers/net/ethernet/intel/i40e/i40e_txrx.c 			   union i40e_rx_desc *rx_desc, u8 prog_id)
rx_desc           539 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	qw = le64_to_cpu(rx_desc->wb.qword1.status_error_len);
rx_desc           544 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		pf->fd_inv = le32_to_cpu(rx_desc->wb.qword0.hi_dword.fd_id);
rx_desc           545 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		if ((rx_desc->wb.qword0.hi_dword.fd_id != 0) ||
rx_desc           563 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		if ((rx_desc->wb.qword0.hi_dword.fd_id == 0) &&
rx_desc           592 drivers/net/ethernet/intel/i40e/i40e_txrx.c 				 rx_desc->wb.qword0.hi_dword.fd_id);
rx_desc          1262 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	union i40e_rx_desc *rx_desc,
rx_desc          1285 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		i40e_fd_handle_status(rx_ring, rx_desc, id);
rx_desc          1571 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	union i40e_rx_desc *rx_desc;
rx_desc          1578 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	rx_desc = I40E_RX_DESC(rx_ring, ntu);
rx_desc          1594 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		rx_desc->read.pkt_addr = cpu_to_le64(bi->dma + bi->page_offset);
rx_desc          1596 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		rx_desc++;
rx_desc          1600 drivers/net/ethernet/intel/i40e/i40e_txrx.c 			rx_desc = I40E_RX_DESC(rx_ring, 0);
rx_desc          1606 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		rx_desc->wb.qword1.status_error_len = 0;
rx_desc          1634 drivers/net/ethernet/intel/i40e/i40e_txrx.c 				    union i40e_rx_desc *rx_desc)
rx_desc          1642 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	qword = le64_to_cpu(rx_desc->wb.qword1.status_error_len);
rx_desc          1748 drivers/net/ethernet/intel/i40e/i40e_txrx.c 				union i40e_rx_desc *rx_desc,
rx_desc          1760 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	if ((rx_desc->wb.qword1.status_error_len & rss_mask) == rss_mask) {
rx_desc          1761 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		hash = le32_to_cpu(rx_desc->wb.qword0.hi_dword.rss);
rx_desc          1778 drivers/net/ethernet/intel/i40e/i40e_txrx.c 			     union i40e_rx_desc *rx_desc, struct sk_buff *skb)
rx_desc          1780 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	u64 qword = le64_to_cpu(rx_desc->wb.qword1.status_error_len);
rx_desc          1792 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	i40e_rx_hash(rx_ring, rx_desc, skb, rx_ptype);
rx_desc          1794 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	i40e_rx_checksum(rx_ring->vsi, skb, rx_desc);
rx_desc          1799 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		u16 vlan_tag = rx_desc->wb.qword0.lo_dword.l2tag1;
rx_desc          1824 drivers/net/ethernet/intel/i40e/i40e_txrx.c 				 union i40e_rx_desc *rx_desc)
rx_desc          1836 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	if (unlikely(i40e_test_staterr(rx_desc,
rx_desc          2157 drivers/net/ethernet/intel/i40e/i40e_txrx.c 			    union i40e_rx_desc *rx_desc,
rx_desc          2170 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	if (likely(i40e_test_staterr(rx_desc, I40E_RXD_EOF)))
rx_desc          2342 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		union i40e_rx_desc *rx_desc;
rx_desc          2353 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		rx_desc = I40E_RX_DESC(rx_ring, rx_ring->next_to_clean);
rx_desc          2360 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		qword = le64_to_cpu(rx_desc->wb.qword1.status_error_len);
rx_desc          2368 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		rx_buffer = i40e_clean_programming_status(rx_ring, rx_desc,
rx_desc          2381 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		i40e_trace(clean_rx_irq, rx_ring, rx_desc, skb);
rx_desc          2425 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		if (i40e_is_non_eop(rx_ring, rx_desc, skb))
rx_desc          2428 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		if (i40e_cleanup_headers(rx_ring, skb, rx_desc)) {
rx_desc          2437 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		i40e_process_skb_fields(rx_ring, rx_desc, skb);
rx_desc          2439 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		i40e_trace(clean_rx_irq_rx, rx_ring, rx_desc, skb);
rx_desc           187 drivers/net/ethernet/intel/i40e/i40e_txrx.h static inline bool i40e_test_staterr(union i40e_rx_desc *rx_desc,
rx_desc           190 drivers/net/ethernet/intel/i40e/i40e_txrx.h 	return !!(rx_desc->wb.qword1.status_error_len &
rx_desc             8 drivers/net/ethernet/intel/i40e/i40e_txrx_common.h 			   union i40e_rx_desc *rx_desc, u8 prog_id);
rx_desc            12 drivers/net/ethernet/intel/i40e/i40e_txrx_common.h 	union i40e_rx_desc *rx_desc,
rx_desc            15 drivers/net/ethernet/intel/i40e/i40e_txrx_common.h 			     union i40e_rx_desc *rx_desc, struct sk_buff *skb);
rx_desc           319 drivers/net/ethernet/intel/i40e/i40e_xsk.c 	union i40e_rx_desc *rx_desc;
rx_desc           323 drivers/net/ethernet/intel/i40e/i40e_xsk.c 	rx_desc = I40E_RX_DESC(rx_ring, ntu);
rx_desc           335 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		rx_desc->read.pkt_addr = cpu_to_le64(bi->dma);
rx_desc           337 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		rx_desc++;
rx_desc           342 drivers/net/ethernet/intel/i40e/i40e_xsk.c 			rx_desc = I40E_RX_DESC(rx_ring, 0);
rx_desc           347 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		rx_desc->wb.qword1.status_error_len = 0;
rx_desc           543 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		union i40e_rx_desc *rx_desc;
rx_desc           554 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		rx_desc = I40E_RX_DESC(rx_ring, rx_ring->next_to_clean);
rx_desc           555 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		qword = le64_to_cpu(rx_desc->wb.qword1.status_error_len);
rx_desc           563 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		bi = i40e_clean_programming_status(rx_ring, rx_desc,
rx_desc           622 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		i40e_process_skb_fields(rx_ring, rx_desc, skb);
rx_desc           882 drivers/net/ethernet/intel/iavf/iavf_txrx.c 	union iavf_rx_desc *rx_desc;
rx_desc           889 drivers/net/ethernet/intel/iavf/iavf_txrx.c 	rx_desc = IAVF_RX_DESC(rx_ring, ntu);
rx_desc           905 drivers/net/ethernet/intel/iavf/iavf_txrx.c 		rx_desc->read.pkt_addr = cpu_to_le64(bi->dma + bi->page_offset);
rx_desc           907 drivers/net/ethernet/intel/iavf/iavf_txrx.c 		rx_desc++;
rx_desc           911 drivers/net/ethernet/intel/iavf/iavf_txrx.c 			rx_desc = IAVF_RX_DESC(rx_ring, 0);
rx_desc           917 drivers/net/ethernet/intel/iavf/iavf_txrx.c 		rx_desc->wb.qword1.status_error_len = 0;
rx_desc           945 drivers/net/ethernet/intel/iavf/iavf_txrx.c 				    union iavf_rx_desc *rx_desc)
rx_desc           953 drivers/net/ethernet/intel/iavf/iavf_txrx.c 	qword = le64_to_cpu(rx_desc->wb.qword1.status_error_len);
rx_desc          1052 drivers/net/ethernet/intel/iavf/iavf_txrx.c 				union iavf_rx_desc *rx_desc,
rx_desc          1064 drivers/net/ethernet/intel/iavf/iavf_txrx.c 	if ((rx_desc->wb.qword1.status_error_len & rss_mask) == rss_mask) {
rx_desc          1065 drivers/net/ethernet/intel/iavf/iavf_txrx.c 		hash = le32_to_cpu(rx_desc->wb.qword0.hi_dword.rss);
rx_desc          1083 drivers/net/ethernet/intel/iavf/iavf_txrx.c 			     union iavf_rx_desc *rx_desc, struct sk_buff *skb,
rx_desc          1086 drivers/net/ethernet/intel/iavf/iavf_txrx.c 	iavf_rx_hash(rx_ring, rx_desc, skb, rx_ptype);
rx_desc          1088 drivers/net/ethernet/intel/iavf/iavf_txrx.c 	iavf_rx_checksum(rx_ring->vsi, skb, rx_desc);
rx_desc          1445 drivers/net/ethernet/intel/iavf/iavf_txrx.c 			    union iavf_rx_desc *rx_desc,
rx_desc          1458 drivers/net/ethernet/intel/iavf/iavf_txrx.c 	if (likely(iavf_test_staterr(rx_desc, IAVF_RXD_EOF)))
rx_desc          1487 drivers/net/ethernet/intel/iavf/iavf_txrx.c 		union iavf_rx_desc *rx_desc;
rx_desc          1500 drivers/net/ethernet/intel/iavf/iavf_txrx.c 		rx_desc = IAVF_RX_DESC(rx_ring, rx_ring->next_to_clean);
rx_desc          1507 drivers/net/ethernet/intel/iavf/iavf_txrx.c 		qword = le64_to_cpu(rx_desc->wb.qword1.status_error_len);
rx_desc          1515 drivers/net/ethernet/intel/iavf/iavf_txrx.c 		if (!iavf_test_staterr(rx_desc, IAVF_RXD_DD))
rx_desc          1521 drivers/net/ethernet/intel/iavf/iavf_txrx.c 		iavf_trace(clean_rx_irq, rx_ring, rx_desc, skb);
rx_desc          1543 drivers/net/ethernet/intel/iavf/iavf_txrx.c 		if (iavf_is_non_eop(rx_ring, rx_desc, skb))
rx_desc          1551 drivers/net/ethernet/intel/iavf/iavf_txrx.c 		if (unlikely(iavf_test_staterr(rx_desc, BIT(IAVF_RXD_QW1_ERROR_SHIFT)))) {
rx_desc          1565 drivers/net/ethernet/intel/iavf/iavf_txrx.c 		qword = le64_to_cpu(rx_desc->wb.qword1.status_error_len);
rx_desc          1570 drivers/net/ethernet/intel/iavf/iavf_txrx.c 		iavf_process_skb_fields(rx_ring, rx_desc, skb, rx_ptype);
rx_desc          1574 drivers/net/ethernet/intel/iavf/iavf_txrx.c 			   le16_to_cpu(rx_desc->wb.qword0.lo_dword.l2tag1) : 0;
rx_desc          1576 drivers/net/ethernet/intel/iavf/iavf_txrx.c 		iavf_trace(clean_rx_irq_rx, rx_ring, rx_desc, skb);
rx_desc           167 drivers/net/ethernet/intel/iavf/iavf_txrx.h static inline bool iavf_test_staterr(union iavf_rx_desc *rx_desc,
rx_desc           170 drivers/net/ethernet/intel/iavf/iavf_txrx.h 	return !!(rx_desc->wb.qword1.status_error_len &
rx_desc           617 drivers/net/ethernet/intel/ice/ice_ethtool.c 		union ice_32b_rx_flex_desc *rx_desc;
rx_desc           619 drivers/net/ethernet/intel/ice/ice_ethtool.c 		rx_desc = ICE_RX_DESC(rx_ring, i);
rx_desc           621 drivers/net/ethernet/intel/ice/ice_ethtool.c 		if (!(rx_desc->wb.status_error0 &
rx_desc           469 drivers/net/ethernet/intel/ice/ice_txrx.c 	union ice_32b_rx_flex_desc *rx_desc;
rx_desc           478 drivers/net/ethernet/intel/ice/ice_txrx.c 	rx_desc = ICE_RX_DESC(rx_ring, ntu);
rx_desc           495 drivers/net/ethernet/intel/ice/ice_txrx.c 		rx_desc->read.pkt_addr = cpu_to_le64(bi->dma + bi->page_offset);
rx_desc           497 drivers/net/ethernet/intel/ice/ice_txrx.c 		rx_desc++;
rx_desc           501 drivers/net/ethernet/intel/ice/ice_txrx.c 			rx_desc = ICE_RX_DESC(rx_ring, 0);
rx_desc           507 drivers/net/ethernet/intel/ice/ice_txrx.c 		rx_desc->wb.status_error0 = 0;
rx_desc           804 drivers/net/ethernet/intel/ice/ice_txrx.c ice_test_staterr(union ice_32b_rx_flex_desc *rx_desc, const u16 stat_err_bits)
rx_desc           806 drivers/net/ethernet/intel/ice/ice_txrx.c 	return !!(rx_desc->wb.status_error0 &
rx_desc           822 drivers/net/ethernet/intel/ice/ice_txrx.c ice_is_non_eop(struct ice_ring *rx_ring, union ice_32b_rx_flex_desc *rx_desc,
rx_desc           835 drivers/net/ethernet/intel/ice/ice_txrx.c 	if (likely(ice_test_staterr(rx_desc, ICE_RXD_EOF)))
rx_desc           864 drivers/net/ethernet/intel/ice/ice_txrx.c ice_rx_hash(struct ice_ring *rx_ring, union ice_32b_rx_flex_desc *rx_desc,
rx_desc           873 drivers/net/ethernet/intel/ice/ice_txrx.c 	if (rx_desc->wb.rxdid != ICE_RXDID_FLEX_NIC)
rx_desc           876 drivers/net/ethernet/intel/ice/ice_txrx.c 	nic_mdid = (struct ice_32b_rx_flex_desc_nic *)rx_desc;
rx_desc           892 drivers/net/ethernet/intel/ice/ice_txrx.c 	    union ice_32b_rx_flex_desc *rx_desc, u8 ptype)
rx_desc           898 drivers/net/ethernet/intel/ice/ice_txrx.c 	rx_status = le16_to_cpu(rx_desc->wb.status_error0);
rx_desc           964 drivers/net/ethernet/intel/ice/ice_txrx.c 		       union ice_32b_rx_flex_desc *rx_desc,
rx_desc           967 drivers/net/ethernet/intel/ice/ice_txrx.c 	ice_rx_hash(rx_ring, rx_desc, skb, ptype);
rx_desc           972 drivers/net/ethernet/intel/ice/ice_txrx.c 	ice_rx_csum(rx_ring, skb, rx_desc, ptype);
rx_desc          1013 drivers/net/ethernet/intel/ice/ice_txrx.c 		union ice_32b_rx_flex_desc *rx_desc;
rx_desc          1022 drivers/net/ethernet/intel/ice/ice_txrx.c 		rx_desc = ICE_RX_DESC(rx_ring, rx_ring->next_to_clean);
rx_desc          1030 drivers/net/ethernet/intel/ice/ice_txrx.c 		if (!ice_test_staterr(rx_desc, stat_err_bits))
rx_desc          1039 drivers/net/ethernet/intel/ice/ice_txrx.c 		size = le16_to_cpu(rx_desc->wb.pkt_len) &
rx_desc          1062 drivers/net/ethernet/intel/ice/ice_txrx.c 		if (ice_is_non_eop(rx_ring, rx_desc, skb))
rx_desc          1066 drivers/net/ethernet/intel/ice/ice_txrx.c 		if (unlikely(ice_test_staterr(rx_desc, stat_err_bits))) {
rx_desc          1072 drivers/net/ethernet/intel/ice/ice_txrx.c 		if (ice_test_staterr(rx_desc, stat_err_bits))
rx_desc          1073 drivers/net/ethernet/intel/ice/ice_txrx.c 			vlan_tag = le16_to_cpu(rx_desc->wb.l2tag1);
rx_desc          1087 drivers/net/ethernet/intel/ice/ice_txrx.c 		rx_ptype = le16_to_cpu(rx_desc->wb.ptype_flex_flags0) &
rx_desc          1090 drivers/net/ethernet/intel/ice/ice_txrx.c 		ice_process_skb_fields(rx_ring, rx_desc, skb, rx_ptype);
rx_desc           368 drivers/net/ethernet/intel/igb/igb.h static inline __le32 igb_test_staterr(union e1000_adv_rx_desc *rx_desc,
rx_desc           371 drivers/net/ethernet/intel/igb/igb.h 	return rx_desc->wb.upper.status_error & cpu_to_le32(stat_err_bits);
rx_desc          1805 drivers/net/ethernet/intel/igb/igb_ethtool.c 	union e1000_adv_rx_desc *rx_desc;
rx_desc          1813 drivers/net/ethernet/intel/igb/igb_ethtool.c 	rx_desc = IGB_RX_DESC(rx_ring, rx_ntc);
rx_desc          1815 drivers/net/ethernet/intel/igb/igb_ethtool.c 	while (rx_desc->wb.upper.length) {
rx_desc          1857 drivers/net/ethernet/intel/igb/igb_ethtool.c 		rx_desc = IGB_RX_DESC(rx_ring, rx_ntc);
rx_desc           367 drivers/net/ethernet/intel/igb/igb_main.c 	union e1000_adv_rx_desc *rx_desc;
rx_desc           515 drivers/net/ethernet/intel/igb/igb_main.c 			rx_desc = IGB_RX_DESC(rx_ring, i);
rx_desc           516 drivers/net/ethernet/intel/igb/igb_main.c 			u0 = (struct my_u0 *)rx_desc;
rx_desc           517 drivers/net/ethernet/intel/igb/igb_main.c 			staterr = le32_to_cpu(rx_desc->wb.upper.status_error);
rx_desc          4479 drivers/net/ethernet/intel/igb/igb_main.c 	union e1000_adv_rx_desc *rx_desc;
rx_desc          4526 drivers/net/ethernet/intel/igb/igb_main.c 	rx_desc = IGB_RX_DESC(ring, 0);
rx_desc          4527 drivers/net/ethernet/intel/igb/igb_main.c 	rx_desc->wb.upper.length = 0;
rx_desc          8008 drivers/net/ethernet/intel/igb/igb_main.c 					 union e1000_adv_rx_desc *rx_desc,
rx_desc          8031 drivers/net/ethernet/intel/igb/igb_main.c 	if (unlikely(igb_test_staterr(rx_desc, E1000_RXDADV_STAT_TSIP))) {
rx_desc          8065 drivers/net/ethernet/intel/igb/igb_main.c 				     union e1000_adv_rx_desc *rx_desc,
rx_desc          8093 drivers/net/ethernet/intel/igb/igb_main.c 	if (igb_test_staterr(rx_desc, E1000_RXDADV_STAT_TSIP)) {
rx_desc          8109 drivers/net/ethernet/intel/igb/igb_main.c 				   union e1000_adv_rx_desc *rx_desc,
rx_desc          8115 drivers/net/ethernet/intel/igb/igb_main.c 	if (igb_test_staterr(rx_desc, E1000_RXD_STAT_IXSM))
rx_desc          8123 drivers/net/ethernet/intel/igb/igb_main.c 	if (igb_test_staterr(rx_desc,
rx_desc          8140 drivers/net/ethernet/intel/igb/igb_main.c 	if (igb_test_staterr(rx_desc, E1000_RXD_STAT_TCPCS |
rx_desc          8145 drivers/net/ethernet/intel/igb/igb_main.c 		le32_to_cpu(rx_desc->wb.upper.status_error));
rx_desc          8149 drivers/net/ethernet/intel/igb/igb_main.c 			       union e1000_adv_rx_desc *rx_desc,
rx_desc          8154 drivers/net/ethernet/intel/igb/igb_main.c 			     le32_to_cpu(rx_desc->wb.lower.hi_dword.rss),
rx_desc          8170 drivers/net/ethernet/intel/igb/igb_main.c 			   union e1000_adv_rx_desc *rx_desc)
rx_desc          8180 drivers/net/ethernet/intel/igb/igb_main.c 	if (likely(igb_test_staterr(rx_desc, E1000_RXD_STAT_EOP)))
rx_desc          8201 drivers/net/ethernet/intel/igb/igb_main.c 				union e1000_adv_rx_desc *rx_desc,
rx_desc          8204 drivers/net/ethernet/intel/igb/igb_main.c 	if (unlikely((igb_test_staterr(rx_desc,
rx_desc          8231 drivers/net/ethernet/intel/igb/igb_main.c 				   union e1000_adv_rx_desc *rx_desc,
rx_desc          8236 drivers/net/ethernet/intel/igb/igb_main.c 	igb_rx_hash(rx_ring, rx_desc, skb);
rx_desc          8238 drivers/net/ethernet/intel/igb/igb_main.c 	igb_rx_checksum(rx_ring, rx_desc, skb);
rx_desc          8240 drivers/net/ethernet/intel/igb/igb_main.c 	if (igb_test_staterr(rx_desc, E1000_RXDADV_STAT_TS) &&
rx_desc          8241 drivers/net/ethernet/intel/igb/igb_main.c 	    !igb_test_staterr(rx_desc, E1000_RXDADV_STAT_TSIP))
rx_desc          8245 drivers/net/ethernet/intel/igb/igb_main.c 	    igb_test_staterr(rx_desc, E1000_RXD_STAT_VP)) {
rx_desc          8248 drivers/net/ethernet/intel/igb/igb_main.c 		if (igb_test_staterr(rx_desc, E1000_RXDEXT_STATERR_LB) &&
rx_desc          8250 drivers/net/ethernet/intel/igb/igb_main.c 			vid = be16_to_cpu(rx_desc->wb.upper.vlan);
rx_desc          8252 drivers/net/ethernet/intel/igb/igb_main.c 			vid = le16_to_cpu(rx_desc->wb.upper.vlan);
rx_desc          8311 drivers/net/ethernet/intel/igb/igb_main.c 		union e1000_adv_rx_desc *rx_desc;
rx_desc          8321 drivers/net/ethernet/intel/igb/igb_main.c 		rx_desc = IGB_RX_DESC(rx_ring, rx_ring->next_to_clean);
rx_desc          8322 drivers/net/ethernet/intel/igb/igb_main.c 		size = le16_to_cpu(rx_desc->wb.upper.length);
rx_desc          8338 drivers/net/ethernet/intel/igb/igb_main.c 			skb = igb_build_skb(rx_ring, rx_buffer, rx_desc, size);
rx_desc          8341 drivers/net/ethernet/intel/igb/igb_main.c 						rx_desc, size);
rx_desc          8354 drivers/net/ethernet/intel/igb/igb_main.c 		if (igb_is_non_eop(rx_ring, rx_desc))
rx_desc          8358 drivers/net/ethernet/intel/igb/igb_main.c 		if (igb_cleanup_headers(rx_ring, rx_desc, skb)) {
rx_desc          8367 drivers/net/ethernet/intel/igb/igb_main.c 		igb_process_skb_fields(rx_ring, rx_desc, skb);
rx_desc          8446 drivers/net/ethernet/intel/igb/igb_main.c 	union e1000_adv_rx_desc *rx_desc;
rx_desc          8455 drivers/net/ethernet/intel/igb/igb_main.c 	rx_desc = IGB_RX_DESC(rx_ring, i);
rx_desc          8473 drivers/net/ethernet/intel/igb/igb_main.c 		rx_desc->read.pkt_addr = cpu_to_le64(bi->dma + bi->page_offset);
rx_desc          8475 drivers/net/ethernet/intel/igb/igb_main.c 		rx_desc++;
rx_desc          8479 drivers/net/ethernet/intel/igb/igb_main.c 			rx_desc = IGB_RX_DESC(rx_ring, 0);
rx_desc          8485 drivers/net/ethernet/intel/igb/igb_main.c 		rx_desc->wb.upper.length = 0;
rx_desc           120 drivers/net/ethernet/intel/igbvf/igbvf.h 	union e1000_adv_rx_desc rx_desc;
rx_desc           271 drivers/net/ethernet/intel/igbvf/igbvf.h 	(&((((R).desc))[i].rx_desc))
rx_desc           139 drivers/net/ethernet/intel/igbvf/netdev.c 	union e1000_adv_rx_desc *rx_desc;
rx_desc           154 drivers/net/ethernet/intel/igbvf/netdev.c 		rx_desc = IGBVF_RX_DESC_ADV(*rx_ring, i);
rx_desc           203 drivers/net/ethernet/intel/igbvf/netdev.c 			rx_desc->read.pkt_addr =
rx_desc           205 drivers/net/ethernet/intel/igbvf/netdev.c 			rx_desc->read.hdr_addr = cpu_to_le64(buffer_info->dma);
rx_desc           207 drivers/net/ethernet/intel/igbvf/netdev.c 			rx_desc->read.pkt_addr = cpu_to_le64(buffer_info->dma);
rx_desc           208 drivers/net/ethernet/intel/igbvf/netdev.c 			rx_desc->read.hdr_addr = 0;
rx_desc           248 drivers/net/ethernet/intel/igbvf/netdev.c 	union e1000_adv_rx_desc *rx_desc, *next_rxd;
rx_desc           258 drivers/net/ethernet/intel/igbvf/netdev.c 	rx_desc = IGBVF_RX_DESC_ADV(*rx_ring, i);
rx_desc           259 drivers/net/ethernet/intel/igbvf/netdev.c 	staterr = le32_to_cpu(rx_desc->wb.upper.status_error);
rx_desc           274 drivers/net/ethernet/intel/igbvf/netdev.c 		hlen = (le16_to_cpu(rx_desc->wb.lower.lo_dword.hs_rss.hdr_info)
rx_desc           280 drivers/net/ethernet/intel/igbvf/netdev.c 		length = le16_to_cpu(rx_desc->wb.upper.length);
rx_desc           354 drivers/net/ethernet/intel/igbvf/netdev.c 				  rx_desc->wb.upper.vlan);
rx_desc           357 drivers/net/ethernet/intel/igbvf/netdev.c 		rx_desc->wb.upper.status_error = 0;
rx_desc           366 drivers/net/ethernet/intel/igbvf/netdev.c 		rx_desc = next_rxd;
rx_desc           369 drivers/net/ethernet/intel/igbvf/netdev.c 		staterr = le32_to_cpu(rx_desc->wb.upper.status_error);
rx_desc           142 drivers/net/ethernet/intel/igc/igc.h static inline __le32 igc_test_staterr(union igc_adv_rx_desc *rx_desc,
rx_desc           145 drivers/net/ethernet/intel/igc/igc.h 	return rx_desc->wb.upper.status_error & cpu_to_le32(stat_err_bits);
rx_desc           524 drivers/net/ethernet/intel/igc/igc_main.c 	union igc_adv_rx_desc *rx_desc;
rx_desc           567 drivers/net/ethernet/intel/igc/igc_main.c 	rx_desc = IGC_RX_DESC(ring, 0);
rx_desc           568 drivers/net/ethernet/intel/igc/igc_main.c 	rx_desc->wb.upper.length = 0;
rx_desc          1167 drivers/net/ethernet/intel/igc/igc_main.c 			       union igc_adv_rx_desc *rx_desc,
rx_desc          1172 drivers/net/ethernet/intel/igc/igc_main.c 			     le32_to_cpu(rx_desc->wb.lower.hi_dword.rss),
rx_desc          1187 drivers/net/ethernet/intel/igc/igc_main.c 				   union igc_adv_rx_desc *rx_desc,
rx_desc          1190 drivers/net/ethernet/intel/igc/igc_main.c 	igc_rx_hash(rx_ring, rx_desc, skb);
rx_desc          1249 drivers/net/ethernet/intel/igc/igc_main.c 				     union igc_adv_rx_desc *rx_desc,
rx_desc          1288 drivers/net/ethernet/intel/igc/igc_main.c 					 union igc_adv_rx_desc *rx_desc,
rx_desc          1416 drivers/net/ethernet/intel/igc/igc_main.c 			   union igc_adv_rx_desc *rx_desc)
rx_desc          1426 drivers/net/ethernet/intel/igc/igc_main.c 	if (likely(igc_test_staterr(rx_desc, IGC_RXD_STAT_EOP)))
rx_desc          1447 drivers/net/ethernet/intel/igc/igc_main.c 				union igc_adv_rx_desc *rx_desc,
rx_desc          1450 drivers/net/ethernet/intel/igc/igc_main.c 	if (unlikely((igc_test_staterr(rx_desc,
rx_desc          1494 drivers/net/ethernet/intel/igc/igc_main.c 	union igc_adv_rx_desc *rx_desc;
rx_desc          1503 drivers/net/ethernet/intel/igc/igc_main.c 	rx_desc = IGC_RX_DESC(rx_ring, i);
rx_desc          1521 drivers/net/ethernet/intel/igc/igc_main.c 		rx_desc->read.pkt_addr = cpu_to_le64(bi->dma + bi->page_offset);
rx_desc          1523 drivers/net/ethernet/intel/igc/igc_main.c 		rx_desc++;
rx_desc          1527 drivers/net/ethernet/intel/igc/igc_main.c 			rx_desc = IGC_RX_DESC(rx_ring, 0);
rx_desc          1533 drivers/net/ethernet/intel/igc/igc_main.c 		rx_desc->wb.upper.length = 0;
rx_desc          1565 drivers/net/ethernet/intel/igc/igc_main.c 		union igc_adv_rx_desc *rx_desc;
rx_desc          1575 drivers/net/ethernet/intel/igc/igc_main.c 		rx_desc = IGC_RX_DESC(rx_ring, rx_ring->next_to_clean);
rx_desc          1576 drivers/net/ethernet/intel/igc/igc_main.c 		size = le16_to_cpu(rx_desc->wb.upper.length);
rx_desc          1592 drivers/net/ethernet/intel/igc/igc_main.c 			skb = igc_build_skb(rx_ring, rx_buffer, rx_desc, size);
rx_desc          1595 drivers/net/ethernet/intel/igc/igc_main.c 						rx_desc, size);
rx_desc          1608 drivers/net/ethernet/intel/igc/igc_main.c 		if (igc_is_non_eop(rx_ring, rx_desc))
rx_desc          1612 drivers/net/ethernet/intel/igc/igc_main.c 		if (igc_cleanup_headers(rx_ring, rx_desc, skb)) {
rx_desc          1621 drivers/net/ethernet/intel/igc/igc_main.c 		igc_process_skb_fields(rx_ring, rx_desc, skb);
rx_desc          1877 drivers/net/ethernet/intel/ixgb/ixgb_main.c                  struct ixgb_rx_desc *rx_desc,
rx_desc          1883 drivers/net/ethernet/intel/ixgb/ixgb_main.c 	if ((rx_desc->status & IXGB_RX_DESC_STATUS_IXSM) ||
rx_desc          1884 drivers/net/ethernet/intel/ixgb/ixgb_main.c 	   (!(rx_desc->status & IXGB_RX_DESC_STATUS_TCPCS))) {
rx_desc          1891 drivers/net/ethernet/intel/ixgb/ixgb_main.c 	if (rx_desc->errors & IXGB_RX_DESC_ERRORS_TCPE) {
rx_desc          1938 drivers/net/ethernet/intel/ixgb/ixgb_main.c 	struct ixgb_rx_desc *rx_desc, *next_rxd;
rx_desc          1946 drivers/net/ethernet/intel/ixgb/ixgb_main.c 	rx_desc = IXGB_RX_DESC(*rx_ring, i);
rx_desc          1949 drivers/net/ethernet/intel/ixgb/ixgb_main.c 	while (rx_desc->status & IXGB_RX_DESC_STATUS_DD) {
rx_desc          1958 drivers/net/ethernet/intel/ixgb/ixgb_main.c 		status = rx_desc->status;
rx_desc          1986 drivers/net/ethernet/intel/ixgb/ixgb_main.c 		length = le16_to_cpu(rx_desc->length);
rx_desc          1987 drivers/net/ethernet/intel/ixgb/ixgb_main.c 		rx_desc->length = 0;
rx_desc          2000 drivers/net/ethernet/intel/ixgb/ixgb_main.c 		if (unlikely(rx_desc->errors &
rx_desc          2013 drivers/net/ethernet/intel/ixgb/ixgb_main.c 		ixgb_rx_checksum(adapter, rx_desc, skb);
rx_desc          2018 drivers/net/ethernet/intel/ixgb/ixgb_main.c 				       le16_to_cpu(rx_desc->special));
rx_desc          2024 drivers/net/ethernet/intel/ixgb/ixgb_main.c 		rx_desc->status = 0;
rx_desc          2033 drivers/net/ethernet/intel/ixgb/ixgb_main.c 		rx_desc = next_rxd;
rx_desc          2057 drivers/net/ethernet/intel/ixgb/ixgb_main.c 	struct ixgb_rx_desc *rx_desc;
rx_desc          2096 drivers/net/ethernet/intel/ixgb/ixgb_main.c 		rx_desc = IXGB_RX_DESC(*rx_ring, i);
rx_desc          2097 drivers/net/ethernet/intel/ixgb/ixgb_main.c 		rx_desc->buff_addr = cpu_to_le64(buffer_info->dma);
rx_desc          2101 drivers/net/ethernet/intel/ixgb/ixgb_main.c 		rx_desc->status = 0;
rx_desc           501 drivers/net/ethernet/intel/ixgbe/ixgbe.h static inline __le32 ixgbe_test_staterr(union ixgbe_adv_rx_desc *rx_desc,
rx_desc           504 drivers/net/ethernet/intel/ixgbe/ixgbe.h 	return rx_desc->wb.upper.status_error & cpu_to_le32(stat_err_bits);
rx_desc           927 drivers/net/ethernet/intel/ixgbe/ixgbe.h 		   union ixgbe_adv_rx_desc *rx_desc, struct sk_buff *skb);
rx_desc           971 drivers/net/ethernet/intel/ixgbe/ixgbe.h 					 union ixgbe_adv_rx_desc *rx_desc,
rx_desc           974 drivers/net/ethernet/intel/ixgbe/ixgbe.h 	if (unlikely(ixgbe_test_staterr(rx_desc, IXGBE_RXD_STAT_TSIP))) {
rx_desc           979 drivers/net/ethernet/intel/ixgbe/ixgbe.h 	if (unlikely(!ixgbe_test_staterr(rx_desc, IXGBE_RXDADV_STAT_TS)))
rx_desc          1012 drivers/net/ethernet/intel/ixgbe/ixgbe.h 		    union ixgbe_adv_rx_desc *rx_desc,
rx_desc          1024 drivers/net/ethernet/intel/ixgbe/ixgbe.h 				  union ixgbe_adv_rx_desc *rx_desc,
rx_desc          1890 drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c 	union ixgbe_adv_rx_desc *rx_desc;
rx_desc          1896 drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c 	rx_desc = IXGBE_RX_DESC(rx_ring, rx_ntc);
rx_desc          1927 drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c 	while (rx_desc->wb.upper.length) {
rx_desc          1957 drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c 		rx_desc = IXGBE_RX_DESC(rx_ring, rx_ntc);
rx_desc           383 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c 		   union ixgbe_adv_rx_desc *rx_desc,
rx_desc           391 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c 	__le32 fcerr = ixgbe_test_staterr(rx_desc, IXGBE_RXDADV_ERR_FCERR);
rx_desc           427 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c 	ddp_err = ixgbe_test_staterr(rx_desc, IXGBE_RXDADV_ERR_FCEOFE |
rx_desc           432 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c 	switch (ixgbe_test_staterr(rx_desc, IXGBE_RXDADV_STAT_FCSTAT)) {
rx_desc           436 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c 		ddp->len = le32_to_cpu(rx_desc->wb.lower.hi_dword.rss);
rx_desc           450 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c 		ddp->len = le32_to_cpu(rx_desc->wb.lower.hi_dword.rss);
rx_desc          1152 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c 		    union ixgbe_adv_rx_desc *rx_desc,
rx_desc          1156 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c 	__le16 pkt_info = rx_desc->wb.lower.lo_dword.hs_rss.pkt_info;
rx_desc           577 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	union ixgbe_adv_rx_desc *rx_desc;
rx_desc           796 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 			rx_desc = IXGBE_RX_DESC(rx_ring, i);
rx_desc           797 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 			u0 = (struct my_u0 *)rx_desc;
rx_desc           798 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 			if (rx_desc->wb.upper.length) {
rx_desc          1423 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 				 union ixgbe_adv_rx_desc *rx_desc,
rx_desc          1431 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	rss_type = le16_to_cpu(rx_desc->wb.lower.lo_dword.hs_rss.pkt_info) &
rx_desc          1437 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	skb_set_hash(skb, le32_to_cpu(rx_desc->wb.lower.hi_dword.rss),
rx_desc          1451 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 				    union ixgbe_adv_rx_desc *rx_desc)
rx_desc          1453 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	__le16 pkt_info = rx_desc->wb.lower.lo_dword.hs_rss.pkt_info;
rx_desc          1469 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 				     union ixgbe_adv_rx_desc *rx_desc,
rx_desc          1472 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	__le16 pkt_info = rx_desc->wb.lower.lo_dword.hs_rss.pkt_info;
rx_desc          1488 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	if (ixgbe_test_staterr(rx_desc, IXGBE_RXD_STAT_IPCS) &&
rx_desc          1489 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	    ixgbe_test_staterr(rx_desc, IXGBE_RXDADV_ERR_IPE)) {
rx_desc          1494 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	if (!ixgbe_test_staterr(rx_desc, IXGBE_RXD_STAT_L4CS))
rx_desc          1497 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	if (ixgbe_test_staterr(rx_desc, IXGBE_RXDADV_ERR_TCPE)) {
rx_desc          1513 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		if (!ixgbe_test_staterr(rx_desc, IXGBE_RXD_STAT_OUTERIPCS))
rx_desc          1516 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		if (ixgbe_test_staterr(rx_desc, IXGBE_RXDADV_ERR_OUTERIPER)) {
rx_desc          1581 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	union ixgbe_adv_rx_desc *rx_desc;
rx_desc          1590 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	rx_desc = IXGBE_RX_DESC(rx_ring, i);
rx_desc          1609 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		rx_desc->read.pkt_addr = cpu_to_le64(bi->dma + bi->page_offset);
rx_desc          1611 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		rx_desc++;
rx_desc          1615 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 			rx_desc = IXGBE_RX_DESC(rx_ring, 0);
rx_desc          1621 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		rx_desc->wb.upper.length = 0;
rx_desc          1682 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 			      union ixgbe_adv_rx_desc *rx_desc,
rx_desc          1690 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	ixgbe_rx_hash(rx_ring, rx_desc, skb);
rx_desc          1692 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	ixgbe_rx_checksum(rx_ring, rx_desc, skb);
rx_desc          1695 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		ixgbe_ptp_rx_hwtstamp(rx_ring, rx_desc, skb);
rx_desc          1698 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	    ixgbe_test_staterr(rx_desc, IXGBE_RXD_STAT_VP)) {
rx_desc          1699 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		u16 vid = le16_to_cpu(rx_desc->wb.upper.vlan);
rx_desc          1703 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	if (ixgbe_test_staterr(rx_desc, IXGBE_RXDADV_STAT_SECP))
rx_desc          1704 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		ixgbe_ipsec_rx(rx_ring, rx_desc, skb);
rx_desc          1734 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 			     union ixgbe_adv_rx_desc *rx_desc,
rx_desc          1747 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		__le32 rsc_enabled = rx_desc->wb.lower.lo_dword.data &
rx_desc          1757 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 			ntc = le32_to_cpu(rx_desc->wb.upper.status_error);
rx_desc          1764 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	if (likely(ixgbe_test_staterr(rx_desc, IXGBE_RXD_STAT_EOP)))
rx_desc          1879 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 			   union ixgbe_adv_rx_desc *rx_desc,
rx_desc          1892 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	    (unlikely(ixgbe_test_staterr(rx_desc,
rx_desc          1905 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	if (ixgbe_rx_is_fcoe(rx_ring, rx_desc))
rx_desc          2024 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 						   union ixgbe_adv_rx_desc *rx_desc,
rx_desc          2038 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	if (!ixgbe_test_staterr(rx_desc, IXGBE_RXD_STAT_EOP)) {
rx_desc          2088 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 					   union ixgbe_adv_rx_desc *rx_desc)
rx_desc          2126 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		if (!ixgbe_test_staterr(rx_desc, IXGBE_RXD_STAT_EOP))
rx_desc          2149 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 				       union ixgbe_adv_rx_desc *rx_desc)
rx_desc          2183 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	if (!ixgbe_test_staterr(rx_desc, IXGBE_RXD_STAT_EOP))
rx_desc          2294 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		union ixgbe_adv_rx_desc *rx_desc;
rx_desc          2305 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		rx_desc = IXGBE_RX_DESC(rx_ring, rx_ring->next_to_clean);
rx_desc          2306 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		size = le16_to_cpu(rx_desc->wb.upper.length);
rx_desc          2316 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		rx_buffer = ixgbe_get_rx_buffer(rx_ring, rx_desc, &skb, size);
rx_desc          2345 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 					      &xdp, rx_desc);
rx_desc          2348 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 						  &xdp, rx_desc);
rx_desc          2362 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		if (ixgbe_is_non_eop(rx_ring, rx_desc, skb))
rx_desc          2366 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		if (ixgbe_cleanup_headers(rx_ring, rx_desc, skb))
rx_desc          2373 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		ixgbe_process_skb_fields(rx_ring, rx_desc, skb);
rx_desc          2377 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		if (ixgbe_rx_is_fcoe(rx_ring, rx_desc)) {
rx_desc          2378 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 			ddp_bytes = ixgbe_fcoe_ddp(adapter, rx_desc, skb);
rx_desc          4069 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	union ixgbe_adv_rx_desc *rx_desc;
rx_desc          4152 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	rx_desc = IXGBE_RX_DESC(ring, 0);
rx_desc          4153 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	rx_desc->wb.upper.length = 0;
rx_desc            18 drivers/net/ethernet/intel/ixgbe/ixgbe_txrx_common.h 			   union ixgbe_adv_rx_desc *rx_desc,
rx_desc            21 drivers/net/ethernet/intel/ixgbe/ixgbe_txrx_common.h 			      union ixgbe_adv_rx_desc *rx_desc,
rx_desc           316 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 	union ixgbe_adv_rx_desc *rx_desc;
rx_desc           325 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 	rx_desc = IXGBE_RX_DESC(rx_ring, i);
rx_desc           344 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		rx_desc->read.pkt_addr = cpu_to_le64(bi->dma);
rx_desc           346 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		rx_desc++;
rx_desc           350 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 			rx_desc = IXGBE_RX_DESC(rx_ring, 0);
rx_desc           356 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		rx_desc->wb.upper.length = 0;
rx_desc           442 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		union ixgbe_adv_rx_desc *rx_desc;
rx_desc           454 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		rx_desc = IXGBE_RX_DESC(rx_ring, rx_ring->next_to_clean);
rx_desc           455 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		size = le16_to_cpu(rx_desc->wb.upper.length);
rx_desc           467 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		if (unlikely(!ixgbe_test_staterr(rx_desc,
rx_desc           525 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		ixgbe_process_skb_fields(rx_ring, rx_desc, skb);
rx_desc           539 drivers/net/ethernet/intel/ixgbevf/ipsec.c 		      union ixgbe_adv_rx_desc *rx_desc,
rx_desc           543 drivers/net/ethernet/intel/ixgbevf/ipsec.c 	__le16 pkt_info = rx_desc->wb.lower.lo_dword.hs_rss.pkt_info;
rx_desc           280 drivers/net/ethernet/intel/ixgbevf/ixgbevf.h static inline __le32 ixgbevf_test_staterr(union ixgbe_adv_rx_desc *rx_desc,
rx_desc           283 drivers/net/ethernet/intel/ixgbevf/ixgbevf.h 	return rx_desc->wb.upper.status_error & cpu_to_le32(stat_err_bits);
rx_desc           467 drivers/net/ethernet/intel/ixgbevf/ixgbevf.h 		      union ixgbe_adv_rx_desc *rx_desc,
rx_desc           479 drivers/net/ethernet/intel/ixgbevf/ixgbevf.h 				    union ixgbe_adv_rx_desc *rx_desc,
rx_desc           444 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 				   union ixgbe_adv_rx_desc *rx_desc,
rx_desc           452 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	rss_type = le16_to_cpu(rx_desc->wb.lower.lo_dword.hs_rss.pkt_info) &
rx_desc           458 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	skb_set_hash(skb, le32_to_cpu(rx_desc->wb.lower.hi_dword.rss),
rx_desc           470 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 				       union ixgbe_adv_rx_desc *rx_desc,
rx_desc           480 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	if (ixgbevf_test_staterr(rx_desc, IXGBE_RXD_STAT_IPCS) &&
rx_desc           481 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	    ixgbevf_test_staterr(rx_desc, IXGBE_RXDADV_ERR_IPE)) {
rx_desc           486 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	if (!ixgbevf_test_staterr(rx_desc, IXGBE_RXD_STAT_L4CS))
rx_desc           489 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	if (ixgbevf_test_staterr(rx_desc, IXGBE_RXDADV_ERR_TCPE)) {
rx_desc           509 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 				       union ixgbe_adv_rx_desc *rx_desc,
rx_desc           512 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	ixgbevf_rx_hash(rx_ring, rx_desc, skb);
rx_desc           513 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	ixgbevf_rx_checksum(rx_ring, rx_desc, skb);
rx_desc           515 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	if (ixgbevf_test_staterr(rx_desc, IXGBE_RXD_STAT_VP)) {
rx_desc           516 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		u16 vid = le16_to_cpu(rx_desc->wb.upper.vlan);
rx_desc           523 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	if (ixgbevf_test_staterr(rx_desc, IXGBE_RXDADV_STAT_SECP))
rx_desc           524 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		ixgbevf_ipsec_rx(rx_ring, rx_desc, skb);
rx_desc           585 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 			       union ixgbe_adv_rx_desc *rx_desc)
rx_desc           595 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	if (likely(ixgbevf_test_staterr(rx_desc, IXGBE_RXD_STAT_EOP)))
rx_desc           655 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	union ixgbe_adv_rx_desc *rx_desc;
rx_desc           663 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	rx_desc = IXGBEVF_RX_DESC(rx_ring, i);
rx_desc           680 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		rx_desc->read.pkt_addr = cpu_to_le64(bi->dma + bi->page_offset);
rx_desc           682 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		rx_desc++;
rx_desc           686 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 			rx_desc = IXGBEVF_RX_DESC(rx_ring, 0);
rx_desc           692 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		rx_desc->wb.upper.length = 0;
rx_desc           735 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 				    union ixgbe_adv_rx_desc *rx_desc,
rx_desc           743 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	if (unlikely(ixgbevf_test_staterr(rx_desc,
rx_desc           859 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 				      union ixgbe_adv_rx_desc *rx_desc)
rx_desc           936 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 					 union ixgbe_adv_rx_desc *rx_desc)
rx_desc          1130 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		union ixgbe_adv_rx_desc *rx_desc;
rx_desc          1139 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		rx_desc = IXGBEVF_RX_DESC(rx_ring, rx_ring->next_to_clean);
rx_desc          1140 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		size = le16_to_cpu(rx_desc->wb.upper.length);
rx_desc          1178 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 						&xdp, rx_desc);
rx_desc          1181 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 						    &xdp, rx_desc);
rx_desc          1195 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		if (ixgbevf_is_non_eop(rx_ring, rx_desc))
rx_desc          1199 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		if (ixgbevf_cleanup_headers(rx_ring, rx_desc, skb)) {
rx_desc          1219 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		ixgbevf_process_skb_fields(rx_ring, rx_desc, skb);
rx_desc          1901 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	union ixgbe_adv_rx_desc *rx_desc;
rx_desc          1935 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	rx_desc = IXGBEVF_RX_DESC(ring, 0);
rx_desc          1936 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	rx_desc->wb.upper.length = 0;
rx_desc           333 drivers/net/ethernet/marvell/mv643xx_eth.c 	struct rx_desc *rx_desc_area;
rx_desc           513 drivers/net/ethernet/marvell/mv643xx_eth.c 		struct rx_desc *rx_desc;
rx_desc           518 drivers/net/ethernet/marvell/mv643xx_eth.c 		rx_desc = &rxq->rx_desc_area[rxq->rx_curr_desc];
rx_desc           520 drivers/net/ethernet/marvell/mv643xx_eth.c 		cmd_sts = rx_desc->cmd_sts;
rx_desc           532 drivers/net/ethernet/marvell/mv643xx_eth.c 		dma_unmap_single(mp->dev->dev.parent, rx_desc->buf_ptr,
rx_desc           533 drivers/net/ethernet/marvell/mv643xx_eth.c 				 rx_desc->buf_size, DMA_FROM_DEVICE);
rx_desc           539 drivers/net/ethernet/marvell/mv643xx_eth.c 		byte_cnt = rx_desc->byte_cnt;
rx_desc           606 drivers/net/ethernet/marvell/mv643xx_eth.c 		struct rx_desc *rx_desc;
rx_desc           626 drivers/net/ethernet/marvell/mv643xx_eth.c 		rx_desc = rxq->rx_desc_area + rx;
rx_desc           629 drivers/net/ethernet/marvell/mv643xx_eth.c 		rx_desc->buf_ptr = dma_map_single(mp->dev->dev.parent,
rx_desc           632 drivers/net/ethernet/marvell/mv643xx_eth.c 		rx_desc->buf_size = size;
rx_desc           635 drivers/net/ethernet/marvell/mv643xx_eth.c 		rx_desc->cmd_sts = BUFFER_OWNED_BY_DMA | RX_ENABLE_INTERRUPT;
rx_desc          1935 drivers/net/ethernet/marvell/mv643xx_eth.c 	struct rx_desc *rx_desc;
rx_desc          1947 drivers/net/ethernet/marvell/mv643xx_eth.c 	size = rxq->rx_ring_size * sizeof(struct rx_desc);
rx_desc          1972 drivers/net/ethernet/marvell/mv643xx_eth.c 	rx_desc = rxq->rx_desc_area;
rx_desc          1980 drivers/net/ethernet/marvell/mv643xx_eth.c 		rx_desc[i].next_desc_ptr = rxq->rx_desc_dma +
rx_desc          1981 drivers/net/ethernet/marvell/mv643xx_eth.c 					nexti * sizeof(struct rx_desc);
rx_desc          2381 drivers/net/ethernet/marvell/mv643xx_eth.c 		addr += rxq->rx_curr_desc * sizeof(struct rx_desc);
rx_desc           820 drivers/net/ethernet/marvell/mvneta.c 	int rx_desc = rxq->next_desc_to_proc;
rx_desc           822 drivers/net/ethernet/marvell/mvneta.c 	rxq->next_desc_to_proc = MVNETA_QUEUE_NEXT_DESC(rxq, rx_desc);
rx_desc           824 drivers/net/ethernet/marvell/mvneta.c 	return rxq->descs + rx_desc;
rx_desc          1617 drivers/net/ethernet/marvell/mvneta.c static void mvneta_rx_desc_fill(struct mvneta_rx_desc *rx_desc,
rx_desc          1623 drivers/net/ethernet/marvell/mvneta.c 	rx_desc->buf_phys_addr = phys_addr;
rx_desc          1624 drivers/net/ethernet/marvell/mvneta.c 	i = rx_desc - rxq->descs;
rx_desc          1709 drivers/net/ethernet/marvell/mvneta.c 			    struct mvneta_rx_desc *rx_desc)
rx_desc          1712 drivers/net/ethernet/marvell/mvneta.c 	u32 status = rx_desc->status;
rx_desc          1722 drivers/net/ethernet/marvell/mvneta.c 			   status, rx_desc->data_size);
rx_desc          1726 drivers/net/ethernet/marvell/mvneta.c 			   status, rx_desc->data_size);
rx_desc          1730 drivers/net/ethernet/marvell/mvneta.c 			   status, rx_desc->data_size);
rx_desc          1734 drivers/net/ethernet/marvell/mvneta.c 			   status, rx_desc->data_size);
rx_desc          1822 drivers/net/ethernet/marvell/mvneta.c 			    struct mvneta_rx_desc *rx_desc,
rx_desc          1842 drivers/net/ethernet/marvell/mvneta.c 	mvneta_rx_desc_fill(rx_desc, phys_addr, page, rxq);
rx_desc          1889 drivers/net/ethernet/marvell/mvneta.c 			struct mvneta_rx_desc *rx_desc =
rx_desc          1891 drivers/net/ethernet/marvell/mvneta.c 			u8 pool_id = MVNETA_RX_GET_BM_POOL_ID(rx_desc);
rx_desc          1897 drivers/net/ethernet/marvell/mvneta.c 					      rx_desc->buf_phys_addr);
rx_desc          1903 drivers/net/ethernet/marvell/mvneta.c 		struct mvneta_rx_desc *rx_desc = rxq->descs + i;
rx_desc          1905 drivers/net/ethernet/marvell/mvneta.c 		if (!data || !(rx_desc->buf_phys_addr))
rx_desc          1908 drivers/net/ethernet/marvell/mvneta.c 		dma_unmap_page(pp->dev->dev.parent, rx_desc->buf_phys_addr,
rx_desc          1917 drivers/net/ethernet/marvell/mvneta.c 	struct mvneta_rx_desc *rx_desc;
rx_desc          1922 drivers/net/ethernet/marvell/mvneta.c 		rx_desc = rxq->descs + curr_desc;
rx_desc          1923 drivers/net/ethernet/marvell/mvneta.c 		if (!(rx_desc->buf_phys_addr)) {
rx_desc          1924 drivers/net/ethernet/marvell/mvneta.c 			if (mvneta_rx_refill(pp, rx_desc, rxq, GFP_ATOMIC)) {
rx_desc          1956 drivers/net/ethernet/marvell/mvneta.c 		struct mvneta_rx_desc *rx_desc = mvneta_rxq_next_desc_get(rxq);
rx_desc          1964 drivers/net/ethernet/marvell/mvneta.c 		index = rx_desc - rxq->descs;
rx_desc          1970 drivers/net/ethernet/marvell/mvneta.c 		phys_addr = rx_desc->buf_phys_addr;
rx_desc          1971 drivers/net/ethernet/marvell/mvneta.c 		rx_status = rx_desc->status;
rx_desc          1978 drivers/net/ethernet/marvell/mvneta.c 				mvneta_rx_error(pp, rx_desc);
rx_desc          1982 drivers/net/ethernet/marvell/mvneta.c 			rx_bytes = rx_desc->data_size -
rx_desc          2022 drivers/net/ethernet/marvell/mvneta.c 				rx_desc->buf_phys_addr = 0;
rx_desc          2052 drivers/net/ethernet/marvell/mvneta.c 				rx_desc->buf_phys_addr = 0;
rx_desc          2132 drivers/net/ethernet/marvell/mvneta.c 		struct mvneta_rx_desc *rx_desc = mvneta_rxq_next_desc_get(rxq);
rx_desc          2142 drivers/net/ethernet/marvell/mvneta.c 		rx_status = rx_desc->status;
rx_desc          2143 drivers/net/ethernet/marvell/mvneta.c 		rx_bytes = rx_desc->data_size - (ETH_FCS_LEN + MVNETA_MH_SIZE);
rx_desc          2144 drivers/net/ethernet/marvell/mvneta.c 		data = (u8 *)(uintptr_t)rx_desc->buf_cookie;
rx_desc          2145 drivers/net/ethernet/marvell/mvneta.c 		phys_addr = rx_desc->buf_phys_addr;
rx_desc          2146 drivers/net/ethernet/marvell/mvneta.c 		pool_id = MVNETA_RX_GET_BM_POOL_ID(rx_desc);
rx_desc          2154 drivers/net/ethernet/marvell/mvneta.c 					      rx_desc->buf_phys_addr);
rx_desc          2156 drivers/net/ethernet/marvell/mvneta.c 			mvneta_rx_error(pp, rx_desc);
rx_desc          2168 drivers/net/ethernet/marvell/mvneta.c 			                              rx_desc->buf_phys_addr,
rx_desc          2184 drivers/net/ethernet/marvell/mvneta.c 					      rx_desc->buf_phys_addr);
rx_desc           235 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 					    struct mvpp2_rx_desc *rx_desc)
rx_desc           238 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 		return le32_to_cpu(rx_desc->pp21.buf_dma_addr);
rx_desc           240 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 		return le64_to_cpu(rx_desc->pp22.buf_dma_addr_key_hash) &
rx_desc           245 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 					     struct mvpp2_rx_desc *rx_desc)
rx_desc           248 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 		return le32_to_cpu(rx_desc->pp21.buf_cookie);
rx_desc           250 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 		return le64_to_cpu(rx_desc->pp22.buf_cookie_misc) &
rx_desc           255 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 				    struct mvpp2_rx_desc *rx_desc)
rx_desc           258 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 		return le16_to_cpu(rx_desc->pp21.data_size);
rx_desc           260 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 		return le16_to_cpu(rx_desc->pp22.data_size);
rx_desc           264 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 				   struct mvpp2_rx_desc *rx_desc)
rx_desc           267 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 		return le32_to_cpu(rx_desc->pp21.status);
rx_desc           269 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 		return le32_to_cpu(rx_desc->pp22.status);
rx_desc          1867 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 	int rx_desc = rxq->next_desc_to_proc;
rx_desc          1869 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 	rxq->next_desc_to_proc = MVPP2_QUEUE_NEXT_DESC(rxq, rx_desc);
rx_desc          1871 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 	return rxq->descs + rx_desc;
rx_desc          2408 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 		struct mvpp2_rx_desc *rx_desc = mvpp2_rxq_next_desc_get(rxq);
rx_desc          2409 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 		u32 status = mvpp2_rxdesc_status_get(port, rx_desc);
rx_desc          2416 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 				  mvpp2_rxdesc_dma_addr_get(port, rx_desc),
rx_desc          2417 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 				  mvpp2_rxdesc_cookie_get(port, rx_desc));
rx_desc          2825 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 			   struct mvpp2_rx_desc *rx_desc)
rx_desc          2827 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 	u32 status = mvpp2_rxdesc_status_get(port, rx_desc);
rx_desc          2828 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 	size_t sz = mvpp2_rxdesc_size_get(port, rx_desc);
rx_desc          2933 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 		struct mvpp2_rx_desc *rx_desc = mvpp2_rxq_next_desc_get(rxq);
rx_desc          2944 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 		rx_status = mvpp2_rxdesc_status_get(port, rx_desc);
rx_desc          2945 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 		rx_bytes = mvpp2_rxdesc_size_get(port, rx_desc);
rx_desc          2947 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 		dma_addr = mvpp2_rxdesc_dma_addr_get(port, rx_desc);
rx_desc          2948 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 		phys_addr = mvpp2_rxdesc_cookie_get(port, rx_desc);
rx_desc          2963 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 			mvpp2_rx_error(port, rx_desc);
rx_desc           207 drivers/net/ethernet/marvell/pxa168_eth.c 	struct rx_desc *p_rx_desc_area;
rx_desc           307 drivers/net/ethernet/marvell/pxa168_eth.c 	struct rx_desc *p_used_rx_desc;
rx_desc           643 drivers/net/ethernet/marvell/pxa168_eth.c 	    (u32) (pep->rx_desc_dma + rx_curr_desc * sizeof(struct rx_desc)));
rx_desc           646 drivers/net/ethernet/marvell/pxa168_eth.c 	    (u32) (pep->rx_desc_dma + rx_curr_desc * sizeof(struct rx_desc)));
rx_desc           773 drivers/net/ethernet/marvell/pxa168_eth.c 		struct rx_desc *rx_desc;
rx_desc           781 drivers/net/ethernet/marvell/pxa168_eth.c 		rx_desc = &pep->p_rx_desc_area[rx_curr_desc];
rx_desc           782 drivers/net/ethernet/marvell/pxa168_eth.c 		cmd_sts = rx_desc->cmd_sts;
rx_desc           797 drivers/net/ethernet/marvell/pxa168_eth.c 		dma_unmap_single(&pep->pdev->dev, rx_desc->buf_ptr,
rx_desc           798 drivers/net/ethernet/marvell/pxa168_eth.c 				 rx_desc->buf_size,
rx_desc           806 drivers/net/ethernet/marvell/pxa168_eth.c 		stats->rx_bytes += rx_desc->byte_cnt;
rx_desc           830 drivers/net/ethernet/marvell/pxa168_eth.c 			skb_put(skb, rx_desc->byte_cnt - 4);
rx_desc          1023 drivers/net/ethernet/marvell/pxa168_eth.c 	struct rx_desc *p_rx_desc;
rx_desc          1034 drivers/net/ethernet/marvell/pxa168_eth.c 	size = pep->rx_ring_size * sizeof(struct rx_desc);
rx_desc          1046 drivers/net/ethernet/marvell/pxa168_eth.c 		    ((i + 1) % rx_desc_num) * sizeof(struct rx_desc);
rx_desc          1051 drivers/net/ethernet/marvell/pxa168_eth.c 	pep->rx_desc_area_size = rx_desc_num * sizeof(struct rx_desc);
rx_desc            76 drivers/net/ethernet/mellanox/mlx4/en_rx.c 			       struct mlx4_en_rx_desc *rx_desc,
rx_desc            88 drivers/net/ethernet/mellanox/mlx4/en_rx.c 		rx_desc->data[i].addr = cpu_to_be64(frags->dma +
rx_desc           111 drivers/net/ethernet/mellanox/mlx4/en_rx.c 	struct mlx4_en_rx_desc *rx_desc = ring->buf + ring->stride * index;
rx_desc           117 drivers/net/ethernet/mellanox/mlx4/en_rx.c 		rx_desc->data[i].byte_count =
rx_desc           119 drivers/net/ethernet/mellanox/mlx4/en_rx.c 		rx_desc->data[i].lkey = cpu_to_be32(priv->mdev->mr.key);
rx_desc           127 drivers/net/ethernet/mellanox/mlx4/en_rx.c 		rx_desc->data[i].byte_count = 0;
rx_desc           128 drivers/net/ethernet/mellanox/mlx4/en_rx.c 		rx_desc->data[i].lkey = cpu_to_be32(MLX4_EN_MEMTYPE_PAD);
rx_desc           129 drivers/net/ethernet/mellanox/mlx4/en_rx.c 		rx_desc->data[i].addr = 0;
rx_desc           137 drivers/net/ethernet/mellanox/mlx4/en_rx.c 	struct mlx4_en_rx_desc *rx_desc = ring->buf +
rx_desc           149 drivers/net/ethernet/mellanox/mlx4/en_rx.c 		rx_desc->data[0].addr = cpu_to_be64(frags->dma +
rx_desc           154 drivers/net/ethernet/mellanox/mlx4/en_rx.c 	return mlx4_en_alloc_frags(priv, ring, rx_desc, frags, gfp);
rx_desc          1359 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 	struct pch_gbe_rx_desc *rx_desc;
rx_desc          1392 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 		rx_desc = PCH_GBE_RX_DESC(*rx_ring, i);
rx_desc          1393 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 		rx_desc->buffer_addr = (buffer_info->dma);
rx_desc          1394 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 		rx_desc->gbec_status = DSC_INIT16;
rx_desc          1623 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 	struct pch_gbe_rx_desc *rx_desc;
rx_desc          1637 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 		rx_desc = PCH_GBE_RX_DESC(*rx_ring, i);
rx_desc          1638 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 		if (rx_desc->gbec_status == DSC_INIT16)
rx_desc          1643 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 		dma_status = rx_desc->dma_status;
rx_desc          1644 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 		gbec_status = rx_desc->gbec_status;
rx_desc          1645 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 		tcp_ip_status = rx_desc->tcp_ip_status;
rx_desc          1646 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 		rx_desc->gbec_status = DSC_INIT16;
rx_desc          1675 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 			length = (rx_desc->rx_words_eob) - 3 - ETH_FCS_LEN;
rx_desc          1676 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 			if (rx_desc->rx_words_eob & 0x02)
rx_desc          1777 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 	struct pch_gbe_rx_desc *rx_desc;
rx_desc          1796 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 		rx_desc = PCH_GBE_RX_DESC(*rx_ring, desNo);
rx_desc          1797 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 		rx_desc->gbec_status = DSC_INIT16;
rx_desc           277 drivers/net/ethernet/renesas/ravb_main.c 	struct ravb_ex_rx_desc *rx_desc;
rx_desc           280 drivers/net/ethernet/renesas/ravb_main.c 	int rx_ring_size = sizeof(*rx_desc) * priv->num_rx_ring[q];
rx_desc           295 drivers/net/ethernet/renesas/ravb_main.c 		rx_desc = &priv->rx_ring[q][i];
rx_desc           296 drivers/net/ethernet/renesas/ravb_main.c 		rx_desc->ds_cc = cpu_to_le16(RX_BUF_SZ);
rx_desc           304 drivers/net/ethernet/renesas/ravb_main.c 			rx_desc->ds_cc = cpu_to_le16(0);
rx_desc           305 drivers/net/ethernet/renesas/ravb_main.c 		rx_desc->dptr = cpu_to_le32(dma_addr);
rx_desc           306 drivers/net/ethernet/renesas/ravb_main.c 		rx_desc->die_dt = DT_FEMPTY;
rx_desc           308 drivers/net/ethernet/renesas/ravb_main.c 	rx_desc = &priv->rx_ring[q][i];
rx_desc           309 drivers/net/ethernet/renesas/ravb_main.c 	rx_desc->dptr = cpu_to_le32((u32)priv->rx_desc_dma[q]);
rx_desc           310 drivers/net/ethernet/renesas/ravb_main.c 	rx_desc->die_dt = DT_LINKFIX; /* type */
rx_desc            59 drivers/net/ethernet/seeq/sgiseeq.c 					       (unsigned long)((sp)->rx_desc)))
rx_desc            95 drivers/net/ethernet/seeq/sgiseeq.c 	struct sgiseeq_rx_desc *rx_desc;
rx_desc           198 drivers/net/ethernet/seeq/sgiseeq.c 		if (!sp->rx_desc[i].skb) {
rx_desc           208 drivers/net/ethernet/seeq/sgiseeq.c 			sp->rx_desc[i].skb = skb;
rx_desc           209 drivers/net/ethernet/seeq/sgiseeq.c 			sp->rx_desc[i].rdma.pbuf = dma_addr;
rx_desc           211 drivers/net/ethernet/seeq/sgiseeq.c 		sp->rx_desc[i].rdma.cntinfo = RCNTINFO_INIT;
rx_desc           212 drivers/net/ethernet/seeq/sgiseeq.c 		dma_sync_desc_dev(dev, &sp->rx_desc[i]);
rx_desc           214 drivers/net/ethernet/seeq/sgiseeq.c 	sp->rx_desc[i - 1].rdma.cntinfo |= HPCDMA_EOR;
rx_desc           215 drivers/net/ethernet/seeq/sgiseeq.c 	dma_sync_desc_dev(dev, &sp->rx_desc[i - 1]);
rx_desc           234 drivers/net/ethernet/seeq/sgiseeq.c 		if (sp->rx_desc[i].skb) {
rx_desc           235 drivers/net/ethernet/seeq/sgiseeq.c 			dev_kfree_skb(sp->rx_desc[i].skb);
rx_desc           236 drivers/net/ethernet/seeq/sgiseeq.c 			sp->rx_desc[i].skb = NULL;
rx_desc           248 drivers/net/ethernet/seeq/sgiseeq.c 	struct sgiseeq_rx_desc *r = gpriv->rx_desc;
rx_desc           307 drivers/net/ethernet/seeq/sgiseeq.c 	hregs->rx_ndptr = VIRT_TO_DMA(sp, sp->rx_desc);
rx_desc           332 drivers/net/ethernet/seeq/sgiseeq.c 		hregs->rx_ndptr = VIRT_TO_DMA(sp, sp->rx_desc + sp->rx_new);
rx_desc           349 drivers/net/ethernet/seeq/sgiseeq.c 	rd = &sp->rx_desc[sp->rx_new];
rx_desc           403 drivers/net/ethernet/seeq/sgiseeq.c 		rd = &sp->rx_desc[sp->rx_new];
rx_desc           406 drivers/net/ethernet/seeq/sgiseeq.c 	dma_sync_desc_cpu(dev, &sp->rx_desc[orig_end]);
rx_desc           407 drivers/net/ethernet/seeq/sgiseeq.c 	sp->rx_desc[orig_end].rdma.cntinfo &= ~(HPCDMA_EOR);
rx_desc           408 drivers/net/ethernet/seeq/sgiseeq.c 	dma_sync_desc_dev(dev, &sp->rx_desc[orig_end]);
rx_desc           409 drivers/net/ethernet/seeq/sgiseeq.c 	dma_sync_desc_cpu(dev, &sp->rx_desc[PREV_RX(sp->rx_new)]);
rx_desc           410 drivers/net/ethernet/seeq/sgiseeq.c 	sp->rx_desc[PREV_RX(sp->rx_new)].rdma.cntinfo |= HPCDMA_EOR;
rx_desc           411 drivers/net/ethernet/seeq/sgiseeq.c 	dma_sync_desc_dev(dev, &sp->rx_desc[PREV_RX(sp->rx_new)]);
rx_desc           751 drivers/net/ethernet/seeq/sgiseeq.c 	sp->rx_desc = sp->srings->rxvector;
rx_desc           756 drivers/net/ethernet/seeq/sgiseeq.c 	setup_rx_ring(dev, sp->rx_desc, SEEQ_RX_BUFFERS);
rx_desc            61 drivers/net/ethernet/stmicro/stmmac/common.h 	unsigned long rx_desc;
rx_desc           201 drivers/net/ethernet/stmicro/stmmac/enh_desc.c 			x->rx_desc++;
rx_desc            90 drivers/net/ethernet/stmicro/stmmac/norm_desc.c 			x->rx_desc++;
rx_desc            53 drivers/net/ethernet/stmicro/stmmac/stmmac_ethtool.c 	STMMAC_STAT(rx_desc),
rx_desc           107 drivers/net/ethernet/tundra/tsi108_eth.c 	rx_desc *rxring;
rx_desc          1288 drivers/net/ethernet/tundra/tsi108_eth.c 	unsigned int rxring_size = TSI108_RXRING_LEN * sizeof(rx_desc);
rx_desc          1317 drivers/net/ethernet/tundra/tsi108_eth.c 		data->rxring[i].next0 = data->rxdma + (i + 1) * sizeof(rx_desc);
rx_desc          1424 drivers/net/ethernet/tundra/tsi108_eth.c 			    TSI108_RXRING_LEN * sizeof(rx_desc),
rx_desc           442 drivers/net/ethernet/via/via-rhine.c 	struct rx_desc *rx_ring;
rx_desc          1160 drivers/net/ethernet/via/via-rhine.c 				  RX_RING_SIZE * sizeof(struct rx_desc) +
rx_desc          1175 drivers/net/ethernet/via/via-rhine.c 					  RX_RING_SIZE * sizeof(struct rx_desc) +
rx_desc          1183 drivers/net/ethernet/via/via-rhine.c 	rp->tx_ring = ring + RX_RING_SIZE * sizeof(struct rx_desc);
rx_desc          1185 drivers/net/ethernet/via/via-rhine.c 	rp->tx_ring_dma = ring_dma + RX_RING_SIZE * sizeof(struct rx_desc);
rx_desc          1196 drivers/net/ethernet/via/via-rhine.c 			  RX_RING_SIZE * sizeof(struct rx_desc) +
rx_desc          1270 drivers/net/ethernet/via/via-rhine.c 		next += sizeof(struct rx_desc);
rx_desc          2028 drivers/net/ethernet/via/via-rhine.c static inline void rhine_rx_vlan_tag(struct sk_buff *skb, struct rx_desc *desc,
rx_desc          2053 drivers/net/ethernet/via/via-rhine.c 		struct rx_desc *desc = rp->rx_ring + entry;
rx_desc          1463 drivers/net/ethernet/via/via-velocity.c 	const unsigned int rx_ring_size = opt->numrx * sizeof(struct rx_desc);
rx_desc          1516 drivers/net/ethernet/via/via-velocity.c 	struct rx_desc *rd = &(vptr->rx.ring[idx]);
rx_desc          1549 drivers/net/ethernet/via/via-velocity.c 		struct rx_desc *rd = vptr->rx.ring + dirty;
rx_desc          1587 drivers/net/ethernet/via/via-velocity.c 		struct rx_desc *rd = vptr->rx.ring + i;
rx_desc          1672 drivers/net/ethernet/via/via-velocity.c 	const int size = vptr->options.numrx * sizeof(struct rx_desc) +
rx_desc          1956 drivers/net/ethernet/via/via-velocity.c static inline void velocity_rx_csum(struct rx_desc *rd, struct sk_buff *skb)
rx_desc          2033 drivers/net/ethernet/via/via-velocity.c 	struct rx_desc *rd = &(vptr->rx.ring[idx]);
rx_desc          2095 drivers/net/ethernet/via/via-velocity.c 		struct rx_desc *rd = vptr->rx.ring + rd_curr;
rx_desc          1459 drivers/net/ethernet/via/via-velocity.h 		struct rx_desc *ring;
rx_desc           664 drivers/net/hippi/rrunner.c 	rrpriv->rx_ctrl[4].entry_size = sizeof(struct rx_desc);
rx_desc           930 drivers/net/hippi/rrunner.c 		struct rx_desc *desc;
rx_desc          1133 drivers/net/hippi/rrunner.c 			struct rx_desc *desc = &(rrpriv->rx_ring[i]);
rx_desc           578 drivers/net/hippi/rrunner.h #define RX_TOTAL_SIZE	(RX_RING_ENTRIES * sizeof(struct rx_desc))
rx_desc           802 drivers/net/hippi/rrunner.h 	struct rx_desc		*rx_ring;
rx_desc           626 drivers/net/usb/r8152.c 				 sizeof(struct rx_desc) + RX_ALIGN)
rx_desc          1780 drivers/net/usb/r8152.c static inline void rtl_rx_vlan_tag(struct rx_desc *desc, struct sk_buff *skb)
rx_desc          1972 drivers/net/usb/r8152.c static u8 r8152_rx_csum(struct r8152 *tp, struct rx_desc *rx_desc)
rx_desc          1980 drivers/net/usb/r8152.c 	opts2 = le32_to_cpu(rx_desc->opts2);
rx_desc          1981 drivers/net/usb/r8152.c 	opts3 = le32_to_cpu(rx_desc->opts3);
rx_desc          2075 drivers/net/usb/r8152.c 		struct rx_desc *rx_desc;
rx_desc          2090 drivers/net/usb/r8152.c 		rx_desc = agg->buffer;
rx_desc          2092 drivers/net/usb/r8152.c 		len_used += sizeof(struct rx_desc);
rx_desc          2104 drivers/net/usb/r8152.c 			pkt_len = le32_to_cpu(rx_desc->opts1) & RX_LEN_MASK;
rx_desc          2113 drivers/net/usb/r8152.c 			rx_data += sizeof(struct rx_desc);
rx_desc          2126 drivers/net/usb/r8152.c 			skb->ip_summed = r8152_rx_csum(tp, rx_desc);
rx_desc          2140 drivers/net/usb/r8152.c 			rtl_rx_vlan_tag(rx_desc, skb);
rx_desc          2152 drivers/net/usb/r8152.c 			rx_desc = (struct rx_desc *)rx_data;
rx_desc          2154 drivers/net/usb/r8152.c 			len_used += sizeof(struct rx_desc);
rx_desc           131 drivers/net/wireless/ath/ath10k/htt_rx.c 	struct htt_rx_desc *rx_desc;
rx_desc           158 drivers/net/wireless/ath/ath10k/htt_rx.c 		rx_desc = (struct htt_rx_desc *)skb->data;
rx_desc           159 drivers/net/wireless/ath/ath10k/htt_rx.c 		rx_desc->attention.flags = __cpu_to_le32(0);
rx_desc           340 drivers/net/wireless/ath/ath10k/htt_rx.c 	struct htt_rx_desc *rx_desc;
rx_desc           355 drivers/net/wireless/ath/ath10k/htt_rx.c 		rx_desc = (struct htt_rx_desc *)msdu->data;
rx_desc           371 drivers/net/wireless/ath/ath10k/htt_rx.c 		if (!(__le32_to_cpu(rx_desc->attention.flags)
rx_desc           377 drivers/net/wireless/ath/ath10k/htt_rx.c 		msdu_len_invalid = !!(__le32_to_cpu(rx_desc->attention.flags)
rx_desc           380 drivers/net/wireless/ath/ath10k/htt_rx.c 		msdu_len = MS(__le32_to_cpu(rx_desc->msdu_start.common.info0),
rx_desc           382 drivers/net/wireless/ath/ath10k/htt_rx.c 		msdu_chained = rx_desc->frag_info.ring2_more_count;
rx_desc           406 drivers/net/wireless/ath/ath10k/htt_rx.c 		last_msdu = __le32_to_cpu(rx_desc->msdu_end.common.info0) &
rx_desc           409 drivers/net/wireless/ath/ath10k/htt_rx.c 		trace_ath10k_htt_rx_desc(ar, &rx_desc->attention,
rx_desc           410 drivers/net/wireless/ath/ath10k/htt_rx.c 					 sizeof(*rx_desc) - sizeof(u32));
rx_desc          2064 drivers/net/wireless/ath/ath10k/htt_rx.c static void ath10k_htt_rx_mpdu_desc_pn_hl(struct htt_hl_rx_desc *rx_desc,
rx_desc          2070 drivers/net/wireless/ath/ath10k/htt_rx.c 		pn->pn48 = __le32_to_cpu(rx_desc->pn_31_0) +
rx_desc          2071 drivers/net/wireless/ath/ath10k/htt_rx.c 			   ((u64)(__le32_to_cpu(rx_desc->u0.pn_63_32) & 0xFFFF) << 32);
rx_desc          2074 drivers/net/wireless/ath/ath10k/htt_rx.c 		pn->pn24 = __le32_to_cpu(rx_desc->pn_31_0);
rx_desc          2094 drivers/net/wireless/ath/ath10k/htt_rx.c 	struct htt_hl_rx_desc *rx_desc;
rx_desc          2110 drivers/net/wireless/ath/ath10k/htt_rx.c 	rx_desc = (struct htt_hl_rx_desc *)&rx->mpdu_ranges[num_mpdu_ranges];
rx_desc          2111 drivers/net/wireless/ath/ath10k/htt_rx.c 	rx_desc_info = __le32_to_cpu(rx_desc->info);
rx_desc          2126 drivers/net/wireless/ath/ath10k/htt_rx.c 	ath10k_htt_rx_mpdu_desc_pn_hl(rx_desc, &new_pn, peer->rx_pn[sec_index].pn_len);
rx_desc          2157 drivers/net/wireless/ath/ath10k/htt_rx.c 	struct htt_hl_rx_desc *rx_desc;
rx_desc          2204 drivers/net/wireless/ath/ath10k/htt_rx.c 	rx_desc = (struct htt_hl_rx_desc *)&rx->mpdu_ranges[num_mpdu_ranges];
rx_desc          2205 drivers/net/wireless/ath/ath10k/htt_rx.c 	rx_desc_info = __le32_to_cpu(rx_desc->info);
rx_desc          2215 drivers/net/wireless/ath/ath10k/htt_rx.c 	ath10k_htt_rx_mpdu_desc_pn_hl(rx_desc, &new_pn, peer->rx_pn[sec_index].pn_len);
rx_desc          2449 drivers/net/wireless/ath/ath10k/htt_rx.c 	struct htt_hl_rx_desc *rx_desc;
rx_desc          2483 drivers/net/wireless/ath/ath10k/htt_rx.c 	rx_desc = (struct htt_hl_rx_desc *)(skb->data + tot_hdr_len);
rx_desc          2484 drivers/net/wireless/ath/ath10k/htt_rx.c 	rx_desc_info = __le32_to_cpu(rx_desc->info);
rx_desc          2493 drivers/net/wireless/ath/ath10k/htt_rx.c 	hdr = (struct ieee80211_hdr *)((u8 *)rx_desc + rx_hl->fw_desc.len);
rx_desc          2506 drivers/net/wireless/ath/ath10k/htt_rx.c 	ath10k_htt_rx_mpdu_desc_pn_hl(rx_desc, &new_pn, peer->rx_pn[sec_index].pn_len);
rx_desc           509 drivers/net/wireless/ath/wil6210/txrx.h 	struct vring_rx_desc rx_desc;
rx_desc           628 drivers/net/wireless/atmel/atmel.c 	return priv->host_info.rx_desc_pos + (sizeof(struct rx_desc) * desc) + offset;
rx_desc          1017 drivers/net/wireless/cisco/airo.c 	RxFid         rx_desc;		     /* card receive descriptor */
rx_desc          2483 drivers/net/wireless/cisco/airo.c 			&ai->rxfids[i].rx_desc, sizeof(RxFid));
rx_desc          2593 drivers/net/wireless/cisco/airo.c 		ai->rxfids[i].rx_desc.host_addr = busaddroff;
rx_desc          2594 drivers/net/wireless/cisco/airo.c 		ai->rxfids[i].rx_desc.valid = 1;
rx_desc          2595 drivers/net/wireless/cisco/airo.c 		ai->rxfids[i].rx_desc.len = PKTSIZE;
rx_desc          2596 drivers/net/wireless/cisco/airo.c 		ai->rxfids[i].rx_desc.rdy = 0;
rx_desc          5203 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c 	struct rtl8xxxu_rxdesc16 *rx_desc;
rx_desc          5215 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c 		rx_desc = (struct rtl8xxxu_rxdesc16 *)skb->data;
rx_desc          5228 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c 			pkt_cnt = rx_desc->pkt_cnt;
rx_desc          5229 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c 		pkt_len = rx_desc->pktlen;
rx_desc          5231 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c 		drvinfo_sz = rx_desc->drvinfo_sz * 8;
rx_desc          5232 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c 		desc_shift = rx_desc->shift;
rx_desc          5255 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c 		if (rx_desc->phy_stats)
rx_desc          5257 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c 						   rx_desc->rxmcs);
rx_desc          5259 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c 		rx_status->mactime = rx_desc->tsfl;
rx_desc          5262 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c 		if (!rx_desc->swdec)
rx_desc          5264 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c 		if (rx_desc->crc32)
rx_desc          5266 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c 		if (rx_desc->bw)
rx_desc          5269 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c 		if (rx_desc->rxht) {
rx_desc          5271 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c 			rx_status->rate_idx = rx_desc->rxmcs - DESC_RATE_MCS0;
rx_desc          5273 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c 			rx_status->rate_idx = rx_desc->rxmcs;
rx_desc          5296 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c 	struct rtl8xxxu_rxdesc24 *rx_desc =
rx_desc          5313 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c 	drvinfo_sz = rx_desc->drvinfo_sz * 8;
rx_desc          5314 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c 	desc_shift = rx_desc->shift;
rx_desc          5317 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c 	if (rx_desc->rpt_sel) {
rx_desc          5325 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c 	if (rx_desc->phy_stats)
rx_desc          5327 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c 					   rx_desc->rxmcs);
rx_desc          5329 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c 	rx_status->mactime = rx_desc->tsfl;
rx_desc          5332 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c 	if (!rx_desc->swdec)
rx_desc          5334 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c 	if (rx_desc->crc32)
rx_desc          5336 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c 	if (rx_desc->bw)
rx_desc          5339 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c 	if (rx_desc->rxmcs >= DESC_RATE_MCS0) {
rx_desc          5341 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c 		rx_status->rate_idx = rx_desc->rxmcs - DESC_RATE_MCS0;
rx_desc          5343 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c 		rx_status->rate_idx = rx_desc->rxmcs;
rx_desc           629 drivers/net/wireless/realtek/rtw88/main.h 	void (*query_rx_desc)(struct rtw_dev *rtwdev, u8 *rx_desc,
rx_desc           824 drivers/net/wireless/realtek/rtw88/pci.c 	u8 *rx_desc;
rx_desc           844 drivers/net/wireless/realtek/rtw88/pci.c 		rx_desc = skb->data;
rx_desc           845 drivers/net/wireless/realtek/rtw88/pci.c 		chip->ops->query_rx_desc(rtwdev, rx_desc, &pkt_stat, &rx_status);
rx_desc           826 drivers/net/wireless/realtek/rtw88/rtw8822b.c static void rtw8822b_query_rx_desc(struct rtw_dev *rtwdev, u8 *rx_desc,
rx_desc           836 drivers/net/wireless/realtek/rtw88/rtw8822b.c 	pkt_stat->phy_status = GET_RX_DESC_PHYST(rx_desc);
rx_desc           837 drivers/net/wireless/realtek/rtw88/rtw8822b.c 	pkt_stat->icv_err = GET_RX_DESC_ICV_ERR(rx_desc);
rx_desc           838 drivers/net/wireless/realtek/rtw88/rtw8822b.c 	pkt_stat->crc_err = GET_RX_DESC_CRC32(rx_desc);
rx_desc           839 drivers/net/wireless/realtek/rtw88/rtw8822b.c 	pkt_stat->decrypted = !GET_RX_DESC_SWDEC(rx_desc);
rx_desc           840 drivers/net/wireless/realtek/rtw88/rtw8822b.c 	pkt_stat->is_c2h = GET_RX_DESC_C2H(rx_desc);
rx_desc           841 drivers/net/wireless/realtek/rtw88/rtw8822b.c 	pkt_stat->pkt_len = GET_RX_DESC_PKT_LEN(rx_desc);
rx_desc           842 drivers/net/wireless/realtek/rtw88/rtw8822b.c 	pkt_stat->drv_info_sz = GET_RX_DESC_DRV_INFO_SIZE(rx_desc);
rx_desc           843 drivers/net/wireless/realtek/rtw88/rtw8822b.c 	pkt_stat->shift = GET_RX_DESC_SHIFT(rx_desc);
rx_desc           844 drivers/net/wireless/realtek/rtw88/rtw8822b.c 	pkt_stat->rate = GET_RX_DESC_RX_RATE(rx_desc);
rx_desc           845 drivers/net/wireless/realtek/rtw88/rtw8822b.c 	pkt_stat->cam_id = GET_RX_DESC_MACID(rx_desc);
rx_desc           846 drivers/net/wireless/realtek/rtw88/rtw8822b.c 	pkt_stat->ppdu_cnt = GET_RX_DESC_PPDU_CNT(rx_desc);
rx_desc           847 drivers/net/wireless/realtek/rtw88/rtw8822b.c 	pkt_stat->tsf_low = GET_RX_DESC_TSFL(rx_desc);
rx_desc           856 drivers/net/wireless/realtek/rtw88/rtw8822b.c 	hdr = (struct ieee80211_hdr *)(rx_desc + desc_sz + pkt_stat->shift +
rx_desc           859 drivers/net/wireless/realtek/rtw88/rtw8822b.c 		phy_status = rx_desc + desc_sz + pkt_stat->shift;
rx_desc          1694 drivers/net/wireless/realtek/rtw88/rtw8822c.c static void rtw8822c_query_rx_desc(struct rtw_dev *rtwdev, u8 *rx_desc,
rx_desc          1704 drivers/net/wireless/realtek/rtw88/rtw8822c.c 	pkt_stat->phy_status = GET_RX_DESC_PHYST(rx_desc);
rx_desc          1705 drivers/net/wireless/realtek/rtw88/rtw8822c.c 	pkt_stat->icv_err = GET_RX_DESC_ICV_ERR(rx_desc);
rx_desc          1706 drivers/net/wireless/realtek/rtw88/rtw8822c.c 	pkt_stat->crc_err = GET_RX_DESC_CRC32(rx_desc);
rx_desc          1707 drivers/net/wireless/realtek/rtw88/rtw8822c.c 	pkt_stat->decrypted = !GET_RX_DESC_SWDEC(rx_desc);
rx_desc          1708 drivers/net/wireless/realtek/rtw88/rtw8822c.c 	pkt_stat->is_c2h = GET_RX_DESC_C2H(rx_desc);
rx_desc          1709 drivers/net/wireless/realtek/rtw88/rtw8822c.c 	pkt_stat->pkt_len = GET_RX_DESC_PKT_LEN(rx_desc);
rx_desc          1710 drivers/net/wireless/realtek/rtw88/rtw8822c.c 	pkt_stat->drv_info_sz = GET_RX_DESC_DRV_INFO_SIZE(rx_desc);
rx_desc          1711 drivers/net/wireless/realtek/rtw88/rtw8822c.c 	pkt_stat->shift = GET_RX_DESC_SHIFT(rx_desc);
rx_desc          1712 drivers/net/wireless/realtek/rtw88/rtw8822c.c 	pkt_stat->rate = GET_RX_DESC_RX_RATE(rx_desc);
rx_desc          1713 drivers/net/wireless/realtek/rtw88/rtw8822c.c 	pkt_stat->cam_id = GET_RX_DESC_MACID(rx_desc);
rx_desc          1714 drivers/net/wireless/realtek/rtw88/rtw8822c.c 	pkt_stat->ppdu_cnt = GET_RX_DESC_PPDU_CNT(rx_desc);
rx_desc          1715 drivers/net/wireless/realtek/rtw88/rtw8822c.c 	pkt_stat->tsf_low = GET_RX_DESC_TSFL(rx_desc);
rx_desc          1724 drivers/net/wireless/realtek/rtw88/rtw8822c.c 	hdr = (struct ieee80211_hdr *)(rx_desc + desc_sz + pkt_stat->shift +
rx_desc          1727 drivers/net/wireless/realtek/rtw88/rtw8822c.c 		phy_status = rx_desc + desc_sz + pkt_stat->shift;
rx_desc           206 drivers/net/wireless/ti/wl1251/rx.c 	struct wl1251_rx_descriptor *rx_desc;
rx_desc           211 drivers/net/wireless/ti/wl1251/rx.c 	rx_desc = wl->rx_descriptor;
rx_desc           214 drivers/net/wireless/ti/wl1251/rx.c 	wl1251_rx_header(wl, rx_desc);
rx_desc           217 drivers/net/wireless/ti/wl1251/rx.c 	wl1251_rx_body(wl, rx_desc);
rx_desc           597 drivers/net/wireless/ti/wl12xx/main.c static int wl127x_prepare_read(struct wl1271 *wl, u32 rx_desc, u32 len)
rx_desc           610 drivers/net/wireless/ti/wl12xx/main.c 		u32 mem_block = rx_desc & RX_MEM_BLOCK_MASK;
rx_desc          1345 drivers/net/wireless/ti/wl12xx/main.c wl12xx_get_rx_buf_align(struct wl1271 *wl, u32 rx_desc)
rx_desc          1347 drivers/net/wireless/ti/wl12xx/main.c 	if (rx_desc & RX_BUF_UNALIGNED_PAYLOAD)
rx_desc          1099 drivers/net/wireless/ti/wl18xx/main.c wl18xx_get_rx_buf_align(struct wl1271 *wl, u32 rx_desc)
rx_desc          1101 drivers/net/wireless/ti/wl18xx/main.c 	if (rx_desc & RX_BUF_PADDED_PAYLOAD)
rx_desc            45 drivers/net/wireless/ti/wlcore/hw_ops.h wlcore_hw_get_rx_buf_align(struct wl1271 *wl, u32 rx_desc)
rx_desc            51 drivers/net/wireless/ti/wlcore/hw_ops.h 	return wl->ops->get_rx_buf_align(wl, rx_desc);
rx_desc            55 drivers/net/wireless/ti/wlcore/hw_ops.h wlcore_hw_prepare_read(struct wl1271 *wl, u32 rx_desc, u32 len)
rx_desc            58 drivers/net/wireless/ti/wlcore/hw_ops.h 		return wl->ops->prepare_read(wl, rx_desc, len);
rx_desc            57 drivers/net/wireless/ti/wlcore/wlcore.h 						 u32 rx_desc);
rx_desc            58 drivers/net/wireless/ti/wlcore/wlcore.h 	int (*prepare_read)(struct wl1271 *wl, u32 rx_desc, u32 len);
rx_desc           869 drivers/scsi/mvsas/mv_sas.c static void mvs_slot_free(struct mvs_info *mvi, u32 rx_desc)
rx_desc           871 drivers/scsi/mvsas/mv_sas.c 	u32 slot_idx = rx_desc & RXQ_SLOT_MASK;
rx_desc          1700 drivers/scsi/mvsas/mv_sas.c int mvs_slot_complete(struct mvs_info *mvi, u32 rx_desc, u32 flags)
rx_desc          1702 drivers/scsi/mvsas/mv_sas.c 	u32 slot_idx = rx_desc & RXQ_SLOT_MASK;
rx_desc          1754 drivers/scsi/mvsas/mv_sas.c 	if (unlikely((rx_desc & RXQ_ERR)
rx_desc          1759 drivers/scsi/mvsas/mv_sas.c 			 rx_desc, get_unaligned_le64(slot->response));
rx_desc          1768 drivers/scsi/mvsas/mv_sas.c 		if (rx_desc & RXQ_GOOD) {
rx_desc          1773 drivers/scsi/mvsas/mv_sas.c 		else if (rx_desc & RXQ_RSP) {
rx_desc          2049 drivers/scsi/mvsas/mv_sas.c 	u32 rx_prod_idx, rx_desc;
rx_desc          2075 drivers/scsi/mvsas/mv_sas.c 		rx_desc = le32_to_cpu(mvi->rx[rx_prod_idx + 1]);
rx_desc          2077 drivers/scsi/mvsas/mv_sas.c 		if (likely(rx_desc & RXQ_DONE))
rx_desc          2078 drivers/scsi/mvsas/mv_sas.c 			mvs_slot_complete(mvi, rx_desc, 0);
rx_desc          2079 drivers/scsi/mvsas/mv_sas.c 		if (rx_desc & RXQ_ATTN) {
rx_desc          2081 drivers/scsi/mvsas/mv_sas.c 		} else if (rx_desc & RXQ_ERR) {
rx_desc          2082 drivers/scsi/mvsas/mv_sas.c 			if (!(rx_desc & RXQ_DONE))
rx_desc          2083 drivers/scsi/mvsas/mv_sas.c 				mvs_slot_complete(mvi, rx_desc, 0);
rx_desc          2084 drivers/scsi/mvsas/mv_sas.c 		} else if (rx_desc & RXQ_SLOT_RESET) {
rx_desc          2085 drivers/scsi/mvsas/mv_sas.c 			mvs_slot_free(mvi, rx_desc);
rx_desc           451 drivers/scsi/mvsas/mv_sas.h int mvs_slot_complete(struct mvs_info *mvi, u32 rx_desc, u32 flags);
rx_desc           154 drivers/slimbus/qcom-ngd-ctrl.c 	struct qcom_slim_ngd_dma_desc rx_desc[QCOM_SLIM_NGD_DESC_NUM];
rx_desc           642 drivers/slimbus/qcom-ngd-ctrl.c 		desc = &ctrl->rx_desc[i];
rx_desc           170 drivers/spi/spi-fsl-dspi.c 	struct dma_async_tx_descriptor		*rx_desc;
rx_desc           296 drivers/spi/spi-fsl-dspi.c 	dma->rx_desc = dmaengine_prep_slave_single(dma->chan_rx,
rx_desc           302 drivers/spi/spi-fsl-dspi.c 	if (!dma->rx_desc) {
rx_desc           307 drivers/spi/spi-fsl-dspi.c 	dma->rx_desc->callback = dspi_rx_dma_callback;
rx_desc           308 drivers/spi/spi-fsl-dspi.c 	dma->rx_desc->callback_param = dspi;
rx_desc           309 drivers/spi/spi-fsl-dspi.c 	if (dma_submit_error(dmaengine_submit(dma->rx_desc))) {
rx_desc           143 drivers/spi/spi-pxa2xx-dma.c 	struct dma_async_tx_descriptor *tx_desc, *rx_desc;
rx_desc           154 drivers/spi/spi-pxa2xx-dma.c 	rx_desc = pxa2xx_spi_dma_prepare_one(drv_data, DMA_DEV_TO_MEM, xfer);
rx_desc           155 drivers/spi/spi-pxa2xx-dma.c 	if (!rx_desc) {
rx_desc           163 drivers/spi/spi-pxa2xx-dma.c 	rx_desc->callback = pxa2xx_spi_dma_callback;
rx_desc           164 drivers/spi/spi-pxa2xx-dma.c 	rx_desc->callback_param = drv_data;
rx_desc           166 drivers/spi/spi-pxa2xx-dma.c 	dmaengine_submit(rx_desc);
rx_desc           492 drivers/spi/spi-sirf.c 	struct dma_async_tx_descriptor *rx_desc, *tx_desc;
rx_desc           550 drivers/spi/spi-sirf.c 	rx_desc = dmaengine_prep_slave_single(sspi->rx_chan,
rx_desc           553 drivers/spi/spi-sirf.c 	rx_desc->callback = spi_sirfsoc_dma_fini_callback;
rx_desc           554 drivers/spi/spi-sirf.c 	rx_desc->callback_param = &sspi->rx_done;
rx_desc           566 drivers/spi/spi-sirf.c 	dmaengine_submit(rx_desc);
rx_desc           236 drivers/staging/mt7621-dma/mtk-hsdma.c 	struct hsdma_desc *rx_desc;
rx_desc           244 drivers/staging/mt7621-dma/mtk-hsdma.c 		rx_desc = &chan->rx_ring[i];
rx_desc           249 drivers/staging/mt7621-dma/mtk-hsdma.c 				tx_desc->addr1, rx_desc->addr0, rx_desc->flags);
rx_desc           320 drivers/staging/mt7621-dma/mtk-hsdma.c 	struct hsdma_desc *tx_desc, *rx_desc;
rx_desc           363 drivers/staging/mt7621-dma/mtk-hsdma.c 		rx_desc = &chan->rx_ring[rx_idx];
rx_desc           369 drivers/staging/mt7621-dma/mtk-hsdma.c 		rx_desc->addr0 = dst;
rx_desc           370 drivers/staging/mt7621-dma/mtk-hsdma.c 		rx_desc->flags = HSDMA_DESC_PLEN0(tlen);
rx_desc          1466 drivers/staging/rtl8192e/rtl8192e/r8192E_dev.c 	struct rx_desc  *pdesc,
rx_desc          1849 drivers/staging/rtl8192e/rtl8192e/r8192E_dev.c 					      struct rx_desc *pdesc,
rx_desc          2005 drivers/staging/rtl8192e/rtl8192e/r8192E_dev.c 			 struct rx_desc *pdesc, struct sk_buff *skb)
rx_desc            35 drivers/staging/rtl8192e/rtl8192e/r8192E_dev.h 			 struct rx_desc *pdesc, struct sk_buff *skb);
rx_desc          1782 drivers/staging/rtl8192e/rtl8192e/rtl_core.c 	struct rx_desc *entry = NULL;
rx_desc          1889 drivers/staging/rtl8192e/rtl8192e/rtl_core.c 			struct rx_desc *entry = NULL;
rx_desc          2021 drivers/staging/rtl8192e/rtl8192e/rtl_core.c 		struct rx_desc *pdesc = &priv->rx_ring[rx_queue_idx]
rx_desc           288 drivers/staging/rtl8192e/rtl8192e/rtl_core.h 					   struct rx_desc *pdesc,
rx_desc           292 drivers/staging/rtl8192e/rtl8192e/rtl_core.h 					  struct rx_desc *pdesc);
rx_desc           374 drivers/staging/rtl8192e/rtl8192e/rtl_core.h 	struct rx_desc *rx_ring[MAX_RX_QUEUE];
rx_desc           459 drivers/tty/serial/samsung.c 	async_tx_ack(dma->rx_desc);
rx_desc           483 drivers/tty/serial/samsung.c 	dma->rx_desc = dmaengine_prep_slave_single(dma->rx_chan,
rx_desc           486 drivers/tty/serial/samsung.c 	if (!dma->rx_desc) {
rx_desc           491 drivers/tty/serial/samsung.c 	dma->rx_desc->callback = s3c24xx_serial_rx_dma_complete;
rx_desc           492 drivers/tty/serial/samsung.c 	dma->rx_desc->callback_param = ourport;
rx_desc           495 drivers/tty/serial/samsung.c 	dma->rx_cookie = dmaengine_submit(dma->rx_desc);
rx_desc            67 drivers/tty/serial/samsung.h 	struct dma_async_tx_descriptor	*rx_desc;