/linux-4.4.14/drivers/net/ethernet/qlogic/netxen/ |
H A D | netxen_nic_init.c | 49 struct nx_host_rds_ring *rds_ring); 110 struct nx_host_rds_ring *rds_ring; netxen_release_rx_buffers() local 116 rds_ring = &recv_ctx->rds_rings[ring]; netxen_release_rx_buffers() 117 for (i = 0; i < rds_ring->num_desc; ++i) { netxen_release_rx_buffers() 118 rx_buf = &(rds_ring->rx_buf_arr[i]); netxen_release_rx_buffers() 123 rds_ring->dma_size, netxen_release_rx_buffers() 168 struct nx_host_rds_ring *rds_ring; netxen_free_sw_resources() local 178 rds_ring = &recv_ctx->rds_rings[ring]; netxen_free_sw_resources() 179 vfree(rds_ring->rx_buf_arr); netxen_free_sw_resources() 180 rds_ring->rx_buf_arr = NULL; netxen_free_sw_resources() 197 struct nx_host_rds_ring *rds_ring; netxen_alloc_sw_resources() local 223 rds_ring = kcalloc(adapter->max_rds_rings, netxen_alloc_sw_resources() 225 if (rds_ring == NULL) netxen_alloc_sw_resources() 228 recv_ctx->rds_rings = rds_ring; netxen_alloc_sw_resources() 231 rds_ring = &recv_ctx->rds_rings[ring]; netxen_alloc_sw_resources() 234 rds_ring->num_desc = adapter->num_rxd; netxen_alloc_sw_resources() 236 rds_ring->dma_size = netxen_alloc_sw_resources() 238 rds_ring->skb_size = netxen_alloc_sw_resources() 242 rds_ring->dma_size = netxen_alloc_sw_resources() 245 rds_ring->dma_size = netxen_alloc_sw_resources() 247 rds_ring->skb_size = netxen_alloc_sw_resources() 248 rds_ring->dma_size + NET_IP_ALIGN; netxen_alloc_sw_resources() 253 rds_ring->num_desc = adapter->num_jumbo_rxd; netxen_alloc_sw_resources() 255 rds_ring->dma_size = netxen_alloc_sw_resources() 258 rds_ring->dma_size = netxen_alloc_sw_resources() 262 rds_ring->dma_size += NX_LRO_BUFFER_EXTRA; netxen_alloc_sw_resources() 264 rds_ring->skb_size = netxen_alloc_sw_resources() 265 rds_ring->dma_size + NET_IP_ALIGN; netxen_alloc_sw_resources() 269 rds_ring->num_desc = adapter->num_lro_rxd; netxen_alloc_sw_resources() 270 rds_ring->dma_size = NX_RX_LRO_BUFFER_LENGTH; netxen_alloc_sw_resources() 271 rds_ring->skb_size = rds_ring->dma_size + NET_IP_ALIGN; netxen_alloc_sw_resources() 275 rds_ring->rx_buf_arr = vzalloc(RCV_BUFF_RINGSIZE(rds_ring)); netxen_alloc_sw_resources() 276 if (rds_ring->rx_buf_arr == NULL) netxen_alloc_sw_resources() 280 INIT_LIST_HEAD(&rds_ring->free_list); netxen_alloc_sw_resources() 285 rx_buf = rds_ring->rx_buf_arr; netxen_alloc_sw_resources() 286 for (i = 0; i < rds_ring->num_desc; i++) { netxen_alloc_sw_resources() 288 &rds_ring->free_list); netxen_alloc_sw_resources() 293 spin_lock_init(&rds_ring->lock); netxen_alloc_sw_resources() 1474 struct nx_host_rds_ring *rds_ring, netxen_alloc_rx_skb() 1481 buffer->skb = netdev_alloc_skb(adapter->netdev, rds_ring->skb_size); netxen_alloc_rx_skb() 1491 rds_ring->dma_size, PCI_DMA_FROMDEVICE); netxen_alloc_rx_skb() 1507 struct nx_host_rds_ring *rds_ring, u16 index, u16 cksum) netxen_process_rxbuf() 1512 buffer = &rds_ring->rx_buf_arr[index]; netxen_process_rxbuf() 1514 pci_unmap_single(adapter->pdev, buffer->dma, rds_ring->dma_size, netxen_process_rxbuf() 1543 struct nx_host_rds_ring *rds_ring; netxen_process_rcv() local 1549 rds_ring = &recv_ctx->rds_rings[ring]; netxen_process_rcv() 1552 if (unlikely(index >= rds_ring->num_desc)) netxen_process_rcv() 1555 buffer = &rds_ring->rx_buf_arr[index]; netxen_process_rcv() 1561 skb = netxen_process_rxbuf(adapter, rds_ring, index, cksum); netxen_process_rcv() 1565 if (length > rds_ring->skb_size) netxen_process_rcv() 1566 skb_put(skb, rds_ring->skb_size); netxen_process_rcv() 1597 struct nx_host_rds_ring *rds_ring; netxen_process_lro() local 1610 rds_ring = &recv_ctx->rds_rings[ring]; netxen_process_lro() 1613 if (unlikely(index >= rds_ring->num_desc)) netxen_process_lro() 1616 buffer = &rds_ring->rx_buf_arr[index]; netxen_process_lro() 1625 skb = netxen_process_rxbuf(adapter, rds_ring, index, STATUS_CKSUM_OK); netxen_process_lro() 1729 struct nx_host_rds_ring *rds_ring = netxen_process_rcv_ring() local 1736 netxen_alloc_rx_skb(adapter, rds_ring, rxbuf); netxen_process_rcv_ring() 1738 spin_lock(&rds_ring->lock); netxen_process_rcv_ring() 1740 &rds_ring->free_list); netxen_process_rcv_ring() 1741 spin_unlock(&rds_ring->lock); netxen_process_rcv_ring() 1744 netxen_post_rx_buffers_nodb(adapter, rds_ring); netxen_process_rcv_ring() 1829 struct nx_host_rds_ring *rds_ring) netxen_post_rx_buffers() 1837 producer = rds_ring->producer; netxen_post_rx_buffers() 1839 head = &rds_ring->free_list; netxen_post_rx_buffers() 1845 if (netxen_alloc_rx_skb(adapter, rds_ring, buffer)) netxen_post_rx_buffers() 1853 pdesc = &rds_ring->desc_head[producer]; netxen_post_rx_buffers() 1856 pdesc->buffer_length = cpu_to_le32(rds_ring->dma_size); netxen_post_rx_buffers() 1858 producer = get_next_index(producer, rds_ring->num_desc); netxen_post_rx_buffers() 1862 rds_ring->producer = producer; netxen_post_rx_buffers() 1863 NXWRIO(adapter, rds_ring->crb_rcv_producer, netxen_post_rx_buffers() 1864 (producer-1) & (rds_ring->num_desc-1)); netxen_post_rx_buffers() 1876 (rds_ring->num_desc - 1))); netxen_post_rx_buffers() 1887 struct nx_host_rds_ring *rds_ring) netxen_post_rx_buffers_nodb() 1894 if (!spin_trylock(&rds_ring->lock)) netxen_post_rx_buffers_nodb() 1897 producer = rds_ring->producer; netxen_post_rx_buffers_nodb() 1899 head = &rds_ring->free_list; netxen_post_rx_buffers_nodb() 1905 if (netxen_alloc_rx_skb(adapter, rds_ring, buffer)) netxen_post_rx_buffers_nodb() 1913 pdesc = &rds_ring->desc_head[producer]; netxen_post_rx_buffers_nodb() 1915 pdesc->buffer_length = cpu_to_le32(rds_ring->dma_size); netxen_post_rx_buffers_nodb() 1918 producer = get_next_index(producer, rds_ring->num_desc); netxen_post_rx_buffers_nodb() 1922 rds_ring->producer = producer; netxen_post_rx_buffers_nodb() 1923 NXWRIO(adapter, rds_ring->crb_rcv_producer, netxen_post_rx_buffers_nodb() 1924 (producer - 1) & (rds_ring->num_desc - 1)); netxen_post_rx_buffers_nodb() 1926 spin_unlock(&rds_ring->lock); netxen_post_rx_buffers_nodb() 1473 netxen_alloc_rx_skb(struct netxen_adapter *adapter, struct nx_host_rds_ring *rds_ring, struct netxen_rx_buffer *buffer) netxen_alloc_rx_skb() argument 1506 netxen_process_rxbuf(struct netxen_adapter *adapter, struct nx_host_rds_ring *rds_ring, u16 index, u16 cksum) netxen_process_rxbuf() argument 1828 netxen_post_rx_buffers(struct netxen_adapter *adapter, u32 ringid, struct nx_host_rds_ring *rds_ring) netxen_post_rx_buffers() argument 1886 netxen_post_rx_buffers_nodb(struct netxen_adapter *adapter, struct nx_host_rds_ring *rds_ring) netxen_post_rx_buffers_nodb() argument
|
H A D | netxen_nic_ctx.c | 282 struct nx_host_rds_ring *rds_ring; nx_fw_cmd_create_rx_ctx() local 346 rds_ring = &recv_ctx->rds_rings[i]; nx_fw_cmd_create_rx_ctx() 348 prq_rds[i].host_phys_addr = cpu_to_le64(rds_ring->phys_addr); nx_fw_cmd_create_rx_ctx() 349 prq_rds[i].ring_size = cpu_to_le32(rds_ring->num_desc); nx_fw_cmd_create_rx_ctx() 351 prq_rds[i].buff_size = cpu_to_le64(rds_ring->dma_size); nx_fw_cmd_create_rx_ctx() 384 rds_ring = &recv_ctx->rds_rings[i]; nx_fw_cmd_create_rx_ctx() 387 rds_ring->crb_rcv_producer = netxen_get_ioaddr(adapter, nx_fw_cmd_create_rx_ctx() 704 struct nx_host_rds_ring *rds_ring; netxen_init_old_ctx() local 721 rds_ring = &recv_ctx->rds_rings[ring]; netxen_init_old_ctx() 724 cpu_to_le64(rds_ring->phys_addr); netxen_init_old_ctx() 726 cpu_to_le32(rds_ring->num_desc); netxen_init_old_ctx() 760 struct nx_host_rds_ring *rds_ring; netxen_alloc_hw_resources() local 802 rds_ring = &recv_ctx->rds_rings[ring]; netxen_alloc_hw_resources() 804 RCV_DESC_RINGSIZE(rds_ring), netxen_alloc_hw_resources() 805 &rds_ring->phys_addr); netxen_alloc_hw_resources() 813 rds_ring->desc_head = addr; netxen_alloc_hw_resources() 816 rds_ring->crb_rcv_producer = netxen_alloc_hw_resources() 874 struct nx_host_rds_ring *rds_ring; netxen_free_hw_resources() local 918 rds_ring = &recv_ctx->rds_rings[ring]; netxen_free_hw_resources() 920 if (rds_ring->desc_head != NULL) { netxen_free_hw_resources() 922 RCV_DESC_RINGSIZE(rds_ring), netxen_free_hw_resources() 923 rds_ring->desc_head, netxen_free_hw_resources() 924 rds_ring->phys_addr); netxen_free_hw_resources() 925 rds_ring->desc_head = NULL; netxen_free_hw_resources()
|
H A D | netxen_nic.h | 75 #define RCV_DESC_RINGSIZE(rds_ring) \ 76 (sizeof(struct rcv_desc) * (rds_ring)->num_desc) 77 #define RCV_BUFF_RINGSIZE(rds_ring) \ 78 (sizeof(struct netxen_rx_buffer) * rds_ring->num_desc) 1783 struct nx_host_rds_ring *rds_ring);
|
H A D | netxen_nic_main.c | 1224 struct nx_host_rds_ring *rds_ring; netxen_nic_attach() local 1275 rds_ring = &adapter->recv_ctx.rds_rings[ring]; netxen_nic_attach() 1276 netxen_post_rx_buffers(adapter, ring, rds_ring); netxen_nic_attach()
|
/linux-4.4.14/drivers/net/ethernet/qlogic/qlcnic/ |
H A D | qlcnic_io.c | 812 struct qlcnic_host_rds_ring *rds_ring, qlcnic_alloc_rx_skb() 819 skb = netdev_alloc_skb(adapter->netdev, rds_ring->skb_size); qlcnic_alloc_rx_skb() 827 rds_ring->dma_size, PCI_DMA_FROMDEVICE); qlcnic_alloc_rx_skb() 842 struct qlcnic_host_rds_ring *rds_ring, qlcnic_post_rx_buffers_nodb() 851 if (!spin_trylock(&rds_ring->lock)) qlcnic_post_rx_buffers_nodb() 854 producer = rds_ring->producer; qlcnic_post_rx_buffers_nodb() 855 head = &rds_ring->free_list; qlcnic_post_rx_buffers_nodb() 860 if (qlcnic_alloc_rx_skb(adapter, rds_ring, buffer)) qlcnic_post_rx_buffers_nodb() 867 pdesc = &rds_ring->desc_head[producer]; qlcnic_post_rx_buffers_nodb() 871 pdesc->buffer_length = cpu_to_le32(rds_ring->dma_size); qlcnic_post_rx_buffers_nodb() 873 producer = get_next_index(producer, rds_ring->num_desc); qlcnic_post_rx_buffers_nodb() 876 rds_ring->producer = producer; qlcnic_post_rx_buffers_nodb() 877 writel((producer - 1) & (rds_ring->num_desc - 1), qlcnic_post_rx_buffers_nodb() 878 rds_ring->crb_rcv_producer); qlcnic_post_rx_buffers_nodb() 880 spin_unlock(&rds_ring->lock); qlcnic_post_rx_buffers_nodb() 1199 struct qlcnic_host_rds_ring *rds_ring; qlcnic_process_rcv() local 1206 rds_ring = &recv_ctx->rds_rings[ring]; qlcnic_process_rcv() 1209 if (unlikely(index >= rds_ring->num_desc)) qlcnic_process_rcv() 1212 buffer = &rds_ring->rx_buf_arr[index]; qlcnic_process_rcv() 1217 skb = qlcnic_process_rxbuf(adapter, rds_ring, index, cksum); qlcnic_process_rcv() 1227 if (length > rds_ring->skb_size) qlcnic_process_rcv() 1228 skb_put(skb, rds_ring->skb_size); qlcnic_process_rcv() 1266 struct qlcnic_host_rds_ring *rds_ring; qlcnic_process_lro() local 1278 rds_ring = &recv_ctx->rds_rings[ring]; qlcnic_process_lro() 1281 if (unlikely(index >= rds_ring->num_desc)) qlcnic_process_lro() 1284 buffer = &rds_ring->rx_buf_arr[index]; qlcnic_process_lro() 1293 skb = qlcnic_process_rxbuf(adapter, rds_ring, index, STATUS_CKSUM_OK); qlcnic_process_lro() 1356 struct qlcnic_host_rds_ring *rds_ring; qlcnic_process_rcv_ring() local 1410 rds_ring = &adapter->recv_ctx->rds_rings[ring]; qlcnic_process_rcv_ring() 1415 qlcnic_alloc_rx_skb(adapter, rds_ring, rxbuf); qlcnic_process_rcv_ring() 1417 spin_lock(&rds_ring->lock); qlcnic_process_rcv_ring() 1419 &rds_ring->free_list); qlcnic_process_rcv_ring() 1420 spin_unlock(&rds_ring->lock); qlcnic_process_rcv_ring() 1423 qlcnic_post_rx_buffers_nodb(adapter, rds_ring, ring); qlcnic_process_rcv_ring() 1435 struct qlcnic_host_rds_ring *rds_ring, u8 ring_id) qlcnic_post_rx_buffers() 1443 producer = rds_ring->producer; qlcnic_post_rx_buffers() 1444 head = &rds_ring->free_list; qlcnic_post_rx_buffers() 1451 if (qlcnic_alloc_rx_skb(adapter, rds_ring, buffer)) qlcnic_post_rx_buffers() 1459 pdesc = &rds_ring->desc_head[producer]; qlcnic_post_rx_buffers() 1464 pdesc->buffer_length = cpu_to_le32(rds_ring->dma_size); qlcnic_post_rx_buffers() 1465 producer = get_next_index(producer, rds_ring->num_desc); qlcnic_post_rx_buffers() 1469 rds_ring->producer = producer; qlcnic_post_rx_buffers() 1470 writel((producer-1) & (rds_ring->num_desc-1), qlcnic_post_rx_buffers() 1471 rds_ring->crb_rcv_producer); qlcnic_post_rx_buffers() 1493 struct qlcnic_host_rds_ring *rds_ring; qlcnic_process_rcv_diag() local 1499 rds_ring = &recv_ctx->rds_rings[ring]; qlcnic_process_rcv_diag() 1503 if (unlikely(index >= rds_ring->num_desc)) qlcnic_process_rcv_diag() 1509 skb = qlcnic_process_rxbuf(adapter, rds_ring, index, cksum); qlcnic_process_rcv_diag() 1513 if (length > rds_ring->skb_size) qlcnic_process_rcv_diag() 1514 skb_put(skb, rds_ring->skb_size); qlcnic_process_rcv_diag() 1722 struct qlcnic_host_rds_ring *rds_ring; qlcnic_83xx_process_rcv() local 1730 rds_ring = &recv_ctx->rds_rings[ring]; qlcnic_83xx_process_rcv() 1733 if (unlikely(index >= rds_ring->num_desc)) qlcnic_83xx_process_rcv() 1736 buffer = &rds_ring->rx_buf_arr[index]; qlcnic_83xx_process_rcv() 1739 skb = qlcnic_process_rxbuf(adapter, rds_ring, index, cksum); qlcnic_83xx_process_rcv() 1743 if (length > rds_ring->skb_size) qlcnic_83xx_process_rcv() 1744 skb_put(skb, rds_ring->skb_size); qlcnic_83xx_process_rcv() 1788 struct qlcnic_host_rds_ring *rds_ring; qlcnic_83xx_process_lro() local 1802 rds_ring = &recv_ctx->rds_rings[ring]; qlcnic_83xx_process_lro() 1805 if (unlikely(index >= rds_ring->num_desc)) qlcnic_83xx_process_lro() 1808 buffer = &rds_ring->rx_buf_arr[index]; qlcnic_83xx_process_lro() 1815 skb = qlcnic_process_rxbuf(adapter, rds_ring, index, STATUS_CKSUM_OK); qlcnic_83xx_process_lro() 1880 struct qlcnic_host_rds_ring *rds_ring; qlcnic_83xx_process_rcv_ring() local 1926 rds_ring = &adapter->recv_ctx->rds_rings[ring]; qlcnic_83xx_process_rcv_ring() 1931 qlcnic_alloc_rx_skb(adapter, rds_ring, rxbuf); qlcnic_83xx_process_rcv_ring() 1933 spin_lock(&rds_ring->lock); qlcnic_83xx_process_rcv_ring() 1935 &rds_ring->free_list); qlcnic_83xx_process_rcv_ring() 1936 spin_unlock(&rds_ring->lock); qlcnic_83xx_process_rcv_ring() 1938 qlcnic_post_rx_buffers_nodb(adapter, rds_ring, ring); qlcnic_83xx_process_rcv_ring() 2177 struct qlcnic_host_rds_ring *rds_ring; qlcnic_83xx_process_rcv_diag() local 2183 rds_ring = &recv_ctx->rds_rings[ring]; qlcnic_83xx_process_rcv_diag() 2185 if (unlikely(index >= rds_ring->num_desc)) qlcnic_83xx_process_rcv_diag() 2190 skb = qlcnic_process_rxbuf(adapter, rds_ring, index, STATUS_CKSUM_OK); qlcnic_83xx_process_rcv_diag() 2194 if (length > rds_ring->skb_size) qlcnic_83xx_process_rcv_diag() 2195 skb_put(skb, rds_ring->skb_size); qlcnic_83xx_process_rcv_diag() 811 qlcnic_alloc_rx_skb(struct qlcnic_adapter *adapter, struct qlcnic_host_rds_ring *rds_ring, struct qlcnic_rx_buffer *buffer) qlcnic_alloc_rx_skb() argument 841 qlcnic_post_rx_buffers_nodb(struct qlcnic_adapter *adapter, struct qlcnic_host_rds_ring *rds_ring, u8 ring_id) qlcnic_post_rx_buffers_nodb() argument 1434 qlcnic_post_rx_buffers(struct qlcnic_adapter *adapter, struct qlcnic_host_rds_ring *rds_ring, u8 ring_id) qlcnic_post_rx_buffers() argument
|
H A D | qlcnic_init.c | 86 struct qlcnic_host_rds_ring *rds_ring; qlcnic_release_rx_buffers() local 92 rds_ring = &recv_ctx->rds_rings[ring]; qlcnic_release_rx_buffers() 93 for (i = 0; i < rds_ring->num_desc; ++i) { qlcnic_release_rx_buffers() 94 rx_buf = &(rds_ring->rx_buf_arr[i]); qlcnic_release_rx_buffers() 100 rds_ring->dma_size, qlcnic_release_rx_buffers() 111 struct qlcnic_host_rds_ring *rds_ring; qlcnic_reset_rx_buffers_list() local 117 rds_ring = &recv_ctx->rds_rings[ring]; qlcnic_reset_rx_buffers_list() 119 INIT_LIST_HEAD(&rds_ring->free_list); qlcnic_reset_rx_buffers_list() 121 rx_buf = rds_ring->rx_buf_arr; qlcnic_reset_rx_buffers_list() 122 for (i = 0; i < rds_ring->num_desc; i++) { qlcnic_reset_rx_buffers_list() 124 &rds_ring->free_list); qlcnic_reset_rx_buffers_list() 169 struct qlcnic_host_rds_ring *rds_ring; qlcnic_free_sw_resources() local 178 rds_ring = &recv_ctx->rds_rings[ring]; qlcnic_free_sw_resources() 179 vfree(rds_ring->rx_buf_arr); qlcnic_free_sw_resources() 180 rds_ring->rx_buf_arr = NULL; qlcnic_free_sw_resources() 188 struct qlcnic_host_rds_ring *rds_ring; qlcnic_alloc_sw_resources() local 195 rds_ring = kcalloc(adapter->max_rds_rings, qlcnic_alloc_sw_resources() 197 if (rds_ring == NULL) qlcnic_alloc_sw_resources() 200 recv_ctx->rds_rings = rds_ring; qlcnic_alloc_sw_resources() 203 rds_ring = &recv_ctx->rds_rings[ring]; qlcnic_alloc_sw_resources() 206 rds_ring->num_desc = adapter->num_rxd; qlcnic_alloc_sw_resources() 207 rds_ring->dma_size = QLCNIC_P3P_RX_BUF_MAX_LEN; qlcnic_alloc_sw_resources() 208 rds_ring->skb_size = rds_ring->dma_size + NET_IP_ALIGN; qlcnic_alloc_sw_resources() 212 rds_ring->num_desc = adapter->num_jumbo_rxd; qlcnic_alloc_sw_resources() 213 rds_ring->dma_size = qlcnic_alloc_sw_resources() 218 rds_ring->dma_size += QLCNIC_LRO_BUFFER_EXTRA; qlcnic_alloc_sw_resources() 220 rds_ring->skb_size = qlcnic_alloc_sw_resources() 221 rds_ring->dma_size + NET_IP_ALIGN; qlcnic_alloc_sw_resources() 224 rds_ring->rx_buf_arr = vzalloc(RCV_BUFF_RINGSIZE(rds_ring)); qlcnic_alloc_sw_resources() 225 if (rds_ring->rx_buf_arr == NULL) qlcnic_alloc_sw_resources() 228 INIT_LIST_HEAD(&rds_ring->free_list); qlcnic_alloc_sw_resources() 233 rx_buf = rds_ring->rx_buf_arr; qlcnic_alloc_sw_resources() 234 for (i = 0; i < rds_ring->num_desc; i++) { qlcnic_alloc_sw_resources() 236 &rds_ring->free_list); qlcnic_alloc_sw_resources() 240 spin_lock_init(&rds_ring->lock); qlcnic_alloc_sw_resources()
|
H A D | qlcnic_ctx.c | 245 struct qlcnic_host_rds_ring *rds_ring; qlcnic_82xx_fw_cmd_create_rx_ctx() local 313 rds_ring = &recv_ctx->rds_rings[i]; qlcnic_82xx_fw_cmd_create_rx_ctx() 314 rds_ring->producer = 0; qlcnic_82xx_fw_cmd_create_rx_ctx() 315 prq_rds[i].host_phys_addr = cpu_to_le64(rds_ring->phys_addr); qlcnic_82xx_fw_cmd_create_rx_ctx() 316 prq_rds[i].ring_size = cpu_to_le32(rds_ring->num_desc); qlcnic_82xx_fw_cmd_create_rx_ctx() 318 prq_rds[i].buff_size = cpu_to_le64(rds_ring->dma_size); qlcnic_82xx_fw_cmd_create_rx_ctx() 356 rds_ring = &recv_ctx->rds_rings[i]; qlcnic_82xx_fw_cmd_create_rx_ctx() 358 rds_ring->crb_rcv_producer = ahw->pci_base0 + reg; qlcnic_82xx_fw_cmd_create_rx_ctx() 562 struct qlcnic_host_rds_ring *rds_ring; qlcnic_alloc_hw_resources() local 593 rds_ring = &recv_ctx->rds_rings[ring]; qlcnic_alloc_hw_resources() 595 RCV_DESC_RINGSIZE(rds_ring), qlcnic_alloc_hw_resources() 596 &rds_ring->phys_addr, GFP_KERNEL); qlcnic_alloc_hw_resources() 601 rds_ring->desc_head = addr; qlcnic_alloc_hw_resources() 715 struct qlcnic_host_rds_ring *rds_ring; qlcnic_free_hw_resources() local 742 rds_ring = &recv_ctx->rds_rings[ring]; qlcnic_free_hw_resources() 744 if (rds_ring->desc_head != NULL) { qlcnic_free_hw_resources() 746 RCV_DESC_RINGSIZE(rds_ring), qlcnic_free_hw_resources() 747 rds_ring->desc_head, qlcnic_free_hw_resources() 748 rds_ring->phys_addr); qlcnic_free_hw_resources() 749 rds_ring->desc_head = NULL; qlcnic_free_hw_resources()
|
H A D | qlcnic_main.c | 1882 struct qlcnic_host_rds_ring *rds_ring; __qlcnic_up() local 1899 rds_ring = &adapter->recv_ctx->rds_rings[ring]; __qlcnic_up() 1900 qlcnic_post_rx_buffers(adapter, rds_ring, ring); __qlcnic_up() 2139 struct qlcnic_host_rds_ring *rds_ring; qlcnic_diag_alloc_res() local 2168 rds_ring = &adapter->recv_ctx->rds_rings[ring]; qlcnic_diag_alloc_res() 2169 qlcnic_post_rx_buffers(adapter, rds_ring, ring); qlcnic_diag_alloc_res() 3009 struct qlcnic_host_rds_ring *rds_ring; qlcnic_dump_rings() local 3018 rds_ring = &recv_ctx->rds_rings[ring]; qlcnic_dump_rings() 3019 if (!rds_ring) qlcnic_dump_rings() 3022 "rds_ring=%d crb_rcv_producer=%d producer=%u num_desc=%u\n", qlcnic_dump_rings() 3023 ring, readl(rds_ring->crb_rcv_producer), qlcnic_dump_rings() 3024 rds_ring->producer, rds_ring->num_desc); qlcnic_dump_rings()
|
H A D | qlcnic.h | 65 #define RCV_DESC_RINGSIZE(rds_ring) \ 66 (sizeof(struct rcv_desc) * (rds_ring)->num_desc) 67 #define RCV_BUFF_RINGSIZE(rds_ring) \ 68 (sizeof(struct qlcnic_rx_buffer) * rds_ring->num_desc) 1621 struct qlcnic_host_rds_ring *rds_ring, u8 ring_id);
|
H A D | qlcnic_83xx_hw.c | 1395 struct qlcnic_host_rds_ring *rds_ring; qlcnic_83xx_diag_alloc_res() local 1429 rds_ring = &adapter->recv_ctx->rds_rings[ring]; qlcnic_83xx_diag_alloc_res() 1430 qlcnic_post_rx_buffers(adapter, rds_ring, ring); qlcnic_83xx_diag_alloc_res()
|