rx_pool 97 drivers/atm/idt77252.c static void flush_rx_pool(struct idt77252_dev *, struct rx_pool *); rx_pool 99 drivers/atm/idt77252.c struct rx_pool *); rx_pool 1014 drivers/atm/idt77252.c struct rx_pool *rpp; rx_pool 1114 drivers/atm/idt77252.c rpp = &vc->rcv.rx_pool; rx_pool 1907 drivers/atm/idt77252.c flush_rx_pool(struct idt77252_dev *card, struct rx_pool *rpp) rx_pool 1914 drivers/atm/idt77252.c recycle_rx_pool_skb(struct idt77252_dev *card, struct rx_pool *rpp) rx_pool 2337 drivers/atm/idt77252.c flush_rx_pool(card, &vc->rcv.rx_pool); rx_pool 2520 drivers/atm/idt77252.c if (skb_queue_len(&vc->rcv.rx_pool.queue) != 0) { rx_pool 2524 drivers/atm/idt77252.c recycle_rx_pool_skb(card, &vc->rcv.rx_pool); rx_pool 2867 drivers/atm/idt77252.c flush_rx_pool(card, &vc->rcv.rx_pool); rx_pool 2910 drivers/atm/idt77252.c if (skb_queue_len(&vc->rcv.rx_pool.queue) != 0) { rx_pool 2915 drivers/atm/idt77252.c recycle_rx_pool_skb(card, &vc->rcv.rx_pool); rx_pool 221 drivers/atm/idt77252.h struct rx_pool rx_pool; rx_pool 239 drivers/net/ethernet/ibm/ibmvnic.c adapter->rx_pool[i].active = 0; rx_pool 349 drivers/net/ethernet/ibm/ibmvnic.c if (adapter->rx_pool[i].active) rx_pool 350 drivers/net/ethernet/ibm/ibmvnic.c replenish_rx_pool(adapter, &adapter->rx_pool[i]); rx_pool 414 drivers/net/ethernet/ibm/ibmvnic.c struct ibmvnic_rx_pool *rx_pool; rx_pool 424 drivers/net/ethernet/ibm/ibmvnic.c rx_pool = &adapter->rx_pool[i]; rx_pool 428 drivers/net/ethernet/ibm/ibmvnic.c if (rx_pool->buff_size != be64_to_cpu(size_array[i])) { rx_pool 429 drivers/net/ethernet/ibm/ibmvnic.c free_long_term_buff(adapter, &rx_pool->long_term_buff); rx_pool 430 drivers/net/ethernet/ibm/ibmvnic.c rx_pool->buff_size = be64_to_cpu(size_array[i]); rx_pool 432 drivers/net/ethernet/ibm/ibmvnic.c &rx_pool->long_term_buff, rx_pool 433 drivers/net/ethernet/ibm/ibmvnic.c rx_pool->size * rx_pool 434 drivers/net/ethernet/ibm/ibmvnic.c rx_pool->buff_size); rx_pool 437 drivers/net/ethernet/ibm/ibmvnic.c &rx_pool->long_term_buff); rx_pool 443 drivers/net/ethernet/ibm/ibmvnic.c for (j = 0; j < rx_pool->size; j++) rx_pool 444 drivers/net/ethernet/ibm/ibmvnic.c rx_pool->free_map[j] = j; rx_pool 446 drivers/net/ethernet/ibm/ibmvnic.c memset(rx_pool->rx_buff, 0, rx_pool 447 drivers/net/ethernet/ibm/ibmvnic.c rx_pool->size * sizeof(struct ibmvnic_rx_buff)); rx_pool 449 drivers/net/ethernet/ibm/ibmvnic.c atomic_set(&rx_pool->available, 0); rx_pool 450 drivers/net/ethernet/ibm/ibmvnic.c rx_pool->next_alloc = 0; rx_pool 451 drivers/net/ethernet/ibm/ibmvnic.c rx_pool->next_free = 0; rx_pool 452 drivers/net/ethernet/ibm/ibmvnic.c rx_pool->active = 1; rx_pool 460 drivers/net/ethernet/ibm/ibmvnic.c struct ibmvnic_rx_pool *rx_pool; rx_pool 463 drivers/net/ethernet/ibm/ibmvnic.c if (!adapter->rx_pool) rx_pool 467 drivers/net/ethernet/ibm/ibmvnic.c rx_pool = &adapter->rx_pool[i]; rx_pool 471 drivers/net/ethernet/ibm/ibmvnic.c kfree(rx_pool->free_map); rx_pool 472 drivers/net/ethernet/ibm/ibmvnic.c free_long_term_buff(adapter, &rx_pool->long_term_buff); rx_pool 474 drivers/net/ethernet/ibm/ibmvnic.c if (!rx_pool->rx_buff) rx_pool 477 drivers/net/ethernet/ibm/ibmvnic.c for (j = 0; j < rx_pool->size; j++) { rx_pool 478 drivers/net/ethernet/ibm/ibmvnic.c if (rx_pool->rx_buff[j].skb) { rx_pool 479 drivers/net/ethernet/ibm/ibmvnic.c dev_kfree_skb_any(rx_pool->rx_buff[j].skb); rx_pool 480 drivers/net/ethernet/ibm/ibmvnic.c rx_pool->rx_buff[j].skb = NULL; rx_pool 484 drivers/net/ethernet/ibm/ibmvnic.c kfree(rx_pool->rx_buff); rx_pool 487 drivers/net/ethernet/ibm/ibmvnic.c kfree(adapter->rx_pool); rx_pool 488 drivers/net/ethernet/ibm/ibmvnic.c adapter->rx_pool = NULL; rx_pool 496 drivers/net/ethernet/ibm/ibmvnic.c struct ibmvnic_rx_pool *rx_pool; rx_pool 506 drivers/net/ethernet/ibm/ibmvnic.c adapter->rx_pool = kcalloc(rxadd_subcrqs, rx_pool 509 drivers/net/ethernet/ibm/ibmvnic.c if (!adapter->rx_pool) { rx_pool 517 drivers/net/ethernet/ibm/ibmvnic.c rx_pool = &adapter->rx_pool[i]; rx_pool 524 drivers/net/ethernet/ibm/ibmvnic.c rx_pool->size = adapter->req_rx_add_entries_per_subcrq; rx_pool 525 drivers/net/ethernet/ibm/ibmvnic.c rx_pool->index = i; rx_pool 526 drivers/net/ethernet/ibm/ibmvnic.c rx_pool->buff_size = be64_to_cpu(size_array[i]); rx_pool 527 drivers/net/ethernet/ibm/ibmvnic.c rx_pool->active = 1; rx_pool 529 drivers/net/ethernet/ibm/ibmvnic.c rx_pool->free_map = kcalloc(rx_pool->size, sizeof(int), rx_pool 531 drivers/net/ethernet/ibm/ibmvnic.c if (!rx_pool->free_map) { rx_pool 536 drivers/net/ethernet/ibm/ibmvnic.c rx_pool->rx_buff = kcalloc(rx_pool->size, rx_pool 539 drivers/net/ethernet/ibm/ibmvnic.c if (!rx_pool->rx_buff) { rx_pool 545 drivers/net/ethernet/ibm/ibmvnic.c if (alloc_long_term_buff(adapter, &rx_pool->long_term_buff, rx_pool 546 drivers/net/ethernet/ibm/ibmvnic.c rx_pool->size * rx_pool->buff_size)) { rx_pool 551 drivers/net/ethernet/ibm/ibmvnic.c for (j = 0; j < rx_pool->size; ++j) rx_pool 552 drivers/net/ethernet/ibm/ibmvnic.c rx_pool->free_map[j] = j; rx_pool 554 drivers/net/ethernet/ibm/ibmvnic.c atomic_set(&rx_pool->available, 0); rx_pool 555 drivers/net/ethernet/ibm/ibmvnic.c rx_pool->next_alloc = 0; rx_pool 556 drivers/net/ethernet/ibm/ibmvnic.c rx_pool->next_free = 0; rx_pool 1110 drivers/net/ethernet/ibm/ibmvnic.c struct ibmvnic_rx_pool *rx_pool; rx_pool 1116 drivers/net/ethernet/ibm/ibmvnic.c if (!adapter->rx_pool) rx_pool 1124 drivers/net/ethernet/ibm/ibmvnic.c rx_pool = &adapter->rx_pool[i]; rx_pool 1125 drivers/net/ethernet/ibm/ibmvnic.c if (!rx_pool || !rx_pool->rx_buff) rx_pool 1130 drivers/net/ethernet/ibm/ibmvnic.c rx_buff = &rx_pool->rx_buff[j]; rx_pool 2203 drivers/net/ethernet/ibm/ibmvnic.c struct ibmvnic_rx_pool *pool = &adapter->rx_pool[rx_buff->pool_index]; rx_pool 2296 drivers/net/ethernet/ibm/ibmvnic.c replenish_rx_pool(adapter, &adapter->rx_pool[scrq_num]); rx_pool 5113 drivers/net/ethernet/ibm/ibmvnic.c ret += adapter->rx_pool[i].size * rx_pool 5114 drivers/net/ethernet/ibm/ibmvnic.c IOMMU_PAGE_ALIGN(adapter->rx_pool[i].buff_size, tbl); rx_pool 1020 drivers/net/ethernet/ibm/ibmvnic.h struct ibmvnic_rx_pool *rx_pool; rx_pool 93 drivers/net/ethernet/ti/netcp.h void *rx_pool; rx_pool 590 drivers/net/ethernet/ti/netcp_core.c ndesc = knav_pool_desc_unmap(netcp->rx_pool, dma_desc, dma_sz); rx_pool 603 drivers/net/ethernet/ti/netcp_core.c knav_pool_desc_put(netcp->rx_pool, desc); rx_pool 613 drivers/net/ethernet/ti/netcp_core.c knav_pool_desc_put(netcp->rx_pool, desc); rx_pool 628 drivers/net/ethernet/ti/netcp_core.c desc = knav_pool_desc_unmap(netcp->rx_pool, dma, dma_sz); rx_pool 657 drivers/net/ethernet/ti/netcp_core.c desc = knav_pool_desc_unmap(netcp->rx_pool, dma_desc, dma_sz); rx_pool 694 drivers/net/ethernet/ti/netcp_core.c ndesc = knav_pool_desc_unmap(netcp->rx_pool, dma_desc, dma_sz); rx_pool 720 drivers/net/ethernet/ti/netcp_core.c knav_pool_desc_put(netcp->rx_pool, ndesc); rx_pool 754 drivers/net/ethernet/ti/netcp_core.c knav_pool_desc_put(netcp->rx_pool, desc); rx_pool 760 drivers/net/ethernet/ti/netcp_core.c knav_pool_desc_put(netcp->rx_pool, desc); rx_pool 798 drivers/net/ethernet/ti/netcp_core.c desc = knav_pool_desc_unmap(netcp->rx_pool, dma, dma_sz); rx_pool 812 drivers/net/ethernet/ti/netcp_core.c knav_pool_desc_put(netcp->rx_pool, desc); rx_pool 818 drivers/net/ethernet/ti/netcp_core.c knav_pool_desc_put(netcp->rx_pool, desc); rx_pool 832 drivers/net/ethernet/ti/netcp_core.c knav_pool_desc_put(netcp->rx_pool, desc); rx_pool 844 drivers/net/ethernet/ti/netcp_core.c if (knav_pool_count(netcp->rx_pool) != netcp->rx_pool_size) rx_pool 846 drivers/net/ethernet/ti/netcp_core.c netcp->rx_pool_size - knav_pool_count(netcp->rx_pool)); rx_pool 848 drivers/net/ethernet/ti/netcp_core.c knav_pool_destroy(netcp->rx_pool); rx_pool 849 drivers/net/ethernet/ti/netcp_core.c netcp->rx_pool = NULL; rx_pool 863 drivers/net/ethernet/ti/netcp_core.c hwdesc = knav_pool_desc_get(netcp->rx_pool); rx_pool 921 drivers/net/ethernet/ti/netcp_core.c knav_pool_desc_map(netcp->rx_pool, hwdesc, sizeof(*hwdesc), &dma, rx_pool 927 drivers/net/ethernet/ti/netcp_core.c knav_pool_desc_put(netcp->rx_pool, hwdesc); rx_pool 1558 drivers/net/ethernet/ti/netcp_core.c if (!IS_ERR_OR_NULL(netcp->rx_pool)) rx_pool 1595 drivers/net/ethernet/ti/netcp_core.c netcp->rx_pool = knav_pool_create(name, netcp->rx_pool_size, rx_pool 1597 drivers/net/ethernet/ti/netcp_core.c if (IS_ERR_OR_NULL(netcp->rx_pool)) { rx_pool 1599 drivers/net/ethernet/ti/netcp_core.c ret = PTR_ERR(netcp->rx_pool); rx_pool 247 drivers/net/wireless/ath/carl9170/carl9170.h struct usb_anchor rx_pool; rx_pool 324 drivers/net/wireless/ath/carl9170/usb.c urb = usb_get_from_anchor(&ar->rx_pool); rx_pool 330 drivers/net/wireless/ath/carl9170/usb.c usb_anchor_urb(urb, &ar->rx_pool); rx_pool 358 drivers/net/wireless/ath/carl9170/usb.c usb_anchor_urb(urb, &ar->rx_pool); rx_pool 424 drivers/net/wireless/ath/carl9170/usb.c usb_anchor_urb(urb, &ar->rx_pool); rx_pool 535 drivers/net/wireless/ath/carl9170/usb.c usb_anchor_urb(urb, &ar->rx_pool); rx_pool 550 drivers/net/wireless/ath/carl9170/usb.c usb_scuttle_anchored_urbs(&ar->rx_pool); rx_pool 600 drivers/net/wireless/ath/carl9170/usb.c usb_scuttle_anchored_urbs(&ar->rx_pool); rx_pool 1076 drivers/net/wireless/ath/carl9170/usb.c init_usb_anchor(&ar->rx_pool); rx_pool 516 drivers/net/wireless/intel/iwlwifi/pcie/internal.h struct iwl_rx_mem_buffer rx_pool[RX_POOL_SIZE]; rx_pool 518 drivers/net/wireless/intel/iwlwifi/pcie/rx.c if (!trans_pcie->rx_pool[i].page) rx_pool 520 drivers/net/wireless/intel/iwlwifi/pcie/rx.c dma_unmap_page(trans->dev, trans_pcie->rx_pool[i].page_dma, rx_pool 523 drivers/net/wireless/intel/iwlwifi/pcie/rx.c __free_pages(trans_pcie->rx_pool[i].page, rx_pool 525 drivers/net/wireless/intel/iwlwifi/pcie/rx.c trans_pcie->rx_pool[i].page = NULL; rx_pool 1097 drivers/net/wireless/intel/iwlwifi/pcie/rx.c ARRAY_SIZE(trans_pcie->rx_pool)); rx_pool 1099 drivers/net/wireless/intel/iwlwifi/pcie/rx.c struct iwl_rx_mem_buffer *rxb = &trans_pcie->rx_pool[i]; rx_pool 1036 drivers/net/wireless/ti/wl1251/acx.h u32 rx_pool; rx_pool 173 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_FILE(event, rx_pool, 20, "%u"); rx_pool 313 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_DEL(event, rx_pool); rx_pool 412 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_ADD(event, rx_pool); rx_pool 217 drivers/net/wireless/ti/wl12xx/acx.h __le32 rx_pool; rx_pool 91 drivers/net/wireless/ti/wl12xx/debugfs.c WL12XX_DEBUGFS_FWSTATS_FILE(event, rx_pool, "%u"); rx_pool 191 drivers/net/wireless/ti/wl12xx/debugfs.c DEBUGFS_FWSTATS_ADD(event, rx_pool); rx_pool 248 drivers/tty/ipwireless/hardware.c struct list_head rx_pool; rx_pool 564 drivers/tty/ipwireless/hardware.c if (!list_empty(&hw->rx_pool)) { rx_pool 565 drivers/tty/ipwireless/hardware.c packet = list_first_entry(&hw->rx_pool, rx_pool 615 drivers/tty/ipwireless/hardware.c list_add(&packet->queue, &hw->rx_pool); rx_pool 1636 drivers/tty/ipwireless/hardware.c INIT_LIST_HEAD(&hw->rx_pool); rx_pool 1755 drivers/tty/ipwireless/hardware.c list_for_each_entry_safe(rp, rq, &hw->rx_pool, queue) {