Lines Matching refs:tnapi
205 #define TG3_TX_WAKEUP_THRESH(tnapi) ((tnapi)->tx_pending / 4) argument
1010 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_enable_ints() local
1012 tw32_mailbox_f(tnapi->int_mbox, tnapi->last_tag << 24); in tg3_enable_ints()
1014 tw32_mailbox_f(tnapi->int_mbox, tnapi->last_tag << 24); in tg3_enable_ints()
1016 tp->coal_now |= tnapi->coal_now; in tg3_enable_ints()
1029 static inline unsigned int tg3_has_work(struct tg3_napi *tnapi) in tg3_has_work() argument
1031 struct tg3 *tp = tnapi->tp; in tg3_has_work()
1032 struct tg3_hw_status *sblk = tnapi->hw_status; in tg3_has_work()
1042 if (sblk->idx[0].tx_consumer != tnapi->tx_cons) in tg3_has_work()
1046 if (tnapi->rx_rcb_prod_idx && in tg3_has_work()
1047 *(tnapi->rx_rcb_prod_idx) != tnapi->rx_rcb_ptr) in tg3_has_work()
1058 static void tg3_int_reenable(struct tg3_napi *tnapi) in tg3_int_reenable() argument
1060 struct tg3 *tp = tnapi->tp; in tg3_int_reenable()
1062 tw32_mailbox(tnapi->int_mbox, tnapi->last_tag << 24); in tg3_int_reenable()
1069 if (!tg3_flag(tp, TAGGED_STATUS) && tg3_has_work(tnapi)) in tg3_int_reenable()
1071 HOSTCC_MODE_ENABLE | tnapi->coal_now); in tg3_int_reenable()
6459 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_dump_state() local
6465 tnapi->hw_status->status, in tg3_dump_state()
6466 tnapi->hw_status->status_tag, in tg3_dump_state()
6467 tnapi->hw_status->rx_jumbo_consumer, in tg3_dump_state()
6468 tnapi->hw_status->rx_consumer, in tg3_dump_state()
6469 tnapi->hw_status->rx_mini_consumer, in tg3_dump_state()
6470 tnapi->hw_status->idx[0].rx_producer, in tg3_dump_state()
6471 tnapi->hw_status->idx[0].tx_consumer); in tg3_dump_state()
6476 tnapi->last_tag, tnapi->last_irq_tag, in tg3_dump_state()
6477 tnapi->tx_prod, tnapi->tx_cons, tnapi->tx_pending, in tg3_dump_state()
6478 tnapi->rx_rcb_ptr, in tg3_dump_state()
6479 tnapi->prodring.rx_std_prod_idx, in tg3_dump_state()
6480 tnapi->prodring.rx_std_cons_idx, in tg3_dump_state()
6481 tnapi->prodring.rx_jmb_prod_idx, in tg3_dump_state()
6482 tnapi->prodring.rx_jmb_cons_idx); in tg3_dump_state()
6506 static inline u32 tg3_tx_avail(struct tg3_napi *tnapi) in tg3_tx_avail() argument
6510 return tnapi->tx_pending - in tg3_tx_avail()
6511 ((tnapi->tx_prod - tnapi->tx_cons) & (TG3_TX_RING_SIZE - 1)); in tg3_tx_avail()
6518 static void tg3_tx(struct tg3_napi *tnapi) in tg3_tx() argument
6520 struct tg3 *tp = tnapi->tp; in tg3_tx()
6521 u32 hw_idx = tnapi->hw_status->idx[0].tx_consumer; in tg3_tx()
6522 u32 sw_idx = tnapi->tx_cons; in tg3_tx()
6524 int index = tnapi - tp->napi; in tg3_tx()
6533 struct tg3_tx_ring_info *ri = &tnapi->tx_buffers[sw_idx]; in tg3_tx()
6542 if (tnapi->tx_ring[sw_idx].len_flags & TXD_FLAG_HWTSTAMP) { in tg3_tx()
6562 ri = &tnapi->tx_buffers[sw_idx]; in tg3_tx()
6568 ri = &tnapi->tx_buffers[sw_idx]; in tg3_tx()
6580 ri = &tnapi->tx_buffers[sw_idx]; in tg3_tx()
6599 tnapi->tx_cons = sw_idx; in tg3_tx()
6609 (tg3_tx_avail(tnapi) > TG3_TX_WAKEUP_THRESH(tnapi)))) { in tg3_tx()
6612 (tg3_tx_avail(tnapi) > TG3_TX_WAKEUP_THRESH(tnapi))) in tg3_tx()
6721 static void tg3_recycle_rx(struct tg3_napi *tnapi, in tg3_recycle_rx() argument
6726 struct tg3 *tp = tnapi->tp; in tg3_recycle_rx()
6791 static int tg3_rx(struct tg3_napi *tnapi, int budget) in tg3_rx() argument
6793 struct tg3 *tp = tnapi->tp; in tg3_rx()
6796 u32 sw_idx = tnapi->rx_rcb_ptr; in tg3_rx()
6799 struct tg3_rx_prodring_set *tpr = &tnapi->prodring; in tg3_rx()
6801 hw_idx = *(tnapi->rx_rcb_prod_idx); in tg3_rx()
6813 struct tg3_rx_buffer_desc *desc = &tnapi->rx_rcb[sw_idx]; in tg3_rx()
6841 tg3_recycle_rx(tnapi, tpr, opaque_key, in tg3_rx()
6887 tg3_recycle_rx(tnapi, tpr, opaque_key, in tg3_rx()
6930 napi_gro_receive(&tnapi->napi, skb); in tg3_rx()
6952 hw_idx = *(tnapi->rx_rcb_prod_idx); in tg3_rx()
6958 tnapi->rx_rcb_ptr = sw_idx; in tg3_rx()
6959 tw32_rx_mbox(tnapi->consmbox, sw_idx); in tg3_rx()
6988 if (tnapi != &tp->napi[1]) { in tg3_rx()
7147 static int tg3_poll_work(struct tg3_napi *tnapi, int work_done, int budget) in tg3_poll_work() argument
7149 struct tg3 *tp = tnapi->tp; in tg3_poll_work()
7152 if (tnapi->hw_status->idx[0].tx_consumer != tnapi->tx_cons) { in tg3_poll_work()
7153 tg3_tx(tnapi); in tg3_poll_work()
7158 if (!tnapi->rx_rcb_prod_idx) in tg3_poll_work()
7165 if (*(tnapi->rx_rcb_prod_idx) != tnapi->rx_rcb_ptr) in tg3_poll_work()
7166 work_done += tg3_rx(tnapi, budget - work_done); in tg3_poll_work()
7168 if (tg3_flag(tp, ENABLE_RSS) && tnapi == &tp->napi[1]) { in tg3_poll_work()
7213 struct tg3_napi *tnapi = container_of(napi, struct tg3_napi, napi); in tg3_poll_msix() local
7214 struct tg3 *tp = tnapi->tp; in tg3_poll_msix()
7216 struct tg3_hw_status *sblk = tnapi->hw_status; in tg3_poll_msix()
7219 work_done = tg3_poll_work(tnapi, work_done, budget); in tg3_poll_msix()
7231 tnapi->last_tag = sblk->status_tag; in tg3_poll_msix()
7232 tnapi->last_irq_tag = tnapi->last_tag; in tg3_poll_msix()
7236 if (likely(sblk->idx[0].tx_consumer == tnapi->tx_cons && in tg3_poll_msix()
7237 *(tnapi->rx_rcb_prod_idx) == tnapi->rx_rcb_ptr)) { in tg3_poll_msix()
7242 if (tnapi == &tp->napi[1] && tp->rx_refill) in tg3_poll_msix()
7247 tw32_mailbox(tnapi->int_mbox, tnapi->last_tag << 24); in tg3_poll_msix()
7252 if (unlikely(tnapi == &tp->napi[1] && tp->rx_refill)) { in tg3_poll_msix()
7255 tnapi->coal_now); in tg3_poll_msix()
7307 struct tg3_napi *tnapi = container_of(napi, struct tg3_napi, napi); in tg3_poll() local
7308 struct tg3 *tp = tnapi->tp; in tg3_poll()
7310 struct tg3_hw_status *sblk = tnapi->hw_status; in tg3_poll()
7318 work_done = tg3_poll_work(tnapi, work_done, budget); in tg3_poll()
7331 tnapi->last_tag = sblk->status_tag; in tg3_poll()
7332 tnapi->last_irq_tag = tnapi->last_tag; in tg3_poll()
7337 if (likely(!tg3_has_work(tnapi))) { in tg3_poll()
7339 tg3_int_reenable(tnapi); in tg3_poll()
7454 struct tg3_napi *tnapi = dev_id; in tg3_msi_1shot() local
7455 struct tg3 *tp = tnapi->tp; in tg3_msi_1shot()
7457 prefetch(tnapi->hw_status); in tg3_msi_1shot()
7458 if (tnapi->rx_rcb) in tg3_msi_1shot()
7459 prefetch(&tnapi->rx_rcb[tnapi->rx_rcb_ptr]); in tg3_msi_1shot()
7462 napi_schedule(&tnapi->napi); in tg3_msi_1shot()
7473 struct tg3_napi *tnapi = dev_id; in tg3_msi() local
7474 struct tg3 *tp = tnapi->tp; in tg3_msi()
7476 prefetch(tnapi->hw_status); in tg3_msi()
7477 if (tnapi->rx_rcb) in tg3_msi()
7478 prefetch(&tnapi->rx_rcb[tnapi->rx_rcb_ptr]); in tg3_msi()
7486 tw32_mailbox(tnapi->int_mbox, 0x00000001); in tg3_msi()
7488 napi_schedule(&tnapi->napi); in tg3_msi()
7495 struct tg3_napi *tnapi = dev_id; in tg3_interrupt() local
7496 struct tg3 *tp = tnapi->tp; in tg3_interrupt()
7497 struct tg3_hw_status *sblk = tnapi->hw_status; in tg3_interrupt()
7528 if (likely(tg3_has_work(tnapi))) { in tg3_interrupt()
7529 prefetch(&tnapi->rx_rcb[tnapi->rx_rcb_ptr]); in tg3_interrupt()
7530 napi_schedule(&tnapi->napi); in tg3_interrupt()
7544 struct tg3_napi *tnapi = dev_id; in tg3_interrupt_tagged() local
7545 struct tg3 *tp = tnapi->tp; in tg3_interrupt_tagged()
7546 struct tg3_hw_status *sblk = tnapi->hw_status; in tg3_interrupt_tagged()
7554 if (unlikely(sblk->status_tag == tnapi->last_irq_tag)) { in tg3_interrupt_tagged()
7581 tnapi->last_irq_tag = sblk->status_tag; in tg3_interrupt_tagged()
7586 prefetch(&tnapi->rx_rcb[tnapi->rx_rcb_ptr]); in tg3_interrupt_tagged()
7588 napi_schedule(&tnapi->napi); in tg3_interrupt_tagged()
7597 struct tg3_napi *tnapi = dev_id; in tg3_test_isr() local
7598 struct tg3 *tp = tnapi->tp; in tg3_test_isr()
7599 struct tg3_hw_status *sblk = tnapi->hw_status; in tg3_test_isr()
7680 static bool tg3_tx_frag_set(struct tg3_napi *tnapi, u32 *entry, u32 *budget, in tg3_tx_frag_set() argument
7684 struct tg3 *tp = tnapi->tp; in tg3_tx_frag_set()
7712 tnapi->tx_buffers[*entry].fragmented = true; in tg3_tx_frag_set()
7714 tg3_tx_set_bd(&tnapi->tx_ring[*entry], map, in tg3_tx_frag_set()
7725 tg3_tx_set_bd(&tnapi->tx_ring[*entry], map, in tg3_tx_frag_set()
7731 tnapi->tx_buffers[prvidx].fragmented = false; in tg3_tx_frag_set()
7735 tg3_tx_set_bd(&tnapi->tx_ring[*entry], map, in tg3_tx_frag_set()
7743 static void tg3_tx_skb_unmap(struct tg3_napi *tnapi, u32 entry, int last) in tg3_tx_skb_unmap() argument
7747 struct tg3_tx_ring_info *txb = &tnapi->tx_buffers[entry]; in tg3_tx_skb_unmap()
7752 pci_unmap_single(tnapi->tp->pdev, in tg3_tx_skb_unmap()
7760 txb = &tnapi->tx_buffers[entry]; in tg3_tx_skb_unmap()
7767 txb = &tnapi->tx_buffers[entry]; in tg3_tx_skb_unmap()
7769 pci_unmap_page(tnapi->tp->pdev, in tg3_tx_skb_unmap()
7776 txb = &tnapi->tx_buffers[entry]; in tg3_tx_skb_unmap()
7782 static int tigon3_dma_hwbug_workaround(struct tg3_napi *tnapi, in tigon3_dma_hwbug_workaround() argument
7787 struct tg3 *tp = tnapi->tp; in tigon3_dma_hwbug_workaround()
7817 tnapi->tx_buffers[*entry].skb = new_skb; in tigon3_dma_hwbug_workaround()
7818 dma_unmap_addr_set(&tnapi->tx_buffers[*entry], in tigon3_dma_hwbug_workaround()
7821 if (tg3_tx_frag_set(tnapi, entry, budget, new_addr, in tigon3_dma_hwbug_workaround()
7824 tg3_tx_skb_unmap(tnapi, save_entry, -1); in tigon3_dma_hwbug_workaround()
7836 static bool tg3_tso_bug_gso_check(struct tg3_napi *tnapi, struct sk_buff *skb) in tg3_tso_bug_gso_check() argument
7841 return skb_shinfo(skb)->gso_segs < tnapi->tx_pending / 3; in tg3_tso_bug_gso_check()
7849 static int tg3_tso_bug(struct tg3 *tp, struct tg3_napi *tnapi, in tg3_tso_bug() argument
7856 if (unlikely(tg3_tx_avail(tnapi) <= frag_cnt_est)) { in tg3_tso_bug()
7865 if (tg3_tx_avail(tnapi) <= frag_cnt_est) in tg3_tso_bug()
7897 struct tg3_napi *tnapi; in tg3_start_xmit() local
7906 tnapi = &tp->napi[skb_get_queue_mapping(skb)]; in tg3_start_xmit()
7908 tnapi++; in tg3_start_xmit()
7910 budget = tg3_tx_avail(tnapi); in tg3_start_xmit()
7928 entry = tnapi->tx_prod; in tg3_start_xmit()
7948 if (tg3_tso_bug_gso_check(tnapi, skb)) in tg3_start_xmit()
7949 return tg3_tso_bug(tp, tnapi, txq, skb); in tg3_start_xmit()
7956 if (tg3_tso_bug_gso_check(tnapi, skb)) in tg3_start_xmit()
7957 return tg3_tso_bug(tp, tnapi, txq, skb); in tg3_start_xmit()
8040 tnapi->tx_buffers[entry].skb = skb; in tg3_start_xmit()
8041 dma_unmap_addr_set(&tnapi->tx_buffers[entry], mapping, mapping); in tg3_start_xmit()
8048 if (tg3_tx_frag_set(tnapi, &entry, &budget, mapping, len, base_flags | in tg3_start_xmit()
8071 tnapi->tx_buffers[entry].skb = NULL; in tg3_start_xmit()
8072 dma_unmap_addr_set(&tnapi->tx_buffers[entry], mapping, in tg3_start_xmit()
8078 tg3_tx_frag_set(tnapi, &entry, &budget, mapping, in tg3_start_xmit()
8089 tg3_tx_skb_unmap(tnapi, tnapi->tx_prod, i); in tg3_start_xmit()
8091 if (mss && tg3_tso_bug_gso_check(tnapi, skb)) { in tg3_start_xmit()
8100 return tg3_tso_bug(tp, tnapi, txq, skb); in tg3_start_xmit()
8106 entry = tnapi->tx_prod; in tg3_start_xmit()
8107 budget = tg3_tx_avail(tnapi); in tg3_start_xmit()
8108 if (tigon3_dma_hwbug_workaround(tnapi, &skb, &entry, &budget, in tg3_start_xmit()
8119 tnapi->tx_prod = entry; in tg3_start_xmit()
8120 if (unlikely(tg3_tx_avail(tnapi) <= (MAX_SKB_FRAGS + 1))) { in tg3_start_xmit()
8129 if (tg3_tx_avail(tnapi) > TG3_TX_WAKEUP_THRESH(tnapi)) in tg3_start_xmit()
8135 tw32_tx_mbox(tnapi->prodmbox, entry); in tg3_start_xmit()
8142 tg3_tx_skb_unmap(tnapi, tnapi->tx_prod, --i); in tg3_start_xmit()
8143 tnapi->tx_buffers[tnapi->tx_prod].skb = NULL; in tg3_start_xmit()
8529 struct tg3_napi *tnapi = &tp->napi[j]; in tg3_free_rings() local
8531 tg3_rx_prodring_free(tp, &tnapi->prodring); in tg3_free_rings()
8533 if (!tnapi->tx_buffers) in tg3_free_rings()
8537 struct sk_buff *skb = tnapi->tx_buffers[i].skb; in tg3_free_rings()
8542 tg3_tx_skb_unmap(tnapi, i, in tg3_free_rings()
8566 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_init_rings() local
8568 tnapi->last_tag = 0; in tg3_init_rings()
8569 tnapi->last_irq_tag = 0; in tg3_init_rings()
8570 tnapi->hw_status->status = 0; in tg3_init_rings()
8571 tnapi->hw_status->status_tag = 0; in tg3_init_rings()
8572 memset(tnapi->hw_status, 0, TG3_HW_STATUS_SIZE); in tg3_init_rings()
8574 tnapi->tx_prod = 0; in tg3_init_rings()
8575 tnapi->tx_cons = 0; in tg3_init_rings()
8576 if (tnapi->tx_ring) in tg3_init_rings()
8577 memset(tnapi->tx_ring, 0, TG3_TX_RING_BYTES); in tg3_init_rings()
8579 tnapi->rx_rcb_ptr = 0; in tg3_init_rings()
8580 if (tnapi->rx_rcb) in tg3_init_rings()
8581 memset(tnapi->rx_rcb, 0, TG3_RX_RCB_RING_BYTES(tp)); in tg3_init_rings()
8583 if (tnapi->prodring.rx_std && in tg3_init_rings()
8584 tg3_rx_prodring_alloc(tp, &tnapi->prodring)) { in tg3_init_rings()
8598 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_mem_tx_release() local
8600 if (tnapi->tx_ring) { in tg3_mem_tx_release()
8602 tnapi->tx_ring, tnapi->tx_desc_mapping); in tg3_mem_tx_release()
8603 tnapi->tx_ring = NULL; in tg3_mem_tx_release()
8606 kfree(tnapi->tx_buffers); in tg3_mem_tx_release()
8607 tnapi->tx_buffers = NULL; in tg3_mem_tx_release()
8614 struct tg3_napi *tnapi = &tp->napi[0]; in tg3_mem_tx_acquire() local
8620 tnapi++; in tg3_mem_tx_acquire()
8622 for (i = 0; i < tp->txq_cnt; i++, tnapi++) { in tg3_mem_tx_acquire()
8623 tnapi->tx_buffers = kzalloc(sizeof(struct tg3_tx_ring_info) * in tg3_mem_tx_acquire()
8625 if (!tnapi->tx_buffers) in tg3_mem_tx_acquire()
8628 tnapi->tx_ring = dma_alloc_coherent(&tp->pdev->dev, in tg3_mem_tx_acquire()
8630 &tnapi->tx_desc_mapping, in tg3_mem_tx_acquire()
8632 if (!tnapi->tx_ring) in tg3_mem_tx_acquire()
8648 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_mem_rx_release() local
8650 tg3_rx_prodring_fini(tp, &tnapi->prodring); in tg3_mem_rx_release()
8652 if (!tnapi->rx_rcb) in tg3_mem_rx_release()
8657 tnapi->rx_rcb, in tg3_mem_rx_release()
8658 tnapi->rx_rcb_mapping); in tg3_mem_rx_release()
8659 tnapi->rx_rcb = NULL; in tg3_mem_rx_release()
8676 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_mem_rx_acquire() local
8678 if (tg3_rx_prodring_init(tp, &tnapi->prodring)) in tg3_mem_rx_acquire()
8688 tnapi->rx_rcb = dma_zalloc_coherent(&tp->pdev->dev, in tg3_mem_rx_acquire()
8690 &tnapi->rx_rcb_mapping, in tg3_mem_rx_acquire()
8692 if (!tnapi->rx_rcb) in tg3_mem_rx_acquire()
8712 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_free_consistent() local
8714 if (tnapi->hw_status) { in tg3_free_consistent()
8716 tnapi->hw_status, in tg3_free_consistent()
8717 tnapi->status_mapping); in tg3_free_consistent()
8718 tnapi->hw_status = NULL; in tg3_free_consistent()
8747 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_alloc_consistent() local
8750 tnapi->hw_status = dma_zalloc_coherent(&tp->pdev->dev, in tg3_alloc_consistent()
8752 &tnapi->status_mapping, in tg3_alloc_consistent()
8754 if (!tnapi->hw_status) in tg3_alloc_consistent()
8757 sblk = tnapi->hw_status; in tg3_alloc_consistent()
8782 tnapi->rx_rcb_prod_idx = prodptr; in tg3_alloc_consistent()
8784 tnapi->rx_rcb_prod_idx = &sblk->idx[0].rx_producer; in tg3_alloc_consistent()
8918 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_abort_hw() local
8919 if (tnapi->hw_status) in tg3_abort_hw()
8920 memset(tnapi->hw_status, 0, TG3_HW_STATUS_SIZE); in tg3_abort_hw()
9085 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_chip_reset() local
9086 if (tnapi->hw_status) { in tg3_chip_reset()
9087 tnapi->hw_status->status = 0; in tg3_chip_reset()
9088 tnapi->hw_status->status_tag = 0; in tg3_chip_reset()
9090 tnapi->last_tag = 0; in tg3_chip_reset()
9091 tnapi->last_irq_tag = 0; in tg3_chip_reset()
9512 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_tx_rcbs_init() local
9514 if (!tnapi->tx_ring) in tg3_tx_rcbs_init()
9517 tg3_set_bdinfo(tp, txrcb, tnapi->tx_desc_mapping, in tg3_tx_rcbs_init()
9556 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_rx_ret_rcbs_init() local
9558 if (!tnapi->rx_rcb) in tg3_rx_ret_rcbs_init()
9561 tg3_set_bdinfo(tp, rxrcb, tnapi->rx_rcb_mapping, in tg3_rx_ret_rcbs_init()
9572 struct tg3_napi *tnapi = &tp->napi[0]; in tg3_rings_reset() local
9614 memset(tnapi->hw_status, 0, TG3_HW_STATUS_SIZE); in tg3_rings_reset()
9618 ((u64) tnapi->status_mapping >> 32)); in tg3_rings_reset()
9620 ((u64) tnapi->status_mapping & 0xffffffff)); in tg3_rings_reset()
9624 for (i = 1, tnapi++; i < tp->irq_cnt; i++, tnapi++) { in tg3_rings_reset()
9625 u64 mapping = (u64)tnapi->status_mapping; in tg3_rings_reset()
9631 memset(tnapi->hw_status, 0, TG3_HW_STATUS_SIZE); in tg3_rings_reset()
10907 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_chk_missed_msi() local
10909 if (tg3_has_work(tnapi)) { in tg3_chk_missed_msi()
10910 if (tnapi->last_rx_cons == tnapi->rx_rcb_ptr && in tg3_chk_missed_msi()
10911 tnapi->last_tx_cons == tnapi->tx_cons) { in tg3_chk_missed_msi()
10912 if (tnapi->chk_msi_cnt < 1) { in tg3_chk_missed_msi()
10913 tnapi->chk_msi_cnt++; in tg3_chk_missed_msi()
10916 tg3_msi(0, tnapi); in tg3_chk_missed_msi()
10919 tnapi->chk_msi_cnt = 0; in tg3_chk_missed_msi()
10920 tnapi->last_rx_cons = tnapi->rx_rcb_ptr; in tg3_chk_missed_msi()
10921 tnapi->last_tx_cons = tnapi->tx_cons; in tg3_chk_missed_msi()
11176 struct tg3_napi *tnapi = &tp->napi[irq_num]; in tg3_request_irq() local
11181 name = &tnapi->irq_lbl[0]; in tg3_request_irq()
11182 if (tnapi->tx_buffers && tnapi->rx_rcb) in tg3_request_irq()
11185 else if (tnapi->tx_buffers) in tg3_request_irq()
11188 else if (tnapi->rx_rcb) in tg3_request_irq()
11209 return request_irq(tnapi->irq_vec, fn, flags, name, tnapi); in tg3_request_irq()
11214 struct tg3_napi *tnapi = &tp->napi[0]; in tg3_test_interrupt() local
11224 free_irq(tnapi->irq_vec, tnapi); in tg3_test_interrupt()
11235 err = request_irq(tnapi->irq_vec, tg3_test_isr, in tg3_test_interrupt()
11236 IRQF_SHARED, dev->name, tnapi); in tg3_test_interrupt()
11240 tnapi->hw_status->status &= ~SD_STATUS_UPDATED; in tg3_test_interrupt()
11244 tnapi->coal_now); in tg3_test_interrupt()
11249 int_mbox = tr32_mailbox(tnapi->int_mbox); in tg3_test_interrupt()
11259 tnapi->hw_status->status_tag != tnapi->last_tag) in tg3_test_interrupt()
11260 tw32_mailbox_f(tnapi->int_mbox, tnapi->last_tag << 24); in tg3_test_interrupt()
11267 free_irq(tnapi->irq_vec, tnapi); in tg3_test_interrupt()
11530 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_start() local
11534 tnapi = &tp->napi[i]; in tg3_start()
11535 free_irq(tnapi->irq_vec, tnapi); in tg3_start()
11604 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_start() local
11605 free_irq(tnapi->irq_vec, tnapi); in tg3_start()
11643 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_stop() local
11644 free_irq(tnapi->irq_vec, tnapi); in tg3_stop()
13374 struct tg3_napi *tnapi, *rnapi; in tg3_run_loopback() local
13377 tnapi = &tp->napi[0]; in tg3_run_loopback()
13383 tnapi = &tp->napi[1]; in tg3_run_loopback()
13385 coal_now = tnapi->coal_now | rnapi->coal_now; in tg3_run_loopback()
13462 val = tnapi->tx_prod; in tg3_run_loopback()
13463 tnapi->tx_buffers[val].skb = skb; in tg3_run_loopback()
13464 dma_unmap_addr_set(&tnapi->tx_buffers[val], mapping, map); in tg3_run_loopback()
13473 budget = tg3_tx_avail(tnapi); in tg3_run_loopback()
13474 if (tg3_tx_frag_set(tnapi, &val, &budget, map, tx_len, in tg3_run_loopback()
13476 tnapi->tx_buffers[val].skb = NULL; in tg3_run_loopback()
13481 tnapi->tx_prod++; in tg3_run_loopback()
13486 tw32_tx_mbox(tnapi->prodmbox, tnapi->tx_prod); in tg3_run_loopback()
13487 tr32_mailbox(tnapi->prodmbox); in tg3_run_loopback()
13498 tx_idx = tnapi->hw_status->idx[0].tx_consumer; in tg3_run_loopback()
13500 if ((tx_idx == tnapi->tx_prod) && in tg3_run_loopback()
13505 tg3_tx_skb_unmap(tnapi, tnapi->tx_prod - 1, -1); in tg3_run_loopback()
13508 if (tx_idx != tnapi->tx_prod) in tg3_run_loopback()
17822 struct tg3_napi *tnapi = &tp->napi[i]; in tg3_init_one() local
17824 tnapi->tp = tp; in tg3_init_one()
17825 tnapi->tx_pending = TG3_DEF_TX_RING_PENDING; in tg3_init_one()
17827 tnapi->int_mbox = intmbx; in tg3_init_one()
17833 tnapi->consmbox = rcvmbx; in tg3_init_one()
17834 tnapi->prodmbox = sndmbx; in tg3_init_one()
17837 tnapi->coal_now = HOSTCC_MODE_COAL_VEC1_NOW << (i - 1); in tg3_init_one()
17839 tnapi->coal_now = HOSTCC_MODE_NOW; in tg3_init_one()