Lines Matching refs:rp

3287 static unsigned int niu_hash_rxaddr(struct rx_ring_info *rp, u64 a)  in niu_hash_rxaddr()  argument
3295 static struct page *niu_find_rxpage(struct rx_ring_info *rp, u64 addr, in niu_find_rxpage() argument
3298 unsigned int h = niu_hash_rxaddr(rp, addr); in niu_find_rxpage()
3302 pp = &rp->rxhash[h]; in niu_find_rxpage()
3315 static void niu_hash_page(struct rx_ring_info *rp, struct page *page, u64 base) in niu_hash_page() argument
3317 unsigned int h = niu_hash_rxaddr(rp, base); in niu_hash_page()
3320 page->mapping = (struct address_space *) rp->rxhash[h]; in niu_hash_page()
3321 rp->rxhash[h] = page; in niu_hash_page()
3324 static int niu_rbr_add_page(struct niu *np, struct rx_ring_info *rp, in niu_rbr_add_page() argument
3342 niu_hash_page(rp, page, addr); in niu_rbr_add_page()
3343 if (rp->rbr_blocks_per_page > 1) in niu_rbr_add_page()
3344 atomic_add(rp->rbr_blocks_per_page - 1, &page->_count); in niu_rbr_add_page()
3346 for (i = 0; i < rp->rbr_blocks_per_page; i++) { in niu_rbr_add_page()
3347 __le32 *rbr = &rp->rbr[start_index + i]; in niu_rbr_add_page()
3350 addr += rp->rbr_block_size; in niu_rbr_add_page()
3356 static void niu_rbr_refill(struct niu *np, struct rx_ring_info *rp, gfp_t mask) in niu_rbr_refill() argument
3358 int index = rp->rbr_index; in niu_rbr_refill()
3360 rp->rbr_pending++; in niu_rbr_refill()
3361 if ((rp->rbr_pending % rp->rbr_blocks_per_page) == 0) { in niu_rbr_refill()
3362 int err = niu_rbr_add_page(np, rp, mask, index); in niu_rbr_refill()
3365 rp->rbr_pending--; in niu_rbr_refill()
3369 rp->rbr_index += rp->rbr_blocks_per_page; in niu_rbr_refill()
3370 BUG_ON(rp->rbr_index > rp->rbr_table_size); in niu_rbr_refill()
3371 if (rp->rbr_index == rp->rbr_table_size) in niu_rbr_refill()
3372 rp->rbr_index = 0; in niu_rbr_refill()
3374 if (rp->rbr_pending >= rp->rbr_kick_thresh) { in niu_rbr_refill()
3375 nw64(RBR_KICK(rp->rx_channel), rp->rbr_pending); in niu_rbr_refill()
3376 rp->rbr_pending = 0; in niu_rbr_refill()
3381 static int niu_rx_pkt_ignore(struct niu *np, struct rx_ring_info *rp) in niu_rx_pkt_ignore() argument
3383 unsigned int index = rp->rcr_index; in niu_rx_pkt_ignore()
3386 rp->rx_dropped++; in niu_rx_pkt_ignore()
3394 val = le64_to_cpup(&rp->rcr[index]); in niu_rx_pkt_ignore()
3397 page = niu_find_rxpage(rp, addr, &link); in niu_rx_pkt_ignore()
3399 rcr_size = rp->rbr_sizes[(val & RCR_ENTRY_PKTBUFSZ) >> in niu_rx_pkt_ignore()
3408 rp->rbr_refill_pending++; in niu_rx_pkt_ignore()
3411 index = NEXT_RCR(rp, index); in niu_rx_pkt_ignore()
3416 rp->rcr_index = index; in niu_rx_pkt_ignore()
3422 struct rx_ring_info *rp) in niu_process_rx_pkt() argument
3424 unsigned int index = rp->rcr_index; in niu_process_rx_pkt()
3431 return niu_rx_pkt_ignore(np, rp); in niu_process_rx_pkt()
3441 val = le64_to_cpup(&rp->rcr[index]); in niu_process_rx_pkt()
3449 page = niu_find_rxpage(rp, addr, &link); in niu_process_rx_pkt()
3451 rcr_size = rp->rbr_sizes[(val & RCR_ENTRY_PKTBUFSZ) >> in niu_process_rx_pkt()
3471 if ((page->index + rp->rbr_block_size) - rcr_size == addr) { in niu_process_rx_pkt()
3477 rp->rbr_refill_pending++; in niu_process_rx_pkt()
3481 index = NEXT_RCR(rp, index); in niu_process_rx_pkt()
3486 rp->rcr_index = index; in niu_process_rx_pkt()
3502 rp->rx_packets++; in niu_process_rx_pkt()
3503 rp->rx_bytes += skb->len; in niu_process_rx_pkt()
3506 skb_record_rx_queue(skb, rp->rx_channel); in niu_process_rx_pkt()
3512 static int niu_rbr_fill(struct niu *np, struct rx_ring_info *rp, gfp_t mask) in niu_rbr_fill() argument
3514 int blocks_per_page = rp->rbr_blocks_per_page; in niu_rbr_fill()
3515 int err, index = rp->rbr_index; in niu_rbr_fill()
3518 while (index < (rp->rbr_table_size - blocks_per_page)) { in niu_rbr_fill()
3519 err = niu_rbr_add_page(np, rp, mask, index); in niu_rbr_fill()
3526 rp->rbr_index = index; in niu_rbr_fill()
3530 static void niu_rbr_free(struct niu *np, struct rx_ring_info *rp) in niu_rbr_free() argument
3537 page = rp->rxhash[i]; in niu_rbr_free()
3553 for (i = 0; i < rp->rbr_table_size; i++) in niu_rbr_free()
3554 rp->rbr[i] = cpu_to_le32(0); in niu_rbr_free()
3555 rp->rbr_index = 0; in niu_rbr_free()
3558 static int release_tx_packet(struct niu *np, struct tx_ring_info *rp, int idx) in release_tx_packet() argument
3560 struct tx_buff_info *tb = &rp->tx_buffs[idx]; in release_tx_packet()
3569 rp->tx_packets++; in release_tx_packet()
3570 rp->tx_bytes += (((tx_flags & TXHDR_LEN) >> TXHDR_LEN_SHIFT) - in release_tx_packet()
3577 if (le64_to_cpu(rp->descr[idx]) & TX_DESC_MARK) in release_tx_packet()
3578 rp->mark_pending--; in release_tx_packet()
3582 idx = NEXT_TX(rp, idx); in release_tx_packet()
3587 tb = &rp->tx_buffs[idx]; in release_tx_packet()
3592 idx = NEXT_TX(rp, idx); in release_tx_packet()
3600 #define NIU_TX_WAKEUP_THRESH(rp) ((rp)->pending / 4) argument
3602 static void niu_tx_work(struct niu *np, struct tx_ring_info *rp) in niu_tx_work() argument
3609 index = (rp - np->tx_rings); in niu_tx_work()
3612 cs = rp->tx_cs; in niu_tx_work()
3617 pkt_cnt = (pkt_cnt - rp->last_pkt_cnt) & in niu_tx_work()
3620 rp->last_pkt_cnt = tmp; in niu_tx_work()
3622 cons = rp->cons; in niu_tx_work()
3628 cons = release_tx_packet(np, rp, cons); in niu_tx_work()
3630 rp->cons = cons; in niu_tx_work()
3635 (niu_tx_avail(rp) > NIU_TX_WAKEUP_THRESH(rp)))) { in niu_tx_work()
3638 (niu_tx_avail(rp) > NIU_TX_WAKEUP_THRESH(rp))) in niu_tx_work()
3645 struct rx_ring_info *rp, in niu_sync_rx_discard_stats() argument
3659 int rx_channel = rp->rx_channel; in niu_sync_rx_discard_stats()
3670 rp->rx_errors += misc & RXMISC_COUNT; in niu_sync_rx_discard_stats()
3685 rp->rx_dropped += wred & RED_DIS_CNT_COUNT; in niu_sync_rx_discard_stats()
3697 struct rx_ring_info *rp, int budget) in niu_rx_work() argument
3700 struct rxdma_mailbox *mbox = rp->mbox; in niu_rx_work()
3704 stat = nr64(RX_DMA_CTL_STAT(rp->rx_channel)); in niu_rx_work()
3705 qlen = nr64(RCRSTAT_A(rp->rx_channel)) & RCRSTAT_A_QLEN; in niu_rx_work()
3715 __func__, rp->rx_channel, (unsigned long long)stat, qlen); in niu_rx_work()
3720 rcr_done += niu_process_rx_pkt(napi, np, rp); in niu_rx_work()
3724 if (rp->rbr_refill_pending >= rp->rbr_kick_thresh) { in niu_rx_work()
3727 for (i = 0; i < rp->rbr_refill_pending; i++) in niu_rx_work()
3728 niu_rbr_refill(np, rp, GFP_ATOMIC); in niu_rx_work()
3729 rp->rbr_refill_pending = 0; in niu_rx_work()
3736 nw64(RX_DMA_CTL_STAT(rp->rx_channel), stat); in niu_rx_work()
3740 niu_sync_rx_discard_stats(np, rp, 0x7FFF); in niu_rx_work()
3756 struct tx_ring_info *rp = &np->tx_rings[i]; in niu_poll_core() local
3757 if (tx_vec & (1 << rp->tx_channel)) in niu_poll_core()
3758 niu_tx_work(np, rp); in niu_poll_core()
3759 nw64(LD_IM0(LDN_TXDMA(rp->tx_channel)), 0); in niu_poll_core()
3763 struct rx_ring_info *rp = &np->rx_rings[i]; in niu_poll_core() local
3765 if (rx_vec & (1 << rp->rx_channel)) { in niu_poll_core()
3768 this_work_done = niu_rx_work(&lp->napi, np, rp, in niu_poll_core()
3774 nw64(LD_IM0(LDN_RXDMA(rp->rx_channel)), 0); in niu_poll_core()
3795 static void niu_log_rxchan_errors(struct niu *np, struct rx_ring_info *rp, in niu_log_rxchan_errors() argument
3798 netdev_err(np->dev, "RX channel %u errors ( ", rp->rx_channel); in niu_log_rxchan_errors()
3832 static int niu_rx_error(struct niu *np, struct rx_ring_info *rp) in niu_rx_error() argument
3834 u64 stat = nr64(RX_DMA_CTL_STAT(rp->rx_channel)); in niu_rx_error()
3844 rp->rx_channel, in niu_rx_error()
3847 niu_log_rxchan_errors(np, rp, stat); in niu_rx_error()
3850 nw64(RX_DMA_CTL_STAT(rp->rx_channel), in niu_rx_error()
3856 static void niu_log_txchan_errors(struct niu *np, struct tx_ring_info *rp, in niu_log_txchan_errors() argument
3859 netdev_err(np->dev, "TX channel %u errors ( ", rp->tx_channel); in niu_log_txchan_errors()
3881 static int niu_tx_error(struct niu *np, struct tx_ring_info *rp) in niu_tx_error() argument
3885 cs = nr64(TX_CS(rp->tx_channel)); in niu_tx_error()
3886 logh = nr64(TX_RNG_ERR_LOGH(rp->tx_channel)); in niu_tx_error()
3887 logl = nr64(TX_RNG_ERR_LOGL(rp->tx_channel)); in niu_tx_error()
3890 rp->tx_channel, in niu_tx_error()
3895 niu_log_txchan_errors(np, rp, cs); in niu_tx_error()
4092 struct rx_ring_info *rp = &np->rx_rings[i]; in niu_slowpath_interrupt() local
4094 if (rx_vec & (1 << rp->rx_channel)) { in niu_slowpath_interrupt()
4095 int r = niu_rx_error(np, rp); in niu_slowpath_interrupt()
4100 nw64(RX_DMA_CTL_STAT(rp->rx_channel), in niu_slowpath_interrupt()
4110 struct tx_ring_info *rp = &np->tx_rings[i]; in niu_slowpath_interrupt() local
4112 if (tx_vec & (1 << rp->tx_channel)) { in niu_slowpath_interrupt()
4113 int r = niu_tx_error(np, rp); in niu_slowpath_interrupt()
4143 static void niu_rxchan_intr(struct niu *np, struct rx_ring_info *rp, in niu_rxchan_intr() argument
4146 struct rxdma_mailbox *mbox = rp->mbox; in niu_rxchan_intr()
4151 nw64(RX_DMA_CTL_STAT(rp->rx_channel), stat_write); in niu_rxchan_intr()
4157 static void niu_txchan_intr(struct niu *np, struct tx_ring_info *rp, in niu_txchan_intr() argument
4160 rp->tx_cs = nr64(TX_CS(rp->tx_channel)); in niu_txchan_intr()
4163 "%s() cs[%llx]\n", __func__, (unsigned long long)rp->tx_cs); in niu_txchan_intr()
4176 struct rx_ring_info *rp = &np->rx_rings[i]; in __niu_fastpath_interrupt() local
4177 int ldn = LDN_RXDMA(rp->rx_channel); in __niu_fastpath_interrupt()
4183 if (rx_vec & (1 << rp->rx_channel)) in __niu_fastpath_interrupt()
4184 niu_rxchan_intr(np, rp, ldn); in __niu_fastpath_interrupt()
4188 struct tx_ring_info *rp = &np->tx_rings[i]; in __niu_fastpath_interrupt() local
4189 int ldn = LDN_TXDMA(rp->tx_channel); in __niu_fastpath_interrupt()
4195 if (tx_vec & (1 << rp->tx_channel)) in __niu_fastpath_interrupt()
4196 niu_txchan_intr(np, rp, ldn); in __niu_fastpath_interrupt()
4256 static void niu_free_rx_ring_info(struct niu *np, struct rx_ring_info *rp) in niu_free_rx_ring_info() argument
4258 if (rp->mbox) { in niu_free_rx_ring_info()
4261 rp->mbox, rp->mbox_dma); in niu_free_rx_ring_info()
4262 rp->mbox = NULL; in niu_free_rx_ring_info()
4264 if (rp->rcr) { in niu_free_rx_ring_info()
4267 rp->rcr, rp->rcr_dma); in niu_free_rx_ring_info()
4268 rp->rcr = NULL; in niu_free_rx_ring_info()
4269 rp->rcr_table_size = 0; in niu_free_rx_ring_info()
4270 rp->rcr_index = 0; in niu_free_rx_ring_info()
4272 if (rp->rbr) { in niu_free_rx_ring_info()
4273 niu_rbr_free(np, rp); in niu_free_rx_ring_info()
4277 rp->rbr, rp->rbr_dma); in niu_free_rx_ring_info()
4278 rp->rbr = NULL; in niu_free_rx_ring_info()
4279 rp->rbr_table_size = 0; in niu_free_rx_ring_info()
4280 rp->rbr_index = 0; in niu_free_rx_ring_info()
4282 kfree(rp->rxhash); in niu_free_rx_ring_info()
4283 rp->rxhash = NULL; in niu_free_rx_ring_info()
4286 static void niu_free_tx_ring_info(struct niu *np, struct tx_ring_info *rp) in niu_free_tx_ring_info() argument
4288 if (rp->mbox) { in niu_free_tx_ring_info()
4291 rp->mbox, rp->mbox_dma); in niu_free_tx_ring_info()
4292 rp->mbox = NULL; in niu_free_tx_ring_info()
4294 if (rp->descr) { in niu_free_tx_ring_info()
4298 if (rp->tx_buffs[i].skb) in niu_free_tx_ring_info()
4299 (void) release_tx_packet(np, rp, i); in niu_free_tx_ring_info()
4304 rp->descr, rp->descr_dma); in niu_free_tx_ring_info()
4305 rp->descr = NULL; in niu_free_tx_ring_info()
4306 rp->pending = 0; in niu_free_tx_ring_info()
4307 rp->prod = 0; in niu_free_tx_ring_info()
4308 rp->cons = 0; in niu_free_tx_ring_info()
4309 rp->wrap_bit = 0; in niu_free_tx_ring_info()
4319 struct rx_ring_info *rp = &np->rx_rings[i]; in niu_free_channels() local
4321 niu_free_rx_ring_info(np, rp); in niu_free_channels()
4330 struct tx_ring_info *rp = &np->tx_rings[i]; in niu_free_channels() local
4332 niu_free_tx_ring_info(np, rp); in niu_free_channels()
4341 struct rx_ring_info *rp) in niu_alloc_rx_ring_info() argument
4345 rp->rxhash = kcalloc(MAX_RBR_RING_SIZE, sizeof(struct page *), in niu_alloc_rx_ring_info()
4347 if (!rp->rxhash) in niu_alloc_rx_ring_info()
4350 rp->mbox = np->ops->alloc_coherent(np->device, in niu_alloc_rx_ring_info()
4352 &rp->mbox_dma, GFP_KERNEL); in niu_alloc_rx_ring_info()
4353 if (!rp->mbox) in niu_alloc_rx_ring_info()
4355 if ((unsigned long)rp->mbox & (64UL - 1)) { in niu_alloc_rx_ring_info()
4357 rp->mbox); in niu_alloc_rx_ring_info()
4361 rp->rcr = np->ops->alloc_coherent(np->device, in niu_alloc_rx_ring_info()
4363 &rp->rcr_dma, GFP_KERNEL); in niu_alloc_rx_ring_info()
4364 if (!rp->rcr) in niu_alloc_rx_ring_info()
4366 if ((unsigned long)rp->rcr & (64UL - 1)) { in niu_alloc_rx_ring_info()
4368 rp->rcr); in niu_alloc_rx_ring_info()
4371 rp->rcr_table_size = MAX_RCR_RING_SIZE; in niu_alloc_rx_ring_info()
4372 rp->rcr_index = 0; in niu_alloc_rx_ring_info()
4374 rp->rbr = np->ops->alloc_coherent(np->device, in niu_alloc_rx_ring_info()
4376 &rp->rbr_dma, GFP_KERNEL); in niu_alloc_rx_ring_info()
4377 if (!rp->rbr) in niu_alloc_rx_ring_info()
4379 if ((unsigned long)rp->rbr & (64UL - 1)) { in niu_alloc_rx_ring_info()
4381 rp->rbr); in niu_alloc_rx_ring_info()
4384 rp->rbr_table_size = MAX_RBR_RING_SIZE; in niu_alloc_rx_ring_info()
4385 rp->rbr_index = 0; in niu_alloc_rx_ring_info()
4386 rp->rbr_pending = 0; in niu_alloc_rx_ring_info()
4391 static void niu_set_max_burst(struct niu *np, struct tx_ring_info *rp) in niu_set_max_burst() argument
4398 rp->max_burst = mtu + 32; in niu_set_max_burst()
4399 if (rp->max_burst > 4096) in niu_set_max_burst()
4400 rp->max_burst = 4096; in niu_set_max_burst()
4404 struct tx_ring_info *rp) in niu_alloc_tx_ring_info() argument
4408 rp->mbox = np->ops->alloc_coherent(np->device, in niu_alloc_tx_ring_info()
4410 &rp->mbox_dma, GFP_KERNEL); in niu_alloc_tx_ring_info()
4411 if (!rp->mbox) in niu_alloc_tx_ring_info()
4413 if ((unsigned long)rp->mbox & (64UL - 1)) { in niu_alloc_tx_ring_info()
4415 rp->mbox); in niu_alloc_tx_ring_info()
4419 rp->descr = np->ops->alloc_coherent(np->device, in niu_alloc_tx_ring_info()
4421 &rp->descr_dma, GFP_KERNEL); in niu_alloc_tx_ring_info()
4422 if (!rp->descr) in niu_alloc_tx_ring_info()
4424 if ((unsigned long)rp->descr & (64UL - 1)) { in niu_alloc_tx_ring_info()
4426 rp->descr); in niu_alloc_tx_ring_info()
4430 rp->pending = MAX_TX_RING_SIZE; in niu_alloc_tx_ring_info()
4431 rp->prod = 0; in niu_alloc_tx_ring_info()
4432 rp->cons = 0; in niu_alloc_tx_ring_info()
4433 rp->wrap_bit = 0; in niu_alloc_tx_ring_info()
4436 rp->mark_freq = rp->pending / 4; in niu_alloc_tx_ring_info()
4438 niu_set_max_burst(np, rp); in niu_alloc_tx_ring_info()
4443 static void niu_size_rbr(struct niu *np, struct rx_ring_info *rp) in niu_size_rbr() argument
4449 rp->rbr_block_size = 1 << bss; in niu_size_rbr()
4450 rp->rbr_blocks_per_page = 1 << (PAGE_SHIFT-bss); in niu_size_rbr()
4452 rp->rbr_sizes[0] = 256; in niu_size_rbr()
4453 rp->rbr_sizes[1] = 1024; in niu_size_rbr()
4457 rp->rbr_sizes[2] = 4096; in niu_size_rbr()
4461 rp->rbr_sizes[2] = 8192; in niu_size_rbr()
4465 rp->rbr_sizes[2] = 2048; in niu_size_rbr()
4467 rp->rbr_sizes[3] = rp->rbr_block_size; in niu_size_rbr()
4502 struct rx_ring_info *rp = &np->rx_rings[i]; in niu_alloc_channels() local
4504 rp->np = np; in niu_alloc_channels()
4505 rp->rx_channel = first_rx_channel + i; in niu_alloc_channels()
4507 err = niu_alloc_rx_ring_info(np, rp); in niu_alloc_channels()
4511 niu_size_rbr(np, rp); in niu_alloc_channels()
4514 rp->nonsyn_window = 64; in niu_alloc_channels()
4515 rp->nonsyn_threshold = rp->rcr_table_size - 64; in niu_alloc_channels()
4516 rp->syn_window = 64; in niu_alloc_channels()
4517 rp->syn_threshold = rp->rcr_table_size - 64; in niu_alloc_channels()
4518 rp->rcr_pkt_threshold = 16; in niu_alloc_channels()
4519 rp->rcr_timeout = 8; in niu_alloc_channels()
4520 rp->rbr_kick_thresh = RBR_REFILL_MIN; in niu_alloc_channels()
4521 if (rp->rbr_kick_thresh < rp->rbr_blocks_per_page) in niu_alloc_channels()
4522 rp->rbr_kick_thresh = rp->rbr_blocks_per_page; in niu_alloc_channels()
4524 err = niu_rbr_fill(np, rp, GFP_KERNEL); in niu_alloc_channels()
4542 struct tx_ring_info *rp = &np->tx_rings[i]; in niu_alloc_channels() local
4544 rp->np = np; in niu_alloc_channels()
4545 rp->tx_channel = first_tx_channel + i; in niu_alloc_channels()
4547 err = niu_alloc_tx_ring_info(np, rp); in niu_alloc_channels()
4673 static int niu_init_one_tx_channel(struct niu *np, struct tx_ring_info *rp) in niu_init_one_tx_channel() argument
4675 int err, channel = rp->tx_channel; in niu_init_one_tx_channel()
4690 nw64(TXC_DMA_MAX(channel), rp->max_burst); in niu_init_one_tx_channel()
4693 if (rp->descr_dma & ~(TX_RNG_CFIG_STADDR_BASE | in niu_init_one_tx_channel()
4696 channel, (unsigned long long)rp->descr_dma); in niu_init_one_tx_channel()
4705 ring_len = (rp->pending / 8); in niu_init_one_tx_channel()
4708 rp->descr_dma); in niu_init_one_tx_channel()
4711 if (((rp->mbox_dma >> 32) & ~TXDMA_MBH_MBADDR) || in niu_init_one_tx_channel()
4712 ((u32)rp->mbox_dma & ~TXDMA_MBL_MBADDR)) { in niu_init_one_tx_channel()
4714 channel, (unsigned long long)rp->mbox_dma); in niu_init_one_tx_channel()
4717 nw64(TXDMA_MBH(channel), rp->mbox_dma >> 32); in niu_init_one_tx_channel()
4718 nw64(TXDMA_MBL(channel), rp->mbox_dma & TXDMA_MBL_MBADDR); in niu_init_one_tx_channel()
4722 rp->last_pkt_cnt = 0; in niu_init_one_tx_channel()
4813 static void niu_rx_channel_wred_init(struct niu *np, struct rx_ring_info *rp) in niu_rx_channel_wred_init() argument
4817 val = (((u64)rp->nonsyn_window << RDC_RED_PARA_WIN_SHIFT) | in niu_rx_channel_wred_init()
4818 ((u64)rp->nonsyn_threshold << RDC_RED_PARA_THRE_SHIFT) | in niu_rx_channel_wred_init()
4819 ((u64)rp->syn_window << RDC_RED_PARA_WIN_SYN_SHIFT) | in niu_rx_channel_wred_init()
4820 ((u64)rp->syn_threshold << RDC_RED_PARA_THRE_SYN_SHIFT)); in niu_rx_channel_wred_init()
4821 nw64(RDC_RED_PARA(rp->rx_channel), val); in niu_rx_channel_wred_init()
4824 static int niu_compute_rbr_cfig_b(struct rx_ring_info *rp, u64 *ret) in niu_compute_rbr_cfig_b() argument
4829 switch (rp->rbr_block_size) { in niu_compute_rbr_cfig_b()
4846 switch (rp->rbr_sizes[2]) { in niu_compute_rbr_cfig_b()
4864 switch (rp->rbr_sizes[1]) { in niu_compute_rbr_cfig_b()
4882 switch (rp->rbr_sizes[0]) { in niu_compute_rbr_cfig_b()
4926 static int niu_init_one_rx_channel(struct niu *np, struct rx_ring_info *rp) in niu_init_one_rx_channel() argument
4928 int err, channel = rp->rx_channel; in niu_init_one_rx_channel()
4939 niu_rx_channel_wred_init(np, rp); in niu_init_one_rx_channel()
4947 nw64(RXDMA_CFIG1(channel), rp->mbox_dma >> 32); in niu_init_one_rx_channel()
4949 ((rp->mbox_dma & RXDMA_CFIG2_MBADDR_L) | in niu_init_one_rx_channel()
4952 ((u64)rp->rbr_table_size << RBR_CFIG_A_LEN_SHIFT) | in niu_init_one_rx_channel()
4953 (rp->rbr_dma & (RBR_CFIG_A_STADDR_BASE | RBR_CFIG_A_STADDR))); in niu_init_one_rx_channel()
4954 err = niu_compute_rbr_cfig_b(rp, &val); in niu_init_one_rx_channel()
4959 ((u64)rp->rcr_table_size << RCRCFIG_A_LEN_SHIFT) | in niu_init_one_rx_channel()
4960 (rp->rcr_dma & (RCRCFIG_A_STADDR_BASE | RCRCFIG_A_STADDR))); in niu_init_one_rx_channel()
4962 ((u64)rp->rcr_pkt_threshold << RCRCFIG_B_PTHRES_SHIFT) | in niu_init_one_rx_channel()
4964 ((u64)rp->rcr_timeout << RCRCFIG_B_TIMEOUT_SHIFT)); in niu_init_one_rx_channel()
4970 nw64(RBR_KICK(channel), rp->rbr_index); in niu_init_one_rx_channel()
5000 struct rx_ring_info *rp = &np->rx_rings[i]; in niu_init_rx_channels() local
5002 err = niu_init_one_rx_channel(np, rp); in niu_init_rx_channels()
5839 static void niu_stop_one_tx_channel(struct niu *np, struct tx_ring_info *rp) in niu_stop_one_tx_channel() argument
5841 (void) niu_tx_channel_stop(np, rp->tx_channel); in niu_stop_one_tx_channel()
5849 struct tx_ring_info *rp = &np->tx_rings[i]; in niu_stop_tx_channels() local
5851 niu_stop_one_tx_channel(np, rp); in niu_stop_tx_channels()
5855 static void niu_reset_one_tx_channel(struct niu *np, struct tx_ring_info *rp) in niu_reset_one_tx_channel() argument
5857 (void) niu_tx_channel_reset(np, rp->tx_channel); in niu_reset_one_tx_channel()
5865 struct tx_ring_info *rp = &np->tx_rings[i]; in niu_reset_tx_channels() local
5867 niu_reset_one_tx_channel(np, rp); in niu_reset_tx_channels()
5871 static void niu_stop_one_rx_channel(struct niu *np, struct rx_ring_info *rp) in niu_stop_one_rx_channel() argument
5873 (void) niu_enable_rx_channel(np, rp->rx_channel, 0); in niu_stop_one_rx_channel()
5881 struct rx_ring_info *rp = &np->rx_rings[i]; in niu_stop_rx_channels() local
5883 niu_stop_one_rx_channel(np, rp); in niu_stop_rx_channels()
5887 static void niu_reset_one_rx_channel(struct niu *np, struct rx_ring_info *rp) in niu_reset_one_rx_channel() argument
5889 int channel = rp->rx_channel; in niu_reset_one_rx_channel()
5902 struct rx_ring_info *rp = &np->rx_rings[i]; in niu_reset_rx_channels() local
5904 niu_reset_one_rx_channel(np, rp); in niu_reset_rx_channels()
5948 struct tx_ring_info *rp = &np->tx_rings[i]; in niu_init_hw() local
5950 err = niu_init_one_tx_channel(np, rp); in niu_init_hw()
6253 struct rx_ring_info *rp = &rx_rings[i]; in niu_get_rx_stats() local
6255 niu_sync_rx_discard_stats(np, rp, 0); in niu_get_rx_stats()
6257 pkts += rp->rx_packets; in niu_get_rx_stats()
6258 bytes += rp->rx_bytes; in niu_get_rx_stats()
6259 dropped += rp->rx_dropped; in niu_get_rx_stats()
6260 errors += rp->rx_errors; in niu_get_rx_stats()
6284 struct tx_ring_info *rp = &tx_rings[i]; in niu_get_tx_stats() local
6286 pkts += rp->tx_packets; in niu_get_tx_stats()
6287 bytes += rp->tx_bytes; in niu_get_tx_stats()
6288 errors += rp->tx_errors; in niu_get_tx_stats()
6460 struct rx_ring_info *rp = &np->rx_rings[i]; in niu_reset_buffers() local
6465 page = rp->rxhash[j]; in niu_reset_buffers()
6471 rp->rbr[k++] = cpu_to_le32(base); in niu_reset_buffers()
6476 err = niu_rbr_add_page(np, rp, GFP_ATOMIC, k); in niu_reset_buffers()
6481 rp->rbr_index = rp->rbr_table_size - 1; in niu_reset_buffers()
6482 rp->rcr_index = 0; in niu_reset_buffers()
6483 rp->rbr_pending = 0; in niu_reset_buffers()
6484 rp->rbr_refill_pending = 0; in niu_reset_buffers()
6489 struct tx_ring_info *rp = &np->tx_rings[i]; in niu_reset_buffers() local
6492 if (rp->tx_buffs[j].skb) in niu_reset_buffers()
6493 (void) release_tx_packet(np, rp, j); in niu_reset_buffers()
6496 rp->pending = MAX_TX_RING_SIZE; in niu_reset_buffers()
6497 rp->prod = 0; in niu_reset_buffers()
6498 rp->cons = 0; in niu_reset_buffers()
6499 rp->wrap_bit = 0; in niu_reset_buffers()
6552 static void niu_set_txd(struct tx_ring_info *rp, int index, in niu_set_txd() argument
6556 __le64 *desc = &rp->descr[index]; in niu_set_txd()
6635 struct tx_ring_info *rp; in niu_start_xmit() local
6643 rp = &np->tx_rings[i]; in niu_start_xmit()
6646 if (niu_tx_avail(rp) <= (skb_shinfo(skb)->nr_frags + 1)) { in niu_start_xmit()
6649 rp->tx_errors++; in niu_start_xmit()
6682 prod = rp->prod; in niu_start_xmit()
6684 rp->tx_buffs[prod].skb = skb; in niu_start_xmit()
6685 rp->tx_buffs[prod].mapping = mapping; in niu_start_xmit()
6688 if (++rp->mark_counter == rp->mark_freq) { in niu_start_xmit()
6689 rp->mark_counter = 0; in niu_start_xmit()
6691 rp->mark_pending++; in niu_start_xmit()
6707 niu_set_txd(rp, prod, mapping, this_len, mrk, nfg); in niu_start_xmit()
6710 prod = NEXT_TX(rp, prod); in niu_start_xmit()
6723 rp->tx_buffs[prod].skb = NULL; in niu_start_xmit()
6724 rp->tx_buffs[prod].mapping = mapping; in niu_start_xmit()
6726 niu_set_txd(rp, prod, mapping, len, 0, 0); in niu_start_xmit()
6728 prod = NEXT_TX(rp, prod); in niu_start_xmit()
6731 if (prod < rp->prod) in niu_start_xmit()
6732 rp->wrap_bit ^= TX_RING_KICK_WRAP; in niu_start_xmit()
6733 rp->prod = prod; in niu_start_xmit()
6735 nw64(TX_RING_KICK(rp->tx_channel), rp->wrap_bit | (prod << 3)); in niu_start_xmit()
6737 if (unlikely(niu_tx_avail(rp) <= (MAX_SKB_FRAGS + 1))) { in niu_start_xmit()
6739 if (niu_tx_avail(rp) > NIU_TX_WAKEUP_THRESH(rp)) in niu_start_xmit()
6747 rp->tx_errors++; in niu_start_xmit()
7815 struct rx_ring_info *rp = &np->rx_rings[i]; in niu_get_ethtool_stats() local
7817 niu_sync_rx_discard_stats(np, rp, 0); in niu_get_ethtool_stats()
7819 data[0] = rp->rx_channel; in niu_get_ethtool_stats()
7820 data[1] = rp->rx_packets; in niu_get_ethtool_stats()
7821 data[2] = rp->rx_bytes; in niu_get_ethtool_stats()
7822 data[3] = rp->rx_dropped; in niu_get_ethtool_stats()
7823 data[4] = rp->rx_errors; in niu_get_ethtool_stats()
7827 struct tx_ring_info *rp = &np->tx_rings[i]; in niu_get_ethtool_stats() local
7829 data[0] = rp->tx_channel; in niu_get_ethtool_stats()
7830 data[1] = rp->tx_packets; in niu_get_ethtool_stats()
7831 data[2] = rp->tx_bytes; in niu_get_ethtool_stats()
7832 data[3] = rp->tx_errors; in niu_get_ethtool_stats()