Lines Matching refs:flaglen

356 	__le32 flaglen;  member
363 __le32 flaglen; member
955 return le32_to_cpu(prd->flaglen) in nv_descr_getlength()
961 return le32_to_cpu(prd->flaglen) & LEN_MASK_V2; in nv_descr_getlength_ex()
1830 np->put_rx.orig->flaglen = cpu_to_le32(np->rx_buf_sz | NV_RX_AVAIL); in nv_alloc_rx()
1872 np->put_rx.ex->flaglen = cpu_to_le32(np->rx_buf_sz | NV_RX2_AVAIL); in nv_alloc_rx_optimized()
1914 np->rx_ring.orig[i].flaglen = 0; in nv_init_rx()
1917 np->rx_ring.ex[i].flaglen = 0; in nv_init_rx()
1948 np->tx_ring.orig[i].flaglen = 0; in nv_init_tx()
1951 np->tx_ring.ex[i].flaglen = 0; in nv_init_tx()
2011 np->tx_ring.orig[i].flaglen = 0; in nv_drain_tx()
2014 np->tx_ring.ex[i].flaglen = 0; in nv_drain_tx()
2042 np->rx_ring.orig[i].flaglen = 0; in nv_drain_rx()
2045 np->rx_ring.ex[i].flaglen = 0; in nv_drain_rx()
2243 put_tx->flaglen = cpu_to_le32((bcnt-1) | tx_flags); in nv_start_xmit()
2291 put_tx->flaglen = cpu_to_le32((bcnt-1) | tx_flags); in nv_start_xmit()
2303 prev_tx->flaglen |= cpu_to_le32(tx_flags_extra); in nv_start_xmit()
2317 start_tx->flaglen |= cpu_to_le32(tx_flags | tx_flags_extra); in nv_start_xmit()
2393 put_tx->flaglen = cpu_to_le32((bcnt-1) | tx_flags); in nv_start_xmit_optimized()
2441 put_tx->flaglen = cpu_to_le32((bcnt-1) | tx_flags); in nv_start_xmit_optimized()
2453 prev_tx->flaglen |= cpu_to_le32(NV_TX2_LASTPACKET); in nv_start_xmit_optimized()
2494 start_tx->flaglen |= cpu_to_le32(tx_flags | tx_flags_extra); in nv_start_xmit_optimized()
2514 np->tx_change_owner->first_tx_desc->flaglen |= in nv_tx_flip_ownership()
2540 !((flags = le32_to_cpu(np->get_tx.orig->flaglen)) & NV_TX_VALID) && in nv_tx_done()
2604 !((flags = le32_to_cpu(np->get_tx.ex->flaglen)) & NV_TX2_VALID) && in nv_tx_done_optimized()
2691 le32_to_cpu(np->tx_ring.orig[i].flaglen), in nv_tx_timeout()
2693 le32_to_cpu(np->tx_ring.orig[i+1].flaglen), in nv_tx_timeout()
2695 le32_to_cpu(np->tx_ring.orig[i+2].flaglen), in nv_tx_timeout()
2697 le32_to_cpu(np->tx_ring.orig[i+3].flaglen)); in nv_tx_timeout()
2707 le32_to_cpu(np->tx_ring.ex[i].flaglen), in nv_tx_timeout()
2710 le32_to_cpu(np->tx_ring.ex[i+1].flaglen), in nv_tx_timeout()
2713 le32_to_cpu(np->tx_ring.ex[i+2].flaglen), in nv_tx_timeout()
2716 le32_to_cpu(np->tx_ring.ex[i+3].flaglen)); in nv_tx_timeout()
2807 !((flags = le32_to_cpu(np->get_rx.orig->flaglen)) & NV_RX_AVAIL) && in nv_rx_process()
2913 !((flags = le32_to_cpu(np->get_rx.ex->flaglen)) & NV_RX2_AVAIL) && in nv_rx_process_optimized()
5089 np->tx_ring.orig[0].flaglen = cpu_to_le32((pkt_len-1) | np->tx_flags | tx_flags_extra); in nv_loopback_test()
5093 np->tx_ring.ex[0].flaglen = cpu_to_le32((pkt_len-1) | np->tx_flags | tx_flags_extra); in nv_loopback_test()
5102 flags = le32_to_cpu(np->rx_ring.orig[0].flaglen); in nv_loopback_test()
5106 flags = le32_to_cpu(np->rx_ring.ex[0].flaglen); in nv_loopback_test()