cause_rx_tx 405 drivers/net/ethernet/marvell/mvneta.c u32 cause_rx_tx; cause_rx_tx 427 drivers/net/ethernet/marvell/mvneta.c u32 cause_rx_tx; cause_rx_tx 2776 drivers/net/ethernet/marvell/mvneta.c u32 cause_rx_tx; cause_rx_tx 2787 drivers/net/ethernet/marvell/mvneta.c cause_rx_tx = mvreg_read(pp, MVNETA_INTR_NEW_CAUSE); cause_rx_tx 2788 drivers/net/ethernet/marvell/mvneta.c if (cause_rx_tx & MVNETA_MISCINTR_INTR_MASK) { cause_rx_tx 2799 drivers/net/ethernet/marvell/mvneta.c if (cause_rx_tx & MVNETA_TX_INTR_MASK_ALL) { cause_rx_tx 2800 drivers/net/ethernet/marvell/mvneta.c mvneta_tx_done_gbe(pp, (cause_rx_tx & MVNETA_TX_INTR_MASK_ALL)); cause_rx_tx 2801 drivers/net/ethernet/marvell/mvneta.c cause_rx_tx &= ~MVNETA_TX_INTR_MASK_ALL; cause_rx_tx 2807 drivers/net/ethernet/marvell/mvneta.c cause_rx_tx |= pp->neta_armada3700 ? pp->cause_rx_tx : cause_rx_tx 2808 drivers/net/ethernet/marvell/mvneta.c port->cause_rx_tx; cause_rx_tx 2810 drivers/net/ethernet/marvell/mvneta.c rx_queue = fls(((cause_rx_tx >> 8) & 0xff)); cause_rx_tx 2822 drivers/net/ethernet/marvell/mvneta.c cause_rx_tx = 0; cause_rx_tx 2840 drivers/net/ethernet/marvell/mvneta.c pp->cause_rx_tx = cause_rx_tx; cause_rx_tx 2842 drivers/net/ethernet/marvell/mvneta.c port->cause_rx_tx = cause_rx_tx; cause_rx_tx 3331 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c u32 cause_rx_tx, cause_rx, cause_tx, cause_misc; cause_rx_tx 3349 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c cause_rx_tx = mvpp2_thread_read_relaxed(port->priv, qv->sw_thread_id, cause_rx_tx 3352 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c cause_misc = cause_rx_tx & MVPP2_CAUSE_MISC_SUM_MASK; cause_rx_tx 3360 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c cause_rx_tx & ~MVPP2_CAUSE_MISC_SUM_MASK); cause_rx_tx 3364 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c cause_tx = cause_rx_tx & MVPP2_CAUSE_TXQ_OCCUP_DESC_ALL_MASK; cause_rx_tx 3372 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c cause_rx = cause_rx_tx &