tc_to_txq 8513 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c txq = vdev->tc_to_txq[tc].offset; tc_to_txq 8515 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c vdev->tc_to_txq[tc].count); tc_to_txq 449 drivers/net/ethernet/sfc/falcon/tx.c net_dev->tc_to_txq[tc].offset = tc * efx->n_tx_channels; tc_to_txq 450 drivers/net/ethernet/sfc/falcon/tx.c net_dev->tc_to_txq[tc].count = efx->n_tx_channels; tc_to_txq 706 drivers/net/ethernet/sfc/tx.c net_dev->tc_to_txq[tc].offset = tc * efx->n_tx_channels; tc_to_txq 707 drivers/net/ethernet/sfc/tx.c net_dev->tc_to_txq[tc].count = efx->n_tx_channels; tc_to_txq 2052 include/linux/netdevice.h struct netdev_tc_txq tc_to_txq[TC_MAX_QUEUE]; tc_to_txq 2019 net/core/dev.c struct netdev_tc_txq *tc = &dev->tc_to_txq[0]; tc_to_txq 2032 net/core/dev.c tc = &dev->tc_to_txq[q]; tc_to_txq 2044 net/core/dev.c struct netdev_tc_txq *tc = &dev->tc_to_txq[0]; tc_to_txq 2472 net/core/dev.c memset(dev->tc_to_txq, 0, sizeof(dev->tc_to_txq)); tc_to_txq 2485 net/core/dev.c dev->tc_to_txq[tc].count = count; tc_to_txq 2486 net/core/dev.c dev->tc_to_txq[tc].offset = offset; tc_to_txq 2514 net/core/dev.c memset(sb_dev->tc_to_txq, 0, sizeof(sb_dev->tc_to_txq)); tc_to_txq 2537 net/core/dev.c sb_dev->tc_to_txq[tc].count = count; tc_to_txq 2538 net/core/dev.c sb_dev->tc_to_txq[tc].offset = offset; tc_to_txq 2792 net/core/dev.c qoffset = sb_dev->tc_to_txq[tc].offset; tc_to_txq 2793 net/core/dev.c qcount = sb_dev->tc_to_txq[tc].count; tc_to_txq 433 net/sched/sch_mqprio.c opt.count[tc] = dev->tc_to_txq[tc].count; tc_to_txq 434 net/sched/sch_mqprio.c opt.offset[tc] = dev->tc_to_txq[tc].offset; tc_to_txq 519 net/sched/sch_mqprio.c struct netdev_tc_txq tc = dev->tc_to_txq[cl & TC_BITMASK]; tc_to_txq 1368 net/sched/sch_taprio.c if (dev->tc_to_txq[i].count != mqprio->count[i] || tc_to_txq 1369 net/sched/sch_taprio.c dev->tc_to_txq[i].offset != mqprio->offset[i]) tc_to_txq 1794 net/sched/sch_taprio.c opt.count[i] = dev->tc_to_txq[i].count; tc_to_txq 1795 net/sched/sch_taprio.c opt.offset[i] = dev->tc_to_txq[i].offset;