Lines Matching refs:q_vector
352 I40E_PFINT_DYN_CTLN(tx_ring->q_vector->v_idx + in i40e_tx_timeout()
2699 if (!ring->q_vector || !ring->netdev) in i40e_config_xps_tx_ring()
2706 &ring->q_vector->affinity_mask, in i40e_config_xps_tx_ring()
3088 struct i40e_q_vector *q_vector = vsi->q_vectors[i]; in i40e_vsi_configure_msix() local
3090 q_vector->itr_countdown = ITR_COUNTDOWN_START; in i40e_vsi_configure_msix()
3091 q_vector->rx.itr = ITR_TO_REG(vsi->rx_itr_setting); in i40e_vsi_configure_msix()
3092 q_vector->rx.latency_range = I40E_LOW_LATENCY; in i40e_vsi_configure_msix()
3094 q_vector->rx.itr); in i40e_vsi_configure_msix()
3095 q_vector->tx.itr = ITR_TO_REG(vsi->tx_itr_setting); in i40e_vsi_configure_msix()
3096 q_vector->tx.latency_range = I40E_LOW_LATENCY; in i40e_vsi_configure_msix()
3098 q_vector->tx.itr); in i40e_vsi_configure_msix()
3104 for (q = 0; q < q_vector->num_ringpairs; q++) { in i40e_vsi_configure_msix()
3124 if (q == (q_vector->num_ringpairs - 1)) in i40e_vsi_configure_msix()
3180 struct i40e_q_vector *q_vector = vsi->q_vectors[0]; in i40e_configure_msi_and_legacy() local
3186 q_vector->itr_countdown = ITR_COUNTDOWN_START; in i40e_configure_msi_and_legacy()
3187 q_vector->rx.itr = ITR_TO_REG(vsi->rx_itr_setting); in i40e_configure_msi_and_legacy()
3188 q_vector->rx.latency_range = I40E_LOW_LATENCY; in i40e_configure_msi_and_legacy()
3189 wr32(hw, I40E_PFINT_ITR0(I40E_RX_ITR), q_vector->rx.itr); in i40e_configure_msi_and_legacy()
3190 q_vector->tx.itr = ITR_TO_REG(vsi->tx_itr_setting); in i40e_configure_msi_and_legacy()
3191 q_vector->tx.latency_range = I40E_LOW_LATENCY; in i40e_configure_msi_and_legacy()
3192 wr32(hw, I40E_PFINT_ITR0(I40E_TX_ITR), q_vector->tx.itr); in i40e_configure_msi_and_legacy()
3267 struct i40e_q_vector *q_vector = data; in i40e_msix_clean_rings() local
3269 if (!q_vector->tx.ring && !q_vector->rx.ring) in i40e_msix_clean_rings()
3272 napi_schedule_irqoff(&q_vector->napi); in i40e_msix_clean_rings()
3294 struct i40e_q_vector *q_vector = vsi->q_vectors[vector]; in i40e_vsi_request_irq_msix() local
3296 if (q_vector->tx.ring && q_vector->rx.ring) { in i40e_vsi_request_irq_msix()
3297 snprintf(q_vector->name, sizeof(q_vector->name) - 1, in i40e_vsi_request_irq_msix()
3300 } else if (q_vector->rx.ring) { in i40e_vsi_request_irq_msix()
3301 snprintf(q_vector->name, sizeof(q_vector->name) - 1, in i40e_vsi_request_irq_msix()
3303 } else if (q_vector->tx.ring) { in i40e_vsi_request_irq_msix()
3304 snprintf(q_vector->name, sizeof(q_vector->name) - 1, in i40e_vsi_request_irq_msix()
3313 q_vector->name, in i40e_vsi_request_irq_msix()
3314 q_vector); in i40e_vsi_request_irq_msix()
3322 &q_vector->affinity_mask); in i40e_vsi_request_irq_msix()
3442 struct i40e_q_vector *q_vector = vsi->q_vectors[0]; in i40e_intr() local
3455 napi_schedule_irqoff(&q_vector->napi); in i40e_intr()
3617 i40e_irq_dynamic_enable(vsi, tx_ring->q_vector->v_idx); in i40e_clean_fdir_tx_irq()
3629 struct i40e_q_vector *q_vector = data; in i40e_fdir_clean_ring() local
3632 if (!q_vector->tx.ring) in i40e_fdir_clean_ring()
3635 vsi = q_vector->tx.ring->vsi; in i40e_fdir_clean_ring()
3636 i40e_clean_fdir_tx_irq(q_vector->tx.ring, vsi->work_limit); in i40e_fdir_clean_ring()
3649 struct i40e_q_vector *q_vector = vsi->q_vectors[v_idx]; in i40e_map_vector_to_qp() local
3653 tx_ring->q_vector = q_vector; in i40e_map_vector_to_qp()
3654 tx_ring->next = q_vector->tx.ring; in i40e_map_vector_to_qp()
3655 q_vector->tx.ring = tx_ring; in i40e_map_vector_to_qp()
3656 q_vector->tx.count++; in i40e_map_vector_to_qp()
3658 rx_ring->q_vector = q_vector; in i40e_map_vector_to_qp()
3659 rx_ring->next = q_vector->rx.ring; in i40e_map_vector_to_qp()
3660 q_vector->rx.ring = rx_ring; in i40e_map_vector_to_qp()
3661 q_vector->rx.count++; in i40e_map_vector_to_qp()
3689 struct i40e_q_vector *q_vector = vsi->q_vectors[v_start]; in i40e_vsi_map_rings_to_vectors() local
3693 q_vector->num_ringpairs = num_ringpairs; in i40e_vsi_map_rings_to_vectors()
3695 q_vector->rx.count = 0; in i40e_vsi_map_rings_to_vectors()
3696 q_vector->tx.count = 0; in i40e_vsi_map_rings_to_vectors()
3697 q_vector->rx.ring = NULL; in i40e_vsi_map_rings_to_vectors()
3698 q_vector->tx.ring = NULL; in i40e_vsi_map_rings_to_vectors()
4078 struct i40e_q_vector *q_vector = vsi->q_vectors[v_idx]; in i40e_free_q_vector() local
4081 if (!q_vector) in i40e_free_q_vector()
4085 i40e_for_each_ring(ring, q_vector->tx) in i40e_free_q_vector()
4086 ring->q_vector = NULL; in i40e_free_q_vector()
4088 i40e_for_each_ring(ring, q_vector->rx) in i40e_free_q_vector()
4089 ring->q_vector = NULL; in i40e_free_q_vector()
4093 netif_napi_del(&q_vector->napi); in i40e_free_q_vector()
4097 kfree_rcu(q_vector, rcu); in i40e_free_q_vector()
4358 I40E_PFINT_DYN_CTLN(tx_ring->q_vector->v_idx + in i40e_detect_recover_hung_queue()
4373 i40e_force_wb(vsi, tx_ring->q_vector); in i40e_detect_recover_hung_queue()
7620 struct i40e_q_vector *q_vector; in i40e_vsi_alloc_q_vector() local
7623 q_vector = kzalloc(sizeof(struct i40e_q_vector), GFP_KERNEL); in i40e_vsi_alloc_q_vector()
7624 if (!q_vector) in i40e_vsi_alloc_q_vector()
7627 q_vector->vsi = vsi; in i40e_vsi_alloc_q_vector()
7628 q_vector->v_idx = v_idx; in i40e_vsi_alloc_q_vector()
7629 cpumask_set_cpu(v_idx, &q_vector->affinity_mask); in i40e_vsi_alloc_q_vector()
7631 netif_napi_add(vsi->netdev, &q_vector->napi, in i40e_vsi_alloc_q_vector()
7634 q_vector->rx.latency_range = I40E_LOW_LATENCY; in i40e_vsi_alloc_q_vector()
7635 q_vector->tx.latency_range = I40E_LOW_LATENCY; in i40e_vsi_alloc_q_vector()
7638 vsi->q_vectors[v_idx] = q_vector; in i40e_vsi_alloc_q_vector()