Lines Matching refs:q_vector
221 static inline void ixgbevf_qv_init_lock(struct ixgbevf_q_vector *q_vector) in ixgbevf_qv_init_lock() argument
223 spin_lock_init(&q_vector->lock); in ixgbevf_qv_init_lock()
224 q_vector->state = IXGBEVF_QV_STATE_IDLE; in ixgbevf_qv_init_lock()
228 static inline bool ixgbevf_qv_lock_napi(struct ixgbevf_q_vector *q_vector) in ixgbevf_qv_lock_napi() argument
232 spin_lock_bh(&q_vector->lock); in ixgbevf_qv_lock_napi()
233 if (q_vector->state & IXGBEVF_QV_LOCKED) { in ixgbevf_qv_lock_napi()
234 WARN_ON(q_vector->state & IXGBEVF_QV_STATE_NAPI); in ixgbevf_qv_lock_napi()
235 q_vector->state |= IXGBEVF_QV_STATE_NAPI_YIELD; in ixgbevf_qv_lock_napi()
238 q_vector->tx.ring->stats.yields++; in ixgbevf_qv_lock_napi()
242 q_vector->state = IXGBEVF_QV_STATE_NAPI; in ixgbevf_qv_lock_napi()
244 spin_unlock_bh(&q_vector->lock); in ixgbevf_qv_lock_napi()
249 static inline bool ixgbevf_qv_unlock_napi(struct ixgbevf_q_vector *q_vector) in ixgbevf_qv_unlock_napi() argument
253 spin_lock_bh(&q_vector->lock); in ixgbevf_qv_unlock_napi()
254 WARN_ON(q_vector->state & (IXGBEVF_QV_STATE_POLL | in ixgbevf_qv_unlock_napi()
257 if (q_vector->state & IXGBEVF_QV_STATE_POLL_YIELD) in ixgbevf_qv_unlock_napi()
260 q_vector->state &= IXGBEVF_QV_STATE_DISABLED; in ixgbevf_qv_unlock_napi()
261 spin_unlock_bh(&q_vector->lock); in ixgbevf_qv_unlock_napi()
266 static inline bool ixgbevf_qv_lock_poll(struct ixgbevf_q_vector *q_vector) in ixgbevf_qv_lock_poll() argument
270 spin_lock_bh(&q_vector->lock); in ixgbevf_qv_lock_poll()
271 if ((q_vector->state & IXGBEVF_QV_LOCKED)) { in ixgbevf_qv_lock_poll()
272 q_vector->state |= IXGBEVF_QV_STATE_POLL_YIELD; in ixgbevf_qv_lock_poll()
275 q_vector->rx.ring->stats.yields++; in ixgbevf_qv_lock_poll()
279 q_vector->state |= IXGBEVF_QV_STATE_POLL; in ixgbevf_qv_lock_poll()
281 spin_unlock_bh(&q_vector->lock); in ixgbevf_qv_lock_poll()
286 static inline bool ixgbevf_qv_unlock_poll(struct ixgbevf_q_vector *q_vector) in ixgbevf_qv_unlock_poll() argument
290 spin_lock_bh(&q_vector->lock); in ixgbevf_qv_unlock_poll()
291 WARN_ON(q_vector->state & (IXGBEVF_QV_STATE_NAPI)); in ixgbevf_qv_unlock_poll()
293 if (q_vector->state & IXGBEVF_QV_STATE_POLL_YIELD) in ixgbevf_qv_unlock_poll()
296 q_vector->state &= IXGBEVF_QV_STATE_DISABLED; in ixgbevf_qv_unlock_poll()
297 spin_unlock_bh(&q_vector->lock); in ixgbevf_qv_unlock_poll()
302 static inline bool ixgbevf_qv_busy_polling(struct ixgbevf_q_vector *q_vector) in ixgbevf_qv_busy_polling() argument
304 WARN_ON(!(q_vector->state & IXGBEVF_QV_OWNED)); in ixgbevf_qv_busy_polling()
305 return q_vector->state & IXGBEVF_QV_USER_PEND; in ixgbevf_qv_busy_polling()
309 static inline bool ixgbevf_qv_disable(struct ixgbevf_q_vector *q_vector) in ixgbevf_qv_disable() argument
313 spin_lock_bh(&q_vector->lock); in ixgbevf_qv_disable()
314 if (q_vector->state & IXGBEVF_QV_OWNED) in ixgbevf_qv_disable()
316 q_vector->state |= IXGBEVF_QV_STATE_DISABLED; in ixgbevf_qv_disable()
317 spin_unlock_bh(&q_vector->lock); in ixgbevf_qv_disable()
382 struct ixgbevf_q_vector *q_vector[MAX_MSIX_Q_VECTORS]; member
502 extern void ixgbevf_write_eitr(struct ixgbevf_q_vector *q_vector);