Lines Matching refs:q_vector

219 static inline void ixgbevf_qv_init_lock(struct ixgbevf_q_vector *q_vector)  in ixgbevf_qv_init_lock()  argument
221 spin_lock_init(&q_vector->lock); in ixgbevf_qv_init_lock()
222 q_vector->state = IXGBEVF_QV_STATE_IDLE; in ixgbevf_qv_init_lock()
226 static inline bool ixgbevf_qv_lock_napi(struct ixgbevf_q_vector *q_vector) in ixgbevf_qv_lock_napi() argument
230 spin_lock_bh(&q_vector->lock); in ixgbevf_qv_lock_napi()
231 if (q_vector->state & IXGBEVF_QV_LOCKED) { in ixgbevf_qv_lock_napi()
232 WARN_ON(q_vector->state & IXGBEVF_QV_STATE_NAPI); in ixgbevf_qv_lock_napi()
233 q_vector->state |= IXGBEVF_QV_STATE_NAPI_YIELD; in ixgbevf_qv_lock_napi()
236 q_vector->tx.ring->stats.yields++; in ixgbevf_qv_lock_napi()
240 q_vector->state = IXGBEVF_QV_STATE_NAPI; in ixgbevf_qv_lock_napi()
242 spin_unlock_bh(&q_vector->lock); in ixgbevf_qv_lock_napi()
247 static inline bool ixgbevf_qv_unlock_napi(struct ixgbevf_q_vector *q_vector) in ixgbevf_qv_unlock_napi() argument
251 spin_lock_bh(&q_vector->lock); in ixgbevf_qv_unlock_napi()
252 WARN_ON(q_vector->state & (IXGBEVF_QV_STATE_POLL | in ixgbevf_qv_unlock_napi()
255 if (q_vector->state & IXGBEVF_QV_STATE_POLL_YIELD) in ixgbevf_qv_unlock_napi()
258 q_vector->state &= IXGBEVF_QV_STATE_DISABLED; in ixgbevf_qv_unlock_napi()
259 spin_unlock_bh(&q_vector->lock); in ixgbevf_qv_unlock_napi()
264 static inline bool ixgbevf_qv_lock_poll(struct ixgbevf_q_vector *q_vector) in ixgbevf_qv_lock_poll() argument
268 spin_lock_bh(&q_vector->lock); in ixgbevf_qv_lock_poll()
269 if ((q_vector->state & IXGBEVF_QV_LOCKED)) { in ixgbevf_qv_lock_poll()
270 q_vector->state |= IXGBEVF_QV_STATE_POLL_YIELD; in ixgbevf_qv_lock_poll()
273 q_vector->rx.ring->stats.yields++; in ixgbevf_qv_lock_poll()
277 q_vector->state |= IXGBEVF_QV_STATE_POLL; in ixgbevf_qv_lock_poll()
279 spin_unlock_bh(&q_vector->lock); in ixgbevf_qv_lock_poll()
284 static inline bool ixgbevf_qv_unlock_poll(struct ixgbevf_q_vector *q_vector) in ixgbevf_qv_unlock_poll() argument
288 spin_lock_bh(&q_vector->lock); in ixgbevf_qv_unlock_poll()
289 WARN_ON(q_vector->state & (IXGBEVF_QV_STATE_NAPI)); in ixgbevf_qv_unlock_poll()
291 if (q_vector->state & IXGBEVF_QV_STATE_POLL_YIELD) in ixgbevf_qv_unlock_poll()
294 q_vector->state &= IXGBEVF_QV_STATE_DISABLED; in ixgbevf_qv_unlock_poll()
295 spin_unlock_bh(&q_vector->lock); in ixgbevf_qv_unlock_poll()
300 static inline bool ixgbevf_qv_busy_polling(struct ixgbevf_q_vector *q_vector) in ixgbevf_qv_busy_polling() argument
302 WARN_ON(!(q_vector->state & IXGBEVF_QV_OWNED)); in ixgbevf_qv_busy_polling()
303 return q_vector->state & IXGBEVF_QV_USER_PEND; in ixgbevf_qv_busy_polling()
307 static inline bool ixgbevf_qv_disable(struct ixgbevf_q_vector *q_vector) in ixgbevf_qv_disable() argument
311 spin_lock_bh(&q_vector->lock); in ixgbevf_qv_disable()
312 if (q_vector->state & IXGBEVF_QV_OWNED) in ixgbevf_qv_disable()
314 q_vector->state |= IXGBEVF_QV_STATE_DISABLED; in ixgbevf_qv_disable()
315 spin_unlock_bh(&q_vector->lock); in ixgbevf_qv_disable()
380 struct ixgbevf_q_vector *q_vector[MAX_MSIX_Q_VECTORS]; member
491 extern void ixgbevf_write_eitr(struct ixgbevf_q_vector *q_vector);