Lines Matching refs:tx_queue
283 static inline void efx_farch_notify_tx_desc(struct efx_tx_queue *tx_queue) in efx_farch_notify_tx_desc() argument
288 write_ptr = tx_queue->write_count & tx_queue->ptr_mask; in efx_farch_notify_tx_desc()
290 efx_writed_page(tx_queue->efx, ®, in efx_farch_notify_tx_desc()
291 FR_AZ_TX_DESC_UPD_DWORD_P0, tx_queue->queue); in efx_farch_notify_tx_desc()
295 static inline void efx_farch_push_tx_desc(struct efx_tx_queue *tx_queue, in efx_farch_push_tx_desc() argument
304 write_ptr = tx_queue->write_count & tx_queue->ptr_mask; in efx_farch_push_tx_desc()
308 efx_writeo_page(tx_queue->efx, ®, in efx_farch_push_tx_desc()
309 FR_BZ_TX_DESC_UPD_P0, tx_queue->queue); in efx_farch_push_tx_desc()
317 void efx_farch_tx_write(struct efx_tx_queue *tx_queue) in efx_farch_tx_write() argument
322 unsigned old_write_count = tx_queue->write_count; in efx_farch_tx_write()
324 tx_queue->xmit_more_available = false; in efx_farch_tx_write()
325 if (unlikely(tx_queue->write_count == tx_queue->insert_count)) in efx_farch_tx_write()
329 write_ptr = tx_queue->write_count & tx_queue->ptr_mask; in efx_farch_tx_write()
330 buffer = &tx_queue->buffer[write_ptr]; in efx_farch_tx_write()
331 txd = efx_tx_desc(tx_queue, write_ptr); in efx_farch_tx_write()
332 ++tx_queue->write_count; in efx_farch_tx_write()
344 } while (tx_queue->write_count != tx_queue->insert_count); in efx_farch_tx_write()
348 if (efx_nic_may_push_tx_desc(tx_queue, old_write_count)) { in efx_farch_tx_write()
349 txd = efx_tx_desc(tx_queue, in efx_farch_tx_write()
350 old_write_count & tx_queue->ptr_mask); in efx_farch_tx_write()
351 efx_farch_push_tx_desc(tx_queue, txd); in efx_farch_tx_write()
352 ++tx_queue->pushes; in efx_farch_tx_write()
354 efx_farch_notify_tx_desc(tx_queue); in efx_farch_tx_write()
359 int efx_farch_tx_probe(struct efx_tx_queue *tx_queue) in efx_farch_tx_probe() argument
361 struct efx_nic *efx = tx_queue->efx; in efx_farch_tx_probe()
364 entries = tx_queue->ptr_mask + 1; in efx_farch_tx_probe()
365 return efx_alloc_special_buffer(efx, &tx_queue->txd, in efx_farch_tx_probe()
369 void efx_farch_tx_init(struct efx_tx_queue *tx_queue) in efx_farch_tx_init() argument
371 struct efx_nic *efx = tx_queue->efx; in efx_farch_tx_init()
375 efx_init_special_buffer(efx, &tx_queue->txd); in efx_farch_tx_init()
382 FRF_AZ_TX_DESCQ_BUF_BASE_ID, tx_queue->txd.index, in efx_farch_tx_init()
384 tx_queue->channel->channel, in efx_farch_tx_init()
386 FRF_AZ_TX_DESCQ_LABEL, tx_queue->queue, in efx_farch_tx_init()
388 __ffs(tx_queue->txd.entries), in efx_farch_tx_init()
393 int csum = tx_queue->queue & EFX_TXQ_TYPE_OFFLOAD; in efx_farch_tx_init()
400 tx_queue->queue); in efx_farch_tx_init()
407 if (tx_queue->queue & EFX_TXQ_TYPE_OFFLOAD) in efx_farch_tx_init()
408 __clear_bit_le(tx_queue->queue, ®); in efx_farch_tx_init()
410 __set_bit_le(tx_queue->queue, ®); in efx_farch_tx_init()
417 (tx_queue->queue & EFX_TXQ_TYPE_HIGHPRI) ? in efx_farch_tx_init()
421 tx_queue->queue); in efx_farch_tx_init()
425 static void efx_farch_flush_tx_queue(struct efx_tx_queue *tx_queue) in efx_farch_flush_tx_queue() argument
427 struct efx_nic *efx = tx_queue->efx; in efx_farch_flush_tx_queue()
430 WARN_ON(atomic_read(&tx_queue->flush_outstanding)); in efx_farch_flush_tx_queue()
431 atomic_set(&tx_queue->flush_outstanding, 1); in efx_farch_flush_tx_queue()
435 FRF_AZ_TX_FLUSH_DESCQ, tx_queue->queue); in efx_farch_flush_tx_queue()
439 void efx_farch_tx_fini(struct efx_tx_queue *tx_queue) in efx_farch_tx_fini() argument
441 struct efx_nic *efx = tx_queue->efx; in efx_farch_tx_fini()
447 tx_queue->queue); in efx_farch_tx_fini()
450 efx_fini_special_buffer(efx, &tx_queue->txd); in efx_farch_tx_fini()
454 void efx_farch_tx_remove(struct efx_tx_queue *tx_queue) in efx_farch_tx_remove() argument
456 efx_free_special_buffer(tx_queue->efx, &tx_queue->txd); in efx_farch_tx_remove()
615 struct efx_tx_queue *tx_queue; in efx_check_tx_flush_complete() local
618 efx_for_each_channel_tx_queue(tx_queue, channel) { in efx_check_tx_flush_complete()
620 FR_BZ_TX_DESC_PTR_TBL, tx_queue->queue); in efx_check_tx_flush_complete()
627 tx_queue->queue); in efx_check_tx_flush_complete()
629 } else if (atomic_cmpxchg(&tx_queue->flush_outstanding, in efx_check_tx_flush_complete()
636 "the queue\n", tx_queue->queue); in efx_check_tx_flush_complete()
643 tx_queue)); in efx_check_tx_flush_complete()
659 struct efx_tx_queue *tx_queue; in efx_farch_do_flush() local
663 efx_for_each_channel_tx_queue(tx_queue, channel) { in efx_farch_do_flush()
664 efx_farch_flush_tx_queue(tx_queue); in efx_farch_do_flush()
727 struct efx_tx_queue *tx_queue; in efx_farch_fini_dmaq() local
743 efx_for_each_channel_tx_queue(tx_queue, channel) in efx_farch_fini_dmaq()
744 efx_farch_tx_fini(tx_queue); in efx_farch_fini_dmaq()
837 struct efx_tx_queue *tx_queue; in efx_farch_handle_tx_event() local
848 tx_queue = efx_channel_get_tx_queue( in efx_farch_handle_tx_event()
850 tx_packets = ((tx_ev_desc_ptr - tx_queue->read_count) & in efx_farch_handle_tx_event()
851 tx_queue->ptr_mask); in efx_farch_handle_tx_event()
852 efx_xmit_done(tx_queue, tx_ev_desc_ptr); in efx_farch_handle_tx_event()
856 tx_queue = efx_channel_get_tx_queue( in efx_farch_handle_tx_event()
860 efx_farch_notify_tx_desc(tx_queue); in efx_farch_handle_tx_event()
1100 struct efx_tx_queue *tx_queue; in efx_farch_handle_tx_flush_done() local
1105 tx_queue = efx_get_tx_queue(efx, qid / EFX_TXQ_TYPES, in efx_farch_handle_tx_flush_done()
1107 if (atomic_cmpxchg(&tx_queue->flush_outstanding, 1, 0)) { in efx_farch_handle_tx_flush_done()
1108 efx_farch_magic_event(tx_queue->channel, in efx_farch_handle_tx_flush_done()
1109 EFX_CHANNEL_MAGIC_TX_DRAIN(tx_queue)); in efx_farch_handle_tx_flush_done()