/linux-4.1.27/drivers/net/ethernet/sfc/ |
D | tx.c | 37 efx_tx_queue_get_insert_index(const struct efx_tx_queue *tx_queue) in efx_tx_queue_get_insert_index() argument 39 return tx_queue->insert_count & tx_queue->ptr_mask; in efx_tx_queue_get_insert_index() 43 __efx_tx_queue_get_insert_buffer(const struct efx_tx_queue *tx_queue) in __efx_tx_queue_get_insert_buffer() argument 45 return &tx_queue->buffer[efx_tx_queue_get_insert_index(tx_queue)]; in __efx_tx_queue_get_insert_buffer() 49 efx_tx_queue_get_insert_buffer(const struct efx_tx_queue *tx_queue) in efx_tx_queue_get_insert_buffer() argument 52 __efx_tx_queue_get_insert_buffer(tx_queue); in efx_tx_queue_get_insert_buffer() 61 static void efx_dequeue_buffer(struct efx_tx_queue *tx_queue, in efx_dequeue_buffer() argument 67 struct device *dma_dev = &tx_queue->efx->pci_dev->dev; in efx_dequeue_buffer() 82 netif_vdbg(tx_queue->efx, tx_done, tx_queue->efx->net_dev, in efx_dequeue_buffer() 84 tx_queue->queue, tx_queue->read_count); in efx_dequeue_buffer() [all …]
|
D | nic.h | 69 efx_tx_desc(struct efx_tx_queue *tx_queue, unsigned int index) in efx_tx_desc() argument 71 return ((efx_qword_t *) (tx_queue->txd.buf.addr)) + index; in efx_tx_desc() 75 static struct efx_tx_queue *efx_tx_queue_partner(struct efx_tx_queue *tx_queue) in efx_tx_queue_partner() argument 77 if (tx_queue->queue & EFX_TXQ_TYPE_OFFLOAD) in efx_tx_queue_partner() 78 return tx_queue - EFX_TXQ_TYPE_OFFLOAD; in efx_tx_queue_partner() 80 return tx_queue + EFX_TXQ_TYPE_OFFLOAD; in efx_tx_queue_partner() 86 static inline bool __efx_nic_tx_is_empty(struct efx_tx_queue *tx_queue, in __efx_nic_tx_is_empty() argument 89 unsigned int empty_read_count = ACCESS_ONCE(tx_queue->empty_read_count); in __efx_nic_tx_is_empty() 103 static inline bool efx_nic_may_tx_pio(struct efx_tx_queue *tx_queue) in efx_nic_may_tx_pio() argument 105 struct efx_tx_queue *partner = efx_tx_queue_partner(tx_queue); in efx_nic_may_tx_pio() [all …]
|
D | farch.c | 281 static inline void efx_farch_notify_tx_desc(struct efx_tx_queue *tx_queue) in efx_farch_notify_tx_desc() argument 286 write_ptr = tx_queue->write_count & tx_queue->ptr_mask; in efx_farch_notify_tx_desc() 288 efx_writed_page(tx_queue->efx, ®, in efx_farch_notify_tx_desc() 289 FR_AZ_TX_DESC_UPD_DWORD_P0, tx_queue->queue); in efx_farch_notify_tx_desc() 293 static inline void efx_farch_push_tx_desc(struct efx_tx_queue *tx_queue, in efx_farch_push_tx_desc() argument 302 write_ptr = tx_queue->write_count & tx_queue->ptr_mask; in efx_farch_push_tx_desc() 306 efx_writeo_page(tx_queue->efx, ®, in efx_farch_push_tx_desc() 307 FR_BZ_TX_DESC_UPD_P0, tx_queue->queue); in efx_farch_push_tx_desc() 315 void efx_farch_tx_write(struct efx_tx_queue *tx_queue) in efx_farch_tx_write() argument 320 unsigned old_write_count = tx_queue->write_count; in efx_farch_tx_write() [all …]
|
D | selftest.c | 396 static int efx_begin_loopback(struct efx_tx_queue *tx_queue) in efx_begin_loopback() argument 398 struct efx_nic *efx = tx_queue->efx; in efx_begin_loopback() 427 rc = efx_enqueue_skb(tx_queue, skb); in efx_begin_loopback() 433 "%d in %s loopback test\n", tx_queue->queue, in efx_begin_loopback() 453 static int efx_end_loopback(struct efx_tx_queue *tx_queue, in efx_end_loopback() argument 456 struct efx_nic *efx = tx_queue->efx; in efx_end_loopback() 485 tx_queue->queue, tx_done, state->packet_count, in efx_end_loopback() 496 tx_queue->queue, rx_good, state->packet_count, in efx_end_loopback() 503 lb_tests->tx_sent[tx_queue->queue] += state->packet_count; in efx_end_loopback() 504 lb_tests->tx_done[tx_queue->queue] += tx_done; in efx_end_loopback() [all …]
|
D | efx.h | 21 int efx_probe_tx_queue(struct efx_tx_queue *tx_queue); 22 void efx_remove_tx_queue(struct efx_tx_queue *tx_queue); 23 void efx_init_tx_queue(struct efx_tx_queue *tx_queue); 24 void efx_init_tx_queue_core_txq(struct efx_tx_queue *tx_queue); 25 void efx_fini_tx_queue(struct efx_tx_queue *tx_queue); 28 netdev_tx_t efx_enqueue_skb(struct efx_tx_queue *tx_queue, struct sk_buff *skb); 29 void efx_xmit_done(struct efx_tx_queue *tx_queue, unsigned int index);
|
D | ef10.c | 359 struct efx_tx_queue *tx_queue; in efx_ef10_link_piobufs() local 389 efx_for_each_channel_tx_queue(tx_queue, channel) { in efx_ef10_link_piobufs() 395 tx_queue->channel->channel - 1) * in efx_ef10_link_piobufs() 405 if (tx_queue->queue == nic_data->pio_write_vi_base) { in efx_ef10_link_piobufs() 414 tx_queue->queue); in efx_ef10_link_piobufs() 426 tx_queue->queue, index, rc); in efx_ef10_link_piobufs() 427 tx_queue->piobuf = NULL; in efx_ef10_link_piobufs() 429 tx_queue->piobuf = in efx_ef10_link_piobufs() 432 tx_queue->piobuf_offset = offset; in efx_ef10_link_piobufs() 435 tx_queue->queue, index, in efx_ef10_link_piobufs() [all …]
|
D | net_driver.h | 474 struct efx_tx_queue tx_queue[EFX_TXQ_TYPES]; member 1278 int (*tx_probe)(struct efx_tx_queue *tx_queue); 1279 void (*tx_init)(struct efx_tx_queue *tx_queue); 1280 void (*tx_remove)(struct efx_tx_queue *tx_queue); 1281 void (*tx_write)(struct efx_tx_queue *tx_queue); 1394 return &efx->channel[efx->tx_channel_offset + index]->tx_queue[type]; in efx_get_tx_queue() 1408 return &channel->tx_queue[type]; in efx_channel_get_tx_queue() 1411 static inline bool efx_tx_queue_used(struct efx_tx_queue *tx_queue) in efx_tx_queue_used() argument 1413 return !(tx_queue->efx->net_dev->num_tc < 2 && in efx_tx_queue_used() 1414 tx_queue->queue & EFX_TXQ_TYPE_HIGHPRI); in efx_tx_queue_used() [all …]
|
D | efx.c | 422 struct efx_tx_queue *tx_queue; in efx_alloc_channel() local 434 tx_queue = &channel->tx_queue[j]; in efx_alloc_channel() 435 tx_queue->efx = efx; in efx_alloc_channel() 436 tx_queue->queue = i * EFX_TXQ_TYPES + j; in efx_alloc_channel() 437 tx_queue->channel = channel; in efx_alloc_channel() 456 struct efx_tx_queue *tx_queue; in efx_copy_channel() local 469 tx_queue = &channel->tx_queue[j]; in efx_copy_channel() 470 if (tx_queue->channel) in efx_copy_channel() 471 tx_queue->channel = channel; in efx_copy_channel() 472 tx_queue->buffer = NULL; in efx_copy_channel() [all …]
|
D | ethtool.c | 64 EFX_ETHTOOL_STAT(tx_##field, tx_queue, field, \ 265 struct efx_tx_queue *tx_queue; in efx_fill_loopback_test() local 267 efx_for_each_channel_tx_queue(tx_queue, channel) { in efx_fill_loopback_test() 269 &lb_tests->tx_sent[tx_queue->queue], in efx_fill_loopback_test() 270 EFX_TX_QUEUE_NAME(tx_queue), in efx_fill_loopback_test() 273 &lb_tests->tx_done[tx_queue->queue], in efx_fill_loopback_test() 274 EFX_TX_QUEUE_NAME(tx_queue), in efx_fill_loopback_test() 373 channel->tx_queue[0].queue / in efx_describe_per_queue_stats() 445 struct efx_tx_queue *tx_queue; in efx_ethtool_get_stats() local 470 efx_for_each_channel_tx_queue(tx_queue, channel) in efx_ethtool_get_stats() [all …]
|
/linux-4.1.27/drivers/net/ethernet/freescale/ |
D | gianfar.c | 143 static void gfar_clean_tx_ring(struct gfar_priv_tx_q *tx_queue); 176 struct gfar_priv_tx_q *tx_queue = NULL; in gfar_init_bds() local 185 tx_queue = priv->tx_queue[i]; in gfar_init_bds() 187 tx_queue->num_txbdfree = tx_queue->tx_ring_size; in gfar_init_bds() 188 tx_queue->dirty_tx = tx_queue->tx_bd_base; in gfar_init_bds() 189 tx_queue->cur_tx = tx_queue->tx_bd_base; in gfar_init_bds() 190 tx_queue->skb_curtx = 0; in gfar_init_bds() 191 tx_queue->skb_dirtytx = 0; in gfar_init_bds() 194 txbdp = tx_queue->tx_bd_base; in gfar_init_bds() 195 for (j = 0; j < tx_queue->tx_ring_size; j++) { in gfar_init_bds() [all …]
|
D | gianfar_ethtool.c | 206 struct gfar_priv_tx_q *tx_queue = NULL; in gfar_gsettings() local 210 tx_queue = priv->tx_queue[0]; in gfar_gsettings() 215 cmd->maxtxpkt = get_icft_value(tx_queue->txic); in gfar_gsettings() 298 struct gfar_priv_tx_q *tx_queue = NULL; in gfar_gcoalesce() local 311 tx_queue = priv->tx_queue[0]; in gfar_gcoalesce() 315 txtime = get_ictt_value(tx_queue->txic); in gfar_gcoalesce() 316 txcount = get_icft_value(tx_queue->txic); in gfar_gcoalesce() 420 priv->tx_queue[i]->txcoalescing = 0; in gfar_scoalesce() 423 priv->tx_queue[i]->txcoalescing = 1; in gfar_scoalesce() 427 priv->tx_queue[i]->txic = mk_ic_value( in gfar_scoalesce() [all …]
|
D | fec_main.c | 226 struct fec_enet_priv_tx_q *txq = fep->tx_queue[queue_id]; in fec_enet_get_nextdesc() 257 struct fec_enet_priv_tx_q *txq = fep->tx_queue[queue_id]; in fec_enet_get_prevdesc() 326 txq = fep->tx_queue[0]; in fec_dump() 784 txq = fep->tx_queue[queue]; in fec_enet_start_xmit() 836 txq = fep->tx_queue[q]; in fec_enet_bd_init() 886 txq = fep->tx_queue[i]; in fec_enet_enable_ring() 903 txq = fep->tx_queue[i]; in fec_enet_reset_skb() 1207 txq = fep->tx_queue[queue_id]; in fec_enet_tx_queue() 2586 txq = fep->tx_queue[q]; in fec_enet_free_buffers() 2605 if (fep->tx_queue[i] && fep->tx_queue[i]->tso_hdrs) { in fec_enet_free_queue() [all …]
|
D | fec.h | 487 struct fec_enet_priv_tx_q *tx_queue[FEC_ENET_MAX_TX_QS]; member
|
D | gianfar.h | 1070 struct gfar_priv_tx_q *tx_queue; member 1122 struct gfar_priv_tx_q *tx_queue[MAX_TX_QS]; member
|
/linux-4.1.27/drivers/net/wireless/rsi/ |
D | rsi_91x_core.c | 34 q_len = skb_queue_len(&common->tx_queue[ii]); in rsi_determine_min_weight_queue() 58 q_len = skb_queue_len(&common->tx_queue[ii]); in rsi_recalculate_weights() 102 if (skb_queue_len(&common->tx_queue[q_num])) in rsi_get_num_pkts_dequeue() 103 skb = skb_peek(&common->tx_queue[q_num]); in rsi_get_num_pkts_dequeue() 115 if (skb_queue_len(&common->tx_queue[q_num]) - pkt_cnt) in rsi_get_num_pkts_dequeue() 139 if (skb_queue_len(&common->tx_queue[MGMT_SOFT_Q])) { in rsi_core_determine_hal_queue() 162 q_len = skb_queue_len(&common->tx_queue[ii]); in rsi_core_determine_hal_queue() 177 q_len = skb_queue_len(&common->tx_queue[q_num]); in rsi_core_determine_hal_queue() 191 q_len = skb_queue_len(&common->tx_queue[q_num]); in rsi_core_determine_hal_queue() 220 skb_queue_tail(&common->tx_queue[q_num], skb); in rsi_core_queue_pkt() [all …]
|
D | rsi_91x_debugfs.c | 148 skb_queue_len(&common->tx_queue[MGMT_SOFT_Q])); in rsi_stats_read() 156 skb_queue_len(&common->tx_queue[VO_Q])); in rsi_stats_read() 162 skb_queue_len(&common->tx_queue[VI_Q])); in rsi_stats_read() 168 skb_queue_len(&common->tx_queue[BE_Q])); in rsi_stats_read() 174 skb_queue_len(&common->tx_queue[BK_Q])); in rsi_stats_read()
|
D | rsi_91x_main.c | 212 skb_queue_head_init(&common->tx_queue[ii]); in rsi_91x_init() 252 skb_queue_purge(&common->tx_queue[ii]); in rsi_91x_deinit()
|
D | rsi_main.h | 162 struct sk_buff_head tx_queue[NUM_EDCA_QUEUES + 1]; member
|
D | rsi_91x_mgmt.c | 264 skb_queue_tail(&common->tx_queue[MGMT_SOFT_Q], skb); in rsi_send_internal_mgmt_frame()
|
/linux-4.1.27/drivers/net/wireless/ath/ath5k/ |
D | dma.c | 132 u32 tx_queue; in ath5k_hw_start_tx_dma() local 141 tx_queue = ath5k_hw_reg_read(ah, AR5K_CR); in ath5k_hw_start_tx_dma() 148 tx_queue |= AR5K_CR_TXE0 & ~AR5K_CR_TXD0; in ath5k_hw_start_tx_dma() 151 tx_queue |= AR5K_CR_TXE1 & ~AR5K_CR_TXD1; in ath5k_hw_start_tx_dma() 156 tx_queue |= AR5K_CR_TXE1 & ~AR5K_CR_TXD1; in ath5k_hw_start_tx_dma() 164 ath5k_hw_reg_write(ah, tx_queue, AR5K_CR); in ath5k_hw_start_tx_dma() 191 u32 tx_queue, pending; in ath5k_hw_stop_tx_dma() local 200 tx_queue = ath5k_hw_reg_read(ah, AR5K_CR); in ath5k_hw_stop_tx_dma() 207 tx_queue |= AR5K_CR_TXD0 & ~AR5K_CR_TXE0; in ath5k_hw_stop_tx_dma() 212 tx_queue |= AR5K_CR_TXD1 & ~AR5K_CR_TXD1; in ath5k_hw_stop_tx_dma() [all …]
|
/linux-4.1.27/drivers/net/wireless/p54/ |
D | txrx.c | 41 spin_lock_irqsave(&priv->tx_queue.lock, flags); in p54_dump_tx_queue() 43 skb_queue_len(&priv->tx_queue)); in p54_dump_tx_queue() 46 skb_queue_walk(&priv->tx_queue, skb) { in p54_dump_tx_queue() 69 spin_unlock_irqrestore(&priv->tx_queue.lock, flags); in p54_dump_tx_queue() 97 spin_lock_irqsave(&priv->tx_queue.lock, flags); in p54_assign_address() 98 if (unlikely(skb_queue_len(&priv->tx_queue) == 32)) { in p54_assign_address() 104 spin_unlock_irqrestore(&priv->tx_queue.lock, flags); in p54_assign_address() 108 skb_queue_walk(&priv->tx_queue, entry) { in p54_assign_address() 124 target_skb = priv->tx_queue.prev; in p54_assign_address() 125 if (!skb_queue_empty(&priv->tx_queue)) { in p54_assign_address() [all …]
|
D | p54.h | 174 struct sk_buff_head tx_queue; member
|
D | main.c | 215 skb_queue_purge(&priv->tx_queue); in p54_stop() 749 skb_queue_head_init(&priv->tx_queue); in p54_init_common()
|
/linux-4.1.27/net/nfc/ |
D | llcp_commands.c | 360 skb_queue_tail(&local->tx_queue, skb); in nfc_llcp_send_disconnect() 448 skb_queue_tail(&local->tx_queue, skb); in nfc_llcp_send_connect() 499 skb_queue_tail(&local->tx_queue, skb); in nfc_llcp_send_cc() 560 skb_queue_tail(&local->tx_queue, skb); in nfc_llcp_send_snl_sdres() 595 skb_queue_tail(&local->tx_queue, skb); in nfc_llcp_send_snl_sdreq() 628 skb_queue_head(&local->tx_queue, skb); in nfc_llcp_send_dm() 652 skb_queue_len(&sock->tx_queue) >= 2 * sock->remote_rw)) { in nfc_llcp_send_i_frame() 660 skb_queue_len(&sock->tx_queue) >= 2 * sock->remote_rw)) { in nfc_llcp_send_i_frame() 662 skb_queue_len(&sock->tx_queue)); in nfc_llcp_send_i_frame() 699 skb_queue_tail(&sock->tx_queue, pdu); in nfc_llcp_send_i_frame() [all …]
|
D | llcp_core.c | 62 skb_queue_purge(&sock->tx_queue); in nfc_llcp_socket_purge() 69 skb_queue_walk_safe(&local->tx_queue, s, tmp) { in nfc_llcp_socket_purge() 73 skb_unlink(s, &local->tx_queue); in nfc_llcp_socket_purge() 85 skb_queue_purge(&local->tx_queue); in nfc_llcp_socket_release() 169 skb_queue_purge(&local->tx_queue); in local_cleanup() 718 skb = skb_dequeue(&local->tx_queue); in nfc_llcp_tx_work() 727 skb_queue_head(&local->tx_queue, skb); in nfc_llcp_tx_work() 1010 pdu = skb_dequeue(&sock->tx_queue); in nfc_llcp_queue_i_frames() 1017 skb_queue_tail(&local->tx_queue, pdu); in nfc_llcp_queue_i_frames() 1095 skb_queue_head(&local->tx_queue, s); in nfc_llcp_recv_hdlc() [all …]
|
D | llcp.h | 72 struct sk_buff_head tx_queue; member 147 struct sk_buff_head tx_queue; member
|
D | llcp_sock.c | 971 skb_queue_head_init(&llcp_sock->tx_queue); in nfc_llcp_sock_alloc() 985 skb_queue_purge(&sock->tx_queue); in nfc_llcp_sock_free()
|
/linux-4.1.27/drivers/bluetooth/ |
D | btmrvl_main.c | 201 skb_queue_head(&priv->adapter->tx_queue, skb); in btmrvl_send_sync_cmd() 416 skb_queue_head_init(&priv->adapter->tx_queue); in btmrvl_init_adapter() 439 skb_queue_purge(&priv->adapter->tx_queue); in btmrvl_free_adapter() 474 skb_queue_tail(&priv->adapter->tx_queue, skb); in btmrvl_send_frame() 485 skb_queue_purge(&priv->adapter->tx_queue); in btmrvl_flush() 497 skb_queue_purge(&priv->adapter->tx_queue); in btmrvl_close() 635 skb_queue_empty(&adapter->tx_queue)))) { in btmrvl_service_main_thread() 657 !skb_queue_empty(&adapter->tx_queue)) { in btmrvl_service_main_thread() 672 skb = skb_dequeue(&adapter->tx_queue); in btmrvl_service_main_thread()
|
D | btmrvl_drv.h | 83 struct sk_buff_head tx_queue; member
|
D | btmrvl_sdio.c | 1510 skb_queue_purge(&priv->adapter->tx_queue); in btmrvl_sdio_suspend()
|
/linux-4.1.27/drivers/net/ethernet/fujitsu/ |
D | fmvj18x_cs.c | 113 uint tx_queue; member 744 if (lp->tx_queue) { in fjn_interrupt() 745 outb(DO_TX | lp->tx_queue, ioaddr + TX_START); in fjn_interrupt() 746 lp->sent = lp->tx_queue ; in fjn_interrupt() 747 lp->tx_queue = 0; in fjn_interrupt() 794 lp->tx_queue = 0; in fjn_tx_timeout() 840 lp->tx_queue++; in fjn_start_xmit() 845 outb(DO_TX | lp->tx_queue, ioaddr + TX_START); in fjn_start_xmit() 846 lp->sent = lp->tx_queue ; in fjn_start_xmit() 847 lp->tx_queue = 0; in fjn_start_xmit() [all …]
|
/linux-4.1.27/drivers/net/vmxnet3/ |
D | vmxnet3_ethtool.c | 137 drvTxStats = &adapter->tx_queue[i].stats; in vmxnet3_get_stats64() 320 base = (u8 *)&adapter->tx_queue[j].stats; in vmxnet3_get_ethtool_stats() 360 buf[j++] = adapter->tx_queue[i].tx_ring.next2fill; in vmxnet3_get_regs() 361 buf[j++] = adapter->tx_queue[i].tx_ring.next2comp; in vmxnet3_get_regs() 362 buf[j++] = adapter->tx_queue[i].tx_ring.gen; in vmxnet3_get_regs() 365 buf[j++] = adapter->tx_queue[i].comp_ring.next2proc; in vmxnet3_get_regs() 366 buf[j++] = adapter->tx_queue[i].comp_ring.gen; in vmxnet3_get_regs() 367 buf[j++] = adapter->tx_queue[i].stopped; in vmxnet3_get_regs()
|
D | vmxnet3_drv.c | 113 netif_start_subqueue(adapter->netdev, tq - adapter->tx_queue); in vmxnet3_tq_start() 121 netif_wake_subqueue(adapter->netdev, (tq - adapter->tx_queue)); in vmxnet3_tq_wake() 130 netif_stop_subqueue(adapter->netdev, (tq - adapter->tx_queue)); in vmxnet3_tq_stop() 157 vmxnet3_tq_start(&adapter->tx_queue[i], in vmxnet3_check_link() 166 vmxnet3_tq_stop(&adapter->tx_queue[i], adapter); in vmxnet3_check_link() 465 vmxnet3_tq_destroy(&adapter->tx_queue[i], adapter); in vmxnet3_tq_destroy_all() 551 vmxnet3_tq_cleanup(&adapter->tx_queue[i], adapter); in vmxnet3_tq_cleanup_all() 795 vmxnet3_tq_init(&adapter->tx_queue[i], adapter); in vmxnet3_tq_init_all() 1104 &adapter->tx_queue[skb->queue_mapping], in vmxnet3_xmit_frame() 1602 vmxnet3_tq_tx_complete(&adapter->tx_queue[i], adapter); in vmxnet3_do_poll() [all …]
|
D | vmxnet3_int.h | 317 struct vmxnet3_tx_queue tx_queue[VMXNET3_DEVICE_MAX_TX_QUEUES]; member
|
/linux-4.1.27/net/irda/ |
D | irttp.c | 180 if (skb_queue_empty(&self->tx_queue)) { in irttp_todo_expired() 216 while ((skb = skb_dequeue(&self->tx_queue)) != NULL) in irttp_flush_queues() 327 skb_queue_tail(&self->tx_queue, frag); in irttp_fragment_skb() 336 skb_queue_tail(&self->tx_queue, skb); in irttp_fragment_skb() 380 skb_queue_head_init(&tsap->tx_queue); in irttp_init_tsap() 602 skb_queue_len(&self->tx_queue)); in irttp_data_request() 643 if (skb_queue_len(&self->tx_queue) >= TTP_TX_MAX_QUEUE) { in irttp_data_request() 662 skb_queue_tail(&self->tx_queue, skb); in irttp_data_request() 675 (skb_queue_len(&self->tx_queue) > TTP_TX_HIGH_THRESHOLD)) { in irttp_data_request() 716 self->send_credit, skb_queue_len(&self->tx_queue)); in irttp_run_tx_queue() [all …]
|
/linux-4.1.27/drivers/staging/ozwpan/ |
D | ozpd.c | 121 INIT_LIST_HEAD(&pd->tx_queue); in oz_pd_alloc() 123 pd->last_sent_frame = &pd->tx_queue; in oz_pd_alloc() 157 list_for_each_safe(e, n, &pd->tx_queue) { in oz_pd_free() 399 list_add_tail(&f->link, &pd->tx_queue); in oz_prepare_frame() 478 if (e == &pd->tx_queue) { in oz_send_next_queued_frame() 618 list_for_each_entry(f, &pd->tx_queue, link) { in oz_retire_tx_frames() 629 list_cut_position(&list, &pd->tx_queue, &tmp->link); in oz_retire_tx_frames() 630 pd->last_sent_frame = &pd->tx_queue; in oz_retire_tx_frames() 790 list_for_each_entry(f, &pd->tx_queue, link) { in oz_send_isoc_unit() 804 list_add_tail(&isoc_unit->link, &pd->tx_queue); in oz_send_isoc_unit()
|
D | ozpd.h | 95 struct list_head tx_queue; member
|
D | ozproto.c | 379 pd->last_sent_frame = &pd->tx_queue; in oz_rx_frame()
|
/linux-4.1.27/drivers/net/ethernet/mellanox/mlx4/ |
D | en_tx.c | 377 netdev_tx_reset_queue(ring->tx_queue); in mlx4_en_free_tx_buf() 412 netdev_txq_bql_complete_prefetchw(ring->tx_queue); in mlx4_en_process_tx_cq() 481 netdev_tx_completed_queue(ring->tx_queue, packets, bytes); in mlx4_en_process_tx_cq() 485 if (netif_tx_queue_stopped(ring->tx_queue) && in mlx4_en_process_tx_cq() 487 netif_tx_wake_queue(ring->tx_queue); in mlx4_en_process_tx_cq() 757 netdev_txq_bql_enqueue_prefetchw(ring->tx_queue); in mlx4_en_xmit() 906 netdev_tx_sent_queue(ring->tx_queue, tx_info->nr_bytes); in mlx4_en_xmit() 932 netif_tx_stop_queue(ring->tx_queue); in mlx4_en_xmit() 935 send_doorbell = !skb->xmit_more || netif_xmit_stopped(ring->tx_queue); in mlx4_en_xmit() 1000 netif_tx_wake_queue(ring->tx_queue); in mlx4_en_xmit()
|
D | mlx4_en.h | 296 struct netdev_queue *tx_queue; member
|
D | en_netdev.c | 1637 tx_ring->tx_queue = netdev_get_tx_queue(dev, i); in mlx4_en_start_port()
|
/linux-4.1.27/drivers/net/ethernet/marvell/ |
D | mv643xx_eth.c | 353 struct tx_queue { struct 423 struct tx_queue txq[8]; 461 static struct mv643xx_eth_private *txq_to_mp(struct tx_queue *txq) in txq_to_mp() 482 static void txq_reset_hw_ptr(struct tx_queue *txq) in txq_reset_hw_ptr() 492 static void txq_enable(struct tx_queue *txq) in txq_enable() 498 static void txq_disable(struct tx_queue *txq) in txq_disable() 508 static void txq_maybe_wake(struct tx_queue *txq) in txq_maybe_wake() 746 txq_put_data_tso(struct net_device *dev, struct tx_queue *txq, in txq_put_data_tso() 782 txq_put_hdr_tso(struct sk_buff *skb, struct tx_queue *txq, int length) in txq_put_hdr_tso() 814 static int txq_submit_tso(struct tx_queue *txq, struct sk_buff *skb, in txq_submit_tso() [all …]
|
/linux-4.1.27/drivers/net/usb/ |
D | cdc-phonet.c | 47 unsigned tx_queue; member 84 pnd->tx_queue++; in usbpn_xmit() 85 if (pnd->tx_queue >= dev->tx_queue_len) in usbpn_xmit() 119 pnd->tx_queue--; in tx_complete()
|
D | r8152.c | 590 struct sk_buff_head tx_queue, rx_queue; member 1147 if (!skb_queue_empty(&tp->tx_queue)) in write_bulk_callback() 1266 skb_queue_head_init(&tp->tx_queue); in alloc_all_mem() 1535 struct sk_buff_head skb_head, *tx_queue = &tp->tx_queue; in r8152_tx_agg_fill() local 1540 spin_lock(&tx_queue->lock); in r8152_tx_agg_fill() 1541 skb_queue_splice_init(tx_queue, &skb_head); in r8152_tx_agg_fill() 1542 spin_unlock(&tx_queue->lock); in r8152_tx_agg_fill() 1600 spin_lock(&tx_queue->lock); in r8152_tx_agg_fill() 1601 skb_queue_splice(&skb_head, tx_queue); in r8152_tx_agg_fill() 1602 spin_unlock(&tx_queue->lock); in r8152_tx_agg_fill() [all …]
|
/linux-4.1.27/drivers/net/wireless/ti/wl1251/ |
D | tx.c | 359 while ((skb = skb_dequeue(&wl->tx_queue))) { in wl1251_tx_work() 369 skb_queue_head(&wl->tx_queue, skb); in wl1251_tx_work() 493 queue_len = skb_queue_len(&wl->tx_queue); in wl1251_tx_complete() 571 while ((skb = skb_dequeue(&wl->tx_queue))) { in wl1251_tx_flush()
|
D | wl1251.h | 319 struct sk_buff_head tx_queue; member
|
D | main.c | 365 skb_queue_tail(&wl->tx_queue, skb); in wl1251_op_tx() 378 if (skb_queue_len(&wl->tx_queue) >= WL1251_TX_QUEUE_HIGH_WATERMARK) { in wl1251_op_tx() 1530 skb_queue_head_init(&wl->tx_queue); in wl1251_alloc_hw()
|
D | debugfs.c | 224 queue_len = skb_queue_len(&wl->tx_queue); in tx_queue_len_read()
|
/linux-4.1.27/include/linux/ |
D | scc.h | 73 struct sk_buff_head tx_queue; /* next tx buffer */ member
|
/linux-4.1.27/drivers/atm/ |
D | zatm.h | 49 struct sk_buff_head tx_queue; /* list of buffers in transit */ member
|
D | eni.h | 86 struct sk_buff_head tx_queue; /* PDUs currently being TX DMAed*/ member
|
D | solos-pci.c | 127 struct sk_buff_head tx_queue[4]; member 958 skb_queue_walk_safe(&card->tx_queue[port], skb, tmpskb) { in pclose() 960 skb_unlink(skb, &card->tx_queue[port]); in pclose() 1035 old_len = skb_queue_len(&card->tx_queue[port]); in fpga_queue() 1036 skb_queue_tail(&card->tx_queue[port], skb); in fpga_queue() 1077 skb = skb_dequeue(&card->tx_queue[port]); in fpga_tx() 1359 skb_queue_head_init(&card->tx_queue[i]); in atm_init() 1424 while ((skb = skb_dequeue(&card->tx_queue[i]))) in atm_remove()
|
D | zatm.c | 697 skb_queue_tail(&zatm_vcc->tx_queue,skb); in do_tx() 716 skb = skb_dequeue(&zatm_vcc->tx_queue); in dequeue_tx() 885 if (skb_peek(&zatm_vcc->tx_queue)) { in close_tx() 888 wait_event(zatm_vcc->tx_wait, !skb_peek(&zatm_vcc->tx_queue)); in close_tx() 953 skb_queue_head_init(&zatm_vcc->tx_queue); in open_tx_first()
|
D | horizon.h | 423 wait_queue_head_t tx_queue; member
|
D | eni.c | 1176 skb_queue_tail(&eni_dev->tx_queue,skb); in do_tx() 1216 while ((skb = skb_dequeue(&eni_dev->tx_queue))) { in dequeue_tx() 1225 skb_queue_head(&eni_dev->tx_queue,skb); in dequeue_tx() 1444 skb_queue_head_init(&eni_dev->tx_queue); in start_tx() 1980 skb_queue_walk(&eni_dev->tx_queue, skb) { in eni_change_qos()
|
D | horizon.c | 1079 wait_event_interruptible(dev->tx_queue, (!test_and_set_bit(tx_busy, &dev->flags))); in tx_hold() 1092 wake_up_interruptible (&dev->tx_queue); in tx_release() 2792 init_waitqueue_head(&dev->tx_queue); in hrz_probe()
|
/linux-4.1.27/drivers/tty/ipwireless/ |
D | hardware.c | 242 struct list_head tx_queue[NL_NUM_OF_PRIORITIES]; member 509 list_add(&packet->queue, &hw->tx_queue[0]); in do_send_packet() 966 if (!list_empty(&hw->tx_queue[priority])) { in send_pending_packet() 968 &hw->tx_queue[priority], in send_pending_packet() 992 if (!list_empty(&hw->tx_queue[priority])) { in send_pending_packet() 1242 list_add_tail(&packet->queue, &hw->tx_queue[priority]); in send_packet() 1625 INIT_LIST_HEAD(&hw->tx_queue[i]); in ipwireless_hardware_create() 1738 list_for_each_entry_safe(tp, tq, &hw->tx_queue[i], queue) { in ipwireless_hardware_free()
|
/linux-4.1.27/drivers/net/wan/ |
D | hdlc_ppp.c | 103 static struct sk_buff_head tx_queue; /* used when holding the spin lock */ variable 203 while ((skb = skb_dequeue(&tx_queue)) != NULL) in ppp_tx_flush() 258 skb_queue_tail(&tx_queue, skb); in ppp_tx_cp() 700 skb_queue_head_init(&tx_queue); in mod_init()
|
/linux-4.1.27/drivers/isdn/hisax/ |
D | hfc4s8s_l1.c | 138 struct sk_buff_head tx_queue; member 397 skb_queue_tail(&bch->tx_queue, skb); in bch_l2l1() 562 skb_queue_purge(&bch->tx_queue); in bch_l2l1() 921 if (!(skb = skb_dequeue(&bch->tx_queue))) { in tx_b_frame() 1356 skb_queue_purge(&hw->l1[i].b_ch[0].tx_queue); in hfc_hardware_enable() 1357 skb_queue_purge(&hw->l1[i].b_ch[1].tx_queue); in hfc_hardware_enable() 1413 skb_queue_head_init(&l1p->b_ch[0].tx_queue); in setup_instance() 1421 skb_queue_head_init(&l1p->b_ch[1].tx_queue); in setup_instance()
|
/linux-4.1.27/drivers/net/wireless/cw1200/ |
D | main.c | 388 if (cw1200_queue_init(&priv->tx_queue[i], in cw1200_init_common() 392 cw1200_queue_deinit(&priv->tx_queue[i - 1]); in cw1200_init_common() 471 cw1200_queue_deinit(&priv->tx_queue[i]); in cw1200_unregister_common()
|
D | cw1200.h | 133 struct cw1200_queue tx_queue[4]; member
|
D | wsm.c | 1208 &priv->tx_queue[i], in wsm_flush_tx() 1596 queued = cw1200_queue_get_num_queued(&priv->tx_queue[i], in cw1200_get_prio_queue() 1615 &priv->tx_queue[winner], in cw1200_get_prio_queue() 1618 &priv->tx_queue[priv->tx_burst_idx], in cw1200_get_prio_queue() 1660 *queue_p = &priv->tx_queue[idx]; in wsm_get_tx_queue_and_mask() 1698 queue_num = queue - priv->tx_queue; in wsm_get_tx()
|
D | txrx.c | 37 cw1200_queue_lock(&priv->tx_queue[i]); in cw1200_tx_queues_lock() 44 cw1200_queue_unlock(&priv->tx_queue[i]); in cw1200_tx_queues_unlock() 774 BUG_ON(cw1200_queue_put(&priv->tx_queue[t.queue], in cw1200_tx() 839 if (cw1200_queue_get_num_queued(&priv->tx_queue[i], in cw1200_handle_pspoll() 858 struct cw1200_queue *queue = &priv->tx_queue[queue_id]; in cw1200_tx_confirm_cb()
|
D | debug.c | 210 cw1200_queue_status_show(seq, &priv->tx_queue[i]); in cw1200_status_show()
|
D | bh.c | 492 &priv->tx_queue[i], in cw1200_bh()
|
D | scan.c | 358 struct cw1200_queue *queue = &priv->tx_queue[queue_id]; in cw1200_probe_work()
|
D | sta.c | 134 cw1200_queue_clear(&priv->tx_queue[i]); in cw1200_stop() 853 struct cw1200_queue *queue = &priv->tx_queue[queue_id]; in cw1200_wep_key_work() 914 cw1200_queue_clear(&priv->tx_queue[i]); in __cw1200_flush()
|
/linux-4.1.27/drivers/net/hamradio/ |
D | scc.c | 306 while (!skb_queue_empty(&scc->tx_queue)) in scc_discard_buffers() 307 dev_kfree_skb(skb_dequeue(&scc->tx_queue)); in scc_discard_buffers() 377 skb = skb_dequeue(&scc->tx_queue); in scc_txint() 1130 if (skb_queue_empty(&scc->tx_queue)) { /* nothing to send */ in t_dwait() 1586 skb_queue_head_init(&scc->tx_queue); in scc_net_open() 1666 if (skb_queue_len(&scc->tx_queue) > scc->dev->tx_queue_len) { in scc_net_tx() 1668 skb_del = skb_dequeue(&scc->tx_queue); in scc_net_tx() 1671 skb_queue_tail(&scc->tx_queue, skb); in scc_net_tx() 1889 skb_queue_head_init(&scc->tx_queue); in scc_net_ioctl()
|
/linux-4.1.27/net/bluetooth/rfcomm/ |
D | core.c | 312 skb_queue_head_init(&d->tx_queue); in rfcomm_dlc_alloc() 327 skb_queue_purge(&d->tx_queue); in rfcomm_dlc_free() 442 if (skb_queue_empty(&d->tx_queue)) { in __rfcomm_dlc_disconn() 495 skb_queue_purge(&d->tx_queue); in __rfcomm_dlc_close() 568 skb_queue_tail(&d->tx_queue, skb); in rfcomm_dlc_send() 582 skb_queue_tail(&d->tx_queue, skb); in rfcomm_dlc_send_noerror() 876 skb_queue_tail(&d->tx_queue, skb); in rfcomm_queue_disc() 1823 return skb_queue_len(&d->tx_queue); in rfcomm_process_tx() 1825 while (d->tx_credits && (skb = skb_dequeue(&d->tx_queue))) { in rfcomm_process_tx() 1828 skb_queue_head(&d->tx_queue, skb); in rfcomm_process_tx() [all …]
|
D | tty.c | 700 skb_queue_purge(&dev->dlc->tx_queue); in rfcomm_tty_cleanup() 1036 if (!skb_queue_empty(&dev->dlc->tx_queue)) in rfcomm_tty_chars_in_buffer() 1051 skb_queue_purge(&dev->dlc->tx_queue); in rfcomm_tty_flush_buffer()
|
/linux-4.1.27/include/net/irda/ |
D | irttp.h | 120 struct sk_buff_head tx_queue; /* Frames to be transmitted */ member
|
/linux-4.1.27/drivers/net/can/mscan/ |
D | mscan.c | 281 list_add_tail(&priv->tx_queue[buf_id].list, &priv->tx_head); in mscan_start_xmit() 708 priv->tx_queue[i].id = i; in alloc_mscandev() 709 priv->tx_queue[i].mask = 1 << i; in alloc_mscandev()
|
D | mscan.h | 295 struct tx_queue_entry tx_queue[TX_QUEUE_SIZE]; member
|
/linux-4.1.27/drivers/media/rc/ |
D | ite-cir.h | 136 wait_queue_head_t tx_queue, tx_ended; member
|
D | ite-cir.c | 324 wake_up_interruptible(&dev->tx_queue); in ite_cir_isr() 479 …wait_event_interruptible(dev->tx_queue, (fifo_avail = ITE_TX_FIFO_LEN - dev->params.get_tx_used_sl… in ite_tx_ir() 1524 init_waitqueue_head(&itdev->tx_queue); in ite_probe()
|
/linux-4.1.27/drivers/net/fddi/skfp/h/ |
D | supern_2.h | 228 struct tx_queue { struct 229 struct tx_queue *tq_next ; argument
|
/linux-4.1.27/drivers/net/phy/ |
D | dp83640.c | 130 struct sk_buff_head tx_queue; member 887 skb = skb_dequeue(&dp83640->tx_queue); in decode_txts() 899 skb = skb_dequeue(&dp83640->tx_queue); in decode_txts() 1134 skb_queue_head_init(&dp83640->tx_queue); in dp83640_probe() 1173 skb_queue_purge(&dp83640->tx_queue); in dp83640_remove() 1443 skb_queue_tail(&dp83640->tx_queue, skb); in dp83640_txtstamp()
|
/linux-4.1.27/drivers/net/xen-netback/ |
D | common.h | 141 struct sk_buff_head tx_queue; member
|
D | netback.c | 1187 while (skb_queue_len(&queue->tx_queue) < budget) { in xenvif_tx_build_gops() 1326 __skb_queue_tail(&queue->tx_queue, skb); in xenvif_tx_build_gops() 1416 while ((skb = __skb_dequeue(&queue->tx_queue)) != NULL) { in xenvif_tx_submit()
|
D | interface.c | 472 skb_queue_head_init(&queue->tx_queue); in xenvif_init_queue()
|
D | xenbus.c | 111 skb_queue_len(&queue->tx_queue), in xenvif_read_io_ring()
|
/linux-4.1.27/drivers/net/wireless/ti/wlcore/ |
D | tx.c | 529 skb = skb_dequeue(&lnk->tx_queue[q]); in wlcore_lnk_dequeue() 552 !skb_queue_empty(&lnk->tx_queue[ac]) && in wlcore_lnk_dequeue_high_prio() 697 skb_queue_head(&wl->links[hlid].tx_queue[q], skb); in wl1271_skb_queue_head() 1042 while ((skb = skb_dequeue(&lnk->tx_queue[i]))) { in wl1271_tx_reset_link_queues()
|
D | ps.c | 249 while ((skb = skb_dequeue(&lnk->tx_queue[i]))) { in wl1271_ps_filter_frames()
|
D | wlcore_i.h | 265 struct sk_buff_head tx_queue[NUM_TX_QUEUES]; member
|
D | main.c | 1295 skb_queue_tail(&wl->links[hlid].tx_queue[q], skb); in wl1271_op_tx() 6220 skb_queue_head_init(&wl->links[j].tx_queue[i]); in wlcore_alloc_hw()
|
/linux-4.1.27/include/net/bluetooth/ |
D | rfcomm.h | 171 struct sk_buff_head tx_queue; member
|
/linux-4.1.27/drivers/scsi/fnic/ |
D | fnic_main.c | 883 skb_queue_head_init(&fnic->tx_queue); in fnic_probe() 972 skb_queue_purge(&fnic->tx_queue); in fnic_remove() 1004 BUG_ON(!skb_queue_empty(&fnic->tx_queue)); in fnic_remove()
|
D | fnic.h | 283 struct sk_buff_head tx_queue; member
|
D | fnic_fcs.c | 1120 skb_queue_tail(&fnic->tx_queue, fp_skb(fp)); in fnic_send() 1144 while ((skb = skb_dequeue(&fnic->tx_queue))) { in fnic_flush_tx()
|
D | fnic_scsi.c | 216 skb_queue_purge(&fnic->tx_queue); in fnic_fw_reset_handler() 656 skb_queue_purge(&fnic->tx_queue); in DEF_SCSI_QCMD()
|
/linux-4.1.27/drivers/net/ethernet/intel/igbvf/ |
D | netdev.c | 944 int tx_queue, int msix_vector) in igbvf_assign_vector() argument 969 if (tx_queue > IGBVF_NO_QUEUE) { in igbvf_assign_vector() 970 index = (tx_queue >> 1); in igbvf_assign_vector() 972 if (tx_queue & 0x1) { in igbvf_assign_vector() 981 adapter->tx_ring[tx_queue].eims_value = 1 << msix_vector; in igbvf_assign_vector()
|
/linux-4.1.27/drivers/net/wireless/b43legacy/ |
D | main.c | 2495 while (skb_queue_len(&wl->tx_queue[queue_num])) { in b43legacy_tx_work() 2496 skb = skb_dequeue(&wl->tx_queue[queue_num]); in b43legacy_tx_work() 2504 skb_queue_head(&wl->tx_queue[queue_num], skb); in b43legacy_tx_work() 2532 skb_queue_tail(&wl->tx_queue[skb->queue_mapping], skb); in b43legacy_op_tx() 2978 while (skb_queue_len(&wl->tx_queue[queue_num])) in b43legacy_wireless_core_stop() 2979 dev_kfree_skb(skb_dequeue(&wl->tx_queue[queue_num])); in b43legacy_wireless_core_stop() 3867 skb_queue_head_init(&wl->tx_queue[queue_num]); in b43legacy_wireless_init()
|
D | b43legacy.h | 633 struct sk_buff_head tx_queue[B43legacy_QOS_QUEUE_NUM]; member
|
/linux-4.1.27/drivers/net/wireless/rt2x00/ |
D | rt2x00queue.c | 991 bool tx_queue = in rt2x00queue_flush_queue() local 1004 if (!drop && tx_queue) in rt2x00queue_flush_queue()
|
/linux-4.1.27/drivers/net/wireless/ipw2x00/ |
D | ipw2100.c | 2820 struct ipw2100_bd_queue *txq = &priv->tx_queue; in __ipw2100_tx_process() 3012 struct ipw2100_bd_queue *txq = &priv->tx_queue; in ipw2100_tx_send_commands() 3081 struct ipw2100_bd_queue *txq = &priv->tx_queue; in ipw2100_tx_send_data() 4431 err = bd_queue_allocate(priv, &priv->tx_queue, TX_QUEUE_LENGTH); in ipw2100_tx_allocate() 4442 bd_queue_free(priv, &priv->tx_queue); in ipw2100_tx_allocate() 4516 priv->tx_queue.oldest = 0; in ipw2100_tx_initialize() 4517 priv->tx_queue.available = priv->tx_queue.entries; in ipw2100_tx_initialize() 4518 priv->tx_queue.next = 0; in ipw2100_tx_initialize() 4520 SET_STAT(&priv->txq_stat, priv->tx_queue.available); in ipw2100_tx_initialize() 4522 bd_queue_initialize(priv, &priv->tx_queue, in ipw2100_tx_initialize() [all …]
|
D | ipw2100.h | 544 struct ipw2100_bd_queue tx_queue; member
|
/linux-4.1.27/drivers/net/ethernet/intel/igb/ |
D | igb_main.c | 798 int tx_queue = IGB_N0_QUEUE; in igb_assign_vector() local 804 tx_queue = q_vector->tx.ring->reg_idx; in igb_assign_vector() 815 if (tx_queue > IGB_N0_QUEUE) in igb_assign_vector() 816 msixbm |= E1000_EICR_TX_QUEUE0 << tx_queue; in igb_assign_vector() 832 if (tx_queue > IGB_N0_QUEUE) in igb_assign_vector() 834 tx_queue & 0x7, in igb_assign_vector() 835 ((tx_queue & 0x8) << 1) + 8); in igb_assign_vector() 853 if (tx_queue > IGB_N0_QUEUE) in igb_assign_vector() 855 tx_queue >> 1, in igb_assign_vector() 856 ((tx_queue & 0x1) << 4) + 8); in igb_assign_vector()
|
/linux-4.1.27/drivers/net/wireless/b43/ |
D | b43.h | 960 struct sk_buff_head tx_queue[B43_QOS_QUEUE_NUM]; member
|
D | main.c | 3616 while (skb_queue_len(&wl->tx_queue[queue_num])) { in b43_tx_work() 3617 skb = skb_dequeue(&wl->tx_queue[queue_num]); in b43_tx_work() 3625 skb_queue_head(&wl->tx_queue[queue_num], skb); in b43_tx_work() 3656 skb_queue_tail(&wl->tx_queue[skb->queue_mapping], skb); in b43_op_tx() 4402 while (skb_queue_len(&wl->tx_queue[queue_num])) { in b43_wireless_core_stop() 4405 skb = skb_dequeue(&wl->tx_queue[queue_num]); in b43_wireless_core_stop() 5647 skb_queue_head_init(&wl->tx_queue[queue_num]); in b43_wireless_init()
|
/linux-4.1.27/net/irda/irnet/ |
D | irnet_ppp.c | 981 skb_queue_len(&self->tsap->tx_queue)); in ppp_irnet_send()
|
/linux-4.1.27/drivers/net/wireless/iwlegacy/ |
D | debug.c | 1330 DEBUGFS_READ_FILE_OPS(tx_queue); 1383 DEBUGFS_ADD_FILE(tx_queue, dir_debug, S_IRUSR); in il_dbgfs_register()
|
/linux-4.1.27/include/net/ |
D | sock.h | 1645 static inline void sk_tx_queue_set(struct sock *sk, int tx_queue) in sk_tx_queue_set() argument 1647 sk->sk_tx_queue_mapping = tx_queue; in sk_tx_queue_set()
|
/linux-4.1.27/drivers/net/wireless/iwlwifi/pcie/ |
D | trans.c | 1998 DEBUGFS_READ_FILE_OPS(tx_queue); 2009 DEBUGFS_ADD_FILE(tx_queue, dir, S_IRUSR); in iwl_trans_pcie_dbgfs_register()
|