/linux-4.1.27/drivers/staging/lustre/lnet/klnds/socklnd/ |
D | socklnd_proto.c | 48 list_add_tail(&tx_msg->tx_list, &conn->ksnc_tx_queue); in ksocknal_queue_tx_msg_v1() 62 if (tx->tx_list.next == &conn->ksnc_tx_queue) { in ksocknal_next_tx_carrier() 66 conn->ksnc_tx_carrier = list_entry(tx->tx_list.next, in ksocknal_next_tx_carrier() 67 ksock_tx_t, tx_list); in ksocknal_next_tx_carrier() 90 list_add_tail(&tx_ack->tx_list, in ksocknal_queue_tx_zcack_v2() 100 list_add_tail(&tx_ack->tx_list, in ksocknal_queue_tx_zcack_v2() 132 list_add_tail(&tx_msg->tx_list, &conn->ksnc_tx_queue); in ksocknal_queue_tx_msg_v2() 138 list_add_tail(&tx_msg->tx_list, &conn->ksnc_tx_queue); in ksocknal_queue_tx_msg_v2() 149 list_add(&tx_msg->tx_list, &tx->tx_list); in ksocknal_queue_tx_msg_v2() 150 list_del(&tx->tx_list); in ksocknal_queue_tx_msg_v2() [all …]
|
D | socklnd_cb.c | 42 next, ksock_tx_t, tx_list); in ksocknal_alloc_tx() 44 list_del(&tx->tx_list); in ksocknal_alloc_tx() 102 list_add(&tx->tx_list, &ksocknal_data.ksnd_idle_noop_txs); in ksocknal_free_tx() 413 tx = list_entry (txlist->next, ksock_tx_t, tx_list); in ksocknal_txlist_done() 425 list_del (&tx->tx_list); in ksocknal_txlist_done() 760 list_add_tail(&ztx->tx_list, &sched->kss_zombie_noop_txs); in ksocknal_queue_tx_locked() 910 list_add_tail (&tx->tx_list, &peer->ksnp_tx_queue); in ksocknal_launch_packet() 1475 ksock_tx_t, tx_list); in ksocknal_scheduler() 1481 list_del(&tx->tx_list); in ksocknal_scheduler() 1501 list_add(&tx->tx_list, in ksocknal_scheduler() [all …]
|
D | socklnd.c | 1271 list_for_each_entry_safe(tx, txtmp, &peer->ksnp_tx_queue, tx_list) { in ksocknal_create_conn() 1275 list_del(&tx->tx_list); in ksocknal_create_conn() 1451 tx_list) in ksocknal_close_conn_locked() 2238 tx = list_entry(zlist.next, ksock_tx_t, tx_list); in ksocknal_free_buffers() 2239 list_del(&tx->tx_list); in ksocknal_free_buffers()
|
D | socklnd.h | 210 struct list_head tx_list; /* queue on conn for transmission etc */ member
|
/linux-4.1.27/sound/soc/intel/common/ |
D | sst-ipc.c | 111 list_add_tail(&msg->list, &ipc->tx_list); in ipc_tx_message() 149 if (list_empty(&ipc->tx_list) || ipc->pending) { in ipc_tx_msgs() 162 msg = list_first_entry(&ipc->tx_list, struct ipc_message, list); in ipc_tx_msgs() 233 list_for_each_entry_safe(msg, tmp, &ipc->tx_list, list) { in sst_ipc_drop_all() 255 INIT_LIST_HEAD(&ipc->tx_list); in sst_ipc_init()
|
D | sst-ipc.h | 62 struct list_head tx_list; member
|
/linux-4.1.27/drivers/ps3/ |
D | ps3-vuart.c | 84 } tx_list; member 498 spin_lock_irqsave(&priv->tx_list.lock, flags); in ps3_vuart_write() 500 if (list_empty(&priv->tx_list.head)) { in ps3_vuart_write() 505 spin_unlock_irqrestore(&priv->tx_list.lock, flags); in ps3_vuart_write() 523 spin_unlock_irqrestore(&priv->tx_list.lock, flags); in ps3_vuart_write() 535 spin_lock_irqsave(&priv->tx_list.lock, flags); in ps3_vuart_write() 536 list_add_tail(&lb->link, &priv->tx_list.head); in ps3_vuart_write() 538 spin_unlock_irqrestore(&priv->tx_list.lock, flags); in ps3_vuart_write() 740 spin_lock_irqsave(&priv->tx_list.lock, flags); in ps3_vuart_handle_interrupt_tx() 742 list_for_each_entry_safe(lb, n, &priv->tx_list.head, link) { in ps3_vuart_handle_interrupt_tx() [all …]
|
/linux-4.1.27/drivers/dma/ |
D | xgene-dma.c | 282 struct list_head tx_list; member 571 list_splice_tail_init(&desc->tx_list, &chan->ld_pending); in xgene_dma_tx_submit() 600 INIT_LIST_HEAD(&desc->tx_list); in xgene_dma_alloc_descriptor() 975 list_add_tail(&new->node, &first->tx_list); in xgene_dma_prep_memcpy() 980 list_splice(&first->tx_list, &new->tx_list); in xgene_dma_prep_memcpy() 988 xgene_dma_free_tx_desc_list(chan, &first->tx_list); in xgene_dma_prep_memcpy() 1050 list_add_tail(&new->node, &first->tx_list); in xgene_dma_prep_sg() 1089 list_splice(&first->tx_list, &new->tx_list); in xgene_dma_prep_sg() 1096 xgene_dma_free_tx_desc_list(chan, &first->tx_list); in xgene_dma_prep_sg() 1131 list_add_tail(&new->node, &first->tx_list); in xgene_dma_prep_xor() [all …]
|
D | mmp_pdma.c | 88 struct list_head tx_list; member 350 list_for_each_entry(child, &desc->tx_list, node) { in mmp_pdma_tx_submit() 355 list_splice_tail_init(&desc->tx_list, &chan->chain_pending); in mmp_pdma_tx_submit() 375 INIT_LIST_HEAD(&desc->tx_list); in mmp_pdma_alloc_descriptor() 505 list_add_tail(&new->node, &first->tx_list); in mmp_pdma_prep_memcpy() 521 mmp_pdma_free_desc_list(chan, &first->tx_list); in mmp_pdma_prep_memcpy() 577 list_add_tail(&new->node, &first->tx_list); in mmp_pdma_prep_slave_sg() 599 mmp_pdma_free_desc_list(chan, &first->tx_list); in mmp_pdma_prep_slave_sg() 671 list_add_tail(&new->node, &first->tx_list); in mmp_pdma_prep_dma_cyclic() 685 mmp_pdma_free_desc_list(chan, &first->tx_list); in mmp_pdma_prep_dma_cyclic()
|
D | txx9dmac.c | 185 if (!list_empty(&desc->tx_list)) in txx9dmac_last_child() 186 desc = list_entry(desc->tx_list.prev, typeof(*desc), desc_node); in txx9dmac_last_child() 201 INIT_LIST_HEAD(&desc->tx_list); in txx9dmac_desc_alloc() 250 list_for_each_entry(child, &desc->tx_list, desc_node) in txx9dmac_sync_desc_for_cpu() 272 list_for_each_entry(child, &desc->tx_list, desc_node) in txx9dmac_desc_put() 276 list_splice_init(&desc->tx_list, &dc->free_list); in txx9dmac_desc_put() 418 list_splice_init(&desc->tx_list, &dc->free_list); in txx9dmac_descriptor_complete() 537 list_for_each_entry(child, &bad_desc->tx_list, desc_node) in txx9dmac_handle_error() 579 list_for_each_entry(child, &desc->tx_list, desc_node) in txx9dmac_scan_descriptors() 789 list_add_tail(&desc->desc_node, &first->tx_list); in txx9dmac_prep_dma_memcpy() [all …]
|
D | ep93xx_dma.c | 128 struct list_head tx_list; member 244 while (!list_empty(&desc->tx_list)) { in ep93xx_dma_set_active() 245 struct ep93xx_dma_desc *d = list_first_entry(&desc->tx_list, in ep93xx_dma_set_active() 700 list_splice_init(&desc->tx_list, &edmac->free_list); in ep93xx_dma_desc_put() 915 INIT_LIST_HEAD(&desc->tx_list); in ep93xx_dma_alloc_chan_resources() 1001 list_add_tail(&desc->node, &first->tx_list); in ep93xx_dma_prep_dma_memcpy() 1074 list_add_tail(&desc->node, &first->tx_list); in ep93xx_dma_prep_slave_sg() 1153 list_add_tail(&desc->node, &first->tx_list); in ep93xx_dma_prep_dma_cyclic()
|
D | fsldma.c | 405 list_splice_tail_init(&desc->tx_list, &chan->ld_pending); in append_ld_queue() 429 list_for_each_entry(child, &desc->tx_list, node) { in fsl_dma_tx_submit() 472 INIT_LIST_HEAD(&desc->tx_list); in fsl_dma_alloc_descriptor() 809 list_add_tail(&new->node, &first->tx_list); in fsl_dma_prep_memcpy() 824 fsldma_free_desc_list_reverse(chan, &first->tx_list); in fsl_dma_prep_memcpy() 888 list_add_tail(&new->node, &first->tx_list); in fsl_dma_prep_sg() 940 fsldma_free_desc_list_reverse(chan, &first->tx_list); in fsl_dma_prep_sg()
|
D | pch_dma.c | 99 struct list_head tx_list; member 348 if (list_empty(&desc->tx_list)) { in pdc_dostart() 367 list_splice_init(&desc->tx_list, &pd_chan->free_list); in pdc_chain_complete() 450 INIT_LIST_HEAD(&desc->tx_list); in pdc_alloc_desc() 499 list_splice_init(&desc->tx_list, &pd_chan->free_list); in pdc_desc_put() 646 list_add_tail(&desc->desc_node, &first->tx_list); in pd_prep_slave_sg()
|
D | tegra20-apb-dma.c | 174 struct list_head tx_list; member 313 if (!list_empty(&dma_desc->tx_list)) in tegra_dma_desc_put() 314 list_splice_init(&dma_desc->tx_list, &tdc->free_sg_req); in tegra_dma_desc_put() 693 list_splice_tail_init(&dma_desc->tx_list, &tdc->pending_sg_req); in tegra_dma_tx_submit() 970 INIT_LIST_HEAD(&dma_desc->tx_list); in tegra_dma_prep_slave_sg() 1013 list_add_tail(&sg_req->node, &dma_desc->tx_list); in tegra_dma_prep_slave_sg() 1114 INIT_LIST_HEAD(&dma_desc->tx_list); in tegra_dma_prep_dma_cyclic() 1144 list_add_tail(&sg_req->node, &dma_desc->tx_list); in tegra_dma_prep_dma_cyclic()
|
D | at_hdmac.c | 115 INIT_LIST_HEAD(&desc->tx_list); in atc_alloc_descriptor() 181 list_for_each_entry(child, &desc->tx_list, desc_node) in atc_desc_put() 185 list_splice_init(&desc->tx_list, &atchan->free_list); in atc_desc_put() 211 &(*first)->tx_list); in atc_desc_chain() 356 list_for_each_entry(desc, &desc_first->tx_list, desc_node) { in atc_get_bytes_left() 398 list_splice_init(&desc->tx_list, &atchan->free_list); in atc_chain_complete() 514 list_for_each_entry(child, &bad_desc->tx_list, desc_node) in atc_handle_error()
|
D | txx9dmac.h | 233 struct list_head tx_list; member
|
D | fsldma.h | 104 struct list_head tx_list; member
|
D | iop-adma.c | 343 list_splice(&chain, &alloc_tail->tx_list); in iop_adma_alloc_slots() 390 list_splice_init(&sw_desc->tx_list, in iop_adma_tx_submit() 457 INIT_LIST_HEAD(&slot->tx_list); in iop_adma_alloc_chan_resources() 1460 list_splice_init(&sw_desc->tx_list, &iop_chan->chain); in iop_chan_start_null_memcpy() 1512 list_splice_init(&sw_desc->tx_list, &iop_chan->chain); in iop_chan_start_null_xor()
|
D | at_hdmac_regs.h | 193 struct list_head tx_list; member
|
/linux-4.1.27/drivers/net/ethernet/octeon/ |
D | octeon_mgmt.c | 132 struct sk_buff_head tx_list; member 260 spin_lock_irqsave(&p->tx_list.lock, flags); in octeon_mgmt_clean_tx_buffers() 265 spin_unlock_irqrestore(&p->tx_list.lock, flags); in octeon_mgmt_clean_tx_buffers() 276 skb = __skb_dequeue(&p->tx_list); in octeon_mgmt_clean_tx_buffers() 285 spin_unlock_irqrestore(&p->tx_list.lock, flags); in octeon_mgmt_clean_tx_buffers() 1258 skb_queue_purge(&p->tx_list); in octeon_mgmt_stop() 1288 spin_lock_irqsave(&p->tx_list.lock, flags); in octeon_mgmt_xmit() 1291 spin_unlock_irqrestore(&p->tx_list.lock, flags); in octeon_mgmt_xmit() 1293 spin_lock_irqsave(&p->tx_list.lock, flags); in octeon_mgmt_xmit() 1298 spin_unlock_irqrestore(&p->tx_list.lock, flags); in octeon_mgmt_xmit() [all …]
|
/linux-4.1.27/drivers/staging/lustre/lnet/klnds/o2iblnd/ |
D | o2iblnd_cb.c | 75 kiblnd_pool_free_node(&tx->tx_pool->tpo_pool, &tx->tx_list); in kiblnd_tx_done() 92 tx = list_entry(txlist->next, kib_tx_t, tx_list); in kiblnd_txlist_done() 94 list_del(&tx->tx_list); in kiblnd_txlist_done() 114 tx = container_of(node, kib_tx_t, tx_list); in kiblnd_get_idle_tx() 218 kib_tx_t *tx = list_entry(tmp, kib_tx_t, tx_list); in kiblnd_find_waiting_tx_locked() 268 list_del(&tx->tx_list); in kiblnd_handle_completion() 395 list_del(&tx->tx_list); in kiblnd_handle_rx() 836 list_del(&tx->tx_list); in kiblnd_post_tx_locked() 871 list_add(&tx->tx_list, &conn->ibc_active_txs); in kiblnd_post_tx_locked() 904 list_del(&tx->tx_list); in kiblnd_post_tx_locked() [all …]
|
D | o2iblnd.h | 531 struct list_head tx_list; /* queue on idle_txs ibc_tx_queue etc. */ member
|
D | o2iblnd.c | 1279 list_add(&tx->tx_list, &pool->po_free_list); in kiblnd_map_tx_pool() 1942 list_del(&tx->tx_list); in kiblnd_destroy_tx_pool() 2067 kib_tx_t *tx = list_entry(node, kib_tx_t, tx_list); in kiblnd_tx_init()
|
/linux-4.1.27/drivers/dma/dw/ |
D | core.c | 119 list_for_each_entry(child, &desc->tx_list, desc_node) in dwc_desc_put() 123 list_splice_init(&desc->tx_list, &dwc->free_list); in dwc_desc_put() 241 dwc->tx_node_active = &first->tx_list; in dwc_dostart() 293 list_for_each_entry(child, &desc->tx_list, desc_node) in dwc_descriptor_complete() 297 list_splice_init(&desc->tx_list, &dwc->free_list); in dwc_descriptor_complete() 369 head = &desc->tx_list; in dwc_scan_descriptors() 431 list_for_each_entry(child, &desc->tx_list, desc_node) { in dwc_scan_descriptors() 500 list_for_each_entry(child, &bad_desc->tx_list, desc_node) in dwc_handle_error() 740 &first->tx_list); in dwc_prep_dma_memcpy() 837 &first->tx_list); in dwc_prep_slave_sg() [all …]
|
D | regs.h | 333 struct list_head tx_list; member
|
/linux-4.1.27/drivers/net/wireless/p54/ |
D | p54spi.h | 102 struct list_head tx_list; member
|
D | p54spi.c | 447 struct p54s_tx_info, tx_list); in p54spi_wq_tx() 449 list_del_init(&entry->tx_list); in p54spi_wq_tx() 454 tx_list); in p54spi_wq_tx() 485 list_add_tail(&di->tx_list, &priv->tx_pending); in p54spi_op_tx()
|
/linux-4.1.27/arch/arm/include/asm/hardware/ |
D | iop_adma.h | 100 struct list_head tx_list; member
|
/linux-4.1.27/drivers/dma/sh/ |
D | shdma-base.c | 569 LIST_HEAD(tx_list); in shdma_prep_sg() 618 list_add_tail(&new->node, &tx_list); in shdma_prep_sg() 626 list_splice_tail(&tx_list, &schan->ld_free); in shdma_prep_sg() 633 list_for_each_entry(new, &tx_list, node) in shdma_prep_sg() 635 list_splice(&tx_list, &schan->ld_free); in shdma_prep_sg()
|
/linux-4.1.27/drivers/net/ethernet/ti/ |
D | tlan.c | 863 priv->tx_list = priv->rx_list + TLAN_NUM_RX_LISTS; in tlan_init() 1077 tail_list = priv->tx_list + priv->tx_tail; in tlan_start_tx() 1116 (priv->tx_list + (TLAN_NUM_TX_LISTS - 1))->forward in tlan_start_tx() 1119 (priv->tx_list + (priv->tx_tail - 1))->forward in tlan_start_tx() 1251 tlan_print_list(priv->tx_list + i, "TX", i); in tlan_get_stats() 1381 head_list = priv->tx_list + priv->tx_head; in tlan_handle_tx_eof() 1404 head_list = priv->tx_list + priv->tx_head; in tlan_handle_tx_eof() 1415 head_list = priv->tx_list + priv->tx_head; in tlan_handle_tx_eof() 1660 head_list = priv->tx_list + priv->tx_head; in tlan_handle_tx_eoc() 1929 list = priv->tx_list + i; in tlan_reset_lists() [all …]
|
D | tlan.h | 186 struct tlan_list *tx_list; member
|
/linux-4.1.27/drivers/dma/ioat/ |
D | dma.c | 242 first = to_ioat_desc(desc->tx_list.next); in ioat1_tx_submit() 247 list_splice_tail_init(&desc->tx_list, &ioat->used_desc); in ioat1_tx_submit() 289 INIT_LIST_HEAD(&desc_sw->tx_list); in ioat_dma_alloc_descriptor() 548 list_splice(&chain, &desc->tx_list); in ioat1_dma_prep_memcpy()
|
D | dma.h | 182 struct list_head tx_list; member
|
/linux-4.1.27/drivers/tty/ |
D | n_gsm.c | 248 struct list_head tx_list; /* Pending data packets */ member 687 list_for_each_entry_safe(msg, nmsg, &gsm->tx_list, list) { in gsm_data_kick() 768 list_add_tail(&msg->list, &gsm->tx_list); in __gsm_data_queue() 2074 list_for_each_entry_safe(txq, ntxq, &gsm->tx_list, list) in gsm_cleanup_mux() 2076 INIT_LIST_HEAD(&gsm->tx_list); in gsm_cleanup_mux() 2183 INIT_LIST_HEAD(&gsm->tx_list); in gsm_alloc_mux()
|