Searched refs:td_desc (Results 1 - 1 of 1) sorted by relevance

/linux-4.4.14/drivers/dma/
H A Dtimb_dma.c188 struct timb_dma_desc *td_desc; __td_start_dma() local
196 td_desc = list_entry(td_chan->active_list.next, struct timb_dma_desc, __td_start_dma()
207 iowrite32(td_desc->txd.phys, td_chan->membase + __td_start_dma()
217 iowrite32(td_desc->txd.phys, td_chan->membase + __td_start_dma()
223 if (td_desc->interrupt) __td_start_dma()
232 struct timb_dma_desc *td_desc; __td_finish() local
238 td_desc = list_entry(td_chan->active_list.next, struct timb_dma_desc, __td_finish()
240 txd = &td_desc->txd; __td_finish()
258 list_move(&td_desc->desc_node, &td_chan->free_list); __td_finish()
277 struct timb_dma_desc *td_desc = __td_ier_mask() local
280 if (td_desc->interrupt) __td_ier_mask()
290 struct timb_dma_desc *td_desc; __td_start_next() local
295 td_desc = list_entry(td_chan->queue.next, struct timb_dma_desc, __td_start_next()
299 __func__, td_desc->txd.cookie); __td_start_next()
301 list_move(&td_desc->desc_node, &td_chan->active_list); __td_start_next()
307 struct timb_dma_desc *td_desc = container_of(txd, struct timb_dma_desc, td_tx_submit() local
319 list_add_tail(&td_desc->desc_node, &td_chan->active_list); td_tx_submit()
325 list_add_tail(&td_desc->desc_node, &td_chan->queue); td_tx_submit()
336 struct timb_dma_desc *td_desc; td_alloc_init_desc() local
339 td_desc = kzalloc(sizeof(struct timb_dma_desc), GFP_KERNEL); td_alloc_init_desc()
340 if (!td_desc) { td_alloc_init_desc()
345 td_desc->desc_list_len = td_chan->desc_elems * TIMB_DMA_DESC_SIZE; td_alloc_init_desc()
347 td_desc->desc_list = kzalloc(td_desc->desc_list_len, GFP_KERNEL); td_alloc_init_desc()
348 if (!td_desc->desc_list) { td_alloc_init_desc()
353 dma_async_tx_descriptor_init(&td_desc->txd, chan); td_alloc_init_desc()
354 td_desc->txd.tx_submit = td_tx_submit; td_alloc_init_desc()
355 td_desc->txd.flags = DMA_CTRL_ACK; td_alloc_init_desc()
357 td_desc->txd.phys = dma_map_single(chan2dmadev(chan), td_alloc_init_desc()
358 td_desc->desc_list, td_desc->desc_list_len, DMA_TO_DEVICE); td_alloc_init_desc()
360 err = dma_mapping_error(chan2dmadev(chan), td_desc->txd.phys); td_alloc_init_desc()
366 return td_desc; td_alloc_init_desc()
368 kfree(td_desc->desc_list); td_alloc_init_desc()
369 kfree(td_desc); td_alloc_init_desc()
375 static void td_free_desc(struct timb_dma_desc *td_desc) td_free_desc() argument
377 dev_dbg(chan2dev(td_desc->txd.chan), "Freeing desc: %p\n", td_desc); td_free_desc()
378 dma_unmap_single(chan2dmadev(td_desc->txd.chan), td_desc->txd.phys, td_free_desc()
379 td_desc->desc_list_len, DMA_TO_DEVICE); td_free_desc()
381 kfree(td_desc->desc_list); td_free_desc()
382 kfree(td_desc); td_free_desc()
386 struct timb_dma_desc *td_desc) td_desc_put()
388 dev_dbg(chan2dev(&td_chan->chan), "Putting desc: %p\n", td_desc); td_desc_put()
391 list_add(&td_desc->desc_node, &td_chan->free_list); td_desc_put()
397 struct timb_dma_desc *td_desc, *_td_desc; td_desc_get() local
401 list_for_each_entry_safe(td_desc, _td_desc, &td_chan->free_list, td_desc_get()
403 if (async_tx_test_ack(&td_desc->txd)) { td_desc_get()
404 list_del(&td_desc->desc_node); td_desc_get()
405 ret = td_desc; td_desc_get()
409 td_desc); td_desc_get()
426 struct timb_dma_desc *td_desc = td_alloc_init_desc(td_chan); td_alloc_chan_resources() local
427 if (!td_desc) { td_alloc_chan_resources()
437 td_desc_put(td_chan, td_desc); td_alloc_chan_resources()
451 struct timb_dma_desc *td_desc, *_td_desc; td_free_chan_resources() local
464 list_for_each_entry_safe(td_desc, _td_desc, &list, desc_node) { td_free_chan_resources()
466 td_desc); td_free_chan_resources() local
467 td_free_desc(td_desc); td_free_chan_resources()
511 struct timb_dma_desc *td_desc; td_prep_slave_sg() local
528 td_desc = td_desc_get(td_chan); td_prep_slave_sg()
529 if (!td_desc) { td_prep_slave_sg()
534 td_desc->interrupt = (flags & DMA_PREP_INTERRUPT) != 0; td_prep_slave_sg()
538 if (desc_usage > td_desc->desc_list_len) { for_each_sg()
543 err = td_fill_desc(td_chan, td_desc->desc_list + desc_usage, sg, for_each_sg()
548 td_desc_put(td_chan, td_desc); for_each_sg()
554 dma_sync_single_for_device(chan2dmadev(chan), td_desc->txd.phys,
555 td_desc->desc_list_len, DMA_MEM_TO_DEV);
557 return &td_desc->txd;
564 struct timb_dma_desc *td_desc, *_td_desc; td_terminate_all() local
570 list_for_each_entry_safe(td_desc, _td_desc, &td_chan->queue, td_terminate_all()
572 list_move(&td_desc->desc_node, &td_chan->free_list); td_terminate_all()
385 td_desc_put(struct timb_dma_chan *td_chan, struct timb_dma_desc *td_desc) td_desc_put() argument

Completed in 49 milliseconds