td_desc 180 drivers/dma/timb_dma.c struct timb_dma_desc *td_desc; td_desc 188 drivers/dma/timb_dma.c td_desc = list_entry(td_chan->active_list.next, struct timb_dma_desc, td_desc 199 drivers/dma/timb_dma.c iowrite32(td_desc->txd.phys, td_chan->membase + td_desc 209 drivers/dma/timb_dma.c iowrite32(td_desc->txd.phys, td_chan->membase + td_desc 215 drivers/dma/timb_dma.c if (td_desc->interrupt) td_desc 223 drivers/dma/timb_dma.c struct timb_dma_desc *td_desc; td_desc 229 drivers/dma/timb_dma.c td_desc = list_entry(td_chan->active_list.next, struct timb_dma_desc, td_desc 231 drivers/dma/timb_dma.c txd = &td_desc->txd; td_desc 248 drivers/dma/timb_dma.c list_move(&td_desc->desc_node, &td_chan->free_list); td_desc 266 drivers/dma/timb_dma.c struct timb_dma_desc *td_desc = td_desc 269 drivers/dma/timb_dma.c if (td_desc->interrupt) td_desc 279 drivers/dma/timb_dma.c struct timb_dma_desc *td_desc; td_desc 284 drivers/dma/timb_dma.c td_desc = list_entry(td_chan->queue.next, struct timb_dma_desc, td_desc 288 drivers/dma/timb_dma.c __func__, td_desc->txd.cookie); td_desc 290 drivers/dma/timb_dma.c list_move(&td_desc->desc_node, &td_chan->active_list); td_desc 296 drivers/dma/timb_dma.c struct timb_dma_desc *td_desc = container_of(txd, struct timb_dma_desc, td_desc 308 drivers/dma/timb_dma.c list_add_tail(&td_desc->desc_node, &td_chan->active_list); td_desc 314 drivers/dma/timb_dma.c list_add_tail(&td_desc->desc_node, &td_chan->queue); td_desc 325 drivers/dma/timb_dma.c struct timb_dma_desc *td_desc; td_desc 328 drivers/dma/timb_dma.c td_desc = kzalloc(sizeof(struct timb_dma_desc), GFP_KERNEL); td_desc 329 drivers/dma/timb_dma.c if (!td_desc) td_desc 332 drivers/dma/timb_dma.c td_desc->desc_list_len = td_chan->desc_elems * TIMB_DMA_DESC_SIZE; td_desc 334 drivers/dma/timb_dma.c td_desc->desc_list = kzalloc(td_desc->desc_list_len, GFP_KERNEL); td_desc 335 drivers/dma/timb_dma.c if (!td_desc->desc_list) td_desc 338 drivers/dma/timb_dma.c dma_async_tx_descriptor_init(&td_desc->txd, chan); td_desc 339 drivers/dma/timb_dma.c td_desc->txd.tx_submit = td_tx_submit; td_desc 340 drivers/dma/timb_dma.c td_desc->txd.flags = DMA_CTRL_ACK; td_desc 342 drivers/dma/timb_dma.c td_desc->txd.phys = dma_map_single(chan2dmadev(chan), td_desc 343 drivers/dma/timb_dma.c td_desc->desc_list, td_desc->desc_list_len, DMA_TO_DEVICE); td_desc 345 drivers/dma/timb_dma.c err = dma_mapping_error(chan2dmadev(chan), td_desc->txd.phys); td_desc 351 drivers/dma/timb_dma.c return td_desc; td_desc 353 drivers/dma/timb_dma.c kfree(td_desc->desc_list); td_desc 354 drivers/dma/timb_dma.c kfree(td_desc); td_desc 360 drivers/dma/timb_dma.c static void td_free_desc(struct timb_dma_desc *td_desc) td_desc 362 drivers/dma/timb_dma.c dev_dbg(chan2dev(td_desc->txd.chan), "Freeing desc: %p\n", td_desc); td_desc 363 drivers/dma/timb_dma.c dma_unmap_single(chan2dmadev(td_desc->txd.chan), td_desc->txd.phys, td_desc 364 drivers/dma/timb_dma.c td_desc->desc_list_len, DMA_TO_DEVICE); td_desc 366 drivers/dma/timb_dma.c kfree(td_desc->desc_list); td_desc 367 drivers/dma/timb_dma.c kfree(td_desc); td_desc 371 drivers/dma/timb_dma.c struct timb_dma_desc *td_desc) td_desc 373 drivers/dma/timb_dma.c dev_dbg(chan2dev(&td_chan->chan), "Putting desc: %p\n", td_desc); td_desc 376 drivers/dma/timb_dma.c list_add(&td_desc->desc_node, &td_chan->free_list); td_desc 382 drivers/dma/timb_dma.c struct timb_dma_desc *td_desc, *_td_desc; td_desc 386 drivers/dma/timb_dma.c list_for_each_entry_safe(td_desc, _td_desc, &td_chan->free_list, td_desc 388 drivers/dma/timb_dma.c if (async_tx_test_ack(&td_desc->txd)) { td_desc 389 drivers/dma/timb_dma.c list_del(&td_desc->desc_node); td_desc 390 drivers/dma/timb_dma.c ret = td_desc; td_desc 394 drivers/dma/timb_dma.c td_desc); td_desc 411 drivers/dma/timb_dma.c struct timb_dma_desc *td_desc = td_alloc_init_desc(td_chan); td_desc 412 drivers/dma/timb_dma.c if (!td_desc) { td_desc 422 drivers/dma/timb_dma.c td_desc_put(td_chan, td_desc); td_desc 436 drivers/dma/timb_dma.c struct timb_dma_desc *td_desc, *_td_desc; td_desc 449 drivers/dma/timb_dma.c list_for_each_entry_safe(td_desc, _td_desc, &list, desc_node) { td_desc 451 drivers/dma/timb_dma.c td_desc); td_desc 452 drivers/dma/timb_dma.c td_free_desc(td_desc); td_desc 496 drivers/dma/timb_dma.c struct timb_dma_desc *td_desc; td_desc 513 drivers/dma/timb_dma.c td_desc = td_desc_get(td_chan); td_desc 514 drivers/dma/timb_dma.c if (!td_desc) { td_desc 519 drivers/dma/timb_dma.c td_desc->interrupt = (flags & DMA_PREP_INTERRUPT) != 0; td_desc 523 drivers/dma/timb_dma.c if (desc_usage > td_desc->desc_list_len) { td_desc 528 drivers/dma/timb_dma.c err = td_fill_desc(td_chan, td_desc->desc_list + desc_usage, sg, td_desc 533 drivers/dma/timb_dma.c td_desc_put(td_chan, td_desc); td_desc 539 drivers/dma/timb_dma.c dma_sync_single_for_device(chan2dmadev(chan), td_desc->txd.phys, td_desc 540 drivers/dma/timb_dma.c td_desc->desc_list_len, DMA_TO_DEVICE); td_desc 542 drivers/dma/timb_dma.c return &td_desc->txd; td_desc 549 drivers/dma/timb_dma.c struct timb_dma_desc *td_desc, *_td_desc; td_desc 555 drivers/dma/timb_dma.c list_for_each_entry_safe(td_desc, _td_desc, &td_chan->queue, td_desc 557 drivers/dma/timb_dma.c list_move(&td_desc->desc_node, &td_chan->free_list);