Lines Matching refs:d
133 static inline struct bcm2835_dmadev *to_bcm2835_dma_dev(struct dma_device *d) in to_bcm2835_dma_dev() argument
135 return container_of(d, struct bcm2835_dmadev, ddev); in to_bcm2835_dma_dev()
200 struct bcm2835_desc *d; in bcm2835_dma_start_desc() local
209 c->desc = d = to_bcm2835_dma_desc(&vd->tx); in bcm2835_dma_start_desc()
211 writel(d->cb_list[0].paddr, c->chan_base + BCM2835_DMA_ADDR); in bcm2835_dma_start_desc()
218 struct bcm2835_desc *d; in bcm2835_dma_callback() local
226 d = c->desc; in bcm2835_dma_callback()
228 if (d) { in bcm2835_dma_callback()
230 vchan_cyclic_callback(&d->vd); in bcm2835_dma_callback()
270 static size_t bcm2835_dma_desc_size(struct bcm2835_desc *d) in bcm2835_dma_desc_size() argument
272 return d->size; in bcm2835_dma_desc_size()
275 static size_t bcm2835_dma_desc_size_pos(struct bcm2835_desc *d, dma_addr_t addr) in bcm2835_dma_desc_size_pos() argument
280 for (size = i = 0; i < d->frames; i++) { in bcm2835_dma_desc_size_pos()
281 struct bcm2835_dma_cb *control_block = d->cb_list[i].cb; in bcm2835_dma_desc_size_pos()
285 if (d->dir == DMA_DEV_TO_MEM) in bcm2835_dma_desc_size_pos()
317 struct bcm2835_desc *d = c->desc; in bcm2835_dma_tx_status() local
320 if (d->dir == DMA_MEM_TO_DEV) in bcm2835_dma_tx_status()
322 else if (d->dir == DMA_DEV_TO_MEM) in bcm2835_dma_tx_status()
327 txstate->residue = bcm2835_dma_desc_size_pos(d, pos); in bcm2835_dma_tx_status()
358 struct bcm2835_desc *d; in bcm2835_dma_prep_dma_cyclic() local
390 d = kzalloc(sizeof(*d), GFP_NOWAIT); in bcm2835_dma_prep_dma_cyclic()
391 if (!d) in bcm2835_dma_prep_dma_cyclic()
394 d->c = c; in bcm2835_dma_prep_dma_cyclic()
395 d->dir = direction; in bcm2835_dma_prep_dma_cyclic()
396 d->frames = buf_len / period_len; in bcm2835_dma_prep_dma_cyclic()
398 d->cb_list = kcalloc(d->frames, sizeof(*d->cb_list), GFP_KERNEL); in bcm2835_dma_prep_dma_cyclic()
399 if (!d->cb_list) { in bcm2835_dma_prep_dma_cyclic()
400 kfree(d); in bcm2835_dma_prep_dma_cyclic()
404 for (i = 0; i < d->frames; i++) { in bcm2835_dma_prep_dma_cyclic()
405 struct bcm2835_cb_entry *cb_entry = &d->cb_list[i]; in bcm2835_dma_prep_dma_cyclic()
417 for (frame = 0; frame < d->frames; frame++) { in bcm2835_dma_prep_dma_cyclic()
418 struct bcm2835_dma_cb *control_block = d->cb_list[frame].cb; in bcm2835_dma_prep_dma_cyclic()
421 if (d->dir == DMA_DEV_TO_MEM) { in bcm2835_dma_prep_dma_cyclic()
445 d->size += control_block->length; in bcm2835_dma_prep_dma_cyclic()
452 control_block->next = d->cb_list[((frame + 1) % d->frames)].paddr; in bcm2835_dma_prep_dma_cyclic()
455 return vchan_tx_prep(&c->vc, &d->vd, flags); in bcm2835_dma_prep_dma_cyclic()
459 struct bcm2835_cb_entry *cb_entry = &d->cb_list[i]; in bcm2835_dma_prep_dma_cyclic()
464 kfree(d->cb_list); in bcm2835_dma_prep_dma_cyclic()
465 kfree(d); in bcm2835_dma_prep_dma_cyclic()
490 struct bcm2835_dmadev *d = to_bcm2835_dma_dev(c->vc.chan.device); in bcm2835_dma_terminate_all() local
498 spin_lock(&d->lock); in bcm2835_dma_terminate_all()
500 spin_unlock(&d->lock); in bcm2835_dma_terminate_all()
522 dev_err(d->ddev.dev, "DMA transfer could not be terminated\n"); in bcm2835_dma_terminate_all()
532 static int bcm2835_dma_chan_init(struct bcm2835_dmadev *d, int chan_id, int irq) in bcm2835_dma_chan_init() argument
536 c = devm_kzalloc(d->ddev.dev, sizeof(*c), GFP_KERNEL); in bcm2835_dma_chan_init()
541 vchan_init(&c->vc, &d->ddev); in bcm2835_dma_chan_init()
544 c->chan_base = BCM2835_DMA_CHANIO(d->base, chan_id); in bcm2835_dma_chan_init()
571 struct bcm2835_dmadev *d = ofdma->of_dma_data; in bcm2835_dma_xlate() local
574 chan = dma_get_any_slave_channel(&d->ddev); in bcm2835_dma_xlate()