Lines Matching refs:d
161 static inline struct omap_dmadev *to_omap_dma_dev(struct dma_device *d) in to_omap_dma_dev() argument
163 return container_of(d, struct omap_dmadev, ddev); in to_omap_dma_dev()
280 static void omap_dma_start(struct omap_chan *c, struct omap_desc *d) in omap_dma_start() argument
292 omap_dma_chan_write(c, CICR, d->cicr); in omap_dma_start()
295 omap_dma_chan_write(c, CCR, d->ccr | CCR_ENABLE); in omap_dma_start()
359 static void omap_dma_start_sg(struct omap_chan *c, struct omap_desc *d, in omap_dma_start_sg() argument
362 struct omap_sg *sg = d->sg + idx; in omap_dma_start_sg()
365 if (d->dir == DMA_DEV_TO_MEM) { in omap_dma_start_sg()
381 omap_dma_start(c, d); in omap_dma_start_sg()
387 struct omap_desc *d; in omap_dma_start_desc() local
397 c->desc = d = to_omap_dma_desc(&vd->tx); in omap_dma_start_desc()
407 omap_dma_chan_write(c, CCR, d->ccr); in omap_dma_start_desc()
409 omap_dma_chan_write(c, CCR2, d->ccr >> 16); in omap_dma_start_desc()
411 if (d->dir == DMA_DEV_TO_MEM) { in omap_dma_start_desc()
421 omap_dma_chan_write(c, cxsa, d->dev_addr); in omap_dma_start_desc()
423 omap_dma_chan_write(c, cxfi, d->fi); in omap_dma_start_desc()
424 omap_dma_chan_write(c, CSDP, d->csdp); in omap_dma_start_desc()
425 omap_dma_chan_write(c, CLNK_CTRL, d->clnk_ctrl); in omap_dma_start_desc()
427 omap_dma_start_sg(c, d, 0); in omap_dma_start_desc()
433 struct omap_desc *d; in omap_dma_callback() local
437 d = c->desc; in omap_dma_callback()
438 if (d) { in omap_dma_callback()
440 if (++c->sgidx < d->sglen) { in omap_dma_callback()
441 omap_dma_start_sg(c, d, c->sgidx); in omap_dma_callback()
444 vchan_cookie_complete(&d->vd); in omap_dma_callback()
447 vchan_cyclic_callback(&d->vd); in omap_dma_callback()
462 struct omap_dmadev *d = (struct omap_dmadev *)data; in omap_dma_sched() local
465 spin_lock_irq(&d->lock); in omap_dma_sched()
466 list_splice_tail_init(&d->pending, &head); in omap_dma_sched()
467 spin_unlock_irq(&d->lock); in omap_dma_sched()
599 static size_t omap_dma_desc_size(struct omap_desc *d) in omap_dma_desc_size() argument
604 for (size = i = 0; i < d->sglen; i++) in omap_dma_desc_size()
605 size += omap_dma_sg_size(&d->sg[i]); in omap_dma_desc_size()
607 return size * es_bytes[d->es]; in omap_dma_desc_size()
610 static size_t omap_dma_desc_size_pos(struct omap_desc *d, dma_addr_t addr) in omap_dma_desc_size_pos() argument
613 size_t size, es_size = es_bytes[d->es]; in omap_dma_desc_size_pos()
615 for (size = i = 0; i < d->sglen; i++) { in omap_dma_desc_size_pos()
616 size_t this_size = omap_dma_sg_size(&d->sg[i]) * es_size; in omap_dma_desc_size_pos()
620 else if (addr >= d->sg[i].addr && in omap_dma_desc_size_pos()
621 addr < d->sg[i].addr + this_size) in omap_dma_desc_size_pos()
622 size += d->sg[i].addr + this_size - addr; in omap_dma_desc_size_pos()
712 struct omap_desc *d = c->desc; in omap_dma_tx_status() local
715 if (d->dir == DMA_MEM_TO_DEV) in omap_dma_tx_status()
717 else if (d->dir == DMA_DEV_TO_MEM) in omap_dma_tx_status()
722 txstate->residue = omap_dma_desc_size_pos(d, pos); in omap_dma_tx_status()
743 struct omap_dmadev *d = to_omap_dma_dev(chan->device); in omap_dma_issue_pending() local
744 spin_lock(&d->lock); in omap_dma_issue_pending()
746 list_add_tail(&c->node, &d->pending); in omap_dma_issue_pending()
747 spin_unlock(&d->lock); in omap_dma_issue_pending()
748 tasklet_schedule(&d->task); in omap_dma_issue_pending()
764 struct omap_desc *d; in omap_dma_prep_slave_sg() local
798 d = kzalloc(sizeof(*d) + sglen * sizeof(d->sg[0]), GFP_ATOMIC); in omap_dma_prep_slave_sg()
799 if (!d) in omap_dma_prep_slave_sg()
802 d->dir = dir; in omap_dma_prep_slave_sg()
803 d->dev_addr = dev_addr; in omap_dma_prep_slave_sg()
804 d->es = es; in omap_dma_prep_slave_sg()
806 d->ccr = c->ccr | CCR_SYNC_FRAME; in omap_dma_prep_slave_sg()
808 d->ccr |= CCR_DST_AMODE_POSTINC | CCR_SRC_AMODE_CONSTANT; in omap_dma_prep_slave_sg()
810 d->ccr |= CCR_DST_AMODE_CONSTANT | CCR_SRC_AMODE_POSTINC; in omap_dma_prep_slave_sg()
812 d->cicr = CICR_DROP_IE | CICR_BLOCK_IE; in omap_dma_prep_slave_sg()
813 d->csdp = es; in omap_dma_prep_slave_sg()
816 d->cicr |= CICR_TOUT_IE; in omap_dma_prep_slave_sg()
819 d->csdp |= CSDP_DST_PORT_EMIFF | CSDP_SRC_PORT_TIPB; in omap_dma_prep_slave_sg()
821 d->csdp |= CSDP_DST_PORT_TIPB | CSDP_SRC_PORT_EMIFF; in omap_dma_prep_slave_sg()
824 d->ccr |= CCR_TRIGGER_SRC; in omap_dma_prep_slave_sg()
826 d->cicr |= CICR_MISALIGNED_ERR_IE | CICR_TRANS_ERR_IE; in omap_dma_prep_slave_sg()
829 d->clnk_ctrl = c->dma_ch; in omap_dma_prep_slave_sg()
843 d->sg[j].addr = sg_dma_address(sgent); in omap_dma_prep_slave_sg()
844 d->sg[j].en = en; in omap_dma_prep_slave_sg()
845 d->sg[j].fn = sg_dma_len(sgent) / frame_bytes; in omap_dma_prep_slave_sg()
849 d->sglen = j; in omap_dma_prep_slave_sg()
851 return vchan_tx_prep(&c->vc, &d->vd, tx_flags); in omap_dma_prep_slave_sg()
861 struct omap_desc *d; in omap_dma_prep_dma_cyclic() local
895 d = kzalloc(sizeof(*d) + sizeof(d->sg[0]), GFP_ATOMIC); in omap_dma_prep_dma_cyclic()
896 if (!d) in omap_dma_prep_dma_cyclic()
899 d->dir = dir; in omap_dma_prep_dma_cyclic()
900 d->dev_addr = dev_addr; in omap_dma_prep_dma_cyclic()
901 d->fi = burst; in omap_dma_prep_dma_cyclic()
902 d->es = es; in omap_dma_prep_dma_cyclic()
903 d->sg[0].addr = buf_addr; in omap_dma_prep_dma_cyclic()
904 d->sg[0].en = period_len / es_bytes[es]; in omap_dma_prep_dma_cyclic()
905 d->sg[0].fn = buf_len / period_len; in omap_dma_prep_dma_cyclic()
906 d->sglen = 1; in omap_dma_prep_dma_cyclic()
908 d->ccr = c->ccr; in omap_dma_prep_dma_cyclic()
910 d->ccr |= CCR_DST_AMODE_POSTINC | CCR_SRC_AMODE_CONSTANT; in omap_dma_prep_dma_cyclic()
912 d->ccr |= CCR_DST_AMODE_CONSTANT | CCR_SRC_AMODE_POSTINC; in omap_dma_prep_dma_cyclic()
914 d->cicr = CICR_DROP_IE; in omap_dma_prep_dma_cyclic()
916 d->cicr |= CICR_FRAME_IE; in omap_dma_prep_dma_cyclic()
918 d->csdp = es; in omap_dma_prep_dma_cyclic()
921 d->cicr |= CICR_TOUT_IE; in omap_dma_prep_dma_cyclic()
924 d->csdp |= CSDP_DST_PORT_EMIFF | CSDP_SRC_PORT_MPUI; in omap_dma_prep_dma_cyclic()
926 d->csdp |= CSDP_DST_PORT_MPUI | CSDP_SRC_PORT_EMIFF; in omap_dma_prep_dma_cyclic()
929 d->ccr |= CCR_SYNC_PACKET; in omap_dma_prep_dma_cyclic()
931 d->ccr |= CCR_SYNC_ELEMENT; in omap_dma_prep_dma_cyclic()
934 d->ccr |= CCR_TRIGGER_SRC; in omap_dma_prep_dma_cyclic()
936 d->cicr |= CICR_MISALIGNED_ERR_IE | CICR_TRANS_ERR_IE; in omap_dma_prep_dma_cyclic()
938 d->csdp |= CSDP_DST_BURST_64 | CSDP_SRC_BURST_64; in omap_dma_prep_dma_cyclic()
942 d->ccr |= CCR_AUTO_INIT | CCR_REPEAT; in omap_dma_prep_dma_cyclic()
944 d->clnk_ctrl = c->dma_ch | CLNK_CTRL_ENABLE_LNK; in omap_dma_prep_dma_cyclic()
948 return vchan_tx_prep(&c->vc, &d->vd, flags); in omap_dma_prep_dma_cyclic()
967 struct omap_dmadev *d = to_omap_dma_dev(c->vc.chan.device); in omap_dma_terminate_all() local
974 spin_lock(&d->lock); in omap_dma_terminate_all()
976 spin_unlock(&d->lock); in omap_dma_terminate_all()