Lines Matching refs:pd
144 #define dma_readl(pd, name) \ argument
145 readl((pd)->membase + PCH_DMA_##name)
146 #define dma_writel(pd, name, val) \ argument
147 writel((val), (pd)->membase + PCH_DMA_##name)
191 struct pch_dma *pd = to_pd(chan->device); in pdc_enable_irq() local
200 val = dma_readl(pd, CTL2); in pdc_enable_irq()
207 dma_writel(pd, CTL2, val); in pdc_enable_irq()
216 struct pch_dma *pd = to_pd(chan->device); in pdc_set_dir() local
222 val = dma_readl(pd, CTL0); in pdc_set_dir()
237 dma_writel(pd, CTL0, val); in pdc_set_dir()
240 val = dma_readl(pd, CTL3); in pdc_set_dir()
254 dma_writel(pd, CTL3, val); in pdc_set_dir()
263 struct pch_dma *pd = to_pd(chan->device); in pdc_set_mode() local
273 val = dma_readl(pd, CTL0); in pdc_set_mode()
277 dma_writel(pd, CTL0, val); in pdc_set_mode()
284 val = dma_readl(pd, CTL3); in pdc_set_mode()
288 dma_writel(pd, CTL3, val); in pdc_set_mode()
297 struct pch_dma *pd = to_pd(pd_chan->chan.device); in pdc_get_status0() local
300 val = dma_readl(pd, STS0); in pdc_get_status0()
307 struct pch_dma *pd = to_pd(pd_chan->chan.device); in pdc_get_status2() local
310 val = dma_readl(pd, STS2); in pdc_get_status2()
444 struct pch_dma *pd = to_pd(chan->device); in pdc_alloc_desc() local
447 desc = pci_pool_alloc(pd->pool, flags, &addr); in pdc_alloc_desc()
546 struct pch_dma *pd = to_pd(chan->device); in pd_free_chan_resources() local
560 pci_pool_free(pd->pool, desc, desc->txd.phys); in pd_free_chan_resources()
710 struct pch_dma *pd = (struct pch_dma *)devid; in pd_irq() local
718 sts0 = dma_readl(pd, STS0); in pd_irq()
719 sts2 = dma_readl(pd, STS2); in pd_irq()
721 dev_dbg(pd->dma.dev, "pd_irq sts0: %x\n", sts0); in pd_irq()
723 for (i = 0; i < pd->dma.chancnt; i++) { in pd_irq()
724 pd_chan = &pd->channels[i]; in pd_irq()
747 dma_writel(pd, STS0, sts0); in pd_irq()
749 dma_writel(pd, STS2, sts2); in pd_irq()
755 static void pch_dma_save_regs(struct pch_dma *pd) in pch_dma_save_regs() argument
761 pd->regs.dma_ctl0 = dma_readl(pd, CTL0); in pch_dma_save_regs()
762 pd->regs.dma_ctl1 = dma_readl(pd, CTL1); in pch_dma_save_regs()
763 pd->regs.dma_ctl2 = dma_readl(pd, CTL2); in pch_dma_save_regs()
764 pd->regs.dma_ctl3 = dma_readl(pd, CTL3); in pch_dma_save_regs()
766 list_for_each_entry_safe(chan, _c, &pd->dma.channels, device_node) { in pch_dma_save_regs()
769 pd->ch_regs[i].dev_addr = channel_readl(pd_chan, DEV_ADDR); in pch_dma_save_regs()
770 pd->ch_regs[i].mem_addr = channel_readl(pd_chan, MEM_ADDR); in pch_dma_save_regs()
771 pd->ch_regs[i].size = channel_readl(pd_chan, SIZE); in pch_dma_save_regs()
772 pd->ch_regs[i].next = channel_readl(pd_chan, NEXT); in pch_dma_save_regs()
778 static void pch_dma_restore_regs(struct pch_dma *pd) in pch_dma_restore_regs() argument
784 dma_writel(pd, CTL0, pd->regs.dma_ctl0); in pch_dma_restore_regs()
785 dma_writel(pd, CTL1, pd->regs.dma_ctl1); in pch_dma_restore_regs()
786 dma_writel(pd, CTL2, pd->regs.dma_ctl2); in pch_dma_restore_regs()
787 dma_writel(pd, CTL3, pd->regs.dma_ctl3); in pch_dma_restore_regs()
789 list_for_each_entry_safe(chan, _c, &pd->dma.channels, device_node) { in pch_dma_restore_regs()
792 channel_writel(pd_chan, DEV_ADDR, pd->ch_regs[i].dev_addr); in pch_dma_restore_regs()
793 channel_writel(pd_chan, MEM_ADDR, pd->ch_regs[i].mem_addr); in pch_dma_restore_regs()
794 channel_writel(pd_chan, SIZE, pd->ch_regs[i].size); in pch_dma_restore_regs()
795 channel_writel(pd_chan, NEXT, pd->ch_regs[i].next); in pch_dma_restore_regs()
803 struct pch_dma *pd = pci_get_drvdata(pdev); in pch_dma_suspend() local
805 if (pd) in pch_dma_suspend()
806 pch_dma_save_regs(pd); in pch_dma_suspend()
817 struct pch_dma *pd = pci_get_drvdata(pdev); in pch_dma_resume() local
829 if (pd) in pch_dma_resume()
830 pch_dma_restore_regs(pd); in pch_dma_resume()
839 struct pch_dma *pd; in pch_dma_probe() local
846 pd = kzalloc(sizeof(*pd), GFP_KERNEL); in pch_dma_probe()
847 if (!pd) in pch_dma_probe()
850 pci_set_drvdata(pdev, pd); in pch_dma_probe()
876 regs = pd->membase = pci_iomap(pdev, 1, 0); in pch_dma_probe()
877 if (!pd->membase) { in pch_dma_probe()
885 err = request_irq(pdev->irq, pd_irq, IRQF_SHARED, DRV_NAME, pd); in pch_dma_probe()
891 pd->pool = pci_pool_create("pch_dma_desc_pool", pdev, in pch_dma_probe()
893 if (!pd->pool) { in pch_dma_probe()
899 pd->dma.dev = &pdev->dev; in pch_dma_probe()
901 INIT_LIST_HEAD(&pd->dma.channels); in pch_dma_probe()
904 struct pch_dma_chan *pd_chan = &pd->channels[i]; in pch_dma_probe()
906 pd_chan->chan.device = &pd->dma; in pch_dma_probe()
919 list_add_tail(&pd_chan->chan.device_node, &pd->dma.channels); in pch_dma_probe()
922 dma_cap_zero(pd->dma.cap_mask); in pch_dma_probe()
923 dma_cap_set(DMA_PRIVATE, pd->dma.cap_mask); in pch_dma_probe()
924 dma_cap_set(DMA_SLAVE, pd->dma.cap_mask); in pch_dma_probe()
926 pd->dma.device_alloc_chan_resources = pd_alloc_chan_resources; in pch_dma_probe()
927 pd->dma.device_free_chan_resources = pd_free_chan_resources; in pch_dma_probe()
928 pd->dma.device_tx_status = pd_tx_status; in pch_dma_probe()
929 pd->dma.device_issue_pending = pd_issue_pending; in pch_dma_probe()
930 pd->dma.device_prep_slave_sg = pd_prep_slave_sg; in pch_dma_probe()
931 pd->dma.device_terminate_all = pd_device_terminate_all; in pch_dma_probe()
933 err = dma_async_device_register(&pd->dma); in pch_dma_probe()
942 pci_pool_destroy(pd->pool); in pch_dma_probe()
944 free_irq(pdev->irq, pd); in pch_dma_probe()
946 pci_iounmap(pdev, pd->membase); in pch_dma_probe()
952 kfree(pd); in pch_dma_probe()
958 struct pch_dma *pd = pci_get_drvdata(pdev); in pch_dma_remove() local
962 if (pd) { in pch_dma_remove()
963 dma_async_device_unregister(&pd->dma); in pch_dma_remove()
965 free_irq(pdev->irq, pd); in pch_dma_remove()
967 list_for_each_entry_safe(chan, _c, &pd->dma.channels, in pch_dma_remove()
974 pci_pool_destroy(pd->pool); in pch_dma_remove()
975 pci_iounmap(pdev, pd->membase); in pch_dma_remove()
978 kfree(pd); in pch_dma_remove()