Lines Matching refs:pd

140 #define dma_readl(pd, name) \  argument
141 readl((pd)->membase + PCH_DMA_##name)
142 #define dma_writel(pd, name, val) \ argument
143 writel((val), (pd)->membase + PCH_DMA_##name)
187 struct pch_dma *pd = to_pd(chan->device); in pdc_enable_irq() local
196 val = dma_readl(pd, CTL2); in pdc_enable_irq()
203 dma_writel(pd, CTL2, val); in pdc_enable_irq()
212 struct pch_dma *pd = to_pd(chan->device); in pdc_set_dir() local
218 val = dma_readl(pd, CTL0); in pdc_set_dir()
233 dma_writel(pd, CTL0, val); in pdc_set_dir()
236 val = dma_readl(pd, CTL3); in pdc_set_dir()
250 dma_writel(pd, CTL3, val); in pdc_set_dir()
259 struct pch_dma *pd = to_pd(chan->device); in pdc_set_mode() local
269 val = dma_readl(pd, CTL0); in pdc_set_mode()
273 dma_writel(pd, CTL0, val); in pdc_set_mode()
280 val = dma_readl(pd, CTL3); in pdc_set_mode()
284 dma_writel(pd, CTL3, val); in pdc_set_mode()
293 struct pch_dma *pd = to_pd(pd_chan->chan.device); in pdc_get_status0() local
296 val = dma_readl(pd, STS0); in pdc_get_status0()
303 struct pch_dma *pd = to_pd(pd_chan->chan.device); in pdc_get_status2() local
306 val = dma_readl(pd, STS2); in pdc_get_status2()
440 struct pch_dma *pd = to_pd(chan->device); in pdc_alloc_desc() local
443 desc = pci_pool_alloc(pd->pool, flags, &addr); in pdc_alloc_desc()
542 struct pch_dma *pd = to_pd(chan->device); in pd_free_chan_resources() local
556 pci_pool_free(pd->pool, desc, desc->txd.phys); in pd_free_chan_resources()
706 struct pch_dma *pd = (struct pch_dma *)devid; in pd_irq() local
714 sts0 = dma_readl(pd, STS0); in pd_irq()
715 sts2 = dma_readl(pd, STS2); in pd_irq()
717 dev_dbg(pd->dma.dev, "pd_irq sts0: %x\n", sts0); in pd_irq()
719 for (i = 0; i < pd->dma.chancnt; i++) { in pd_irq()
720 pd_chan = &pd->channels[i]; in pd_irq()
743 dma_writel(pd, STS0, sts0); in pd_irq()
745 dma_writel(pd, STS2, sts2); in pd_irq()
751 static void pch_dma_save_regs(struct pch_dma *pd) in pch_dma_save_regs() argument
757 pd->regs.dma_ctl0 = dma_readl(pd, CTL0); in pch_dma_save_regs()
758 pd->regs.dma_ctl1 = dma_readl(pd, CTL1); in pch_dma_save_regs()
759 pd->regs.dma_ctl2 = dma_readl(pd, CTL2); in pch_dma_save_regs()
760 pd->regs.dma_ctl3 = dma_readl(pd, CTL3); in pch_dma_save_regs()
762 list_for_each_entry_safe(chan, _c, &pd->dma.channels, device_node) { in pch_dma_save_regs()
765 pd->ch_regs[i].dev_addr = channel_readl(pd_chan, DEV_ADDR); in pch_dma_save_regs()
766 pd->ch_regs[i].mem_addr = channel_readl(pd_chan, MEM_ADDR); in pch_dma_save_regs()
767 pd->ch_regs[i].size = channel_readl(pd_chan, SIZE); in pch_dma_save_regs()
768 pd->ch_regs[i].next = channel_readl(pd_chan, NEXT); in pch_dma_save_regs()
774 static void pch_dma_restore_regs(struct pch_dma *pd) in pch_dma_restore_regs() argument
780 dma_writel(pd, CTL0, pd->regs.dma_ctl0); in pch_dma_restore_regs()
781 dma_writel(pd, CTL1, pd->regs.dma_ctl1); in pch_dma_restore_regs()
782 dma_writel(pd, CTL2, pd->regs.dma_ctl2); in pch_dma_restore_regs()
783 dma_writel(pd, CTL3, pd->regs.dma_ctl3); in pch_dma_restore_regs()
785 list_for_each_entry_safe(chan, _c, &pd->dma.channels, device_node) { in pch_dma_restore_regs()
788 channel_writel(pd_chan, DEV_ADDR, pd->ch_regs[i].dev_addr); in pch_dma_restore_regs()
789 channel_writel(pd_chan, MEM_ADDR, pd->ch_regs[i].mem_addr); in pch_dma_restore_regs()
790 channel_writel(pd_chan, SIZE, pd->ch_regs[i].size); in pch_dma_restore_regs()
791 channel_writel(pd_chan, NEXT, pd->ch_regs[i].next); in pch_dma_restore_regs()
799 struct pch_dma *pd = pci_get_drvdata(pdev); in pch_dma_suspend() local
801 if (pd) in pch_dma_suspend()
802 pch_dma_save_regs(pd); in pch_dma_suspend()
813 struct pch_dma *pd = pci_get_drvdata(pdev); in pch_dma_resume() local
825 if (pd) in pch_dma_resume()
826 pch_dma_restore_regs(pd); in pch_dma_resume()
835 struct pch_dma *pd; in pch_dma_probe() local
842 pd = kzalloc(sizeof(*pd), GFP_KERNEL); in pch_dma_probe()
843 if (!pd) in pch_dma_probe()
846 pci_set_drvdata(pdev, pd); in pch_dma_probe()
872 regs = pd->membase = pci_iomap(pdev, 1, 0); in pch_dma_probe()
873 if (!pd->membase) { in pch_dma_probe()
881 err = request_irq(pdev->irq, pd_irq, IRQF_SHARED, DRV_NAME, pd); in pch_dma_probe()
887 pd->pool = pci_pool_create("pch_dma_desc_pool", pdev, in pch_dma_probe()
889 if (!pd->pool) { in pch_dma_probe()
895 pd->dma.dev = &pdev->dev; in pch_dma_probe()
897 INIT_LIST_HEAD(&pd->dma.channels); in pch_dma_probe()
900 struct pch_dma_chan *pd_chan = &pd->channels[i]; in pch_dma_probe()
902 pd_chan->chan.device = &pd->dma; in pch_dma_probe()
915 list_add_tail(&pd_chan->chan.device_node, &pd->dma.channels); in pch_dma_probe()
918 dma_cap_zero(pd->dma.cap_mask); in pch_dma_probe()
919 dma_cap_set(DMA_PRIVATE, pd->dma.cap_mask); in pch_dma_probe()
920 dma_cap_set(DMA_SLAVE, pd->dma.cap_mask); in pch_dma_probe()
922 pd->dma.device_alloc_chan_resources = pd_alloc_chan_resources; in pch_dma_probe()
923 pd->dma.device_free_chan_resources = pd_free_chan_resources; in pch_dma_probe()
924 pd->dma.device_tx_status = pd_tx_status; in pch_dma_probe()
925 pd->dma.device_issue_pending = pd_issue_pending; in pch_dma_probe()
926 pd->dma.device_prep_slave_sg = pd_prep_slave_sg; in pch_dma_probe()
927 pd->dma.device_terminate_all = pd_device_terminate_all; in pch_dma_probe()
929 err = dma_async_device_register(&pd->dma); in pch_dma_probe()
938 pci_pool_destroy(pd->pool); in pch_dma_probe()
940 free_irq(pdev->irq, pd); in pch_dma_probe()
942 pci_iounmap(pdev, pd->membase); in pch_dma_probe()
948 kfree(pd); in pch_dma_probe()
954 struct pch_dma *pd = pci_get_drvdata(pdev); in pch_dma_remove() local
958 if (pd) { in pch_dma_remove()
959 dma_async_device_unregister(&pd->dma); in pch_dma_remove()
961 free_irq(pdev->irq, pd); in pch_dma_remove()
963 list_for_each_entry_safe(chan, _c, &pd->dma.channels, in pch_dma_remove()
970 pci_pool_destroy(pd->pool); in pch_dma_remove()
971 pci_iounmap(pdev, pd->membase); in pch_dma_remove()
974 kfree(pd); in pch_dma_remove()