acdev 226 drivers/ata/pata_arasan_cf.c static void cf_dumpregs(struct arasan_cf_dev *acdev) acdev 228 drivers/ata/pata_arasan_cf.c struct device *dev = acdev->host->dev; acdev 231 drivers/ata/pata_arasan_cf.c dev_dbg(dev, ": CFI_STS: %x", readl(acdev->vbase + CFI_STS)); acdev 232 drivers/ata/pata_arasan_cf.c dev_dbg(dev, ": IRQ_STS: %x", readl(acdev->vbase + IRQ_STS)); acdev 233 drivers/ata/pata_arasan_cf.c dev_dbg(dev, ": IRQ_EN: %x", readl(acdev->vbase + IRQ_EN)); acdev 234 drivers/ata/pata_arasan_cf.c dev_dbg(dev, ": OP_MODE: %x", readl(acdev->vbase + OP_MODE)); acdev 235 drivers/ata/pata_arasan_cf.c dev_dbg(dev, ": CLK_CFG: %x", readl(acdev->vbase + CLK_CFG)); acdev 236 drivers/ata/pata_arasan_cf.c dev_dbg(dev, ": TM_CFG: %x", readl(acdev->vbase + TM_CFG)); acdev 237 drivers/ata/pata_arasan_cf.c dev_dbg(dev, ": XFER_CTR: %x", readl(acdev->vbase + XFER_CTR)); acdev 238 drivers/ata/pata_arasan_cf.c dev_dbg(dev, ": GIRQ_STS: %x", readl(acdev->vbase + GIRQ_STS)); acdev 239 drivers/ata/pata_arasan_cf.c dev_dbg(dev, ": GIRQ_STS_EN: %x", readl(acdev->vbase + GIRQ_STS_EN)); acdev 240 drivers/ata/pata_arasan_cf.c dev_dbg(dev, ": GIRQ_SGN_EN: %x", readl(acdev->vbase + GIRQ_SGN_EN)); acdev 245 drivers/ata/pata_arasan_cf.c static void cf_ginterrupt_enable(struct arasan_cf_dev *acdev, bool enable) acdev 248 drivers/ata/pata_arasan_cf.c writel(enable, acdev->vbase + GIRQ_STS_EN); acdev 249 drivers/ata/pata_arasan_cf.c writel(enable, acdev->vbase + GIRQ_SGN_EN); acdev 254 drivers/ata/pata_arasan_cf.c cf_interrupt_enable(struct arasan_cf_dev *acdev, u32 mask, bool enable) acdev 256 drivers/ata/pata_arasan_cf.c u32 val = readl(acdev->vbase + IRQ_EN); acdev 259 drivers/ata/pata_arasan_cf.c writel(mask, acdev->vbase + IRQ_STS); acdev 260 drivers/ata/pata_arasan_cf.c writel(val | mask, acdev->vbase + IRQ_EN); acdev 262 drivers/ata/pata_arasan_cf.c writel(val & ~mask, acdev->vbase + IRQ_EN); acdev 265 drivers/ata/pata_arasan_cf.c static inline void cf_card_reset(struct arasan_cf_dev *acdev) acdev 267 drivers/ata/pata_arasan_cf.c u32 val = readl(acdev->vbase + OP_MODE); acdev 269 drivers/ata/pata_arasan_cf.c writel(val | CARD_RESET, acdev->vbase + OP_MODE); acdev 271 drivers/ata/pata_arasan_cf.c writel(val & ~CARD_RESET, acdev->vbase + OP_MODE); acdev 274 drivers/ata/pata_arasan_cf.c static inline void cf_ctrl_reset(struct arasan_cf_dev *acdev) acdev 276 drivers/ata/pata_arasan_cf.c writel(readl(acdev->vbase + OP_MODE) & ~CFHOST_ENB, acdev 277 drivers/ata/pata_arasan_cf.c acdev->vbase + OP_MODE); acdev 278 drivers/ata/pata_arasan_cf.c writel(readl(acdev->vbase + OP_MODE) | CFHOST_ENB, acdev 279 drivers/ata/pata_arasan_cf.c acdev->vbase + OP_MODE); acdev 282 drivers/ata/pata_arasan_cf.c static void cf_card_detect(struct arasan_cf_dev *acdev, bool hotplugged) acdev 284 drivers/ata/pata_arasan_cf.c struct ata_port *ap = acdev->host->ports[0]; acdev 286 drivers/ata/pata_arasan_cf.c u32 val = readl(acdev->vbase + CFI_STS); acdev 290 drivers/ata/pata_arasan_cf.c if (acdev->card_present) acdev 292 drivers/ata/pata_arasan_cf.c acdev->card_present = 1; acdev 293 drivers/ata/pata_arasan_cf.c cf_card_reset(acdev); acdev 295 drivers/ata/pata_arasan_cf.c if (!acdev->card_present) acdev 297 drivers/ata/pata_arasan_cf.c acdev->card_present = 0; acdev 306 drivers/ata/pata_arasan_cf.c static int cf_init(struct arasan_cf_dev *acdev) acdev 308 drivers/ata/pata_arasan_cf.c struct arasan_cf_pdata *pdata = dev_get_platdata(acdev->host->dev); acdev 313 drivers/ata/pata_arasan_cf.c ret = clk_prepare_enable(acdev->clk); acdev 315 drivers/ata/pata_arasan_cf.c dev_dbg(acdev->host->dev, "clock enable failed"); acdev 319 drivers/ata/pata_arasan_cf.c ret = clk_set_rate(acdev->clk, 166000000); acdev 321 drivers/ata/pata_arasan_cf.c dev_warn(acdev->host->dev, "clock set rate failed"); acdev 322 drivers/ata/pata_arasan_cf.c clk_disable_unprepare(acdev->clk); acdev 326 drivers/ata/pata_arasan_cf.c spin_lock_irqsave(&acdev->host->lock, flags); acdev 333 drivers/ata/pata_arasan_cf.c writel(if_clk, acdev->vbase + CLK_CFG); acdev 335 drivers/ata/pata_arasan_cf.c writel(TRUE_IDE_MODE | CFHOST_ENB, acdev->vbase + OP_MODE); acdev 336 drivers/ata/pata_arasan_cf.c cf_interrupt_enable(acdev, CARD_DETECT_IRQ, 1); acdev 337 drivers/ata/pata_arasan_cf.c cf_ginterrupt_enable(acdev, 1); acdev 338 drivers/ata/pata_arasan_cf.c spin_unlock_irqrestore(&acdev->host->lock, flags); acdev 343 drivers/ata/pata_arasan_cf.c static void cf_exit(struct arasan_cf_dev *acdev) acdev 347 drivers/ata/pata_arasan_cf.c spin_lock_irqsave(&acdev->host->lock, flags); acdev 348 drivers/ata/pata_arasan_cf.c cf_ginterrupt_enable(acdev, 0); acdev 349 drivers/ata/pata_arasan_cf.c cf_interrupt_enable(acdev, TRUE_IDE_IRQS, 0); acdev 350 drivers/ata/pata_arasan_cf.c cf_card_reset(acdev); acdev 351 drivers/ata/pata_arasan_cf.c writel(readl(acdev->vbase + OP_MODE) & ~CFHOST_ENB, acdev 352 drivers/ata/pata_arasan_cf.c acdev->vbase + OP_MODE); acdev 353 drivers/ata/pata_arasan_cf.c spin_unlock_irqrestore(&acdev->host->lock, flags); acdev 354 drivers/ata/pata_arasan_cf.c clk_disable_unprepare(acdev->clk); acdev 359 drivers/ata/pata_arasan_cf.c struct arasan_cf_dev *acdev = dev; acdev 361 drivers/ata/pata_arasan_cf.c complete(&acdev->dma_completion); acdev 364 drivers/ata/pata_arasan_cf.c static inline void dma_complete(struct arasan_cf_dev *acdev) acdev 366 drivers/ata/pata_arasan_cf.c struct ata_queued_cmd *qc = acdev->qc; acdev 369 drivers/ata/pata_arasan_cf.c acdev->qc = NULL; acdev 370 drivers/ata/pata_arasan_cf.c ata_sff_interrupt(acdev->irq, acdev->host); acdev 372 drivers/ata/pata_arasan_cf.c spin_lock_irqsave(&acdev->host->lock, flags); acdev 375 drivers/ata/pata_arasan_cf.c spin_unlock_irqrestore(&acdev->host->lock, flags); acdev 378 drivers/ata/pata_arasan_cf.c static inline int wait4buf(struct arasan_cf_dev *acdev) acdev 380 drivers/ata/pata_arasan_cf.c if (!wait_for_completion_timeout(&acdev->cf_completion, TIMEOUT)) { acdev 381 drivers/ata/pata_arasan_cf.c u32 rw = acdev->qc->tf.flags & ATA_TFLAG_WRITE; acdev 383 drivers/ata/pata_arasan_cf.c dev_err(acdev->host->dev, "%s TimeOut", rw ? "write" : "read"); acdev 388 drivers/ata/pata_arasan_cf.c if (acdev->dma_status & ATA_DMA_ERR) acdev 395 drivers/ata/pata_arasan_cf.c dma_xfer(struct arasan_cf_dev *acdev, dma_addr_t src, dma_addr_t dest, u32 len) acdev 398 drivers/ata/pata_arasan_cf.c struct dma_chan *chan = acdev->dma_chan; acdev 405 drivers/ata/pata_arasan_cf.c dev_err(acdev->host->dev, "device_prep_dma_memcpy failed\n"); acdev 410 drivers/ata/pata_arasan_cf.c tx->callback_param = acdev; acdev 415 drivers/ata/pata_arasan_cf.c dev_err(acdev->host->dev, "dma_submit_error\n"); acdev 422 drivers/ata/pata_arasan_cf.c if (!wait_for_completion_timeout(&acdev->dma_completion, TIMEOUT)) { acdev 424 drivers/ata/pata_arasan_cf.c dev_err(acdev->host->dev, "wait_for_completion_timeout\n"); acdev 431 drivers/ata/pata_arasan_cf.c static int sg_xfer(struct arasan_cf_dev *acdev, struct scatterlist *sg) acdev 435 drivers/ata/pata_arasan_cf.c u32 write = acdev->qc->tf.flags & ATA_TFLAG_WRITE; acdev 442 drivers/ata/pata_arasan_cf.c dest = acdev->pbase + EXT_WRITE_PORT; acdev 445 drivers/ata/pata_arasan_cf.c src = acdev->pbase + EXT_READ_PORT; acdev 457 drivers/ata/pata_arasan_cf.c spin_lock_irqsave(&acdev->host->lock, flags); acdev 458 drivers/ata/pata_arasan_cf.c xfer_ctr = readl(acdev->vbase + XFER_CTR) & acdev 461 drivers/ata/pata_arasan_cf.c acdev->vbase + XFER_CTR); acdev 462 drivers/ata/pata_arasan_cf.c spin_unlock_irqrestore(&acdev->host->lock, flags); acdev 468 drivers/ata/pata_arasan_cf.c ret = wait4buf(acdev); acdev 475 drivers/ata/pata_arasan_cf.c ret = dma_xfer(acdev, src, dest, dma_len); acdev 477 drivers/ata/pata_arasan_cf.c dev_err(acdev->host->dev, "dma failed"); acdev 491 drivers/ata/pata_arasan_cf.c ret = wait4buf(acdev); acdev 499 drivers/ata/pata_arasan_cf.c spin_lock_irqsave(&acdev->host->lock, flags); acdev 500 drivers/ata/pata_arasan_cf.c writel(readl(acdev->vbase + XFER_CTR) & ~XFER_START, acdev 501 drivers/ata/pata_arasan_cf.c acdev->vbase + XFER_CTR); acdev 502 drivers/ata/pata_arasan_cf.c spin_unlock_irqrestore(&acdev->host->lock, flags); acdev 520 drivers/ata/pata_arasan_cf.c struct arasan_cf_dev *acdev = container_of(work, struct arasan_cf_dev, acdev 522 drivers/ata/pata_arasan_cf.c struct ata_queued_cmd *qc = acdev->qc; acdev 530 drivers/ata/pata_arasan_cf.c acdev->dma_chan = dma_request_slave_channel(acdev->host->dev, "data"); acdev 531 drivers/ata/pata_arasan_cf.c if (!acdev->dma_chan) { acdev 532 drivers/ata/pata_arasan_cf.c dev_err(acdev->host->dev, "Unable to get dma_chan\n"); acdev 537 drivers/ata/pata_arasan_cf.c ret = sg_xfer(acdev, sg); acdev 542 drivers/ata/pata_arasan_cf.c dma_release_channel(acdev->dma_chan); acdev 548 drivers/ata/pata_arasan_cf.c spin_lock_irqsave(&acdev->host->lock, flags); acdev 550 drivers/ata/pata_arasan_cf.c spin_unlock_irqrestore(&acdev->host->lock, flags); acdev 552 drivers/ata/pata_arasan_cf.c ata_sff_queue_delayed_work(&acdev->dwork, 1); acdev 559 drivers/ata/pata_arasan_cf.c cf_dumpregs(acdev); acdev 562 drivers/ata/pata_arasan_cf.c spin_lock_irqsave(&acdev->host->lock, flags); acdev 567 drivers/ata/pata_arasan_cf.c cf_ctrl_reset(acdev); acdev 568 drivers/ata/pata_arasan_cf.c spin_unlock_irqrestore(&acdev->host->lock, flags); acdev 570 drivers/ata/pata_arasan_cf.c dma_complete(acdev); acdev 575 drivers/ata/pata_arasan_cf.c struct arasan_cf_dev *acdev = container_of(work, struct arasan_cf_dev, acdev 577 drivers/ata/pata_arasan_cf.c struct ata_queued_cmd *qc = acdev->qc; acdev 581 drivers/ata/pata_arasan_cf.c spin_lock_irqsave(&acdev->host->lock, flags); acdev 583 drivers/ata/pata_arasan_cf.c spin_unlock_irqrestore(&acdev->host->lock, flags); acdev 586 drivers/ata/pata_arasan_cf.c ata_sff_queue_delayed_work(&acdev->dwork, 1); acdev 588 drivers/ata/pata_arasan_cf.c dma_complete(acdev); acdev 593 drivers/ata/pata_arasan_cf.c struct arasan_cf_dev *acdev = ((struct ata_host *)dev)->private_data; acdev 597 drivers/ata/pata_arasan_cf.c irqsts = readl(acdev->vbase + GIRQ_STS); acdev 601 drivers/ata/pata_arasan_cf.c spin_lock_irqsave(&acdev->host->lock, flags); acdev 602 drivers/ata/pata_arasan_cf.c irqsts = readl(acdev->vbase + IRQ_STS); acdev 603 drivers/ata/pata_arasan_cf.c writel(irqsts, acdev->vbase + IRQ_STS); /* clear irqs */ acdev 604 drivers/ata/pata_arasan_cf.c writel(GIRQ_CF, acdev->vbase + GIRQ_STS); /* clear girqs */ acdev 610 drivers/ata/pata_arasan_cf.c cf_card_detect(acdev, 1); acdev 611 drivers/ata/pata_arasan_cf.c spin_unlock_irqrestore(&acdev->host->lock, flags); acdev 616 drivers/ata/pata_arasan_cf.c acdev->dma_status = ATA_DMA_ERR; acdev 617 drivers/ata/pata_arasan_cf.c writel(readl(acdev->vbase + XFER_CTR) & ~XFER_START, acdev 618 drivers/ata/pata_arasan_cf.c acdev->vbase + XFER_CTR); acdev 619 drivers/ata/pata_arasan_cf.c spin_unlock_irqrestore(&acdev->host->lock, flags); acdev 620 drivers/ata/pata_arasan_cf.c complete(&acdev->cf_completion); acdev 621 drivers/ata/pata_arasan_cf.c dev_err(acdev->host->dev, "pio xfer err irq\n"); acdev 625 drivers/ata/pata_arasan_cf.c spin_unlock_irqrestore(&acdev->host->lock, flags); acdev 628 drivers/ata/pata_arasan_cf.c complete(&acdev->cf_completion); acdev 633 drivers/ata/pata_arasan_cf.c struct ata_queued_cmd *qc = acdev->qc; acdev 637 drivers/ata/pata_arasan_cf.c complete(&acdev->cf_completion); acdev 645 drivers/ata/pata_arasan_cf.c struct arasan_cf_dev *acdev = ap->host->private_data; acdev 648 drivers/ata/pata_arasan_cf.c writel(readl(acdev->vbase + XFER_CTR) & ~XFER_START, acdev 649 drivers/ata/pata_arasan_cf.c acdev->vbase + XFER_CTR); acdev 650 drivers/ata/pata_arasan_cf.c cf_ctrl_reset(acdev); acdev 651 drivers/ata/pata_arasan_cf.c acdev->dma_status = ATA_DMA_ERR; acdev 659 drivers/ata/pata_arasan_cf.c struct arasan_cf_dev *acdev = ap->host->private_data; acdev 666 drivers/ata/pata_arasan_cf.c cancel_work_sync(&acdev->work); acdev 667 drivers/ata/pata_arasan_cf.c cancel_delayed_work_sync(&acdev->dwork); acdev 671 drivers/ata/pata_arasan_cf.c static void arasan_cf_dma_start(struct arasan_cf_dev *acdev) acdev 673 drivers/ata/pata_arasan_cf.c struct ata_queued_cmd *qc = acdev->qc; acdev 676 drivers/ata/pata_arasan_cf.c u32 xfer_ctr = readl(acdev->vbase + XFER_CTR) & ~XFER_DIR_MASK; acdev 680 drivers/ata/pata_arasan_cf.c writel(xfer_ctr, acdev->vbase + XFER_CTR); acdev 683 drivers/ata/pata_arasan_cf.c ata_sff_queue_work(&acdev->work); acdev 689 drivers/ata/pata_arasan_cf.c struct arasan_cf_dev *acdev = ap->host->private_data; acdev 706 drivers/ata/pata_arasan_cf.c acdev->dma_status = 0; acdev 707 drivers/ata/pata_arasan_cf.c acdev->qc = qc; acdev 708 drivers/ata/pata_arasan_cf.c arasan_cf_dma_start(acdev); acdev 722 drivers/ata/pata_arasan_cf.c struct arasan_cf_dev *acdev = ap->host->private_data; acdev 733 drivers/ata/pata_arasan_cf.c spin_lock_irqsave(&acdev->host->lock, flags); acdev 734 drivers/ata/pata_arasan_cf.c val = readl(acdev->vbase + OP_MODE) & acdev 736 drivers/ata/pata_arasan_cf.c writel(val, acdev->vbase + OP_MODE); acdev 737 drivers/ata/pata_arasan_cf.c val = readl(acdev->vbase + TM_CFG) & ~TRUEIDE_PIO_TIMING_MASK; acdev 739 drivers/ata/pata_arasan_cf.c writel(val, acdev->vbase + TM_CFG); acdev 741 drivers/ata/pata_arasan_cf.c cf_interrupt_enable(acdev, BUF_AVAIL_IRQ | XFER_DONE_IRQ, 0); acdev 742 drivers/ata/pata_arasan_cf.c cf_interrupt_enable(acdev, PIO_XFER_ERR_IRQ, 1); acdev 743 drivers/ata/pata_arasan_cf.c spin_unlock_irqrestore(&acdev->host->lock, flags); acdev 748 drivers/ata/pata_arasan_cf.c struct arasan_cf_dev *acdev = ap->host->private_data; acdev 752 drivers/ata/pata_arasan_cf.c spin_lock_irqsave(&acdev->host->lock, flags); acdev 753 drivers/ata/pata_arasan_cf.c opmode = readl(acdev->vbase + OP_MODE) & acdev 755 drivers/ata/pata_arasan_cf.c tmcfg = readl(acdev->vbase + TM_CFG); acdev 768 drivers/ata/pata_arasan_cf.c spin_unlock_irqrestore(&acdev->host->lock, flags); acdev 772 drivers/ata/pata_arasan_cf.c writel(opmode, acdev->vbase + OP_MODE); acdev 773 drivers/ata/pata_arasan_cf.c writel(tmcfg, acdev->vbase + TM_CFG); acdev 774 drivers/ata/pata_arasan_cf.c writel(DMA_XFER_MODE, acdev->vbase + XFER_CTR); acdev 776 drivers/ata/pata_arasan_cf.c cf_interrupt_enable(acdev, PIO_XFER_ERR_IRQ, 0); acdev 777 drivers/ata/pata_arasan_cf.c cf_interrupt_enable(acdev, BUF_AVAIL_IRQ | XFER_DONE_IRQ, 1); acdev 778 drivers/ata/pata_arasan_cf.c spin_unlock_irqrestore(&acdev->host->lock, flags); acdev 792 drivers/ata/pata_arasan_cf.c struct arasan_cf_dev *acdev; acdev 811 drivers/ata/pata_arasan_cf.c acdev = devm_kzalloc(&pdev->dev, sizeof(*acdev), GFP_KERNEL); acdev 812 drivers/ata/pata_arasan_cf.c if (!acdev) acdev 821 drivers/ata/pata_arasan_cf.c acdev->irq = platform_get_irq(pdev, 0); acdev 822 drivers/ata/pata_arasan_cf.c if (acdev->irq) acdev 827 drivers/ata/pata_arasan_cf.c acdev->pbase = res->start; acdev 828 drivers/ata/pata_arasan_cf.c acdev->vbase = devm_ioremap_nocache(&pdev->dev, res->start, acdev 830 drivers/ata/pata_arasan_cf.c if (!acdev->vbase) { acdev 835 drivers/ata/pata_arasan_cf.c acdev->clk = devm_clk_get(&pdev->dev, NULL); acdev 836 drivers/ata/pata_arasan_cf.c if (IS_ERR(acdev->clk)) { acdev 838 drivers/ata/pata_arasan_cf.c return PTR_ERR(acdev->clk); acdev 849 drivers/ata/pata_arasan_cf.c host->private_data = acdev; acdev 850 drivers/ata/pata_arasan_cf.c acdev->host = host; acdev 856 drivers/ata/pata_arasan_cf.c init_completion(&acdev->cf_completion); acdev 857 drivers/ata/pata_arasan_cf.c init_completion(&acdev->dma_completion); acdev 858 drivers/ata/pata_arasan_cf.c INIT_WORK(&acdev->work, data_xfer); acdev 859 drivers/ata/pata_arasan_cf.c INIT_DELAYED_WORK(&acdev->dwork, delayed_finish); acdev 860 drivers/ata/pata_arasan_cf.c dma_cap_set(DMA_MEMCPY, acdev->mask); acdev 875 drivers/ata/pata_arasan_cf.c ap->ioaddr.cmd_addr = acdev->vbase + ATA_DATA_PORT; acdev 876 drivers/ata/pata_arasan_cf.c ap->ioaddr.data_addr = acdev->vbase + ATA_DATA_PORT; acdev 877 drivers/ata/pata_arasan_cf.c ap->ioaddr.error_addr = acdev->vbase + ATA_ERR_FTR; acdev 878 drivers/ata/pata_arasan_cf.c ap->ioaddr.feature_addr = acdev->vbase + ATA_ERR_FTR; acdev 879 drivers/ata/pata_arasan_cf.c ap->ioaddr.nsect_addr = acdev->vbase + ATA_SC; acdev 880 drivers/ata/pata_arasan_cf.c ap->ioaddr.lbal_addr = acdev->vbase + ATA_SN; acdev 881 drivers/ata/pata_arasan_cf.c ap->ioaddr.lbam_addr = acdev->vbase + ATA_CL; acdev 882 drivers/ata/pata_arasan_cf.c ap->ioaddr.lbah_addr = acdev->vbase + ATA_CH; acdev 883 drivers/ata/pata_arasan_cf.c ap->ioaddr.device_addr = acdev->vbase + ATA_SH; acdev 884 drivers/ata/pata_arasan_cf.c ap->ioaddr.status_addr = acdev->vbase + ATA_STS_CMD; acdev 885 drivers/ata/pata_arasan_cf.c ap->ioaddr.command_addr = acdev->vbase + ATA_STS_CMD; acdev 886 drivers/ata/pata_arasan_cf.c ap->ioaddr.altstatus_addr = acdev->vbase + ATA_ASTS_DCTR; acdev 887 drivers/ata/pata_arasan_cf.c ap->ioaddr.ctl_addr = acdev->vbase + ATA_ASTS_DCTR; acdev 890 drivers/ata/pata_arasan_cf.c (unsigned long long) res->start, acdev->vbase); acdev 892 drivers/ata/pata_arasan_cf.c ret = cf_init(acdev); acdev 896 drivers/ata/pata_arasan_cf.c cf_card_detect(acdev, 0); acdev 898 drivers/ata/pata_arasan_cf.c ret = ata_host_activate(host, acdev->irq, irq_handler, 0, acdev 903 drivers/ata/pata_arasan_cf.c cf_exit(acdev); acdev 911 drivers/ata/pata_arasan_cf.c struct arasan_cf_dev *acdev = host->ports[0]->private_data; acdev 914 drivers/ata/pata_arasan_cf.c cf_exit(acdev); acdev 923 drivers/ata/pata_arasan_cf.c struct arasan_cf_dev *acdev = host->ports[0]->private_data; acdev 925 drivers/ata/pata_arasan_cf.c if (acdev->dma_chan) acdev 926 drivers/ata/pata_arasan_cf.c dmaengine_terminate_all(acdev->dma_chan); acdev 928 drivers/ata/pata_arasan_cf.c cf_exit(acdev); acdev 935 drivers/ata/pata_arasan_cf.c struct arasan_cf_dev *acdev = host->ports[0]->private_data; acdev 937 drivers/ata/pata_arasan_cf.c cf_init(acdev);