Lines Matching refs:sdd

201 static void flush_fifo(struct s3c64xx_spi_driver_data *sdd)  in flush_fifo()  argument
203 void __iomem *regs = sdd->regs; in flush_fifo()
222 } while (TX_FIFO_LVL(val, sdd) && loops--); in flush_fifo()
225 dev_warn(&sdd->pdev->dev, "Timed out flushing TX FIFO\n"); in flush_fifo()
231 if (RX_FIFO_LVL(val, sdd)) in flush_fifo()
238 dev_warn(&sdd->pdev->dev, "Timed out flushing RX FIFO\n"); in flush_fifo()
251 struct s3c64xx_spi_driver_data *sdd; in s3c64xx_spi_dmacb() local
256 sdd = container_of(data, in s3c64xx_spi_dmacb()
259 sdd = container_of(data, in s3c64xx_spi_dmacb()
262 spin_lock_irqsave(&sdd->lock, flags); in s3c64xx_spi_dmacb()
265 sdd->state &= ~RXBUSY; in s3c64xx_spi_dmacb()
266 if (!(sdd->state & TXBUSY)) in s3c64xx_spi_dmacb()
267 complete(&sdd->xfer_completion); in s3c64xx_spi_dmacb()
269 sdd->state &= ~TXBUSY; in s3c64xx_spi_dmacb()
270 if (!(sdd->state & RXBUSY)) in s3c64xx_spi_dmacb()
271 complete(&sdd->xfer_completion); in s3c64xx_spi_dmacb()
274 spin_unlock_irqrestore(&sdd->lock, flags); in s3c64xx_spi_dmacb()
280 struct s3c64xx_spi_driver_data *sdd; in prepare_dma() local
287 sdd = container_of((void *)dma, in prepare_dma()
290 config.src_addr = sdd->sfr_start + S3C64XX_SPI_RX_DATA; in prepare_dma()
291 config.src_addr_width = sdd->cur_bpw / 8; in prepare_dma()
295 sdd = container_of((void *)dma, in prepare_dma()
298 config.dst_addr = sdd->sfr_start + S3C64XX_SPI_TX_DATA; in prepare_dma()
299 config.dst_addr_width = sdd->cur_bpw / 8; in prepare_dma()
316 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(spi); in s3c64xx_spi_prepare_transfer() local
317 dma_filter_fn filter = sdd->cntrlr_info->filter; in s3c64xx_spi_prepare_transfer()
318 struct device *dev = &sdd->pdev->dev; in s3c64xx_spi_prepare_transfer()
322 if (!is_polling(sdd)) { in s3c64xx_spi_prepare_transfer()
327 sdd->rx_dma.ch = dma_request_slave_channel_compat(mask, filter, in s3c64xx_spi_prepare_transfer()
328 (void *)(long)sdd->rx_dma.dmach, dev, "rx"); in s3c64xx_spi_prepare_transfer()
329 if (!sdd->rx_dma.ch) { in s3c64xx_spi_prepare_transfer()
334 spi->dma_rx = sdd->rx_dma.ch; in s3c64xx_spi_prepare_transfer()
336 sdd->tx_dma.ch = dma_request_slave_channel_compat(mask, filter, in s3c64xx_spi_prepare_transfer()
337 (void *)(long)sdd->tx_dma.dmach, dev, "tx"); in s3c64xx_spi_prepare_transfer()
338 if (!sdd->tx_dma.ch) { in s3c64xx_spi_prepare_transfer()
343 spi->dma_tx = sdd->tx_dma.ch; in s3c64xx_spi_prepare_transfer()
349 dma_release_channel(sdd->rx_dma.ch); in s3c64xx_spi_prepare_transfer()
356 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(spi); in s3c64xx_spi_unprepare_transfer() local
359 if (!is_polling(sdd)) { in s3c64xx_spi_unprepare_transfer()
360 dma_release_channel(sdd->rx_dma.ch); in s3c64xx_spi_unprepare_transfer()
361 dma_release_channel(sdd->tx_dma.ch); in s3c64xx_spi_unprepare_transfer()
371 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(master); in s3c64xx_spi_can_dma() local
373 return xfer->len > (FIFO_LVL_MASK(sdd) >> 1) + 1; in s3c64xx_spi_can_dma()
376 static void enable_datapath(struct s3c64xx_spi_driver_data *sdd, in enable_datapath() argument
380 void __iomem *regs = sdd->regs; in enable_datapath()
397 writel(((xfer->len * 8 / sdd->cur_bpw) & 0xffff) in enable_datapath()
403 sdd->state |= TXBUSY; in enable_datapath()
407 prepare_dma(&sdd->tx_dma, &xfer->tx_sg); in enable_datapath()
409 switch (sdd->cur_bpw) { in enable_datapath()
427 sdd->state |= RXBUSY; in enable_datapath()
429 if (sdd->port_conf->high_speed && sdd->cur_speed >= 30000000UL in enable_datapath()
430 && !(sdd->cur_mode & SPI_CPHA)) in enable_datapath()
436 writel(((xfer->len * 8 / sdd->cur_bpw) & 0xffff) in enable_datapath()
439 prepare_dma(&sdd->rx_dma, &xfer->rx_sg); in enable_datapath()
447 static u32 s3c64xx_spi_wait_for_timeout(struct s3c64xx_spi_driver_data *sdd, in s3c64xx_spi_wait_for_timeout() argument
450 void __iomem *regs = sdd->regs; in s3c64xx_spi_wait_for_timeout()
455 u32 max_fifo = (FIFO_LVL_MASK(sdd) >> 1) + 1; in s3c64xx_spi_wait_for_timeout()
462 } while (RX_FIFO_LVL(status, sdd) < max_fifo && --val); in s3c64xx_spi_wait_for_timeout()
465 return RX_FIFO_LVL(status, sdd); in s3c64xx_spi_wait_for_timeout()
468 static int wait_for_dma(struct s3c64xx_spi_driver_data *sdd, in wait_for_dma() argument
471 void __iomem *regs = sdd->regs; in wait_for_dma()
477 ms = xfer->len * 8 * 1000 / sdd->cur_speed; in wait_for_dma()
481 val = wait_for_completion_timeout(&sdd->xfer_completion, val); in wait_for_dma()
495 while ((TX_FIFO_LVL(status, sdd) in wait_for_dma()
496 || !S3C64XX_SPI_ST_TX_DONE(status, sdd)) in wait_for_dma()
511 static int wait_for_pio(struct s3c64xx_spi_driver_data *sdd, in wait_for_pio() argument
514 void __iomem *regs = sdd->regs; in wait_for_pio()
523 ms = xfer->len * 8 * 1000 / sdd->cur_speed; in wait_for_pio()
529 } while (RX_FIFO_LVL(status, sdd) < xfer->len && --val); in wait_for_pio()
534 sdd->state &= ~TXBUSY; in wait_for_pio()
546 loops = xfer->len / ((FIFO_LVL_MASK(sdd) >> 1) + 1); in wait_for_pio()
550 cpy_len = s3c64xx_spi_wait_for_timeout(sdd, in wait_for_pio()
553 switch (sdd->cur_bpw) { in wait_for_pio()
570 sdd->state &= ~RXBUSY; in wait_for_pio()
575 static void s3c64xx_spi_config(struct s3c64xx_spi_driver_data *sdd) in s3c64xx_spi_config() argument
577 void __iomem *regs = sdd->regs; in s3c64xx_spi_config()
581 if (sdd->port_conf->clk_from_cmu) { in s3c64xx_spi_config()
582 clk_disable_unprepare(sdd->src_clk); in s3c64xx_spi_config()
595 if (sdd->cur_mode & SPI_CPOL) in s3c64xx_spi_config()
598 if (sdd->cur_mode & SPI_CPHA) in s3c64xx_spi_config()
608 switch (sdd->cur_bpw) { in s3c64xx_spi_config()
625 if (sdd->port_conf->clk_from_cmu) { in s3c64xx_spi_config()
628 clk_set_rate(sdd->src_clk, sdd->cur_speed * 2); in s3c64xx_spi_config()
630 clk_prepare_enable(sdd->src_clk); in s3c64xx_spi_config()
635 val |= ((clk_get_rate(sdd->src_clk) / sdd->cur_speed / 2 - 1) in s3c64xx_spi_config()
651 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(master); in s3c64xx_spi_prepare_message() local
656 if (sdd->cur_speed != spi->max_speed_hz in s3c64xx_spi_prepare_message()
657 || sdd->cur_mode != spi->mode in s3c64xx_spi_prepare_message()
658 || sdd->cur_bpw != spi->bits_per_word) { in s3c64xx_spi_prepare_message()
659 sdd->cur_bpw = spi->bits_per_word; in s3c64xx_spi_prepare_message()
660 sdd->cur_speed = spi->max_speed_hz; in s3c64xx_spi_prepare_message()
661 sdd->cur_mode = spi->mode; in s3c64xx_spi_prepare_message()
662 s3c64xx_spi_config(sdd); in s3c64xx_spi_prepare_message()
666 writel(cs->fb_delay & 0x3, sdd->regs + S3C64XX_SPI_FB_CLK); in s3c64xx_spi_prepare_message()
675 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(master); in s3c64xx_spi_transfer_one() local
682 reinit_completion(&sdd->xfer_completion); in s3c64xx_spi_transfer_one()
688 if (bpw != sdd->cur_bpw || speed != sdd->cur_speed) { in s3c64xx_spi_transfer_one()
689 sdd->cur_bpw = bpw; in s3c64xx_spi_transfer_one()
690 sdd->cur_speed = speed; in s3c64xx_spi_transfer_one()
691 s3c64xx_spi_config(sdd); in s3c64xx_spi_transfer_one()
696 if (!is_polling(sdd) && in s3c64xx_spi_transfer_one()
697 (sdd->rx_dma.ch && sdd->tx_dma.ch && in s3c64xx_spi_transfer_one()
698 (xfer->len > ((FIFO_LVL_MASK(sdd) >> 1) + 1)))) in s3c64xx_spi_transfer_one()
701 spin_lock_irqsave(&sdd->lock, flags); in s3c64xx_spi_transfer_one()
704 sdd->state &= ~RXBUSY; in s3c64xx_spi_transfer_one()
705 sdd->state &= ~TXBUSY; in s3c64xx_spi_transfer_one()
707 enable_datapath(sdd, spi, xfer, use_dma); in s3c64xx_spi_transfer_one()
710 if (!(sdd->port_conf->quirks & S3C64XX_SPI_QUIRK_CS_AUTO)) in s3c64xx_spi_transfer_one()
711 writel(0, sdd->regs + S3C64XX_SPI_SLAVE_SEL); in s3c64xx_spi_transfer_one()
713 writel(readl(sdd->regs + S3C64XX_SPI_SLAVE_SEL) in s3c64xx_spi_transfer_one()
715 sdd->regs + S3C64XX_SPI_SLAVE_SEL); in s3c64xx_spi_transfer_one()
717 spin_unlock_irqrestore(&sdd->lock, flags); in s3c64xx_spi_transfer_one()
720 status = wait_for_dma(sdd, xfer); in s3c64xx_spi_transfer_one()
722 status = wait_for_pio(sdd, xfer); in s3c64xx_spi_transfer_one()
727 (sdd->state & RXBUSY) ? 'f' : 'p', in s3c64xx_spi_transfer_one()
728 (sdd->state & TXBUSY) ? 'f' : 'p', in s3c64xx_spi_transfer_one()
733 && (sdd->state & TXBUSY)) in s3c64xx_spi_transfer_one()
734 dmaengine_terminate_all(sdd->tx_dma.ch); in s3c64xx_spi_transfer_one()
736 && (sdd->state & RXBUSY)) in s3c64xx_spi_transfer_one()
737 dmaengine_terminate_all(sdd->rx_dma.ch); in s3c64xx_spi_transfer_one()
740 flush_fifo(sdd); in s3c64xx_spi_transfer_one()
786 struct s3c64xx_spi_driver_data *sdd; in s3c64xx_spi_setup() local
790 sdd = spi_master_get_devdata(spi->master); in s3c64xx_spi_setup()
823 sci = sdd->cntrlr_info; in s3c64xx_spi_setup()
825 pm_runtime_get_sync(&sdd->pdev->dev); in s3c64xx_spi_setup()
828 if (!sdd->port_conf->clk_from_cmu) { in s3c64xx_spi_setup()
832 speed = clk_get_rate(sdd->src_clk) / 2 / (0 + 1); in s3c64xx_spi_setup()
837 psr = clk_get_rate(sdd->src_clk) / 2 / spi->max_speed_hz - 1; in s3c64xx_spi_setup()
842 speed = clk_get_rate(sdd->src_clk) / 2 / (psr + 1); in s3c64xx_spi_setup()
852 speed = clk_get_rate(sdd->src_clk) / 2 / (psr + 1); in s3c64xx_spi_setup()
863 pm_runtime_mark_last_busy(&sdd->pdev->dev); in s3c64xx_spi_setup()
864 pm_runtime_put_autosuspend(&sdd->pdev->dev); in s3c64xx_spi_setup()
865 if (!(sdd->port_conf->quirks & S3C64XX_SPI_QUIRK_CS_AUTO)) in s3c64xx_spi_setup()
866 writel(S3C64XX_SPI_SLAVE_SIG_INACT, sdd->regs + S3C64XX_SPI_SLAVE_SEL); in s3c64xx_spi_setup()
870 pm_runtime_mark_last_busy(&sdd->pdev->dev); in s3c64xx_spi_setup()
871 pm_runtime_put_autosuspend(&sdd->pdev->dev); in s3c64xx_spi_setup()
873 if (!(sdd->port_conf->quirks & S3C64XX_SPI_QUIRK_CS_AUTO)) in s3c64xx_spi_setup()
874 writel(S3C64XX_SPI_SLAVE_SIG_INACT, sdd->regs + S3C64XX_SPI_SLAVE_SEL); in s3c64xx_spi_setup()
910 struct s3c64xx_spi_driver_data *sdd = data; in s3c64xx_spi_irq() local
911 struct spi_master *spi = sdd->master; in s3c64xx_spi_irq()
914 val = readl(sdd->regs + S3C64XX_SPI_STATUS); in s3c64xx_spi_irq()
934 writel(clr, sdd->regs + S3C64XX_SPI_PENDING_CLR); in s3c64xx_spi_irq()
935 writel(0, sdd->regs + S3C64XX_SPI_PENDING_CLR); in s3c64xx_spi_irq()
940 static void s3c64xx_spi_hwinit(struct s3c64xx_spi_driver_data *sdd, int channel) in s3c64xx_spi_hwinit() argument
942 struct s3c64xx_spi_info *sci = sdd->cntrlr_info; in s3c64xx_spi_hwinit()
943 void __iomem *regs = sdd->regs; in s3c64xx_spi_hwinit()
946 sdd->cur_speed = 0; in s3c64xx_spi_hwinit()
948 if (!(sdd->port_conf->quirks & S3C64XX_SPI_QUIRK_CS_AUTO)) in s3c64xx_spi_hwinit()
949 writel(S3C64XX_SPI_SLAVE_SIG_INACT, sdd->regs + S3C64XX_SPI_SLAVE_SEL); in s3c64xx_spi_hwinit()
954 if (!sdd->port_conf->clk_from_cmu) in s3c64xx_spi_hwinit()
976 flush_fifo(sdd); in s3c64xx_spi_hwinit()
1032 struct s3c64xx_spi_driver_data *sdd; in s3c64xx_spi_probe() local
1070 sdd = spi_master_get_devdata(master); in s3c64xx_spi_probe()
1071 sdd->port_conf = s3c64xx_spi_get_port_config(pdev); in s3c64xx_spi_probe()
1072 sdd->master = master; in s3c64xx_spi_probe()
1073 sdd->cntrlr_info = sci; in s3c64xx_spi_probe()
1074 sdd->pdev = pdev; in s3c64xx_spi_probe()
1075 sdd->sfr_start = mem_res->start; in s3c64xx_spi_probe()
1083 sdd->port_id = ret; in s3c64xx_spi_probe()
1085 sdd->port_id = pdev->id; in s3c64xx_spi_probe()
1088 sdd->cur_bpw = 8; in s3c64xx_spi_probe()
1090 if (!sdd->pdev->dev.of_node) { in s3c64xx_spi_probe()
1094 sdd->port_conf->quirks = S3C64XX_SPI_QUIRK_POLL; in s3c64xx_spi_probe()
1096 sdd->tx_dma.dmach = res->start; in s3c64xx_spi_probe()
1101 sdd->port_conf->quirks = S3C64XX_SPI_QUIRK_POLL; in s3c64xx_spi_probe()
1103 sdd->rx_dma.dmach = res->start; in s3c64xx_spi_probe()
1106 sdd->tx_dma.direction = DMA_MEM_TO_DEV; in s3c64xx_spi_probe()
1107 sdd->rx_dma.direction = DMA_DEV_TO_MEM; in s3c64xx_spi_probe()
1110 master->bus_num = sdd->port_id; in s3c64xx_spi_probe()
1124 if (!is_polling(sdd)) in s3c64xx_spi_probe()
1127 sdd->regs = devm_ioremap_resource(&pdev->dev, mem_res); in s3c64xx_spi_probe()
1128 if (IS_ERR(sdd->regs)) { in s3c64xx_spi_probe()
1129 ret = PTR_ERR(sdd->regs); in s3c64xx_spi_probe()
1140 sdd->clk = devm_clk_get(&pdev->dev, "spi"); in s3c64xx_spi_probe()
1141 if (IS_ERR(sdd->clk)) { in s3c64xx_spi_probe()
1143 ret = PTR_ERR(sdd->clk); in s3c64xx_spi_probe()
1147 if (clk_prepare_enable(sdd->clk)) { in s3c64xx_spi_probe()
1154 sdd->src_clk = devm_clk_get(&pdev->dev, clk_name); in s3c64xx_spi_probe()
1155 if (IS_ERR(sdd->src_clk)) { in s3c64xx_spi_probe()
1158 ret = PTR_ERR(sdd->src_clk); in s3c64xx_spi_probe()
1162 if (clk_prepare_enable(sdd->src_clk)) { in s3c64xx_spi_probe()
1175 s3c64xx_spi_hwinit(sdd, sdd->port_id); in s3c64xx_spi_probe()
1177 spin_lock_init(&sdd->lock); in s3c64xx_spi_probe()
1178 init_completion(&sdd->xfer_completion); in s3c64xx_spi_probe()
1181 "spi-s3c64xx", sdd); in s3c64xx_spi_probe()
1190 sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_probe()
1199 sdd->port_id, master->num_chipselect); in s3c64xx_spi_probe()
1201 mem_res, (FIFO_LVL_MASK(sdd) >> 1) + 1, in s3c64xx_spi_probe()
1202 sdd->rx_dma.dmach, sdd->tx_dma.dmach); in s3c64xx_spi_probe()
1214 clk_disable_unprepare(sdd->src_clk); in s3c64xx_spi_probe()
1216 clk_disable_unprepare(sdd->clk); in s3c64xx_spi_probe()
1226 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(master); in s3c64xx_spi_remove() local
1230 writel(0, sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_remove()
1232 clk_disable_unprepare(sdd->src_clk); in s3c64xx_spi_remove()
1234 clk_disable_unprepare(sdd->clk); in s3c64xx_spi_remove()
1247 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(master); in s3c64xx_spi_suspend() local
1257 sdd->cur_speed = 0; /* Output Clock is stopped */ in s3c64xx_spi_suspend()
1265 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(master); in s3c64xx_spi_resume() local
1266 struct s3c64xx_spi_info *sci = sdd->cntrlr_info; in s3c64xx_spi_resume()
1276 s3c64xx_spi_hwinit(sdd, sdd->port_id); in s3c64xx_spi_resume()
1286 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(master); in s3c64xx_spi_runtime_suspend() local
1288 clk_disable_unprepare(sdd->clk); in s3c64xx_spi_runtime_suspend()
1289 clk_disable_unprepare(sdd->src_clk); in s3c64xx_spi_runtime_suspend()
1297 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(master); in s3c64xx_spi_runtime_resume() local
1300 ret = clk_prepare_enable(sdd->src_clk); in s3c64xx_spi_runtime_resume()
1304 ret = clk_prepare_enable(sdd->clk); in s3c64xx_spi_runtime_resume()
1306 clk_disable_unprepare(sdd->src_clk); in s3c64xx_spi_runtime_resume()