root/drivers/spi/spi-mt65xx.c

/* [<][>][^][v][top][bottom][index][help] */

DEFINITIONS

This source file includes following definitions.
  1. mtk_spi_reset
  2. mtk_spi_prepare_message
  3. mtk_spi_set_cs
  4. mtk_spi_prepare_transfer
  5. mtk_spi_setup_packet
  6. mtk_spi_enable_transfer
  7. mtk_spi_get_mult_delta
  8. mtk_spi_update_mdata_len
  9. mtk_spi_setup_dma_addr
  10. mtk_spi_fifo_transfer
  11. mtk_spi_dma_transfer
  12. mtk_spi_transfer_one
  13. mtk_spi_can_dma
  14. mtk_spi_setup
  15. mtk_spi_interrupt
  16. mtk_spi_probe
  17. mtk_spi_remove
  18. mtk_spi_suspend
  19. mtk_spi_resume
  20. mtk_spi_runtime_suspend
  21. mtk_spi_runtime_resume

   1 // SPDX-License-Identifier: GPL-2.0-only
   2 /*
   3  * Copyright (c) 2015 MediaTek Inc.
   4  * Author: Leilk Liu <leilk.liu@mediatek.com>
   5  */
   6 
   7 #include <linux/clk.h>
   8 #include <linux/device.h>
   9 #include <linux/err.h>
  10 #include <linux/interrupt.h>
  11 #include <linux/io.h>
  12 #include <linux/ioport.h>
  13 #include <linux/module.h>
  14 #include <linux/of.h>
  15 #include <linux/of_gpio.h>
  16 #include <linux/platform_device.h>
  17 #include <linux/platform_data/spi-mt65xx.h>
  18 #include <linux/pm_runtime.h>
  19 #include <linux/spi/spi.h>
  20 #include <linux/dma-mapping.h>
  21 
  22 #define SPI_CFG0_REG                      0x0000
  23 #define SPI_CFG1_REG                      0x0004
  24 #define SPI_TX_SRC_REG                    0x0008
  25 #define SPI_RX_DST_REG                    0x000c
  26 #define SPI_TX_DATA_REG                   0x0010
  27 #define SPI_RX_DATA_REG                   0x0014
  28 #define SPI_CMD_REG                       0x0018
  29 #define SPI_STATUS0_REG                   0x001c
  30 #define SPI_PAD_SEL_REG                   0x0024
  31 #define SPI_CFG2_REG                      0x0028
  32 #define SPI_TX_SRC_REG_64                 0x002c
  33 #define SPI_RX_DST_REG_64                 0x0030
  34 
  35 #define SPI_CFG0_SCK_HIGH_OFFSET          0
  36 #define SPI_CFG0_SCK_LOW_OFFSET           8
  37 #define SPI_CFG0_CS_HOLD_OFFSET           16
  38 #define SPI_CFG0_CS_SETUP_OFFSET          24
  39 #define SPI_ADJUST_CFG0_SCK_LOW_OFFSET    16
  40 #define SPI_ADJUST_CFG0_CS_HOLD_OFFSET    0
  41 #define SPI_ADJUST_CFG0_CS_SETUP_OFFSET   16
  42 
  43 #define SPI_CFG1_CS_IDLE_OFFSET           0
  44 #define SPI_CFG1_PACKET_LOOP_OFFSET       8
  45 #define SPI_CFG1_PACKET_LENGTH_OFFSET     16
  46 #define SPI_CFG1_GET_TICK_DLY_OFFSET      30
  47 
  48 #define SPI_CFG1_CS_IDLE_MASK             0xff
  49 #define SPI_CFG1_PACKET_LOOP_MASK         0xff00
  50 #define SPI_CFG1_PACKET_LENGTH_MASK       0x3ff0000
  51 
  52 #define SPI_CMD_ACT                  BIT(0)
  53 #define SPI_CMD_RESUME               BIT(1)
  54 #define SPI_CMD_RST                  BIT(2)
  55 #define SPI_CMD_PAUSE_EN             BIT(4)
  56 #define SPI_CMD_DEASSERT             BIT(5)
  57 #define SPI_CMD_SAMPLE_SEL           BIT(6)
  58 #define SPI_CMD_CS_POL               BIT(7)
  59 #define SPI_CMD_CPHA                 BIT(8)
  60 #define SPI_CMD_CPOL                 BIT(9)
  61 #define SPI_CMD_RX_DMA               BIT(10)
  62 #define SPI_CMD_TX_DMA               BIT(11)
  63 #define SPI_CMD_TXMSBF               BIT(12)
  64 #define SPI_CMD_RXMSBF               BIT(13)
  65 #define SPI_CMD_RX_ENDIAN            BIT(14)
  66 #define SPI_CMD_TX_ENDIAN            BIT(15)
  67 #define SPI_CMD_FINISH_IE            BIT(16)
  68 #define SPI_CMD_PAUSE_IE             BIT(17)
  69 
  70 #define MT8173_SPI_MAX_PAD_SEL 3
  71 
  72 #define MTK_SPI_PAUSE_INT_STATUS 0x2
  73 
  74 #define MTK_SPI_IDLE 0
  75 #define MTK_SPI_PAUSED 1
  76 
  77 #define MTK_SPI_MAX_FIFO_SIZE 32U
  78 #define MTK_SPI_PACKET_SIZE 1024
  79 #define MTK_SPI_32BITS_MASK  (0xffffffff)
  80 
  81 #define DMA_ADDR_EXT_BITS (36)
  82 #define DMA_ADDR_DEF_BITS (32)
  83 
  84 struct mtk_spi_compatible {
  85         bool need_pad_sel;
  86         /* Must explicitly send dummy Tx bytes to do Rx only transfer */
  87         bool must_tx;
  88         /* some IC design adjust cfg register to enhance time accuracy */
  89         bool enhance_timing;
  90         /* some IC support DMA addr extension */
  91         bool dma_ext;
  92 };
  93 
  94 struct mtk_spi {
  95         void __iomem *base;
  96         u32 state;
  97         int pad_num;
  98         u32 *pad_sel;
  99         struct clk *parent_clk, *sel_clk, *spi_clk;
 100         struct spi_transfer *cur_transfer;
 101         u32 xfer_len;
 102         u32 num_xfered;
 103         struct scatterlist *tx_sgl, *rx_sgl;
 104         u32 tx_sgl_len, rx_sgl_len;
 105         const struct mtk_spi_compatible *dev_comp;
 106 };
 107 
 108 static const struct mtk_spi_compatible mtk_common_compat;
 109 
 110 static const struct mtk_spi_compatible mt2712_compat = {
 111         .must_tx = true,
 112 };
 113 
 114 static const struct mtk_spi_compatible mt6765_compat = {
 115         .need_pad_sel = true,
 116         .must_tx = true,
 117         .enhance_timing = true,
 118         .dma_ext = true,
 119 };
 120 
 121 static const struct mtk_spi_compatible mt7622_compat = {
 122         .must_tx = true,
 123         .enhance_timing = true,
 124 };
 125 
 126 static const struct mtk_spi_compatible mt8173_compat = {
 127         .need_pad_sel = true,
 128         .must_tx = true,
 129 };
 130 
 131 static const struct mtk_spi_compatible mt8183_compat = {
 132         .need_pad_sel = true,
 133         .must_tx = true,
 134         .enhance_timing = true,
 135 };
 136 
 137 /*
 138  * A piece of default chip info unless the platform
 139  * supplies it.
 140  */
 141 static const struct mtk_chip_config mtk_default_chip_info = {
 142         .cs_pol = 0,
 143         .sample_sel = 0,
 144 };
 145 
 146 static const struct of_device_id mtk_spi_of_match[] = {
 147         { .compatible = "mediatek,mt2701-spi",
 148                 .data = (void *)&mtk_common_compat,
 149         },
 150         { .compatible = "mediatek,mt2712-spi",
 151                 .data = (void *)&mt2712_compat,
 152         },
 153         { .compatible = "mediatek,mt6589-spi",
 154                 .data = (void *)&mtk_common_compat,
 155         },
 156         { .compatible = "mediatek,mt6765-spi",
 157                 .data = (void *)&mt6765_compat,
 158         },
 159         { .compatible = "mediatek,mt7622-spi",
 160                 .data = (void *)&mt7622_compat,
 161         },
 162         { .compatible = "mediatek,mt7629-spi",
 163                 .data = (void *)&mt7622_compat,
 164         },
 165         { .compatible = "mediatek,mt8135-spi",
 166                 .data = (void *)&mtk_common_compat,
 167         },
 168         { .compatible = "mediatek,mt8173-spi",
 169                 .data = (void *)&mt8173_compat,
 170         },
 171         { .compatible = "mediatek,mt8183-spi",
 172                 .data = (void *)&mt8183_compat,
 173         },
 174         {}
 175 };
 176 MODULE_DEVICE_TABLE(of, mtk_spi_of_match);
 177 
 178 static void mtk_spi_reset(struct mtk_spi *mdata)
 179 {
 180         u32 reg_val;
 181 
 182         /* set the software reset bit in SPI_CMD_REG. */
 183         reg_val = readl(mdata->base + SPI_CMD_REG);
 184         reg_val |= SPI_CMD_RST;
 185         writel(reg_val, mdata->base + SPI_CMD_REG);
 186 
 187         reg_val = readl(mdata->base + SPI_CMD_REG);
 188         reg_val &= ~SPI_CMD_RST;
 189         writel(reg_val, mdata->base + SPI_CMD_REG);
 190 }
 191 
 192 static int mtk_spi_prepare_message(struct spi_master *master,
 193                                    struct spi_message *msg)
 194 {
 195         u16 cpha, cpol;
 196         u32 reg_val;
 197         struct spi_device *spi = msg->spi;
 198         struct mtk_chip_config *chip_config = spi->controller_data;
 199         struct mtk_spi *mdata = spi_master_get_devdata(master);
 200 
 201         cpha = spi->mode & SPI_CPHA ? 1 : 0;
 202         cpol = spi->mode & SPI_CPOL ? 1 : 0;
 203 
 204         reg_val = readl(mdata->base + SPI_CMD_REG);
 205         if (cpha)
 206                 reg_val |= SPI_CMD_CPHA;
 207         else
 208                 reg_val &= ~SPI_CMD_CPHA;
 209         if (cpol)
 210                 reg_val |= SPI_CMD_CPOL;
 211         else
 212                 reg_val &= ~SPI_CMD_CPOL;
 213 
 214         /* set the mlsbx and mlsbtx */
 215         if (spi->mode & SPI_LSB_FIRST) {
 216                 reg_val &= ~SPI_CMD_TXMSBF;
 217                 reg_val &= ~SPI_CMD_RXMSBF;
 218         } else {
 219                 reg_val |= SPI_CMD_TXMSBF;
 220                 reg_val |= SPI_CMD_RXMSBF;
 221         }
 222 
 223         /* set the tx/rx endian */
 224 #ifdef __LITTLE_ENDIAN
 225         reg_val &= ~SPI_CMD_TX_ENDIAN;
 226         reg_val &= ~SPI_CMD_RX_ENDIAN;
 227 #else
 228         reg_val |= SPI_CMD_TX_ENDIAN;
 229         reg_val |= SPI_CMD_RX_ENDIAN;
 230 #endif
 231 
 232         if (mdata->dev_comp->enhance_timing) {
 233                 if (chip_config->cs_pol)
 234                         reg_val |= SPI_CMD_CS_POL;
 235                 else
 236                         reg_val &= ~SPI_CMD_CS_POL;
 237                 if (chip_config->sample_sel)
 238                         reg_val |= SPI_CMD_SAMPLE_SEL;
 239                 else
 240                         reg_val &= ~SPI_CMD_SAMPLE_SEL;
 241         }
 242 
 243         /* set finish and pause interrupt always enable */
 244         reg_val |= SPI_CMD_FINISH_IE | SPI_CMD_PAUSE_IE;
 245 
 246         /* disable dma mode */
 247         reg_val &= ~(SPI_CMD_TX_DMA | SPI_CMD_RX_DMA);
 248 
 249         /* disable deassert mode */
 250         reg_val &= ~SPI_CMD_DEASSERT;
 251 
 252         writel(reg_val, mdata->base + SPI_CMD_REG);
 253 
 254         /* pad select */
 255         if (mdata->dev_comp->need_pad_sel)
 256                 writel(mdata->pad_sel[spi->chip_select],
 257                        mdata->base + SPI_PAD_SEL_REG);
 258 
 259         return 0;
 260 }
 261 
 262 static void mtk_spi_set_cs(struct spi_device *spi, bool enable)
 263 {
 264         u32 reg_val;
 265         struct mtk_spi *mdata = spi_master_get_devdata(spi->master);
 266 
 267         reg_val = readl(mdata->base + SPI_CMD_REG);
 268         if (!enable) {
 269                 reg_val |= SPI_CMD_PAUSE_EN;
 270                 writel(reg_val, mdata->base + SPI_CMD_REG);
 271         } else {
 272                 reg_val &= ~SPI_CMD_PAUSE_EN;
 273                 writel(reg_val, mdata->base + SPI_CMD_REG);
 274                 mdata->state = MTK_SPI_IDLE;
 275                 mtk_spi_reset(mdata);
 276         }
 277 }
 278 
 279 static void mtk_spi_prepare_transfer(struct spi_master *master,
 280                                      struct spi_transfer *xfer)
 281 {
 282         u32 spi_clk_hz, div, sck_time, cs_time, reg_val = 0;
 283         struct mtk_spi *mdata = spi_master_get_devdata(master);
 284 
 285         spi_clk_hz = clk_get_rate(mdata->spi_clk);
 286         if (xfer->speed_hz < spi_clk_hz / 2)
 287                 div = DIV_ROUND_UP(spi_clk_hz, xfer->speed_hz);
 288         else
 289                 div = 1;
 290 
 291         sck_time = (div + 1) / 2;
 292         cs_time = sck_time * 2;
 293 
 294         if (mdata->dev_comp->enhance_timing) {
 295                 reg_val |= (((sck_time - 1) & 0xffff)
 296                            << SPI_CFG0_SCK_HIGH_OFFSET);
 297                 reg_val |= (((sck_time - 1) & 0xffff)
 298                            << SPI_ADJUST_CFG0_SCK_LOW_OFFSET);
 299                 writel(reg_val, mdata->base + SPI_CFG2_REG);
 300                 reg_val |= (((cs_time - 1) & 0xffff)
 301                            << SPI_ADJUST_CFG0_CS_HOLD_OFFSET);
 302                 reg_val |= (((cs_time - 1) & 0xffff)
 303                            << SPI_ADJUST_CFG0_CS_SETUP_OFFSET);
 304                 writel(reg_val, mdata->base + SPI_CFG0_REG);
 305         } else {
 306                 reg_val |= (((sck_time - 1) & 0xff)
 307                            << SPI_CFG0_SCK_HIGH_OFFSET);
 308                 reg_val |= (((sck_time - 1) & 0xff) << SPI_CFG0_SCK_LOW_OFFSET);
 309                 reg_val |= (((cs_time - 1) & 0xff) << SPI_CFG0_CS_HOLD_OFFSET);
 310                 reg_val |= (((cs_time - 1) & 0xff) << SPI_CFG0_CS_SETUP_OFFSET);
 311                 writel(reg_val, mdata->base + SPI_CFG0_REG);
 312         }
 313 
 314         reg_val = readl(mdata->base + SPI_CFG1_REG);
 315         reg_val &= ~SPI_CFG1_CS_IDLE_MASK;
 316         reg_val |= (((cs_time - 1) & 0xff) << SPI_CFG1_CS_IDLE_OFFSET);
 317         writel(reg_val, mdata->base + SPI_CFG1_REG);
 318 }
 319 
 320 static void mtk_spi_setup_packet(struct spi_master *master)
 321 {
 322         u32 packet_size, packet_loop, reg_val;
 323         struct mtk_spi *mdata = spi_master_get_devdata(master);
 324 
 325         packet_size = min_t(u32, mdata->xfer_len, MTK_SPI_PACKET_SIZE);
 326         packet_loop = mdata->xfer_len / packet_size;
 327 
 328         reg_val = readl(mdata->base + SPI_CFG1_REG);
 329         reg_val &= ~(SPI_CFG1_PACKET_LENGTH_MASK | SPI_CFG1_PACKET_LOOP_MASK);
 330         reg_val |= (packet_size - 1) << SPI_CFG1_PACKET_LENGTH_OFFSET;
 331         reg_val |= (packet_loop - 1) << SPI_CFG1_PACKET_LOOP_OFFSET;
 332         writel(reg_val, mdata->base + SPI_CFG1_REG);
 333 }
 334 
 335 static void mtk_spi_enable_transfer(struct spi_master *master)
 336 {
 337         u32 cmd;
 338         struct mtk_spi *mdata = spi_master_get_devdata(master);
 339 
 340         cmd = readl(mdata->base + SPI_CMD_REG);
 341         if (mdata->state == MTK_SPI_IDLE)
 342                 cmd |= SPI_CMD_ACT;
 343         else
 344                 cmd |= SPI_CMD_RESUME;
 345         writel(cmd, mdata->base + SPI_CMD_REG);
 346 }
 347 
 348 static int mtk_spi_get_mult_delta(u32 xfer_len)
 349 {
 350         u32 mult_delta;
 351 
 352         if (xfer_len > MTK_SPI_PACKET_SIZE)
 353                 mult_delta = xfer_len % MTK_SPI_PACKET_SIZE;
 354         else
 355                 mult_delta = 0;
 356 
 357         return mult_delta;
 358 }
 359 
 360 static void mtk_spi_update_mdata_len(struct spi_master *master)
 361 {
 362         int mult_delta;
 363         struct mtk_spi *mdata = spi_master_get_devdata(master);
 364 
 365         if (mdata->tx_sgl_len && mdata->rx_sgl_len) {
 366                 if (mdata->tx_sgl_len > mdata->rx_sgl_len) {
 367                         mult_delta = mtk_spi_get_mult_delta(mdata->rx_sgl_len);
 368                         mdata->xfer_len = mdata->rx_sgl_len - mult_delta;
 369                         mdata->rx_sgl_len = mult_delta;
 370                         mdata->tx_sgl_len -= mdata->xfer_len;
 371                 } else {
 372                         mult_delta = mtk_spi_get_mult_delta(mdata->tx_sgl_len);
 373                         mdata->xfer_len = mdata->tx_sgl_len - mult_delta;
 374                         mdata->tx_sgl_len = mult_delta;
 375                         mdata->rx_sgl_len -= mdata->xfer_len;
 376                 }
 377         } else if (mdata->tx_sgl_len) {
 378                 mult_delta = mtk_spi_get_mult_delta(mdata->tx_sgl_len);
 379                 mdata->xfer_len = mdata->tx_sgl_len - mult_delta;
 380                 mdata->tx_sgl_len = mult_delta;
 381         } else if (mdata->rx_sgl_len) {
 382                 mult_delta = mtk_spi_get_mult_delta(mdata->rx_sgl_len);
 383                 mdata->xfer_len = mdata->rx_sgl_len - mult_delta;
 384                 mdata->rx_sgl_len = mult_delta;
 385         }
 386 }
 387 
 388 static void mtk_spi_setup_dma_addr(struct spi_master *master,
 389                                    struct spi_transfer *xfer)
 390 {
 391         struct mtk_spi *mdata = spi_master_get_devdata(master);
 392 
 393         if (mdata->tx_sgl) {
 394                 writel((u32)(xfer->tx_dma & MTK_SPI_32BITS_MASK),
 395                        mdata->base + SPI_TX_SRC_REG);
 396 #ifdef CONFIG_ARCH_DMA_ADDR_T_64BIT
 397                 if (mdata->dev_comp->dma_ext)
 398                         writel((u32)(xfer->tx_dma >> 32),
 399                                mdata->base + SPI_TX_SRC_REG_64);
 400 #endif
 401         }
 402 
 403         if (mdata->rx_sgl) {
 404                 writel((u32)(xfer->rx_dma & MTK_SPI_32BITS_MASK),
 405                        mdata->base + SPI_RX_DST_REG);
 406 #ifdef CONFIG_ARCH_DMA_ADDR_T_64BIT
 407                 if (mdata->dev_comp->dma_ext)
 408                         writel((u32)(xfer->rx_dma >> 32),
 409                                mdata->base + SPI_RX_DST_REG_64);
 410 #endif
 411         }
 412 }
 413 
 414 static int mtk_spi_fifo_transfer(struct spi_master *master,
 415                                  struct spi_device *spi,
 416                                  struct spi_transfer *xfer)
 417 {
 418         int cnt, remainder;
 419         u32 reg_val;
 420         struct mtk_spi *mdata = spi_master_get_devdata(master);
 421 
 422         mdata->cur_transfer = xfer;
 423         mdata->xfer_len = min(MTK_SPI_MAX_FIFO_SIZE, xfer->len);
 424         mdata->num_xfered = 0;
 425         mtk_spi_prepare_transfer(master, xfer);
 426         mtk_spi_setup_packet(master);
 427 
 428         cnt = xfer->len / 4;
 429         iowrite32_rep(mdata->base + SPI_TX_DATA_REG, xfer->tx_buf, cnt);
 430 
 431         remainder = xfer->len % 4;
 432         if (remainder > 0) {
 433                 reg_val = 0;
 434                 memcpy(&reg_val, xfer->tx_buf + (cnt * 4), remainder);
 435                 writel(reg_val, mdata->base + SPI_TX_DATA_REG);
 436         }
 437 
 438         mtk_spi_enable_transfer(master);
 439 
 440         return 1;
 441 }
 442 
 443 static int mtk_spi_dma_transfer(struct spi_master *master,
 444                                 struct spi_device *spi,
 445                                 struct spi_transfer *xfer)
 446 {
 447         int cmd;
 448         struct mtk_spi *mdata = spi_master_get_devdata(master);
 449 
 450         mdata->tx_sgl = NULL;
 451         mdata->rx_sgl = NULL;
 452         mdata->tx_sgl_len = 0;
 453         mdata->rx_sgl_len = 0;
 454         mdata->cur_transfer = xfer;
 455         mdata->num_xfered = 0;
 456 
 457         mtk_spi_prepare_transfer(master, xfer);
 458 
 459         cmd = readl(mdata->base + SPI_CMD_REG);
 460         if (xfer->tx_buf)
 461                 cmd |= SPI_CMD_TX_DMA;
 462         if (xfer->rx_buf)
 463                 cmd |= SPI_CMD_RX_DMA;
 464         writel(cmd, mdata->base + SPI_CMD_REG);
 465 
 466         if (xfer->tx_buf)
 467                 mdata->tx_sgl = xfer->tx_sg.sgl;
 468         if (xfer->rx_buf)
 469                 mdata->rx_sgl = xfer->rx_sg.sgl;
 470 
 471         if (mdata->tx_sgl) {
 472                 xfer->tx_dma = sg_dma_address(mdata->tx_sgl);
 473                 mdata->tx_sgl_len = sg_dma_len(mdata->tx_sgl);
 474         }
 475         if (mdata->rx_sgl) {
 476                 xfer->rx_dma = sg_dma_address(mdata->rx_sgl);
 477                 mdata->rx_sgl_len = sg_dma_len(mdata->rx_sgl);
 478         }
 479 
 480         mtk_spi_update_mdata_len(master);
 481         mtk_spi_setup_packet(master);
 482         mtk_spi_setup_dma_addr(master, xfer);
 483         mtk_spi_enable_transfer(master);
 484 
 485         return 1;
 486 }
 487 
 488 static int mtk_spi_transfer_one(struct spi_master *master,
 489                                 struct spi_device *spi,
 490                                 struct spi_transfer *xfer)
 491 {
 492         if (master->can_dma(master, spi, xfer))
 493                 return mtk_spi_dma_transfer(master, spi, xfer);
 494         else
 495                 return mtk_spi_fifo_transfer(master, spi, xfer);
 496 }
 497 
 498 static bool mtk_spi_can_dma(struct spi_master *master,
 499                             struct spi_device *spi,
 500                             struct spi_transfer *xfer)
 501 {
 502         /* Buffers for DMA transactions must be 4-byte aligned */
 503         return (xfer->len > MTK_SPI_MAX_FIFO_SIZE &&
 504                 (unsigned long)xfer->tx_buf % 4 == 0 &&
 505                 (unsigned long)xfer->rx_buf % 4 == 0);
 506 }
 507 
 508 static int mtk_spi_setup(struct spi_device *spi)
 509 {
 510         struct mtk_spi *mdata = spi_master_get_devdata(spi->master);
 511 
 512         if (!spi->controller_data)
 513                 spi->controller_data = (void *)&mtk_default_chip_info;
 514 
 515         if (mdata->dev_comp->need_pad_sel && gpio_is_valid(spi->cs_gpio))
 516                 gpio_direction_output(spi->cs_gpio, !(spi->mode & SPI_CS_HIGH));
 517 
 518         return 0;
 519 }
 520 
 521 static irqreturn_t mtk_spi_interrupt(int irq, void *dev_id)
 522 {
 523         u32 cmd, reg_val, cnt, remainder, len;
 524         struct spi_master *master = dev_id;
 525         struct mtk_spi *mdata = spi_master_get_devdata(master);
 526         struct spi_transfer *trans = mdata->cur_transfer;
 527 
 528         reg_val = readl(mdata->base + SPI_STATUS0_REG);
 529         if (reg_val & MTK_SPI_PAUSE_INT_STATUS)
 530                 mdata->state = MTK_SPI_PAUSED;
 531         else
 532                 mdata->state = MTK_SPI_IDLE;
 533 
 534         if (!master->can_dma(master, master->cur_msg->spi, trans)) {
 535                 if (trans->rx_buf) {
 536                         cnt = mdata->xfer_len / 4;
 537                         ioread32_rep(mdata->base + SPI_RX_DATA_REG,
 538                                      trans->rx_buf + mdata->num_xfered, cnt);
 539                         remainder = mdata->xfer_len % 4;
 540                         if (remainder > 0) {
 541                                 reg_val = readl(mdata->base + SPI_RX_DATA_REG);
 542                                 memcpy(trans->rx_buf +
 543                                         mdata->num_xfered +
 544                                         (cnt * 4),
 545                                         &reg_val,
 546                                         remainder);
 547                         }
 548                 }
 549 
 550                 mdata->num_xfered += mdata->xfer_len;
 551                 if (mdata->num_xfered == trans->len) {
 552                         spi_finalize_current_transfer(master);
 553                         return IRQ_HANDLED;
 554                 }
 555 
 556                 len = trans->len - mdata->num_xfered;
 557                 mdata->xfer_len = min(MTK_SPI_MAX_FIFO_SIZE, len);
 558                 mtk_spi_setup_packet(master);
 559 
 560                 cnt = mdata->xfer_len / 4;
 561                 iowrite32_rep(mdata->base + SPI_TX_DATA_REG,
 562                                 trans->tx_buf + mdata->num_xfered, cnt);
 563 
 564                 remainder = mdata->xfer_len % 4;
 565                 if (remainder > 0) {
 566                         reg_val = 0;
 567                         memcpy(&reg_val,
 568                                 trans->tx_buf + (cnt * 4) + mdata->num_xfered,
 569                                 remainder);
 570                         writel(reg_val, mdata->base + SPI_TX_DATA_REG);
 571                 }
 572 
 573                 mtk_spi_enable_transfer(master);
 574 
 575                 return IRQ_HANDLED;
 576         }
 577 
 578         if (mdata->tx_sgl)
 579                 trans->tx_dma += mdata->xfer_len;
 580         if (mdata->rx_sgl)
 581                 trans->rx_dma += mdata->xfer_len;
 582 
 583         if (mdata->tx_sgl && (mdata->tx_sgl_len == 0)) {
 584                 mdata->tx_sgl = sg_next(mdata->tx_sgl);
 585                 if (mdata->tx_sgl) {
 586                         trans->tx_dma = sg_dma_address(mdata->tx_sgl);
 587                         mdata->tx_sgl_len = sg_dma_len(mdata->tx_sgl);
 588                 }
 589         }
 590         if (mdata->rx_sgl && (mdata->rx_sgl_len == 0)) {
 591                 mdata->rx_sgl = sg_next(mdata->rx_sgl);
 592                 if (mdata->rx_sgl) {
 593                         trans->rx_dma = sg_dma_address(mdata->rx_sgl);
 594                         mdata->rx_sgl_len = sg_dma_len(mdata->rx_sgl);
 595                 }
 596         }
 597 
 598         if (!mdata->tx_sgl && !mdata->rx_sgl) {
 599                 /* spi disable dma */
 600                 cmd = readl(mdata->base + SPI_CMD_REG);
 601                 cmd &= ~SPI_CMD_TX_DMA;
 602                 cmd &= ~SPI_CMD_RX_DMA;
 603                 writel(cmd, mdata->base + SPI_CMD_REG);
 604 
 605                 spi_finalize_current_transfer(master);
 606                 return IRQ_HANDLED;
 607         }
 608 
 609         mtk_spi_update_mdata_len(master);
 610         mtk_spi_setup_packet(master);
 611         mtk_spi_setup_dma_addr(master, trans);
 612         mtk_spi_enable_transfer(master);
 613 
 614         return IRQ_HANDLED;
 615 }
 616 
 617 static int mtk_spi_probe(struct platform_device *pdev)
 618 {
 619         struct spi_master *master;
 620         struct mtk_spi *mdata;
 621         const struct of_device_id *of_id;
 622         struct resource *res;
 623         int i, irq, ret, addr_bits;
 624 
 625         master = spi_alloc_master(&pdev->dev, sizeof(*mdata));
 626         if (!master) {
 627                 dev_err(&pdev->dev, "failed to alloc spi master\n");
 628                 return -ENOMEM;
 629         }
 630 
 631         master->auto_runtime_pm = true;
 632         master->dev.of_node = pdev->dev.of_node;
 633         master->mode_bits = SPI_CPOL | SPI_CPHA | SPI_LSB_FIRST;
 634 
 635         master->set_cs = mtk_spi_set_cs;
 636         master->prepare_message = mtk_spi_prepare_message;
 637         master->transfer_one = mtk_spi_transfer_one;
 638         master->can_dma = mtk_spi_can_dma;
 639         master->setup = mtk_spi_setup;
 640 
 641         of_id = of_match_node(mtk_spi_of_match, pdev->dev.of_node);
 642         if (!of_id) {
 643                 dev_err(&pdev->dev, "failed to probe of_node\n");
 644                 ret = -EINVAL;
 645                 goto err_put_master;
 646         }
 647 
 648         mdata = spi_master_get_devdata(master);
 649         mdata->dev_comp = of_id->data;
 650         if (mdata->dev_comp->must_tx)
 651                 master->flags = SPI_MASTER_MUST_TX;
 652 
 653         if (mdata->dev_comp->need_pad_sel) {
 654                 mdata->pad_num = of_property_count_u32_elems(
 655                         pdev->dev.of_node,
 656                         "mediatek,pad-select");
 657                 if (mdata->pad_num < 0) {
 658                         dev_err(&pdev->dev,
 659                                 "No 'mediatek,pad-select' property\n");
 660                         ret = -EINVAL;
 661                         goto err_put_master;
 662                 }
 663 
 664                 mdata->pad_sel = devm_kmalloc_array(&pdev->dev, mdata->pad_num,
 665                                                     sizeof(u32), GFP_KERNEL);
 666                 if (!mdata->pad_sel) {
 667                         ret = -ENOMEM;
 668                         goto err_put_master;
 669                 }
 670 
 671                 for (i = 0; i < mdata->pad_num; i++) {
 672                         of_property_read_u32_index(pdev->dev.of_node,
 673                                                    "mediatek,pad-select",
 674                                                    i, &mdata->pad_sel[i]);
 675                         if (mdata->pad_sel[i] > MT8173_SPI_MAX_PAD_SEL) {
 676                                 dev_err(&pdev->dev, "wrong pad-sel[%d]: %u\n",
 677                                         i, mdata->pad_sel[i]);
 678                                 ret = -EINVAL;
 679                                 goto err_put_master;
 680                         }
 681                 }
 682         }
 683 
 684         platform_set_drvdata(pdev, master);
 685 
 686         res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
 687         if (!res) {
 688                 ret = -ENODEV;
 689                 dev_err(&pdev->dev, "failed to determine base address\n");
 690                 goto err_put_master;
 691         }
 692 
 693         mdata->base = devm_ioremap_resource(&pdev->dev, res);
 694         if (IS_ERR(mdata->base)) {
 695                 ret = PTR_ERR(mdata->base);
 696                 goto err_put_master;
 697         }
 698 
 699         irq = platform_get_irq(pdev, 0);
 700         if (irq < 0) {
 701                 ret = irq;
 702                 goto err_put_master;
 703         }
 704 
 705         if (!pdev->dev.dma_mask)
 706                 pdev->dev.dma_mask = &pdev->dev.coherent_dma_mask;
 707 
 708         ret = devm_request_irq(&pdev->dev, irq, mtk_spi_interrupt,
 709                                IRQF_TRIGGER_NONE, dev_name(&pdev->dev), master);
 710         if (ret) {
 711                 dev_err(&pdev->dev, "failed to register irq (%d)\n", ret);
 712                 goto err_put_master;
 713         }
 714 
 715         mdata->parent_clk = devm_clk_get(&pdev->dev, "parent-clk");
 716         if (IS_ERR(mdata->parent_clk)) {
 717                 ret = PTR_ERR(mdata->parent_clk);
 718                 dev_err(&pdev->dev, "failed to get parent-clk: %d\n", ret);
 719                 goto err_put_master;
 720         }
 721 
 722         mdata->sel_clk = devm_clk_get(&pdev->dev, "sel-clk");
 723         if (IS_ERR(mdata->sel_clk)) {
 724                 ret = PTR_ERR(mdata->sel_clk);
 725                 dev_err(&pdev->dev, "failed to get sel-clk: %d\n", ret);
 726                 goto err_put_master;
 727         }
 728 
 729         mdata->spi_clk = devm_clk_get(&pdev->dev, "spi-clk");
 730         if (IS_ERR(mdata->spi_clk)) {
 731                 ret = PTR_ERR(mdata->spi_clk);
 732                 dev_err(&pdev->dev, "failed to get spi-clk: %d\n", ret);
 733                 goto err_put_master;
 734         }
 735 
 736         ret = clk_prepare_enable(mdata->spi_clk);
 737         if (ret < 0) {
 738                 dev_err(&pdev->dev, "failed to enable spi_clk (%d)\n", ret);
 739                 goto err_put_master;
 740         }
 741 
 742         ret = clk_set_parent(mdata->sel_clk, mdata->parent_clk);
 743         if (ret < 0) {
 744                 dev_err(&pdev->dev, "failed to clk_set_parent (%d)\n", ret);
 745                 clk_disable_unprepare(mdata->spi_clk);
 746                 goto err_put_master;
 747         }
 748 
 749         clk_disable_unprepare(mdata->spi_clk);
 750 
 751         pm_runtime_enable(&pdev->dev);
 752 
 753         ret = devm_spi_register_master(&pdev->dev, master);
 754         if (ret) {
 755                 dev_err(&pdev->dev, "failed to register master (%d)\n", ret);
 756                 goto err_disable_runtime_pm;
 757         }
 758 
 759         if (mdata->dev_comp->need_pad_sel) {
 760                 if (mdata->pad_num != master->num_chipselect) {
 761                         dev_err(&pdev->dev,
 762                                 "pad_num does not match num_chipselect(%d != %d)\n",
 763                                 mdata->pad_num, master->num_chipselect);
 764                         ret = -EINVAL;
 765                         goto err_disable_runtime_pm;
 766                 }
 767 
 768                 if (!master->cs_gpios && master->num_chipselect > 1) {
 769                         dev_err(&pdev->dev,
 770                                 "cs_gpios not specified and num_chipselect > 1\n");
 771                         ret = -EINVAL;
 772                         goto err_disable_runtime_pm;
 773                 }
 774 
 775                 if (master->cs_gpios) {
 776                         for (i = 0; i < master->num_chipselect; i++) {
 777                                 ret = devm_gpio_request(&pdev->dev,
 778                                                         master->cs_gpios[i],
 779                                                         dev_name(&pdev->dev));
 780                                 if (ret) {
 781                                         dev_err(&pdev->dev,
 782                                                 "can't get CS GPIO %i\n", i);
 783                                         goto err_disable_runtime_pm;
 784                                 }
 785                         }
 786                 }
 787         }
 788 
 789         if (mdata->dev_comp->dma_ext)
 790                 addr_bits = DMA_ADDR_EXT_BITS;
 791         else
 792                 addr_bits = DMA_ADDR_DEF_BITS;
 793         ret = dma_set_mask(&pdev->dev, DMA_BIT_MASK(addr_bits));
 794         if (ret)
 795                 dev_notice(&pdev->dev, "SPI dma_set_mask(%d) failed, ret:%d\n",
 796                            addr_bits, ret);
 797 
 798         return 0;
 799 
 800 err_disable_runtime_pm:
 801         pm_runtime_disable(&pdev->dev);
 802 err_put_master:
 803         spi_master_put(master);
 804 
 805         return ret;
 806 }
 807 
 808 static int mtk_spi_remove(struct platform_device *pdev)
 809 {
 810         struct spi_master *master = platform_get_drvdata(pdev);
 811         struct mtk_spi *mdata = spi_master_get_devdata(master);
 812 
 813         pm_runtime_disable(&pdev->dev);
 814 
 815         mtk_spi_reset(mdata);
 816 
 817         return 0;
 818 }
 819 
 820 #ifdef CONFIG_PM_SLEEP
 821 static int mtk_spi_suspend(struct device *dev)
 822 {
 823         int ret;
 824         struct spi_master *master = dev_get_drvdata(dev);
 825         struct mtk_spi *mdata = spi_master_get_devdata(master);
 826 
 827         ret = spi_master_suspend(master);
 828         if (ret)
 829                 return ret;
 830 
 831         if (!pm_runtime_suspended(dev))
 832                 clk_disable_unprepare(mdata->spi_clk);
 833 
 834         return ret;
 835 }
 836 
 837 static int mtk_spi_resume(struct device *dev)
 838 {
 839         int ret;
 840         struct spi_master *master = dev_get_drvdata(dev);
 841         struct mtk_spi *mdata = spi_master_get_devdata(master);
 842 
 843         if (!pm_runtime_suspended(dev)) {
 844                 ret = clk_prepare_enable(mdata->spi_clk);
 845                 if (ret < 0) {
 846                         dev_err(dev, "failed to enable spi_clk (%d)\n", ret);
 847                         return ret;
 848                 }
 849         }
 850 
 851         ret = spi_master_resume(master);
 852         if (ret < 0)
 853                 clk_disable_unprepare(mdata->spi_clk);
 854 
 855         return ret;
 856 }
 857 #endif /* CONFIG_PM_SLEEP */
 858 
 859 #ifdef CONFIG_PM
 860 static int mtk_spi_runtime_suspend(struct device *dev)
 861 {
 862         struct spi_master *master = dev_get_drvdata(dev);
 863         struct mtk_spi *mdata = spi_master_get_devdata(master);
 864 
 865         clk_disable_unprepare(mdata->spi_clk);
 866 
 867         return 0;
 868 }
 869 
 870 static int mtk_spi_runtime_resume(struct device *dev)
 871 {
 872         struct spi_master *master = dev_get_drvdata(dev);
 873         struct mtk_spi *mdata = spi_master_get_devdata(master);
 874         int ret;
 875 
 876         ret = clk_prepare_enable(mdata->spi_clk);
 877         if (ret < 0) {
 878                 dev_err(dev, "failed to enable spi_clk (%d)\n", ret);
 879                 return ret;
 880         }
 881 
 882         return 0;
 883 }
 884 #endif /* CONFIG_PM */
 885 
 886 static const struct dev_pm_ops mtk_spi_pm = {
 887         SET_SYSTEM_SLEEP_PM_OPS(mtk_spi_suspend, mtk_spi_resume)
 888         SET_RUNTIME_PM_OPS(mtk_spi_runtime_suspend,
 889                            mtk_spi_runtime_resume, NULL)
 890 };
 891 
 892 static struct platform_driver mtk_spi_driver = {
 893         .driver = {
 894                 .name = "mtk-spi",
 895                 .pm     = &mtk_spi_pm,
 896                 .of_match_table = mtk_spi_of_match,
 897         },
 898         .probe = mtk_spi_probe,
 899         .remove = mtk_spi_remove,
 900 };
 901 
 902 module_platform_driver(mtk_spi_driver);
 903 
 904 MODULE_DESCRIPTION("MTK SPI Controller driver");
 905 MODULE_AUTHOR("Leilk Liu <leilk.liu@mediatek.com>");
 906 MODULE_LICENSE("GPL v2");
 907 MODULE_ALIAS("platform:mtk-spi");

/* [<][>][^][v][top][bottom][index][help] */