root/drivers/dma/sprd-dma.c

/* [<][>][^][v][top][bottom][index][help] */

DEFINITIONS

This source file includes following definitions.
  1. to_sprd_dma_chan
  2. to_sprd_dma_dev
  3. to_sprd_dma_desc
  4. sprd_dma_glb_update
  5. sprd_dma_chn_update
  6. sprd_dma_enable
  7. sprd_dma_disable
  8. sprd_dma_set_uid
  9. sprd_dma_unset_uid
  10. sprd_dma_clear_int
  11. sprd_dma_enable_chn
  12. sprd_dma_disable_chn
  13. sprd_dma_soft_request
  14. sprd_dma_pause_resume
  15. sprd_dma_stop_and_disable
  16. sprd_dma_get_src_addr
  17. sprd_dma_get_dst_addr
  18. sprd_dma_get_int_type
  19. sprd_dma_get_req_type
  20. sprd_dma_set_2stage_config
  21. sprd_dma_set_chn_config
  22. sprd_dma_start
  23. sprd_dma_stop
  24. sprd_dma_check_trans_done
  25. dma_irq_handle
  26. sprd_dma_alloc_chan_resources
  27. sprd_dma_free_chan_resources
  28. sprd_dma_tx_status
  29. sprd_dma_issue_pending
  30. sprd_dma_get_datawidth
  31. sprd_dma_get_step
  32. sprd_dma_fill_desc
  33. sprd_dma_fill_linklist_desc
  34. sprd_dma_prep_dma_memcpy
  35. sprd_dma_prep_slave_sg
  36. sprd_dma_slave_config
  37. sprd_dma_pause
  38. sprd_dma_resume
  39. sprd_dma_terminate_all
  40. sprd_dma_free_desc
  41. sprd_dma_filter_fn
  42. sprd_dma_probe
  43. sprd_dma_remove
  44. sprd_dma_runtime_suspend
  45. sprd_dma_runtime_resume

   1 /*
   2  * Copyright (C) 2017 Spreadtrum Communications Inc.
   3  *
   4  * SPDX-License-Identifier: GPL-2.0
   5  */
   6 
   7 #include <linux/clk.h>
   8 #include <linux/dma-mapping.h>
   9 #include <linux/dma/sprd-dma.h>
  10 #include <linux/errno.h>
  11 #include <linux/init.h>
  12 #include <linux/interrupt.h>
  13 #include <linux/io.h>
  14 #include <linux/kernel.h>
  15 #include <linux/module.h>
  16 #include <linux/of.h>
  17 #include <linux/of_dma.h>
  18 #include <linux/of_device.h>
  19 #include <linux/pm_runtime.h>
  20 #include <linux/slab.h>
  21 
  22 #include "virt-dma.h"
  23 
  24 #define SPRD_DMA_CHN_REG_OFFSET         0x1000
  25 #define SPRD_DMA_CHN_REG_LENGTH         0x40
  26 #define SPRD_DMA_MEMCPY_MIN_SIZE        64
  27 
  28 /* DMA global registers definition */
  29 #define SPRD_DMA_GLB_PAUSE              0x0
  30 #define SPRD_DMA_GLB_FRAG_WAIT          0x4
  31 #define SPRD_DMA_GLB_REQ_PEND0_EN       0x8
  32 #define SPRD_DMA_GLB_REQ_PEND1_EN       0xc
  33 #define SPRD_DMA_GLB_INT_RAW_STS        0x10
  34 #define SPRD_DMA_GLB_INT_MSK_STS        0x14
  35 #define SPRD_DMA_GLB_REQ_STS            0x18
  36 #define SPRD_DMA_GLB_CHN_EN_STS         0x1c
  37 #define SPRD_DMA_GLB_DEBUG_STS          0x20
  38 #define SPRD_DMA_GLB_ARB_SEL_STS        0x24
  39 #define SPRD_DMA_GLB_2STAGE_GRP1        0x28
  40 #define SPRD_DMA_GLB_2STAGE_GRP2        0x2c
  41 #define SPRD_DMA_GLB_REQ_UID(uid)       (0x4 * ((uid) - 1))
  42 #define SPRD_DMA_GLB_REQ_UID_OFFSET     0x2000
  43 
  44 /* DMA channel registers definition */
  45 #define SPRD_DMA_CHN_PAUSE              0x0
  46 #define SPRD_DMA_CHN_REQ                0x4
  47 #define SPRD_DMA_CHN_CFG                0x8
  48 #define SPRD_DMA_CHN_INTC               0xc
  49 #define SPRD_DMA_CHN_SRC_ADDR           0x10
  50 #define SPRD_DMA_CHN_DES_ADDR           0x14
  51 #define SPRD_DMA_CHN_FRG_LEN            0x18
  52 #define SPRD_DMA_CHN_BLK_LEN            0x1c
  53 #define SPRD_DMA_CHN_TRSC_LEN           0x20
  54 #define SPRD_DMA_CHN_TRSF_STEP          0x24
  55 #define SPRD_DMA_CHN_WARP_PTR           0x28
  56 #define SPRD_DMA_CHN_WARP_TO            0x2c
  57 #define SPRD_DMA_CHN_LLIST_PTR          0x30
  58 #define SPRD_DMA_CHN_FRAG_STEP          0x34
  59 #define SPRD_DMA_CHN_SRC_BLK_STEP       0x38
  60 #define SPRD_DMA_CHN_DES_BLK_STEP       0x3c
  61 
  62 /* SPRD_DMA_GLB_2STAGE_GRP register definition */
  63 #define SPRD_DMA_GLB_2STAGE_EN          BIT(24)
  64 #define SPRD_DMA_GLB_CHN_INT_MASK       GENMASK(23, 20)
  65 #define SPRD_DMA_GLB_DEST_INT           BIT(22)
  66 #define SPRD_DMA_GLB_SRC_INT            BIT(20)
  67 #define SPRD_DMA_GLB_LIST_DONE_TRG      BIT(19)
  68 #define SPRD_DMA_GLB_TRANS_DONE_TRG     BIT(18)
  69 #define SPRD_DMA_GLB_BLOCK_DONE_TRG     BIT(17)
  70 #define SPRD_DMA_GLB_FRAG_DONE_TRG      BIT(16)
  71 #define SPRD_DMA_GLB_TRG_OFFSET         16
  72 #define SPRD_DMA_GLB_DEST_CHN_MASK      GENMASK(13, 8)
  73 #define SPRD_DMA_GLB_DEST_CHN_OFFSET    8
  74 #define SPRD_DMA_GLB_SRC_CHN_MASK       GENMASK(5, 0)
  75 
  76 /* SPRD_DMA_CHN_INTC register definition */
  77 #define SPRD_DMA_INT_MASK               GENMASK(4, 0)
  78 #define SPRD_DMA_INT_CLR_OFFSET         24
  79 #define SPRD_DMA_FRAG_INT_EN            BIT(0)
  80 #define SPRD_DMA_BLK_INT_EN             BIT(1)
  81 #define SPRD_DMA_TRANS_INT_EN           BIT(2)
  82 #define SPRD_DMA_LIST_INT_EN            BIT(3)
  83 #define SPRD_DMA_CFG_ERR_INT_EN         BIT(4)
  84 
  85 /* SPRD_DMA_CHN_CFG register definition */
  86 #define SPRD_DMA_CHN_EN                 BIT(0)
  87 #define SPRD_DMA_LINKLIST_EN            BIT(4)
  88 #define SPRD_DMA_WAIT_BDONE_OFFSET      24
  89 #define SPRD_DMA_DONOT_WAIT_BDONE       1
  90 
  91 /* SPRD_DMA_CHN_REQ register definition */
  92 #define SPRD_DMA_REQ_EN                 BIT(0)
  93 
  94 /* SPRD_DMA_CHN_PAUSE register definition */
  95 #define SPRD_DMA_PAUSE_EN               BIT(0)
  96 #define SPRD_DMA_PAUSE_STS              BIT(2)
  97 #define SPRD_DMA_PAUSE_CNT              0x2000
  98 
  99 /* DMA_CHN_WARP_* register definition */
 100 #define SPRD_DMA_HIGH_ADDR_MASK         GENMASK(31, 28)
 101 #define SPRD_DMA_LOW_ADDR_MASK          GENMASK(31, 0)
 102 #define SPRD_DMA_HIGH_ADDR_OFFSET       4
 103 
 104 /* SPRD_DMA_CHN_INTC register definition */
 105 #define SPRD_DMA_FRAG_INT_STS           BIT(16)
 106 #define SPRD_DMA_BLK_INT_STS            BIT(17)
 107 #define SPRD_DMA_TRSC_INT_STS           BIT(18)
 108 #define SPRD_DMA_LIST_INT_STS           BIT(19)
 109 #define SPRD_DMA_CFGERR_INT_STS         BIT(20)
 110 #define SPRD_DMA_CHN_INT_STS                                    \
 111         (SPRD_DMA_FRAG_INT_STS | SPRD_DMA_BLK_INT_STS |         \
 112          SPRD_DMA_TRSC_INT_STS | SPRD_DMA_LIST_INT_STS |        \
 113          SPRD_DMA_CFGERR_INT_STS)
 114 
 115 /* SPRD_DMA_CHN_FRG_LEN register definition */
 116 #define SPRD_DMA_SRC_DATAWIDTH_OFFSET   30
 117 #define SPRD_DMA_DES_DATAWIDTH_OFFSET   28
 118 #define SPRD_DMA_SWT_MODE_OFFSET        26
 119 #define SPRD_DMA_REQ_MODE_OFFSET        24
 120 #define SPRD_DMA_REQ_MODE_MASK          GENMASK(1, 0)
 121 #define SPRD_DMA_FIX_SEL_OFFSET         21
 122 #define SPRD_DMA_FIX_EN_OFFSET          20
 123 #define SPRD_DMA_LLIST_END              BIT(19)
 124 #define SPRD_DMA_FRG_LEN_MASK           GENMASK(16, 0)
 125 
 126 /* SPRD_DMA_CHN_BLK_LEN register definition */
 127 #define SPRD_DMA_BLK_LEN_MASK           GENMASK(16, 0)
 128 
 129 /* SPRD_DMA_CHN_TRSC_LEN register definition */
 130 #define SPRD_DMA_TRSC_LEN_MASK          GENMASK(27, 0)
 131 
 132 /* SPRD_DMA_CHN_TRSF_STEP register definition */
 133 #define SPRD_DMA_DEST_TRSF_STEP_OFFSET  16
 134 #define SPRD_DMA_SRC_TRSF_STEP_OFFSET   0
 135 #define SPRD_DMA_TRSF_STEP_MASK         GENMASK(15, 0)
 136 
 137 /* SPRD DMA_SRC_BLK_STEP register definition */
 138 #define SPRD_DMA_LLIST_HIGH_MASK        GENMASK(31, 28)
 139 #define SPRD_DMA_LLIST_HIGH_SHIFT       28
 140 
 141 /* define DMA channel mode & trigger mode mask */
 142 #define SPRD_DMA_CHN_MODE_MASK          GENMASK(7, 0)
 143 #define SPRD_DMA_TRG_MODE_MASK          GENMASK(7, 0)
 144 #define SPRD_DMA_INT_TYPE_MASK          GENMASK(7, 0)
 145 
 146 /* define the DMA transfer step type */
 147 #define SPRD_DMA_NONE_STEP              0
 148 #define SPRD_DMA_BYTE_STEP              1
 149 #define SPRD_DMA_SHORT_STEP             2
 150 #define SPRD_DMA_WORD_STEP              4
 151 #define SPRD_DMA_DWORD_STEP             8
 152 
 153 #define SPRD_DMA_SOFTWARE_UID           0
 154 
 155 /* dma data width values */
 156 enum sprd_dma_datawidth {
 157         SPRD_DMA_DATAWIDTH_1_BYTE,
 158         SPRD_DMA_DATAWIDTH_2_BYTES,
 159         SPRD_DMA_DATAWIDTH_4_BYTES,
 160         SPRD_DMA_DATAWIDTH_8_BYTES,
 161 };
 162 
 163 /* dma channel hardware configuration */
 164 struct sprd_dma_chn_hw {
 165         u32 pause;
 166         u32 req;
 167         u32 cfg;
 168         u32 intc;
 169         u32 src_addr;
 170         u32 des_addr;
 171         u32 frg_len;
 172         u32 blk_len;
 173         u32 trsc_len;
 174         u32 trsf_step;
 175         u32 wrap_ptr;
 176         u32 wrap_to;
 177         u32 llist_ptr;
 178         u32 frg_step;
 179         u32 src_blk_step;
 180         u32 des_blk_step;
 181 };
 182 
 183 /* dma request description */
 184 struct sprd_dma_desc {
 185         struct virt_dma_desc    vd;
 186         struct sprd_dma_chn_hw  chn_hw;
 187         enum dma_transfer_direction dir;
 188 };
 189 
 190 /* dma channel description */
 191 struct sprd_dma_chn {
 192         struct virt_dma_chan    vc;
 193         void __iomem            *chn_base;
 194         struct sprd_dma_linklist        linklist;
 195         struct dma_slave_config slave_cfg;
 196         u32                     chn_num;
 197         u32                     dev_id;
 198         enum sprd_dma_chn_mode  chn_mode;
 199         enum sprd_dma_trg_mode  trg_mode;
 200         enum sprd_dma_int_type  int_type;
 201         struct sprd_dma_desc    *cur_desc;
 202 };
 203 
 204 /* SPRD dma device */
 205 struct sprd_dma_dev {
 206         struct dma_device       dma_dev;
 207         void __iomem            *glb_base;
 208         struct clk              *clk;
 209         struct clk              *ashb_clk;
 210         int                     irq;
 211         u32                     total_chns;
 212         struct sprd_dma_chn     channels[0];
 213 };
 214 
 215 static void sprd_dma_free_desc(struct virt_dma_desc *vd);
 216 static bool sprd_dma_filter_fn(struct dma_chan *chan, void *param);
 217 static struct of_dma_filter_info sprd_dma_info = {
 218         .filter_fn = sprd_dma_filter_fn,
 219 };
 220 
 221 static inline struct sprd_dma_chn *to_sprd_dma_chan(struct dma_chan *c)
 222 {
 223         return container_of(c, struct sprd_dma_chn, vc.chan);
 224 }
 225 
 226 static inline struct sprd_dma_dev *to_sprd_dma_dev(struct dma_chan *c)
 227 {
 228         struct sprd_dma_chn *schan = to_sprd_dma_chan(c);
 229 
 230         return container_of(schan, struct sprd_dma_dev, channels[c->chan_id]);
 231 }
 232 
 233 static inline struct sprd_dma_desc *to_sprd_dma_desc(struct virt_dma_desc *vd)
 234 {
 235         return container_of(vd, struct sprd_dma_desc, vd);
 236 }
 237 
 238 static void sprd_dma_glb_update(struct sprd_dma_dev *sdev, u32 reg,
 239                                 u32 mask, u32 val)
 240 {
 241         u32 orig = readl(sdev->glb_base + reg);
 242         u32 tmp;
 243 
 244         tmp = (orig & ~mask) | val;
 245         writel(tmp, sdev->glb_base + reg);
 246 }
 247 
 248 static void sprd_dma_chn_update(struct sprd_dma_chn *schan, u32 reg,
 249                                 u32 mask, u32 val)
 250 {
 251         u32 orig = readl(schan->chn_base + reg);
 252         u32 tmp;
 253 
 254         tmp = (orig & ~mask) | val;
 255         writel(tmp, schan->chn_base + reg);
 256 }
 257 
 258 static int sprd_dma_enable(struct sprd_dma_dev *sdev)
 259 {
 260         int ret;
 261 
 262         ret = clk_prepare_enable(sdev->clk);
 263         if (ret)
 264                 return ret;
 265 
 266         /*
 267          * The ashb_clk is optional and only for AGCP DMA controller, so we
 268          * need add one condition to check if the ashb_clk need enable.
 269          */
 270         if (!IS_ERR(sdev->ashb_clk))
 271                 ret = clk_prepare_enable(sdev->ashb_clk);
 272 
 273         return ret;
 274 }
 275 
 276 static void sprd_dma_disable(struct sprd_dma_dev *sdev)
 277 {
 278         clk_disable_unprepare(sdev->clk);
 279 
 280         /*
 281          * Need to check if we need disable the optional ashb_clk for AGCP DMA.
 282          */
 283         if (!IS_ERR(sdev->ashb_clk))
 284                 clk_disable_unprepare(sdev->ashb_clk);
 285 }
 286 
 287 static void sprd_dma_set_uid(struct sprd_dma_chn *schan)
 288 {
 289         struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan);
 290         u32 dev_id = schan->dev_id;
 291 
 292         if (dev_id != SPRD_DMA_SOFTWARE_UID) {
 293                 u32 uid_offset = SPRD_DMA_GLB_REQ_UID_OFFSET +
 294                                  SPRD_DMA_GLB_REQ_UID(dev_id);
 295 
 296                 writel(schan->chn_num + 1, sdev->glb_base + uid_offset);
 297         }
 298 }
 299 
 300 static void sprd_dma_unset_uid(struct sprd_dma_chn *schan)
 301 {
 302         struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan);
 303         u32 dev_id = schan->dev_id;
 304 
 305         if (dev_id != SPRD_DMA_SOFTWARE_UID) {
 306                 u32 uid_offset = SPRD_DMA_GLB_REQ_UID_OFFSET +
 307                                  SPRD_DMA_GLB_REQ_UID(dev_id);
 308 
 309                 writel(0, sdev->glb_base + uid_offset);
 310         }
 311 }
 312 
 313 static void sprd_dma_clear_int(struct sprd_dma_chn *schan)
 314 {
 315         sprd_dma_chn_update(schan, SPRD_DMA_CHN_INTC,
 316                             SPRD_DMA_INT_MASK << SPRD_DMA_INT_CLR_OFFSET,
 317                             SPRD_DMA_INT_MASK << SPRD_DMA_INT_CLR_OFFSET);
 318 }
 319 
 320 static void sprd_dma_enable_chn(struct sprd_dma_chn *schan)
 321 {
 322         sprd_dma_chn_update(schan, SPRD_DMA_CHN_CFG, SPRD_DMA_CHN_EN,
 323                             SPRD_DMA_CHN_EN);
 324 }
 325 
 326 static void sprd_dma_disable_chn(struct sprd_dma_chn *schan)
 327 {
 328         sprd_dma_chn_update(schan, SPRD_DMA_CHN_CFG, SPRD_DMA_CHN_EN, 0);
 329 }
 330 
 331 static void sprd_dma_soft_request(struct sprd_dma_chn *schan)
 332 {
 333         sprd_dma_chn_update(schan, SPRD_DMA_CHN_REQ, SPRD_DMA_REQ_EN,
 334                             SPRD_DMA_REQ_EN);
 335 }
 336 
 337 static void sprd_dma_pause_resume(struct sprd_dma_chn *schan, bool enable)
 338 {
 339         struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan);
 340         u32 pause, timeout = SPRD_DMA_PAUSE_CNT;
 341 
 342         if (enable) {
 343                 sprd_dma_chn_update(schan, SPRD_DMA_CHN_PAUSE,
 344                                     SPRD_DMA_PAUSE_EN, SPRD_DMA_PAUSE_EN);
 345 
 346                 do {
 347                         pause = readl(schan->chn_base + SPRD_DMA_CHN_PAUSE);
 348                         if (pause & SPRD_DMA_PAUSE_STS)
 349                                 break;
 350 
 351                         cpu_relax();
 352                 } while (--timeout > 0);
 353 
 354                 if (!timeout)
 355                         dev_warn(sdev->dma_dev.dev,
 356                                  "pause dma controller timeout\n");
 357         } else {
 358                 sprd_dma_chn_update(schan, SPRD_DMA_CHN_PAUSE,
 359                                     SPRD_DMA_PAUSE_EN, 0);
 360         }
 361 }
 362 
 363 static void sprd_dma_stop_and_disable(struct sprd_dma_chn *schan)
 364 {
 365         u32 cfg = readl(schan->chn_base + SPRD_DMA_CHN_CFG);
 366 
 367         if (!(cfg & SPRD_DMA_CHN_EN))
 368                 return;
 369 
 370         sprd_dma_pause_resume(schan, true);
 371         sprd_dma_disable_chn(schan);
 372 }
 373 
 374 static unsigned long sprd_dma_get_src_addr(struct sprd_dma_chn *schan)
 375 {
 376         unsigned long addr, addr_high;
 377 
 378         addr = readl(schan->chn_base + SPRD_DMA_CHN_SRC_ADDR);
 379         addr_high = readl(schan->chn_base + SPRD_DMA_CHN_WARP_PTR) &
 380                     SPRD_DMA_HIGH_ADDR_MASK;
 381 
 382         return addr | (addr_high << SPRD_DMA_HIGH_ADDR_OFFSET);
 383 }
 384 
 385 static unsigned long sprd_dma_get_dst_addr(struct sprd_dma_chn *schan)
 386 {
 387         unsigned long addr, addr_high;
 388 
 389         addr = readl(schan->chn_base + SPRD_DMA_CHN_DES_ADDR);
 390         addr_high = readl(schan->chn_base + SPRD_DMA_CHN_WARP_TO) &
 391                     SPRD_DMA_HIGH_ADDR_MASK;
 392 
 393         return addr | (addr_high << SPRD_DMA_HIGH_ADDR_OFFSET);
 394 }
 395 
 396 static enum sprd_dma_int_type sprd_dma_get_int_type(struct sprd_dma_chn *schan)
 397 {
 398         struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan);
 399         u32 intc_sts = readl(schan->chn_base + SPRD_DMA_CHN_INTC) &
 400                        SPRD_DMA_CHN_INT_STS;
 401 
 402         switch (intc_sts) {
 403         case SPRD_DMA_CFGERR_INT_STS:
 404                 return SPRD_DMA_CFGERR_INT;
 405 
 406         case SPRD_DMA_LIST_INT_STS:
 407                 return SPRD_DMA_LIST_INT;
 408 
 409         case SPRD_DMA_TRSC_INT_STS:
 410                 return SPRD_DMA_TRANS_INT;
 411 
 412         case SPRD_DMA_BLK_INT_STS:
 413                 return SPRD_DMA_BLK_INT;
 414 
 415         case SPRD_DMA_FRAG_INT_STS:
 416                 return SPRD_DMA_FRAG_INT;
 417 
 418         default:
 419                 dev_warn(sdev->dma_dev.dev, "incorrect dma interrupt type\n");
 420                 return SPRD_DMA_NO_INT;
 421         }
 422 }
 423 
 424 static enum sprd_dma_req_mode sprd_dma_get_req_type(struct sprd_dma_chn *schan)
 425 {
 426         u32 frag_reg = readl(schan->chn_base + SPRD_DMA_CHN_FRG_LEN);
 427 
 428         return (frag_reg >> SPRD_DMA_REQ_MODE_OFFSET) & SPRD_DMA_REQ_MODE_MASK;
 429 }
 430 
 431 static int sprd_dma_set_2stage_config(struct sprd_dma_chn *schan)
 432 {
 433         struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan);
 434         u32 val, chn = schan->chn_num + 1;
 435 
 436         switch (schan->chn_mode) {
 437         case SPRD_DMA_SRC_CHN0:
 438                 val = chn & SPRD_DMA_GLB_SRC_CHN_MASK;
 439                 val |= BIT(schan->trg_mode - 1) << SPRD_DMA_GLB_TRG_OFFSET;
 440                 val |= SPRD_DMA_GLB_2STAGE_EN;
 441                 if (schan->int_type != SPRD_DMA_NO_INT)
 442                         val |= SPRD_DMA_GLB_SRC_INT;
 443 
 444                 sprd_dma_glb_update(sdev, SPRD_DMA_GLB_2STAGE_GRP1, val, val);
 445                 break;
 446 
 447         case SPRD_DMA_SRC_CHN1:
 448                 val = chn & SPRD_DMA_GLB_SRC_CHN_MASK;
 449                 val |= BIT(schan->trg_mode - 1) << SPRD_DMA_GLB_TRG_OFFSET;
 450                 val |= SPRD_DMA_GLB_2STAGE_EN;
 451                 if (schan->int_type != SPRD_DMA_NO_INT)
 452                         val |= SPRD_DMA_GLB_SRC_INT;
 453 
 454                 sprd_dma_glb_update(sdev, SPRD_DMA_GLB_2STAGE_GRP2, val, val);
 455                 break;
 456 
 457         case SPRD_DMA_DST_CHN0:
 458                 val = (chn << SPRD_DMA_GLB_DEST_CHN_OFFSET) &
 459                         SPRD_DMA_GLB_DEST_CHN_MASK;
 460                 val |= SPRD_DMA_GLB_2STAGE_EN;
 461                 if (schan->int_type != SPRD_DMA_NO_INT)
 462                         val |= SPRD_DMA_GLB_DEST_INT;
 463 
 464                 sprd_dma_glb_update(sdev, SPRD_DMA_GLB_2STAGE_GRP1, val, val);
 465                 break;
 466 
 467         case SPRD_DMA_DST_CHN1:
 468                 val = (chn << SPRD_DMA_GLB_DEST_CHN_OFFSET) &
 469                         SPRD_DMA_GLB_DEST_CHN_MASK;
 470                 val |= SPRD_DMA_GLB_2STAGE_EN;
 471                 if (schan->int_type != SPRD_DMA_NO_INT)
 472                         val |= SPRD_DMA_GLB_DEST_INT;
 473 
 474                 sprd_dma_glb_update(sdev, SPRD_DMA_GLB_2STAGE_GRP2, val, val);
 475                 break;
 476 
 477         default:
 478                 dev_err(sdev->dma_dev.dev, "invalid channel mode setting %d\n",
 479                         schan->chn_mode);
 480                 return -EINVAL;
 481         }
 482 
 483         return 0;
 484 }
 485 
 486 static void sprd_dma_set_chn_config(struct sprd_dma_chn *schan,
 487                                     struct sprd_dma_desc *sdesc)
 488 {
 489         struct sprd_dma_chn_hw *cfg = &sdesc->chn_hw;
 490 
 491         writel(cfg->pause, schan->chn_base + SPRD_DMA_CHN_PAUSE);
 492         writel(cfg->cfg, schan->chn_base + SPRD_DMA_CHN_CFG);
 493         writel(cfg->intc, schan->chn_base + SPRD_DMA_CHN_INTC);
 494         writel(cfg->src_addr, schan->chn_base + SPRD_DMA_CHN_SRC_ADDR);
 495         writel(cfg->des_addr, schan->chn_base + SPRD_DMA_CHN_DES_ADDR);
 496         writel(cfg->frg_len, schan->chn_base + SPRD_DMA_CHN_FRG_LEN);
 497         writel(cfg->blk_len, schan->chn_base + SPRD_DMA_CHN_BLK_LEN);
 498         writel(cfg->trsc_len, schan->chn_base + SPRD_DMA_CHN_TRSC_LEN);
 499         writel(cfg->trsf_step, schan->chn_base + SPRD_DMA_CHN_TRSF_STEP);
 500         writel(cfg->wrap_ptr, schan->chn_base + SPRD_DMA_CHN_WARP_PTR);
 501         writel(cfg->wrap_to, schan->chn_base + SPRD_DMA_CHN_WARP_TO);
 502         writel(cfg->llist_ptr, schan->chn_base + SPRD_DMA_CHN_LLIST_PTR);
 503         writel(cfg->frg_step, schan->chn_base + SPRD_DMA_CHN_FRAG_STEP);
 504         writel(cfg->src_blk_step, schan->chn_base + SPRD_DMA_CHN_SRC_BLK_STEP);
 505         writel(cfg->des_blk_step, schan->chn_base + SPRD_DMA_CHN_DES_BLK_STEP);
 506         writel(cfg->req, schan->chn_base + SPRD_DMA_CHN_REQ);
 507 }
 508 
 509 static void sprd_dma_start(struct sprd_dma_chn *schan)
 510 {
 511         struct virt_dma_desc *vd = vchan_next_desc(&schan->vc);
 512 
 513         if (!vd)
 514                 return;
 515 
 516         list_del(&vd->node);
 517         schan->cur_desc = to_sprd_dma_desc(vd);
 518 
 519         /*
 520          * Set 2-stage configuration if the channel starts one 2-stage
 521          * transfer.
 522          */
 523         if (schan->chn_mode && sprd_dma_set_2stage_config(schan))
 524                 return;
 525 
 526         /*
 527          * Copy the DMA configuration from DMA descriptor to this hardware
 528          * channel.
 529          */
 530         sprd_dma_set_chn_config(schan, schan->cur_desc);
 531         sprd_dma_set_uid(schan);
 532         sprd_dma_enable_chn(schan);
 533 
 534         if (schan->dev_id == SPRD_DMA_SOFTWARE_UID &&
 535             schan->chn_mode != SPRD_DMA_DST_CHN0 &&
 536             schan->chn_mode != SPRD_DMA_DST_CHN1)
 537                 sprd_dma_soft_request(schan);
 538 }
 539 
 540 static void sprd_dma_stop(struct sprd_dma_chn *schan)
 541 {
 542         sprd_dma_stop_and_disable(schan);
 543         sprd_dma_unset_uid(schan);
 544         sprd_dma_clear_int(schan);
 545         schan->cur_desc = NULL;
 546 }
 547 
 548 static bool sprd_dma_check_trans_done(struct sprd_dma_desc *sdesc,
 549                                       enum sprd_dma_int_type int_type,
 550                                       enum sprd_dma_req_mode req_mode)
 551 {
 552         if (int_type == SPRD_DMA_NO_INT)
 553                 return false;
 554 
 555         if (int_type >= req_mode + 1)
 556                 return true;
 557         else
 558                 return false;
 559 }
 560 
 561 static irqreturn_t dma_irq_handle(int irq, void *dev_id)
 562 {
 563         struct sprd_dma_dev *sdev = (struct sprd_dma_dev *)dev_id;
 564         u32 irq_status = readl(sdev->glb_base + SPRD_DMA_GLB_INT_MSK_STS);
 565         struct sprd_dma_chn *schan;
 566         struct sprd_dma_desc *sdesc;
 567         enum sprd_dma_req_mode req_type;
 568         enum sprd_dma_int_type int_type;
 569         bool trans_done = false, cyclic = false;
 570         u32 i;
 571 
 572         while (irq_status) {
 573                 i = __ffs(irq_status);
 574                 irq_status &= (irq_status - 1);
 575                 schan = &sdev->channels[i];
 576 
 577                 spin_lock(&schan->vc.lock);
 578 
 579                 sdesc = schan->cur_desc;
 580                 if (!sdesc) {
 581                         spin_unlock(&schan->vc.lock);
 582                         return IRQ_HANDLED;
 583                 }
 584 
 585                 int_type = sprd_dma_get_int_type(schan);
 586                 req_type = sprd_dma_get_req_type(schan);
 587                 sprd_dma_clear_int(schan);
 588 
 589                 /* cyclic mode schedule callback */
 590                 cyclic = schan->linklist.phy_addr ? true : false;
 591                 if (cyclic == true) {
 592                         vchan_cyclic_callback(&sdesc->vd);
 593                 } else {
 594                         /* Check if the dma request descriptor is done. */
 595                         trans_done = sprd_dma_check_trans_done(sdesc, int_type,
 596                                                                req_type);
 597                         if (trans_done == true) {
 598                                 vchan_cookie_complete(&sdesc->vd);
 599                                 schan->cur_desc = NULL;
 600                                 sprd_dma_start(schan);
 601                         }
 602                 }
 603                 spin_unlock(&schan->vc.lock);
 604         }
 605 
 606         return IRQ_HANDLED;
 607 }
 608 
 609 static int sprd_dma_alloc_chan_resources(struct dma_chan *chan)
 610 {
 611         return pm_runtime_get_sync(chan->device->dev);
 612 }
 613 
 614 static void sprd_dma_free_chan_resources(struct dma_chan *chan)
 615 {
 616         struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
 617         struct virt_dma_desc *cur_vd = NULL;
 618         unsigned long flags;
 619 
 620         spin_lock_irqsave(&schan->vc.lock, flags);
 621         if (schan->cur_desc)
 622                 cur_vd = &schan->cur_desc->vd;
 623 
 624         sprd_dma_stop(schan);
 625         spin_unlock_irqrestore(&schan->vc.lock, flags);
 626 
 627         if (cur_vd)
 628                 sprd_dma_free_desc(cur_vd);
 629 
 630         vchan_free_chan_resources(&schan->vc);
 631         pm_runtime_put(chan->device->dev);
 632 }
 633 
 634 static enum dma_status sprd_dma_tx_status(struct dma_chan *chan,
 635                                           dma_cookie_t cookie,
 636                                           struct dma_tx_state *txstate)
 637 {
 638         struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
 639         struct virt_dma_desc *vd;
 640         unsigned long flags;
 641         enum dma_status ret;
 642         u32 pos;
 643 
 644         ret = dma_cookie_status(chan, cookie, txstate);
 645         if (ret == DMA_COMPLETE || !txstate)
 646                 return ret;
 647 
 648         spin_lock_irqsave(&schan->vc.lock, flags);
 649         vd = vchan_find_desc(&schan->vc, cookie);
 650         if (vd) {
 651                 struct sprd_dma_desc *sdesc = to_sprd_dma_desc(vd);
 652                 struct sprd_dma_chn_hw *hw = &sdesc->chn_hw;
 653 
 654                 if (hw->trsc_len > 0)
 655                         pos = hw->trsc_len;
 656                 else if (hw->blk_len > 0)
 657                         pos = hw->blk_len;
 658                 else if (hw->frg_len > 0)
 659                         pos = hw->frg_len;
 660                 else
 661                         pos = 0;
 662         } else if (schan->cur_desc && schan->cur_desc->vd.tx.cookie == cookie) {
 663                 struct sprd_dma_desc *sdesc = schan->cur_desc;
 664 
 665                 if (sdesc->dir == DMA_DEV_TO_MEM)
 666                         pos = sprd_dma_get_dst_addr(schan);
 667                 else
 668                         pos = sprd_dma_get_src_addr(schan);
 669         } else {
 670                 pos = 0;
 671         }
 672         spin_unlock_irqrestore(&schan->vc.lock, flags);
 673 
 674         dma_set_residue(txstate, pos);
 675         return ret;
 676 }
 677 
 678 static void sprd_dma_issue_pending(struct dma_chan *chan)
 679 {
 680         struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
 681         unsigned long flags;
 682 
 683         spin_lock_irqsave(&schan->vc.lock, flags);
 684         if (vchan_issue_pending(&schan->vc) && !schan->cur_desc)
 685                 sprd_dma_start(schan);
 686         spin_unlock_irqrestore(&schan->vc.lock, flags);
 687 }
 688 
 689 static int sprd_dma_get_datawidth(enum dma_slave_buswidth buswidth)
 690 {
 691         switch (buswidth) {
 692         case DMA_SLAVE_BUSWIDTH_1_BYTE:
 693         case DMA_SLAVE_BUSWIDTH_2_BYTES:
 694         case DMA_SLAVE_BUSWIDTH_4_BYTES:
 695         case DMA_SLAVE_BUSWIDTH_8_BYTES:
 696                 return ffs(buswidth) - 1;
 697 
 698         default:
 699                 return -EINVAL;
 700         }
 701 }
 702 
 703 static int sprd_dma_get_step(enum dma_slave_buswidth buswidth)
 704 {
 705         switch (buswidth) {
 706         case DMA_SLAVE_BUSWIDTH_1_BYTE:
 707         case DMA_SLAVE_BUSWIDTH_2_BYTES:
 708         case DMA_SLAVE_BUSWIDTH_4_BYTES:
 709         case DMA_SLAVE_BUSWIDTH_8_BYTES:
 710                 return buswidth;
 711 
 712         default:
 713                 return -EINVAL;
 714         }
 715 }
 716 
 717 static int sprd_dma_fill_desc(struct dma_chan *chan,
 718                               struct sprd_dma_chn_hw *hw,
 719                               unsigned int sglen, int sg_index,
 720                               dma_addr_t src, dma_addr_t dst, u32 len,
 721                               enum dma_transfer_direction dir,
 722                               unsigned long flags,
 723                               struct dma_slave_config *slave_cfg)
 724 {
 725         struct sprd_dma_dev *sdev = to_sprd_dma_dev(chan);
 726         struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
 727         enum sprd_dma_chn_mode chn_mode = schan->chn_mode;
 728         u32 req_mode = (flags >> SPRD_DMA_REQ_SHIFT) & SPRD_DMA_REQ_MODE_MASK;
 729         u32 int_mode = flags & SPRD_DMA_INT_MASK;
 730         int src_datawidth, dst_datawidth, src_step, dst_step;
 731         u32 temp, fix_mode = 0, fix_en = 0;
 732         phys_addr_t llist_ptr;
 733 
 734         if (dir == DMA_MEM_TO_DEV) {
 735                 src_step = sprd_dma_get_step(slave_cfg->src_addr_width);
 736                 if (src_step < 0) {
 737                         dev_err(sdev->dma_dev.dev, "invalid source step\n");
 738                         return src_step;
 739                 }
 740 
 741                 /*
 742                  * For 2-stage transfer, destination channel step can not be 0,
 743                  * since destination device is AON IRAM.
 744                  */
 745                 if (chn_mode == SPRD_DMA_DST_CHN0 ||
 746                     chn_mode == SPRD_DMA_DST_CHN1)
 747                         dst_step = src_step;
 748                 else
 749                         dst_step = SPRD_DMA_NONE_STEP;
 750         } else {
 751                 dst_step = sprd_dma_get_step(slave_cfg->dst_addr_width);
 752                 if (dst_step < 0) {
 753                         dev_err(sdev->dma_dev.dev, "invalid destination step\n");
 754                         return dst_step;
 755                 }
 756                 src_step = SPRD_DMA_NONE_STEP;
 757         }
 758 
 759         src_datawidth = sprd_dma_get_datawidth(slave_cfg->src_addr_width);
 760         if (src_datawidth < 0) {
 761                 dev_err(sdev->dma_dev.dev, "invalid source datawidth\n");
 762                 return src_datawidth;
 763         }
 764 
 765         dst_datawidth = sprd_dma_get_datawidth(slave_cfg->dst_addr_width);
 766         if (dst_datawidth < 0) {
 767                 dev_err(sdev->dma_dev.dev, "invalid destination datawidth\n");
 768                 return dst_datawidth;
 769         }
 770 
 771         if (slave_cfg->slave_id)
 772                 schan->dev_id = slave_cfg->slave_id;
 773 
 774         hw->cfg = SPRD_DMA_DONOT_WAIT_BDONE << SPRD_DMA_WAIT_BDONE_OFFSET;
 775 
 776         /*
 777          * wrap_ptr and wrap_to will save the high 4 bits source address and
 778          * destination address.
 779          */
 780         hw->wrap_ptr = (src >> SPRD_DMA_HIGH_ADDR_OFFSET) & SPRD_DMA_HIGH_ADDR_MASK;
 781         hw->wrap_to = (dst >> SPRD_DMA_HIGH_ADDR_OFFSET) & SPRD_DMA_HIGH_ADDR_MASK;
 782         hw->src_addr = src & SPRD_DMA_LOW_ADDR_MASK;
 783         hw->des_addr = dst & SPRD_DMA_LOW_ADDR_MASK;
 784 
 785         /*
 786          * If the src step and dst step both are 0 or both are not 0, that means
 787          * we can not enable the fix mode. If one is 0 and another one is not,
 788          * we can enable the fix mode.
 789          */
 790         if ((src_step != 0 && dst_step != 0) || (src_step | dst_step) == 0) {
 791                 fix_en = 0;
 792         } else {
 793                 fix_en = 1;
 794                 if (src_step)
 795                         fix_mode = 1;
 796                 else
 797                         fix_mode = 0;
 798         }
 799 
 800         hw->intc = int_mode | SPRD_DMA_CFG_ERR_INT_EN;
 801 
 802         temp = src_datawidth << SPRD_DMA_SRC_DATAWIDTH_OFFSET;
 803         temp |= dst_datawidth << SPRD_DMA_DES_DATAWIDTH_OFFSET;
 804         temp |= req_mode << SPRD_DMA_REQ_MODE_OFFSET;
 805         temp |= fix_mode << SPRD_DMA_FIX_SEL_OFFSET;
 806         temp |= fix_en << SPRD_DMA_FIX_EN_OFFSET;
 807         temp |= slave_cfg->src_maxburst & SPRD_DMA_FRG_LEN_MASK;
 808         hw->frg_len = temp;
 809 
 810         hw->blk_len = slave_cfg->src_maxburst & SPRD_DMA_BLK_LEN_MASK;
 811         hw->trsc_len = len & SPRD_DMA_TRSC_LEN_MASK;
 812 
 813         temp = (dst_step & SPRD_DMA_TRSF_STEP_MASK) << SPRD_DMA_DEST_TRSF_STEP_OFFSET;
 814         temp |= (src_step & SPRD_DMA_TRSF_STEP_MASK) << SPRD_DMA_SRC_TRSF_STEP_OFFSET;
 815         hw->trsf_step = temp;
 816 
 817         /* link-list configuration */
 818         if (schan->linklist.phy_addr) {
 819                 hw->cfg |= SPRD_DMA_LINKLIST_EN;
 820 
 821                 /* link-list index */
 822                 temp = sglen ? (sg_index + 1) % sglen : 0;
 823 
 824                 /* Next link-list configuration's physical address offset */
 825                 temp = temp * sizeof(*hw) + SPRD_DMA_CHN_SRC_ADDR;
 826                 /*
 827                  * Set the link-list pointer point to next link-list
 828                  * configuration's physical address.
 829                  */
 830                 llist_ptr = schan->linklist.phy_addr + temp;
 831                 hw->llist_ptr = lower_32_bits(llist_ptr);
 832                 hw->src_blk_step = (upper_32_bits(llist_ptr) << SPRD_DMA_LLIST_HIGH_SHIFT) &
 833                         SPRD_DMA_LLIST_HIGH_MASK;
 834         } else {
 835                 hw->llist_ptr = 0;
 836                 hw->src_blk_step = 0;
 837         }
 838 
 839         hw->frg_step = 0;
 840         hw->des_blk_step = 0;
 841         return 0;
 842 }
 843 
 844 static int sprd_dma_fill_linklist_desc(struct dma_chan *chan,
 845                                        unsigned int sglen, int sg_index,
 846                                        dma_addr_t src, dma_addr_t dst, u32 len,
 847                                        enum dma_transfer_direction dir,
 848                                        unsigned long flags,
 849                                        struct dma_slave_config *slave_cfg)
 850 {
 851         struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
 852         struct sprd_dma_chn_hw *hw;
 853 
 854         if (!schan->linklist.virt_addr)
 855                 return -EINVAL;
 856 
 857         hw = (struct sprd_dma_chn_hw *)(schan->linklist.virt_addr +
 858                                         sg_index * sizeof(*hw));
 859 
 860         return sprd_dma_fill_desc(chan, hw, sglen, sg_index, src, dst, len,
 861                                   dir, flags, slave_cfg);
 862 }
 863 
 864 static struct dma_async_tx_descriptor *
 865 sprd_dma_prep_dma_memcpy(struct dma_chan *chan, dma_addr_t dest, dma_addr_t src,
 866                          size_t len, unsigned long flags)
 867 {
 868         struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
 869         struct sprd_dma_desc *sdesc;
 870         struct sprd_dma_chn_hw *hw;
 871         enum sprd_dma_datawidth datawidth;
 872         u32 step, temp;
 873 
 874         sdesc = kzalloc(sizeof(*sdesc), GFP_NOWAIT);
 875         if (!sdesc)
 876                 return NULL;
 877 
 878         hw = &sdesc->chn_hw;
 879 
 880         hw->cfg = SPRD_DMA_DONOT_WAIT_BDONE << SPRD_DMA_WAIT_BDONE_OFFSET;
 881         hw->intc = SPRD_DMA_TRANS_INT | SPRD_DMA_CFG_ERR_INT_EN;
 882         hw->src_addr = src & SPRD_DMA_LOW_ADDR_MASK;
 883         hw->des_addr = dest & SPRD_DMA_LOW_ADDR_MASK;
 884         hw->wrap_ptr = (src >> SPRD_DMA_HIGH_ADDR_OFFSET) &
 885                 SPRD_DMA_HIGH_ADDR_MASK;
 886         hw->wrap_to = (dest >> SPRD_DMA_HIGH_ADDR_OFFSET) &
 887                 SPRD_DMA_HIGH_ADDR_MASK;
 888 
 889         if (IS_ALIGNED(len, 8)) {
 890                 datawidth = SPRD_DMA_DATAWIDTH_8_BYTES;
 891                 step = SPRD_DMA_DWORD_STEP;
 892         } else if (IS_ALIGNED(len, 4)) {
 893                 datawidth = SPRD_DMA_DATAWIDTH_4_BYTES;
 894                 step = SPRD_DMA_WORD_STEP;
 895         } else if (IS_ALIGNED(len, 2)) {
 896                 datawidth = SPRD_DMA_DATAWIDTH_2_BYTES;
 897                 step = SPRD_DMA_SHORT_STEP;
 898         } else {
 899                 datawidth = SPRD_DMA_DATAWIDTH_1_BYTE;
 900                 step = SPRD_DMA_BYTE_STEP;
 901         }
 902 
 903         temp = datawidth << SPRD_DMA_SRC_DATAWIDTH_OFFSET;
 904         temp |= datawidth << SPRD_DMA_DES_DATAWIDTH_OFFSET;
 905         temp |= SPRD_DMA_TRANS_REQ << SPRD_DMA_REQ_MODE_OFFSET;
 906         temp |= len & SPRD_DMA_FRG_LEN_MASK;
 907         hw->frg_len = temp;
 908 
 909         hw->blk_len = len & SPRD_DMA_BLK_LEN_MASK;
 910         hw->trsc_len = len & SPRD_DMA_TRSC_LEN_MASK;
 911 
 912         temp = (step & SPRD_DMA_TRSF_STEP_MASK) << SPRD_DMA_DEST_TRSF_STEP_OFFSET;
 913         temp |= (step & SPRD_DMA_TRSF_STEP_MASK) << SPRD_DMA_SRC_TRSF_STEP_OFFSET;
 914         hw->trsf_step = temp;
 915 
 916         return vchan_tx_prep(&schan->vc, &sdesc->vd, flags);
 917 }
 918 
 919 static struct dma_async_tx_descriptor *
 920 sprd_dma_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
 921                        unsigned int sglen, enum dma_transfer_direction dir,
 922                        unsigned long flags, void *context)
 923 {
 924         struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
 925         struct dma_slave_config *slave_cfg = &schan->slave_cfg;
 926         dma_addr_t src = 0, dst = 0;
 927         dma_addr_t start_src = 0, start_dst = 0;
 928         struct sprd_dma_desc *sdesc;
 929         struct scatterlist *sg;
 930         u32 len = 0;
 931         int ret, i;
 932 
 933         if (!is_slave_direction(dir))
 934                 return NULL;
 935 
 936         if (context) {
 937                 struct sprd_dma_linklist *ll_cfg =
 938                         (struct sprd_dma_linklist *)context;
 939 
 940                 schan->linklist.phy_addr = ll_cfg->phy_addr;
 941                 schan->linklist.virt_addr = ll_cfg->virt_addr;
 942         } else {
 943                 schan->linklist.phy_addr = 0;
 944                 schan->linklist.virt_addr = 0;
 945         }
 946 
 947         /*
 948          * Set channel mode, interrupt mode and trigger mode for 2-stage
 949          * transfer.
 950          */
 951         schan->chn_mode =
 952                 (flags >> SPRD_DMA_CHN_MODE_SHIFT) & SPRD_DMA_CHN_MODE_MASK;
 953         schan->trg_mode =
 954                 (flags >> SPRD_DMA_TRG_MODE_SHIFT) & SPRD_DMA_TRG_MODE_MASK;
 955         schan->int_type = flags & SPRD_DMA_INT_TYPE_MASK;
 956 
 957         sdesc = kzalloc(sizeof(*sdesc), GFP_NOWAIT);
 958         if (!sdesc)
 959                 return NULL;
 960 
 961         sdesc->dir = dir;
 962 
 963         for_each_sg(sgl, sg, sglen, i) {
 964                 len = sg_dma_len(sg);
 965 
 966                 if (dir == DMA_MEM_TO_DEV) {
 967                         src = sg_dma_address(sg);
 968                         dst = slave_cfg->dst_addr;
 969                 } else {
 970                         src = slave_cfg->src_addr;
 971                         dst = sg_dma_address(sg);
 972                 }
 973 
 974                 if (!i) {
 975                         start_src = src;
 976                         start_dst = dst;
 977                 }
 978 
 979                 /*
 980                  * The link-list mode needs at least 2 link-list
 981                  * configurations. If there is only one sg, it doesn't
 982                  * need to fill the link-list configuration.
 983                  */
 984                 if (sglen < 2)
 985                         break;
 986 
 987                 ret = sprd_dma_fill_linklist_desc(chan, sglen, i, src, dst, len,
 988                                                   dir, flags, slave_cfg);
 989                 if (ret) {
 990                         kfree(sdesc);
 991                         return NULL;
 992                 }
 993         }
 994 
 995         ret = sprd_dma_fill_desc(chan, &sdesc->chn_hw, 0, 0, start_src,
 996                                  start_dst, len, dir, flags, slave_cfg);
 997         if (ret) {
 998                 kfree(sdesc);
 999                 return NULL;
1000         }
1001 
1002         return vchan_tx_prep(&schan->vc, &sdesc->vd, flags);
1003 }
1004 
1005 static int sprd_dma_slave_config(struct dma_chan *chan,
1006                                  struct dma_slave_config *config)
1007 {
1008         struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
1009         struct dma_slave_config *slave_cfg = &schan->slave_cfg;
1010 
1011         memcpy(slave_cfg, config, sizeof(*config));
1012         return 0;
1013 }
1014 
1015 static int sprd_dma_pause(struct dma_chan *chan)
1016 {
1017         struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
1018         unsigned long flags;
1019 
1020         spin_lock_irqsave(&schan->vc.lock, flags);
1021         sprd_dma_pause_resume(schan, true);
1022         spin_unlock_irqrestore(&schan->vc.lock, flags);
1023 
1024         return 0;
1025 }
1026 
1027 static int sprd_dma_resume(struct dma_chan *chan)
1028 {
1029         struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
1030         unsigned long flags;
1031 
1032         spin_lock_irqsave(&schan->vc.lock, flags);
1033         sprd_dma_pause_resume(schan, false);
1034         spin_unlock_irqrestore(&schan->vc.lock, flags);
1035 
1036         return 0;
1037 }
1038 
1039 static int sprd_dma_terminate_all(struct dma_chan *chan)
1040 {
1041         struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
1042         struct virt_dma_desc *cur_vd = NULL;
1043         unsigned long flags;
1044         LIST_HEAD(head);
1045 
1046         spin_lock_irqsave(&schan->vc.lock, flags);
1047         if (schan->cur_desc)
1048                 cur_vd = &schan->cur_desc->vd;
1049 
1050         sprd_dma_stop(schan);
1051 
1052         vchan_get_all_descriptors(&schan->vc, &head);
1053         spin_unlock_irqrestore(&schan->vc.lock, flags);
1054 
1055         if (cur_vd)
1056                 sprd_dma_free_desc(cur_vd);
1057 
1058         vchan_dma_desc_free_list(&schan->vc, &head);
1059         return 0;
1060 }
1061 
1062 static void sprd_dma_free_desc(struct virt_dma_desc *vd)
1063 {
1064         struct sprd_dma_desc *sdesc = to_sprd_dma_desc(vd);
1065 
1066         kfree(sdesc);
1067 }
1068 
1069 static bool sprd_dma_filter_fn(struct dma_chan *chan, void *param)
1070 {
1071         struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
1072         u32 slave_id = *(u32 *)param;
1073 
1074         schan->dev_id = slave_id;
1075         return true;
1076 }
1077 
1078 static int sprd_dma_probe(struct platform_device *pdev)
1079 {
1080         struct device_node *np = pdev->dev.of_node;
1081         struct sprd_dma_dev *sdev;
1082         struct sprd_dma_chn *dma_chn;
1083         struct resource *res;
1084         u32 chn_count;
1085         int ret, i;
1086 
1087         ret = device_property_read_u32(&pdev->dev, "#dma-channels", &chn_count);
1088         if (ret) {
1089                 dev_err(&pdev->dev, "get dma channels count failed\n");
1090                 return ret;
1091         }
1092 
1093         sdev = devm_kzalloc(&pdev->dev,
1094                             struct_size(sdev, channels, chn_count),
1095                             GFP_KERNEL);
1096         if (!sdev)
1097                 return -ENOMEM;
1098 
1099         sdev->clk = devm_clk_get(&pdev->dev, "enable");
1100         if (IS_ERR(sdev->clk)) {
1101                 dev_err(&pdev->dev, "get enable clock failed\n");
1102                 return PTR_ERR(sdev->clk);
1103         }
1104 
1105         /* ashb clock is optional for AGCP DMA */
1106         sdev->ashb_clk = devm_clk_get(&pdev->dev, "ashb_eb");
1107         if (IS_ERR(sdev->ashb_clk))
1108                 dev_warn(&pdev->dev, "no optional ashb eb clock\n");
1109 
1110         /*
1111          * We have three DMA controllers: AP DMA, AON DMA and AGCP DMA. For AGCP
1112          * DMA controller, it can or do not request the irq, which will save
1113          * system power without resuming system by DMA interrupts if AGCP DMA
1114          * does not request the irq. Thus the DMA interrupts property should
1115          * be optional.
1116          */
1117         sdev->irq = platform_get_irq(pdev, 0);
1118         if (sdev->irq > 0) {
1119                 ret = devm_request_irq(&pdev->dev, sdev->irq, dma_irq_handle,
1120                                        0, "sprd_dma", (void *)sdev);
1121                 if (ret < 0) {
1122                         dev_err(&pdev->dev, "request dma irq failed\n");
1123                         return ret;
1124                 }
1125         } else {
1126                 dev_warn(&pdev->dev, "no interrupts for the dma controller\n");
1127         }
1128 
1129         res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1130         sdev->glb_base = devm_ioremap_resource(&pdev->dev, res);
1131         if (IS_ERR(sdev->glb_base))
1132                 return PTR_ERR(sdev->glb_base);
1133 
1134         dma_cap_set(DMA_MEMCPY, sdev->dma_dev.cap_mask);
1135         sdev->total_chns = chn_count;
1136         sdev->dma_dev.chancnt = chn_count;
1137         INIT_LIST_HEAD(&sdev->dma_dev.channels);
1138         INIT_LIST_HEAD(&sdev->dma_dev.global_node);
1139         sdev->dma_dev.dev = &pdev->dev;
1140         sdev->dma_dev.device_alloc_chan_resources = sprd_dma_alloc_chan_resources;
1141         sdev->dma_dev.device_free_chan_resources = sprd_dma_free_chan_resources;
1142         sdev->dma_dev.device_tx_status = sprd_dma_tx_status;
1143         sdev->dma_dev.device_issue_pending = sprd_dma_issue_pending;
1144         sdev->dma_dev.device_prep_dma_memcpy = sprd_dma_prep_dma_memcpy;
1145         sdev->dma_dev.device_prep_slave_sg = sprd_dma_prep_slave_sg;
1146         sdev->dma_dev.device_config = sprd_dma_slave_config;
1147         sdev->dma_dev.device_pause = sprd_dma_pause;
1148         sdev->dma_dev.device_resume = sprd_dma_resume;
1149         sdev->dma_dev.device_terminate_all = sprd_dma_terminate_all;
1150 
1151         for (i = 0; i < chn_count; i++) {
1152                 dma_chn = &sdev->channels[i];
1153                 dma_chn->chn_num = i;
1154                 dma_chn->cur_desc = NULL;
1155                 /* get each channel's registers base address. */
1156                 dma_chn->chn_base = sdev->glb_base + SPRD_DMA_CHN_REG_OFFSET +
1157                                     SPRD_DMA_CHN_REG_LENGTH * i;
1158 
1159                 dma_chn->vc.desc_free = sprd_dma_free_desc;
1160                 vchan_init(&dma_chn->vc, &sdev->dma_dev);
1161         }
1162 
1163         platform_set_drvdata(pdev, sdev);
1164         ret = sprd_dma_enable(sdev);
1165         if (ret)
1166                 return ret;
1167 
1168         pm_runtime_set_active(&pdev->dev);
1169         pm_runtime_enable(&pdev->dev);
1170 
1171         ret = pm_runtime_get_sync(&pdev->dev);
1172         if (ret < 0)
1173                 goto err_rpm;
1174 
1175         ret = dma_async_device_register(&sdev->dma_dev);
1176         if (ret < 0) {
1177                 dev_err(&pdev->dev, "register dma device failed:%d\n", ret);
1178                 goto err_register;
1179         }
1180 
1181         sprd_dma_info.dma_cap = sdev->dma_dev.cap_mask;
1182         ret = of_dma_controller_register(np, of_dma_simple_xlate,
1183                                          &sprd_dma_info);
1184         if (ret)
1185                 goto err_of_register;
1186 
1187         pm_runtime_put(&pdev->dev);
1188         return 0;
1189 
1190 err_of_register:
1191         dma_async_device_unregister(&sdev->dma_dev);
1192 err_register:
1193         pm_runtime_put_noidle(&pdev->dev);
1194         pm_runtime_disable(&pdev->dev);
1195 err_rpm:
1196         sprd_dma_disable(sdev);
1197         return ret;
1198 }
1199 
1200 static int sprd_dma_remove(struct platform_device *pdev)
1201 {
1202         struct sprd_dma_dev *sdev = platform_get_drvdata(pdev);
1203         struct sprd_dma_chn *c, *cn;
1204         int ret;
1205 
1206         ret = pm_runtime_get_sync(&pdev->dev);
1207         if (ret < 0)
1208                 return ret;
1209 
1210         /* explicitly free the irq */
1211         if (sdev->irq > 0)
1212                 devm_free_irq(&pdev->dev, sdev->irq, sdev);
1213 
1214         list_for_each_entry_safe(c, cn, &sdev->dma_dev.channels,
1215                                  vc.chan.device_node) {
1216                 list_del(&c->vc.chan.device_node);
1217                 tasklet_kill(&c->vc.task);
1218         }
1219 
1220         of_dma_controller_free(pdev->dev.of_node);
1221         dma_async_device_unregister(&sdev->dma_dev);
1222         sprd_dma_disable(sdev);
1223 
1224         pm_runtime_put_noidle(&pdev->dev);
1225         pm_runtime_disable(&pdev->dev);
1226         return 0;
1227 }
1228 
1229 static const struct of_device_id sprd_dma_match[] = {
1230         { .compatible = "sprd,sc9860-dma", },
1231         {},
1232 };
1233 
1234 static int __maybe_unused sprd_dma_runtime_suspend(struct device *dev)
1235 {
1236         struct sprd_dma_dev *sdev = dev_get_drvdata(dev);
1237 
1238         sprd_dma_disable(sdev);
1239         return 0;
1240 }
1241 
1242 static int __maybe_unused sprd_dma_runtime_resume(struct device *dev)
1243 {
1244         struct sprd_dma_dev *sdev = dev_get_drvdata(dev);
1245         int ret;
1246 
1247         ret = sprd_dma_enable(sdev);
1248         if (ret)
1249                 dev_err(sdev->dma_dev.dev, "enable dma failed\n");
1250 
1251         return ret;
1252 }
1253 
1254 static const struct dev_pm_ops sprd_dma_pm_ops = {
1255         SET_RUNTIME_PM_OPS(sprd_dma_runtime_suspend,
1256                            sprd_dma_runtime_resume,
1257                            NULL)
1258 };
1259 
1260 static struct platform_driver sprd_dma_driver = {
1261         .probe = sprd_dma_probe,
1262         .remove = sprd_dma_remove,
1263         .driver = {
1264                 .name = "sprd-dma",
1265                 .of_match_table = sprd_dma_match,
1266                 .pm = &sprd_dma_pm_ops,
1267         },
1268 };
1269 module_platform_driver(sprd_dma_driver);
1270 
1271 MODULE_LICENSE("GPL v2");
1272 MODULE_DESCRIPTION("DMA driver for Spreadtrum");
1273 MODULE_AUTHOR("Baolin Wang <baolin.wang@spreadtrum.com>");
1274 MODULE_AUTHOR("Eric Long <eric.long@spreadtrum.com>");
1275 MODULE_ALIAS("platform:sprd-dma");

/* [<][>][^][v][top][bottom][index][help] */