root/drivers/i2c/busses/i2c-stm32.c

/* [<][>][^][v][top][bottom][index][help] */

DEFINITIONS

This source file includes following definitions.
  1. stm32_i2c_dma_request
  2. stm32_i2c_dma_free
  3. stm32_i2c_prep_dma_xfer

   1 // SPDX-License-Identifier: GPL-2.0-only
   2 /*
   3  * i2c-stm32.c
   4  *
   5  * Copyright (C) M'boumba Cedric Madianga 2017
   6  * Author: M'boumba Cedric Madianga <cedric.madianga@gmail.com>
   7  */
   8 
   9 #include "i2c-stm32.h"
  10 
  11 /* Functions for DMA support */
  12 struct stm32_i2c_dma *stm32_i2c_dma_request(struct device *dev,
  13                                             dma_addr_t phy_addr,
  14                                             u32 txdr_offset,
  15                                             u32 rxdr_offset)
  16 {
  17         struct stm32_i2c_dma *dma;
  18         struct dma_slave_config dma_sconfig;
  19         int ret;
  20 
  21         dma = devm_kzalloc(dev, sizeof(*dma), GFP_KERNEL);
  22         if (!dma)
  23                 return ERR_PTR(-ENOMEM);
  24 
  25         /* Request and configure I2C TX dma channel */
  26         dma->chan_tx = dma_request_chan(dev, "tx");
  27         if (IS_ERR(dma->chan_tx)) {
  28                 dev_dbg(dev, "can't request DMA tx channel\n");
  29                 ret = PTR_ERR(dma->chan_tx);
  30                 goto fail_al;
  31         }
  32 
  33         memset(&dma_sconfig, 0, sizeof(dma_sconfig));
  34         dma_sconfig.dst_addr = phy_addr + txdr_offset;
  35         dma_sconfig.dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
  36         dma_sconfig.dst_maxburst = 1;
  37         dma_sconfig.direction = DMA_MEM_TO_DEV;
  38         ret = dmaengine_slave_config(dma->chan_tx, &dma_sconfig);
  39         if (ret < 0) {
  40                 dev_err(dev, "can't configure tx channel\n");
  41                 goto fail_tx;
  42         }
  43 
  44         /* Request and configure I2C RX dma channel */
  45         dma->chan_rx = dma_request_chan(dev, "rx");
  46         if (IS_ERR(dma->chan_rx)) {
  47                 dev_err(dev, "can't request DMA rx channel\n");
  48                 ret = PTR_ERR(dma->chan_rx);
  49                 goto fail_tx;
  50         }
  51 
  52         memset(&dma_sconfig, 0, sizeof(dma_sconfig));
  53         dma_sconfig.src_addr = phy_addr + rxdr_offset;
  54         dma_sconfig.src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
  55         dma_sconfig.src_maxburst = 1;
  56         dma_sconfig.direction = DMA_DEV_TO_MEM;
  57         ret = dmaengine_slave_config(dma->chan_rx, &dma_sconfig);
  58         if (ret < 0) {
  59                 dev_err(dev, "can't configure rx channel\n");
  60                 goto fail_rx;
  61         }
  62 
  63         init_completion(&dma->dma_complete);
  64 
  65         dev_info(dev, "using %s (tx) and %s (rx) for DMA transfers\n",
  66                  dma_chan_name(dma->chan_tx), dma_chan_name(dma->chan_rx));
  67 
  68         return dma;
  69 
  70 fail_rx:
  71         dma_release_channel(dma->chan_rx);
  72 fail_tx:
  73         dma_release_channel(dma->chan_tx);
  74 fail_al:
  75         devm_kfree(dev, dma);
  76         dev_info(dev, "can't use DMA\n");
  77 
  78         return ERR_PTR(ret);
  79 }
  80 
  81 void stm32_i2c_dma_free(struct stm32_i2c_dma *dma)
  82 {
  83         dma->dma_buf = 0;
  84         dma->dma_len = 0;
  85 
  86         dma_release_channel(dma->chan_tx);
  87         dma->chan_tx = NULL;
  88 
  89         dma_release_channel(dma->chan_rx);
  90         dma->chan_rx = NULL;
  91 
  92         dma->chan_using = NULL;
  93 }
  94 
  95 int stm32_i2c_prep_dma_xfer(struct device *dev, struct stm32_i2c_dma *dma,
  96                             bool rd_wr, u32 len, u8 *buf,
  97                             dma_async_tx_callback callback,
  98                             void *dma_async_param)
  99 {
 100         struct dma_async_tx_descriptor *txdesc;
 101         struct device *chan_dev;
 102         int ret;
 103 
 104         if (rd_wr) {
 105                 dma->chan_using = dma->chan_rx;
 106                 dma->dma_transfer_dir = DMA_DEV_TO_MEM;
 107                 dma->dma_data_dir = DMA_FROM_DEVICE;
 108         } else {
 109                 dma->chan_using = dma->chan_tx;
 110                 dma->dma_transfer_dir = DMA_MEM_TO_DEV;
 111                 dma->dma_data_dir = DMA_TO_DEVICE;
 112         }
 113 
 114         dma->dma_len = len;
 115         chan_dev = dma->chan_using->device->dev;
 116 
 117         dma->dma_buf = dma_map_single(chan_dev, buf, dma->dma_len,
 118                                       dma->dma_data_dir);
 119         if (dma_mapping_error(chan_dev, dma->dma_buf)) {
 120                 dev_err(dev, "DMA mapping failed\n");
 121                 return -EINVAL;
 122         }
 123 
 124         txdesc = dmaengine_prep_slave_single(dma->chan_using, dma->dma_buf,
 125                                              dma->dma_len,
 126                                              dma->dma_transfer_dir,
 127                                              DMA_PREP_INTERRUPT);
 128         if (!txdesc) {
 129                 dev_err(dev, "Not able to get desc for DMA xfer\n");
 130                 ret = -EINVAL;
 131                 goto err;
 132         }
 133 
 134         reinit_completion(&dma->dma_complete);
 135 
 136         txdesc->callback = callback;
 137         txdesc->callback_param = dma_async_param;
 138         ret = dma_submit_error(dmaengine_submit(txdesc));
 139         if (ret < 0) {
 140                 dev_err(dev, "DMA submit failed\n");
 141                 goto err;
 142         }
 143 
 144         dma_async_issue_pending(dma->chan_using);
 145 
 146         return 0;
 147 
 148 err:
 149         dma_unmap_single(chan_dev, dma->dma_buf, dma->dma_len,
 150                          dma->dma_data_dir);
 151         return ret;
 152 }

/* [<][>][^][v][top][bottom][index][help] */