root/drivers/crypto/ux500/cryp/cryp_core.c

/* [<][>][^][v][top][bottom][index][help] */

DEFINITIONS

This source file includes following definitions.
  1. uint8p_to_uint32_be
  2. swap_bits_in_byte
  3. swap_words_in_key_and_bits_in_byte
  4. add_session_id
  5. cryp_interrupt_handler
  6. mode_is_aes
  7. cfg_iv
  8. cfg_ivs
  9. set_key
  10. cfg_keys
  11. cryp_setup_context
  12. cryp_get_device_data
  13. cryp_dma_setup_channel
  14. cryp_dma_out_callback
  15. cryp_set_dma_transfer
  16. cryp_dma_done
  17. cryp_dma_write
  18. cryp_dma_read
  19. cryp_polling_mode
  20. cryp_disable_power
  21. cryp_enable_power
  22. hw_crypt_noxts
  23. get_nents
  24. ablk_dma_crypt
  25. ablk_crypt
  26. aes_ablkcipher_setkey
  27. des_ablkcipher_setkey
  28. des3_ablkcipher_setkey
  29. cryp_blk_encrypt
  30. cryp_blk_decrypt
  31. cryp_cra_init
  32. cryp_algs_register_all
  33. cryp_algs_unregister_all
  34. ux500_cryp_probe
  35. ux500_cryp_remove
  36. ux500_cryp_shutdown
  37. ux500_cryp_suspend
  38. ux500_cryp_resume
  39. ux500_cryp_mod_init
  40. ux500_cryp_mod_fini

   1 // SPDX-License-Identifier: GPL-2.0-only
   2 /**
   3  * Copyright (C) ST-Ericsson SA 2010
   4  * Author: Shujuan Chen <shujuan.chen@stericsson.com> for ST-Ericsson.
   5  * Author: Joakim Bech <joakim.xx.bech@stericsson.com> for ST-Ericsson.
   6  * Author: Berne Hebark <berne.herbark@stericsson.com> for ST-Ericsson.
   7  * Author: Niklas Hernaeus <niklas.hernaeus@stericsson.com> for ST-Ericsson.
   8  * Author: Jonas Linde <jonas.linde@stericsson.com> for ST-Ericsson.
   9  * Author: Andreas Westin <andreas.westin@stericsson.com> for ST-Ericsson.
  10  */
  11 
  12 #include <linux/clk.h>
  13 #include <linux/completion.h>
  14 #include <linux/crypto.h>
  15 #include <linux/dmaengine.h>
  16 #include <linux/err.h>
  17 #include <linux/errno.h>
  18 #include <linux/interrupt.h>
  19 #include <linux/io.h>
  20 #include <linux/irqreturn.h>
  21 #include <linux/klist.h>
  22 #include <linux/module.h>
  23 #include <linux/mod_devicetable.h>
  24 #include <linux/platform_device.h>
  25 #include <linux/regulator/consumer.h>
  26 #include <linux/semaphore.h>
  27 #include <linux/platform_data/dma-ste-dma40.h>
  28 
  29 #include <crypto/aes.h>
  30 #include <crypto/algapi.h>
  31 #include <crypto/ctr.h>
  32 #include <crypto/internal/des.h>
  33 #include <crypto/scatterwalk.h>
  34 
  35 #include <linux/platform_data/crypto-ux500.h>
  36 
  37 #include "cryp_p.h"
  38 #include "cryp.h"
  39 
  40 #define CRYP_MAX_KEY_SIZE       32
  41 #define BYTES_PER_WORD          4
  42 
  43 static int cryp_mode;
  44 static atomic_t session_id;
  45 
  46 static struct stedma40_chan_cfg *mem_to_engine;
  47 static struct stedma40_chan_cfg *engine_to_mem;
  48 
  49 /**
  50  * struct cryp_driver_data - data specific to the driver.
  51  *
  52  * @device_list: A list of registered devices to choose from.
  53  * @device_allocation: A semaphore initialized with number of devices.
  54  */
  55 struct cryp_driver_data {
  56         struct klist device_list;
  57         struct semaphore device_allocation;
  58 };
  59 
  60 /**
  61  * struct cryp_ctx - Crypto context
  62  * @config: Crypto mode.
  63  * @key[CRYP_MAX_KEY_SIZE]: Key.
  64  * @keylen: Length of key.
  65  * @iv: Pointer to initialization vector.
  66  * @indata: Pointer to indata.
  67  * @outdata: Pointer to outdata.
  68  * @datalen: Length of indata.
  69  * @outlen: Length of outdata.
  70  * @blocksize: Size of blocks.
  71  * @updated: Updated flag.
  72  * @dev_ctx: Device dependent context.
  73  * @device: Pointer to the device.
  74  */
  75 struct cryp_ctx {
  76         struct cryp_config config;
  77         u8 key[CRYP_MAX_KEY_SIZE];
  78         u32 keylen;
  79         u8 *iv;
  80         const u8 *indata;
  81         u8 *outdata;
  82         u32 datalen;
  83         u32 outlen;
  84         u32 blocksize;
  85         u8 updated;
  86         struct cryp_device_context dev_ctx;
  87         struct cryp_device_data *device;
  88         u32 session_id;
  89 };
  90 
  91 static struct cryp_driver_data driver_data;
  92 
  93 /**
  94  * uint8p_to_uint32_be - 4*uint8 to uint32 big endian
  95  * @in: Data to convert.
  96  */
  97 static inline u32 uint8p_to_uint32_be(u8 *in)
  98 {
  99         u32 *data = (u32 *)in;
 100 
 101         return cpu_to_be32p(data);
 102 }
 103 
 104 /**
 105  * swap_bits_in_byte - mirror the bits in a byte
 106  * @b: the byte to be mirrored
 107  *
 108  * The bits are swapped the following way:
 109  *  Byte b include bits 0-7, nibble 1 (n1) include bits 0-3 and
 110  *  nibble 2 (n2) bits 4-7.
 111  *
 112  *  Nibble 1 (n1):
 113  *  (The "old" (moved) bit is replaced with a zero)
 114  *  1. Move bit 6 and 7, 4 positions to the left.
 115  *  2. Move bit 3 and 5, 2 positions to the left.
 116  *  3. Move bit 1-4, 1 position to the left.
 117  *
 118  *  Nibble 2 (n2):
 119  *  1. Move bit 0 and 1, 4 positions to the right.
 120  *  2. Move bit 2 and 4, 2 positions to the right.
 121  *  3. Move bit 3-6, 1 position to the right.
 122  *
 123  *  Combine the two nibbles to a complete and swapped byte.
 124  */
 125 
 126 static inline u8 swap_bits_in_byte(u8 b)
 127 {
 128 #define R_SHIFT_4_MASK  0xc0 /* Bits 6 and 7, right shift 4 */
 129 #define R_SHIFT_2_MASK  0x28 /* (After right shift 4) Bits 3 and 5,
 130                                   right shift 2 */
 131 #define R_SHIFT_1_MASK  0x1e /* (After right shift 2) Bits 1-4,
 132                                   right shift 1 */
 133 #define L_SHIFT_4_MASK  0x03 /* Bits 0 and 1, left shift 4 */
 134 #define L_SHIFT_2_MASK  0x14 /* (After left shift 4) Bits 2 and 4,
 135                                   left shift 2 */
 136 #define L_SHIFT_1_MASK  0x78 /* (After left shift 1) Bits 3-6,
 137                                   left shift 1 */
 138 
 139         u8 n1;
 140         u8 n2;
 141 
 142         /* Swap most significant nibble */
 143         /* Right shift 4, bits 6 and 7 */
 144         n1 = ((b  & R_SHIFT_4_MASK) >> 4) | (b  & ~(R_SHIFT_4_MASK >> 4));
 145         /* Right shift 2, bits 3 and 5 */
 146         n1 = ((n1 & R_SHIFT_2_MASK) >> 2) | (n1 & ~(R_SHIFT_2_MASK >> 2));
 147         /* Right shift 1, bits 1-4 */
 148         n1 = (n1  & R_SHIFT_1_MASK) >> 1;
 149 
 150         /* Swap least significant nibble */
 151         /* Left shift 4, bits 0 and 1 */
 152         n2 = ((b  & L_SHIFT_4_MASK) << 4) | (b  & ~(L_SHIFT_4_MASK << 4));
 153         /* Left shift 2, bits 2 and 4 */
 154         n2 = ((n2 & L_SHIFT_2_MASK) << 2) | (n2 & ~(L_SHIFT_2_MASK << 2));
 155         /* Left shift 1, bits 3-6 */
 156         n2 = (n2  & L_SHIFT_1_MASK) << 1;
 157 
 158         return n1 | n2;
 159 }
 160 
 161 static inline void swap_words_in_key_and_bits_in_byte(const u8 *in,
 162                                                       u8 *out, u32 len)
 163 {
 164         unsigned int i = 0;
 165         int j;
 166         int index = 0;
 167 
 168         j = len - BYTES_PER_WORD;
 169         while (j >= 0) {
 170                 for (i = 0; i < BYTES_PER_WORD; i++) {
 171                         index = len - j - BYTES_PER_WORD + i;
 172                         out[j + i] =
 173                                 swap_bits_in_byte(in[index]);
 174                 }
 175                 j -= BYTES_PER_WORD;
 176         }
 177 }
 178 
 179 static void add_session_id(struct cryp_ctx *ctx)
 180 {
 181         /*
 182          * We never want 0 to be a valid value, since this is the default value
 183          * for the software context.
 184          */
 185         if (unlikely(atomic_inc_and_test(&session_id)))
 186                 atomic_inc(&session_id);
 187 
 188         ctx->session_id = atomic_read(&session_id);
 189 }
 190 
 191 static irqreturn_t cryp_interrupt_handler(int irq, void *param)
 192 {
 193         struct cryp_ctx *ctx;
 194         int count;
 195         struct cryp_device_data *device_data;
 196 
 197         if (param == NULL) {
 198                 BUG_ON(!param);
 199                 return IRQ_HANDLED;
 200         }
 201 
 202         /* The device is coming from the one found in hw_crypt_noxts. */
 203         device_data = (struct cryp_device_data *)param;
 204 
 205         ctx = device_data->current_ctx;
 206 
 207         if (ctx == NULL) {
 208                 BUG_ON(!ctx);
 209                 return IRQ_HANDLED;
 210         }
 211 
 212         dev_dbg(ctx->device->dev, "[%s] (len: %d) %s, ", __func__, ctx->outlen,
 213                 cryp_pending_irq_src(device_data, CRYP_IRQ_SRC_OUTPUT_FIFO) ?
 214                 "out" : "in");
 215 
 216         if (cryp_pending_irq_src(device_data,
 217                                  CRYP_IRQ_SRC_OUTPUT_FIFO)) {
 218                 if (ctx->outlen / ctx->blocksize > 0) {
 219                         count = ctx->blocksize / 4;
 220 
 221                         readsl(&device_data->base->dout, ctx->outdata, count);
 222                         ctx->outdata += count;
 223                         ctx->outlen -= count;
 224 
 225                         if (ctx->outlen == 0) {
 226                                 cryp_disable_irq_src(device_data,
 227                                                      CRYP_IRQ_SRC_OUTPUT_FIFO);
 228                         }
 229                 }
 230         } else if (cryp_pending_irq_src(device_data,
 231                                         CRYP_IRQ_SRC_INPUT_FIFO)) {
 232                 if (ctx->datalen / ctx->blocksize > 0) {
 233                         count = ctx->blocksize / 4;
 234 
 235                         writesl(&device_data->base->din, ctx->indata, count);
 236 
 237                         ctx->indata += count;
 238                         ctx->datalen -= count;
 239 
 240                         if (ctx->datalen == 0)
 241                                 cryp_disable_irq_src(device_data,
 242                                                    CRYP_IRQ_SRC_INPUT_FIFO);
 243 
 244                         if (ctx->config.algomode == CRYP_ALGO_AES_XTS) {
 245                                 CRYP_PUT_BITS(&device_data->base->cr,
 246                                               CRYP_START_ENABLE,
 247                                               CRYP_CR_START_POS,
 248                                               CRYP_CR_START_MASK);
 249 
 250                                 cryp_wait_until_done(device_data);
 251                         }
 252                 }
 253         }
 254 
 255         return IRQ_HANDLED;
 256 }
 257 
 258 static int mode_is_aes(enum cryp_algo_mode mode)
 259 {
 260         return  CRYP_ALGO_AES_ECB == mode ||
 261                 CRYP_ALGO_AES_CBC == mode ||
 262                 CRYP_ALGO_AES_CTR == mode ||
 263                 CRYP_ALGO_AES_XTS == mode;
 264 }
 265 
 266 static int cfg_iv(struct cryp_device_data *device_data, u32 left, u32 right,
 267                   enum cryp_init_vector_index index)
 268 {
 269         struct cryp_init_vector_value vector_value;
 270 
 271         dev_dbg(device_data->dev, "[%s]", __func__);
 272 
 273         vector_value.init_value_left = left;
 274         vector_value.init_value_right = right;
 275 
 276         return cryp_configure_init_vector(device_data,
 277                                           index,
 278                                           vector_value);
 279 }
 280 
 281 static int cfg_ivs(struct cryp_device_data *device_data, struct cryp_ctx *ctx)
 282 {
 283         int i;
 284         int status = 0;
 285         int num_of_regs = ctx->blocksize / 8;
 286         u32 iv[AES_BLOCK_SIZE / 4];
 287 
 288         dev_dbg(device_data->dev, "[%s]", __func__);
 289 
 290         /*
 291          * Since we loop on num_of_regs we need to have a check in case
 292          * someone provides an incorrect blocksize which would force calling
 293          * cfg_iv with i greater than 2 which is an error.
 294          */
 295         if (num_of_regs > 2) {
 296                 dev_err(device_data->dev, "[%s] Incorrect blocksize %d",
 297                         __func__, ctx->blocksize);
 298                 return -EINVAL;
 299         }
 300 
 301         for (i = 0; i < ctx->blocksize / 4; i++)
 302                 iv[i] = uint8p_to_uint32_be(ctx->iv + i*4);
 303 
 304         for (i = 0; i < num_of_regs; i++) {
 305                 status = cfg_iv(device_data, iv[i*2], iv[i*2+1],
 306                                 (enum cryp_init_vector_index) i);
 307                 if (status != 0)
 308                         return status;
 309         }
 310         return status;
 311 }
 312 
 313 static int set_key(struct cryp_device_data *device_data,
 314                    u32 left_key,
 315                    u32 right_key,
 316                    enum cryp_key_reg_index index)
 317 {
 318         struct cryp_key_value key_value;
 319         int cryp_error;
 320 
 321         dev_dbg(device_data->dev, "[%s]", __func__);
 322 
 323         key_value.key_value_left = left_key;
 324         key_value.key_value_right = right_key;
 325 
 326         cryp_error = cryp_configure_key_values(device_data,
 327                                                index,
 328                                                key_value);
 329         if (cryp_error != 0)
 330                 dev_err(device_data->dev, "[%s]: "
 331                         "cryp_configure_key_values() failed!", __func__);
 332 
 333         return cryp_error;
 334 }
 335 
 336 static int cfg_keys(struct cryp_ctx *ctx)
 337 {
 338         int i;
 339         int num_of_regs = ctx->keylen / 8;
 340         u32 swapped_key[CRYP_MAX_KEY_SIZE / 4];
 341         int cryp_error = 0;
 342 
 343         dev_dbg(ctx->device->dev, "[%s]", __func__);
 344 
 345         if (mode_is_aes(ctx->config.algomode)) {
 346                 swap_words_in_key_and_bits_in_byte((u8 *)ctx->key,
 347                                                    (u8 *)swapped_key,
 348                                                    ctx->keylen);
 349         } else {
 350                 for (i = 0; i < ctx->keylen / 4; i++)
 351                         swapped_key[i] = uint8p_to_uint32_be(ctx->key + i*4);
 352         }
 353 
 354         for (i = 0; i < num_of_regs; i++) {
 355                 cryp_error = set_key(ctx->device,
 356                                      *(((u32 *)swapped_key)+i*2),
 357                                      *(((u32 *)swapped_key)+i*2+1),
 358                                      (enum cryp_key_reg_index) i);
 359 
 360                 if (cryp_error != 0) {
 361                         dev_err(ctx->device->dev, "[%s]: set_key() failed!",
 362                                         __func__);
 363                         return cryp_error;
 364                 }
 365         }
 366         return cryp_error;
 367 }
 368 
 369 static int cryp_setup_context(struct cryp_ctx *ctx,
 370                               struct cryp_device_data *device_data)
 371 {
 372         u32 control_register = CRYP_CR_DEFAULT;
 373 
 374         switch (cryp_mode) {
 375         case CRYP_MODE_INTERRUPT:
 376                 writel_relaxed(CRYP_IMSC_DEFAULT, &device_data->base->imsc);
 377                 break;
 378 
 379         case CRYP_MODE_DMA:
 380                 writel_relaxed(CRYP_DMACR_DEFAULT, &device_data->base->dmacr);
 381                 break;
 382 
 383         default:
 384                 break;
 385         }
 386 
 387         if (ctx->updated == 0) {
 388                 cryp_flush_inoutfifo(device_data);
 389                 if (cfg_keys(ctx) != 0) {
 390                         dev_err(ctx->device->dev, "[%s]: cfg_keys failed!",
 391                                 __func__);
 392                         return -EINVAL;
 393                 }
 394 
 395                 if (ctx->iv &&
 396                     CRYP_ALGO_AES_ECB != ctx->config.algomode &&
 397                     CRYP_ALGO_DES_ECB != ctx->config.algomode &&
 398                     CRYP_ALGO_TDES_ECB != ctx->config.algomode) {
 399                         if (cfg_ivs(device_data, ctx) != 0)
 400                                 return -EPERM;
 401                 }
 402 
 403                 cryp_set_configuration(device_data, &ctx->config,
 404                                        &control_register);
 405                 add_session_id(ctx);
 406         } else if (ctx->updated == 1 &&
 407                    ctx->session_id != atomic_read(&session_id)) {
 408                 cryp_flush_inoutfifo(device_data);
 409                 cryp_restore_device_context(device_data, &ctx->dev_ctx);
 410 
 411                 add_session_id(ctx);
 412                 control_register = ctx->dev_ctx.cr;
 413         } else
 414                 control_register = ctx->dev_ctx.cr;
 415 
 416         writel(control_register |
 417                (CRYP_CRYPEN_ENABLE << CRYP_CR_CRYPEN_POS),
 418                &device_data->base->cr);
 419 
 420         return 0;
 421 }
 422 
 423 static int cryp_get_device_data(struct cryp_ctx *ctx,
 424                                 struct cryp_device_data **device_data)
 425 {
 426         int ret;
 427         struct klist_iter device_iterator;
 428         struct klist_node *device_node;
 429         struct cryp_device_data *local_device_data = NULL;
 430         pr_debug(DEV_DBG_NAME " [%s]", __func__);
 431 
 432         /* Wait until a device is available */
 433         ret = down_interruptible(&driver_data.device_allocation);
 434         if (ret)
 435                 return ret;  /* Interrupted */
 436 
 437         /* Select a device */
 438         klist_iter_init(&driver_data.device_list, &device_iterator);
 439 
 440         device_node = klist_next(&device_iterator);
 441         while (device_node) {
 442                 local_device_data = container_of(device_node,
 443                                            struct cryp_device_data, list_node);
 444                 spin_lock(&local_device_data->ctx_lock);
 445                 /* current_ctx allocates a device, NULL = unallocated */
 446                 if (local_device_data->current_ctx) {
 447                         device_node = klist_next(&device_iterator);
 448                 } else {
 449                         local_device_data->current_ctx = ctx;
 450                         ctx->device = local_device_data;
 451                         spin_unlock(&local_device_data->ctx_lock);
 452                         break;
 453                 }
 454                 spin_unlock(&local_device_data->ctx_lock);
 455         }
 456         klist_iter_exit(&device_iterator);
 457 
 458         if (!device_node) {
 459                 /**
 460                  * No free device found.
 461                  * Since we allocated a device with down_interruptible, this
 462                  * should not be able to happen.
 463                  * Number of available devices, which are contained in
 464                  * device_allocation, is therefore decremented by not doing
 465                  * an up(device_allocation).
 466                  */
 467                 return -EBUSY;
 468         }
 469 
 470         *device_data = local_device_data;
 471 
 472         return 0;
 473 }
 474 
 475 static void cryp_dma_setup_channel(struct cryp_device_data *device_data,
 476                                    struct device *dev)
 477 {
 478         struct dma_slave_config mem2cryp = {
 479                 .direction = DMA_MEM_TO_DEV,
 480                 .dst_addr = device_data->phybase + CRYP_DMA_TX_FIFO,
 481                 .dst_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES,
 482                 .dst_maxburst = 4,
 483         };
 484         struct dma_slave_config cryp2mem = {
 485                 .direction = DMA_DEV_TO_MEM,
 486                 .src_addr = device_data->phybase + CRYP_DMA_RX_FIFO,
 487                 .src_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES,
 488                 .src_maxburst = 4,
 489         };
 490 
 491         dma_cap_zero(device_data->dma.mask);
 492         dma_cap_set(DMA_SLAVE, device_data->dma.mask);
 493 
 494         device_data->dma.cfg_mem2cryp = mem_to_engine;
 495         device_data->dma.chan_mem2cryp =
 496                 dma_request_channel(device_data->dma.mask,
 497                                     stedma40_filter,
 498                                     device_data->dma.cfg_mem2cryp);
 499 
 500         device_data->dma.cfg_cryp2mem = engine_to_mem;
 501         device_data->dma.chan_cryp2mem =
 502                 dma_request_channel(device_data->dma.mask,
 503                                     stedma40_filter,
 504                                     device_data->dma.cfg_cryp2mem);
 505 
 506         dmaengine_slave_config(device_data->dma.chan_mem2cryp, &mem2cryp);
 507         dmaengine_slave_config(device_data->dma.chan_cryp2mem, &cryp2mem);
 508 
 509         init_completion(&device_data->dma.cryp_dma_complete);
 510 }
 511 
 512 static void cryp_dma_out_callback(void *data)
 513 {
 514         struct cryp_ctx *ctx = (struct cryp_ctx *) data;
 515         dev_dbg(ctx->device->dev, "[%s]: ", __func__);
 516 
 517         complete(&ctx->device->dma.cryp_dma_complete);
 518 }
 519 
 520 static int cryp_set_dma_transfer(struct cryp_ctx *ctx,
 521                                  struct scatterlist *sg,
 522                                  int len,
 523                                  enum dma_data_direction direction)
 524 {
 525         struct dma_async_tx_descriptor *desc;
 526         struct dma_chan *channel = NULL;
 527         dma_cookie_t cookie;
 528 
 529         dev_dbg(ctx->device->dev, "[%s]: ", __func__);
 530 
 531         if (unlikely(!IS_ALIGNED((unsigned long)sg, 4))) {
 532                 dev_err(ctx->device->dev, "[%s]: Data in sg list isn't "
 533                         "aligned! Addr: 0x%08lx", __func__, (unsigned long)sg);
 534                 return -EFAULT;
 535         }
 536 
 537         switch (direction) {
 538         case DMA_TO_DEVICE:
 539                 channel = ctx->device->dma.chan_mem2cryp;
 540                 ctx->device->dma.sg_src = sg;
 541                 ctx->device->dma.sg_src_len = dma_map_sg(channel->device->dev,
 542                                                  ctx->device->dma.sg_src,
 543                                                  ctx->device->dma.nents_src,
 544                                                  direction);
 545 
 546                 if (!ctx->device->dma.sg_src_len) {
 547                         dev_dbg(ctx->device->dev,
 548                                 "[%s]: Could not map the sg list (TO_DEVICE)",
 549                                 __func__);
 550                         return -EFAULT;
 551                 }
 552 
 553                 dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer "
 554                         "(TO_DEVICE)", __func__);
 555 
 556                 desc = dmaengine_prep_slave_sg(channel,
 557                                 ctx->device->dma.sg_src,
 558                                 ctx->device->dma.sg_src_len,
 559                                 DMA_MEM_TO_DEV, DMA_CTRL_ACK);
 560                 break;
 561 
 562         case DMA_FROM_DEVICE:
 563                 channel = ctx->device->dma.chan_cryp2mem;
 564                 ctx->device->dma.sg_dst = sg;
 565                 ctx->device->dma.sg_dst_len = dma_map_sg(channel->device->dev,
 566                                                  ctx->device->dma.sg_dst,
 567                                                  ctx->device->dma.nents_dst,
 568                                                  direction);
 569 
 570                 if (!ctx->device->dma.sg_dst_len) {
 571                         dev_dbg(ctx->device->dev,
 572                                 "[%s]: Could not map the sg list (FROM_DEVICE)",
 573                                 __func__);
 574                         return -EFAULT;
 575                 }
 576 
 577                 dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer "
 578                         "(FROM_DEVICE)", __func__);
 579 
 580                 desc = dmaengine_prep_slave_sg(channel,
 581                                 ctx->device->dma.sg_dst,
 582                                 ctx->device->dma.sg_dst_len,
 583                                 DMA_DEV_TO_MEM,
 584                                 DMA_CTRL_ACK |
 585                                 DMA_PREP_INTERRUPT);
 586 
 587                 desc->callback = cryp_dma_out_callback;
 588                 desc->callback_param = ctx;
 589                 break;
 590 
 591         default:
 592                 dev_dbg(ctx->device->dev, "[%s]: Invalid DMA direction",
 593                         __func__);
 594                 return -EFAULT;
 595         }
 596 
 597         cookie = dmaengine_submit(desc);
 598         if (dma_submit_error(cookie)) {
 599                 dev_dbg(ctx->device->dev, "[%s]: DMA submission failed\n",
 600                         __func__);
 601                 return cookie;
 602         }
 603 
 604         dma_async_issue_pending(channel);
 605 
 606         return 0;
 607 }
 608 
 609 static void cryp_dma_done(struct cryp_ctx *ctx)
 610 {
 611         struct dma_chan *chan;
 612 
 613         dev_dbg(ctx->device->dev, "[%s]: ", __func__);
 614 
 615         chan = ctx->device->dma.chan_mem2cryp;
 616         dmaengine_terminate_all(chan);
 617         dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_src,
 618                      ctx->device->dma.sg_src_len, DMA_TO_DEVICE);
 619 
 620         chan = ctx->device->dma.chan_cryp2mem;
 621         dmaengine_terminate_all(chan);
 622         dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_dst,
 623                      ctx->device->dma.sg_dst_len, DMA_FROM_DEVICE);
 624 }
 625 
 626 static int cryp_dma_write(struct cryp_ctx *ctx, struct scatterlist *sg,
 627                           int len)
 628 {
 629         int error = cryp_set_dma_transfer(ctx, sg, len, DMA_TO_DEVICE);
 630         dev_dbg(ctx->device->dev, "[%s]: ", __func__);
 631 
 632         if (error) {
 633                 dev_dbg(ctx->device->dev, "[%s]: cryp_set_dma_transfer() "
 634                         "failed", __func__);
 635                 return error;
 636         }
 637 
 638         return len;
 639 }
 640 
 641 static int cryp_dma_read(struct cryp_ctx *ctx, struct scatterlist *sg, int len)
 642 {
 643         int error = cryp_set_dma_transfer(ctx, sg, len, DMA_FROM_DEVICE);
 644         if (error) {
 645                 dev_dbg(ctx->device->dev, "[%s]: cryp_set_dma_transfer() "
 646                         "failed", __func__);
 647                 return error;
 648         }
 649 
 650         return len;
 651 }
 652 
 653 static void cryp_polling_mode(struct cryp_ctx *ctx,
 654                               struct cryp_device_data *device_data)
 655 {
 656         int len = ctx->blocksize / BYTES_PER_WORD;
 657         int remaining_length = ctx->datalen;
 658         u32 *indata = (u32 *)ctx->indata;
 659         u32 *outdata = (u32 *)ctx->outdata;
 660 
 661         while (remaining_length > 0) {
 662                 writesl(&device_data->base->din, indata, len);
 663                 indata += len;
 664                 remaining_length -= (len * BYTES_PER_WORD);
 665                 cryp_wait_until_done(device_data);
 666 
 667                 readsl(&device_data->base->dout, outdata, len);
 668                 outdata += len;
 669                 cryp_wait_until_done(device_data);
 670         }
 671 }
 672 
 673 static int cryp_disable_power(struct device *dev,
 674                               struct cryp_device_data *device_data,
 675                               bool save_device_context)
 676 {
 677         int ret = 0;
 678 
 679         dev_dbg(dev, "[%s]", __func__);
 680 
 681         spin_lock(&device_data->power_state_spinlock);
 682         if (!device_data->power_state)
 683                 goto out;
 684 
 685         spin_lock(&device_data->ctx_lock);
 686         if (save_device_context && device_data->current_ctx) {
 687                 cryp_save_device_context(device_data,
 688                                 &device_data->current_ctx->dev_ctx,
 689                                 cryp_mode);
 690                 device_data->restore_dev_ctx = true;
 691         }
 692         spin_unlock(&device_data->ctx_lock);
 693 
 694         clk_disable(device_data->clk);
 695         ret = regulator_disable(device_data->pwr_regulator);
 696         if (ret)
 697                 dev_err(dev, "[%s]: "
 698                                 "regulator_disable() failed!",
 699                                 __func__);
 700 
 701         device_data->power_state = false;
 702 
 703 out:
 704         spin_unlock(&device_data->power_state_spinlock);
 705 
 706         return ret;
 707 }
 708 
 709 static int cryp_enable_power(
 710                 struct device *dev,
 711                 struct cryp_device_data *device_data,
 712                 bool restore_device_context)
 713 {
 714         int ret = 0;
 715 
 716         dev_dbg(dev, "[%s]", __func__);
 717 
 718         spin_lock(&device_data->power_state_spinlock);
 719         if (!device_data->power_state) {
 720                 ret = regulator_enable(device_data->pwr_regulator);
 721                 if (ret) {
 722                         dev_err(dev, "[%s]: regulator_enable() failed!",
 723                                         __func__);
 724                         goto out;
 725                 }
 726 
 727                 ret = clk_enable(device_data->clk);
 728                 if (ret) {
 729                         dev_err(dev, "[%s]: clk_enable() failed!",
 730                                         __func__);
 731                         regulator_disable(device_data->pwr_regulator);
 732                         goto out;
 733                 }
 734                 device_data->power_state = true;
 735         }
 736 
 737         if (device_data->restore_dev_ctx) {
 738                 spin_lock(&device_data->ctx_lock);
 739                 if (restore_device_context && device_data->current_ctx) {
 740                         device_data->restore_dev_ctx = false;
 741                         cryp_restore_device_context(device_data,
 742                                         &device_data->current_ctx->dev_ctx);
 743                 }
 744                 spin_unlock(&device_data->ctx_lock);
 745         }
 746 out:
 747         spin_unlock(&device_data->power_state_spinlock);
 748 
 749         return ret;
 750 }
 751 
 752 static int hw_crypt_noxts(struct cryp_ctx *ctx,
 753                           struct cryp_device_data *device_data)
 754 {
 755         int ret = 0;
 756 
 757         const u8 *indata = ctx->indata;
 758         u8 *outdata = ctx->outdata;
 759         u32 datalen = ctx->datalen;
 760         u32 outlen = datalen;
 761 
 762         pr_debug(DEV_DBG_NAME " [%s]", __func__);
 763 
 764         ctx->outlen = ctx->datalen;
 765 
 766         if (unlikely(!IS_ALIGNED((unsigned long)indata, 4))) {
 767                 pr_debug(DEV_DBG_NAME " [%s]: Data isn't aligned! Addr: "
 768                          "0x%08lx", __func__, (unsigned long)indata);
 769                 return -EINVAL;
 770         }
 771 
 772         ret = cryp_setup_context(ctx, device_data);
 773 
 774         if (ret)
 775                 goto out;
 776 
 777         if (cryp_mode == CRYP_MODE_INTERRUPT) {
 778                 cryp_enable_irq_src(device_data, CRYP_IRQ_SRC_INPUT_FIFO |
 779                                     CRYP_IRQ_SRC_OUTPUT_FIFO);
 780 
 781                 /*
 782                  * ctx->outlen is decremented in the cryp_interrupt_handler
 783                  * function. We had to add cpu_relax() (barrier) to make sure
 784                  * that gcc didn't optimze away this variable.
 785                  */
 786                 while (ctx->outlen > 0)
 787                         cpu_relax();
 788         } else if (cryp_mode == CRYP_MODE_POLLING ||
 789                    cryp_mode == CRYP_MODE_DMA) {
 790                 /*
 791                  * The reason for having DMA in this if case is that if we are
 792                  * running cryp_mode = 2, then we separate DMA routines for
 793                  * handling cipher/plaintext > blocksize, except when
 794                  * running the normal CRYPTO_ALG_TYPE_CIPHER, then we still use
 795                  * the polling mode. Overhead of doing DMA setup eats up the
 796                  * benefits using it.
 797                  */
 798                 cryp_polling_mode(ctx, device_data);
 799         } else {
 800                 dev_err(ctx->device->dev, "[%s]: Invalid operation mode!",
 801                         __func__);
 802                 ret = -EPERM;
 803                 goto out;
 804         }
 805 
 806         cryp_save_device_context(device_data, &ctx->dev_ctx, cryp_mode);
 807         ctx->updated = 1;
 808 
 809 out:
 810         ctx->indata = indata;
 811         ctx->outdata = outdata;
 812         ctx->datalen = datalen;
 813         ctx->outlen = outlen;
 814 
 815         return ret;
 816 }
 817 
 818 static int get_nents(struct scatterlist *sg, int nbytes)
 819 {
 820         int nents = 0;
 821 
 822         while (nbytes > 0) {
 823                 nbytes -= sg->length;
 824                 sg = sg_next(sg);
 825                 nents++;
 826         }
 827 
 828         return nents;
 829 }
 830 
 831 static int ablk_dma_crypt(struct ablkcipher_request *areq)
 832 {
 833         struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
 834         struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
 835         struct cryp_device_data *device_data;
 836 
 837         int bytes_written = 0;
 838         int bytes_read = 0;
 839         int ret;
 840 
 841         pr_debug(DEV_DBG_NAME " [%s]", __func__);
 842 
 843         ctx->datalen = areq->nbytes;
 844         ctx->outlen = areq->nbytes;
 845 
 846         ret = cryp_get_device_data(ctx, &device_data);
 847         if (ret)
 848                 return ret;
 849 
 850         ret = cryp_setup_context(ctx, device_data);
 851         if (ret)
 852                 goto out;
 853 
 854         /* We have the device now, so store the nents in the dma struct. */
 855         ctx->device->dma.nents_src = get_nents(areq->src, ctx->datalen);
 856         ctx->device->dma.nents_dst = get_nents(areq->dst, ctx->outlen);
 857 
 858         /* Enable DMA in- and output. */
 859         cryp_configure_for_dma(device_data, CRYP_DMA_ENABLE_BOTH_DIRECTIONS);
 860 
 861         bytes_written = cryp_dma_write(ctx, areq->src, ctx->datalen);
 862         bytes_read = cryp_dma_read(ctx, areq->dst, bytes_written);
 863 
 864         wait_for_completion(&ctx->device->dma.cryp_dma_complete);
 865         cryp_dma_done(ctx);
 866 
 867         cryp_save_device_context(device_data, &ctx->dev_ctx, cryp_mode);
 868         ctx->updated = 1;
 869 
 870 out:
 871         spin_lock(&device_data->ctx_lock);
 872         device_data->current_ctx = NULL;
 873         ctx->device = NULL;
 874         spin_unlock(&device_data->ctx_lock);
 875 
 876         /*
 877          * The down_interruptible part for this semaphore is called in
 878          * cryp_get_device_data.
 879          */
 880         up(&driver_data.device_allocation);
 881 
 882         if (unlikely(bytes_written != bytes_read))
 883                 return -EPERM;
 884 
 885         return 0;
 886 }
 887 
 888 static int ablk_crypt(struct ablkcipher_request *areq)
 889 {
 890         struct ablkcipher_walk walk;
 891         struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
 892         struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
 893         struct cryp_device_data *device_data;
 894         unsigned long src_paddr;
 895         unsigned long dst_paddr;
 896         int ret;
 897         int nbytes;
 898 
 899         pr_debug(DEV_DBG_NAME " [%s]", __func__);
 900 
 901         ret = cryp_get_device_data(ctx, &device_data);
 902         if (ret)
 903                 goto out;
 904 
 905         ablkcipher_walk_init(&walk, areq->dst, areq->src, areq->nbytes);
 906         ret = ablkcipher_walk_phys(areq, &walk);
 907 
 908         if (ret) {
 909                 pr_err(DEV_DBG_NAME "[%s]: ablkcipher_walk_phys() failed!",
 910                         __func__);
 911                 goto out;
 912         }
 913 
 914         while ((nbytes = walk.nbytes) > 0) {
 915                 ctx->iv = walk.iv;
 916                 src_paddr = (page_to_phys(walk.src.page) + walk.src.offset);
 917                 ctx->indata = phys_to_virt(src_paddr);
 918 
 919                 dst_paddr = (page_to_phys(walk.dst.page) + walk.dst.offset);
 920                 ctx->outdata = phys_to_virt(dst_paddr);
 921 
 922                 ctx->datalen = nbytes - (nbytes % ctx->blocksize);
 923 
 924                 ret = hw_crypt_noxts(ctx, device_data);
 925                 if (ret)
 926                         goto out;
 927 
 928                 nbytes -= ctx->datalen;
 929                 ret = ablkcipher_walk_done(areq, &walk, nbytes);
 930                 if (ret)
 931                         goto out;
 932         }
 933         ablkcipher_walk_complete(&walk);
 934 
 935 out:
 936         /* Release the device */
 937         spin_lock(&device_data->ctx_lock);
 938         device_data->current_ctx = NULL;
 939         ctx->device = NULL;
 940         spin_unlock(&device_data->ctx_lock);
 941 
 942         /*
 943          * The down_interruptible part for this semaphore is called in
 944          * cryp_get_device_data.
 945          */
 946         up(&driver_data.device_allocation);
 947 
 948         return ret;
 949 }
 950 
 951 static int aes_ablkcipher_setkey(struct crypto_ablkcipher *cipher,
 952                                  const u8 *key, unsigned int keylen)
 953 {
 954         struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
 955         u32 *flags = &cipher->base.crt_flags;
 956 
 957         pr_debug(DEV_DBG_NAME " [%s]", __func__);
 958 
 959         switch (keylen) {
 960         case AES_KEYSIZE_128:
 961                 ctx->config.keysize = CRYP_KEY_SIZE_128;
 962                 break;
 963 
 964         case AES_KEYSIZE_192:
 965                 ctx->config.keysize = CRYP_KEY_SIZE_192;
 966                 break;
 967 
 968         case AES_KEYSIZE_256:
 969                 ctx->config.keysize = CRYP_KEY_SIZE_256;
 970                 break;
 971 
 972         default:
 973                 pr_err(DEV_DBG_NAME "[%s]: Unknown keylen!", __func__);
 974                 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
 975                 return -EINVAL;
 976         }
 977 
 978         memcpy(ctx->key, key, keylen);
 979         ctx->keylen = keylen;
 980 
 981         ctx->updated = 0;
 982 
 983         return 0;
 984 }
 985 
 986 static int des_ablkcipher_setkey(struct crypto_ablkcipher *cipher,
 987                                  const u8 *key, unsigned int keylen)
 988 {
 989         struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
 990         int err;
 991 
 992         pr_debug(DEV_DBG_NAME " [%s]", __func__);
 993 
 994         err = verify_ablkcipher_des_key(cipher, key);
 995         if (err)
 996                 return err;
 997 
 998         memcpy(ctx->key, key, keylen);
 999         ctx->keylen = keylen;
1000 
1001         ctx->updated = 0;
1002         return 0;
1003 }
1004 
1005 static int des3_ablkcipher_setkey(struct crypto_ablkcipher *cipher,
1006                                   const u8 *key, unsigned int keylen)
1007 {
1008         struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1009         int err;
1010 
1011         pr_debug(DEV_DBG_NAME " [%s]", __func__);
1012 
1013         err = verify_ablkcipher_des3_key(cipher, key);
1014         if (err)
1015                 return err;
1016 
1017         memcpy(ctx->key, key, keylen);
1018         ctx->keylen = keylen;
1019 
1020         ctx->updated = 0;
1021         return 0;
1022 }
1023 
1024 static int cryp_blk_encrypt(struct ablkcipher_request *areq)
1025 {
1026         struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1027         struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1028 
1029         pr_debug(DEV_DBG_NAME " [%s]", __func__);
1030 
1031         ctx->config.algodir = CRYP_ALGORITHM_ENCRYPT;
1032 
1033         /*
1034          * DMA does not work for DES due to a hw bug */
1035         if (cryp_mode == CRYP_MODE_DMA && mode_is_aes(ctx->config.algomode))
1036                 return ablk_dma_crypt(areq);
1037 
1038         /* For everything except DMA, we run the non DMA version. */
1039         return ablk_crypt(areq);
1040 }
1041 
1042 static int cryp_blk_decrypt(struct ablkcipher_request *areq)
1043 {
1044         struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1045         struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1046 
1047         pr_debug(DEV_DBG_NAME " [%s]", __func__);
1048 
1049         ctx->config.algodir = CRYP_ALGORITHM_DECRYPT;
1050 
1051         /* DMA does not work for DES due to a hw bug */
1052         if (cryp_mode == CRYP_MODE_DMA && mode_is_aes(ctx->config.algomode))
1053                 return ablk_dma_crypt(areq);
1054 
1055         /* For everything except DMA, we run the non DMA version. */
1056         return ablk_crypt(areq);
1057 }
1058 
1059 struct cryp_algo_template {
1060         enum cryp_algo_mode algomode;
1061         struct crypto_alg crypto;
1062 };
1063 
1064 static int cryp_cra_init(struct crypto_tfm *tfm)
1065 {
1066         struct cryp_ctx *ctx = crypto_tfm_ctx(tfm);
1067         struct crypto_alg *alg = tfm->__crt_alg;
1068         struct cryp_algo_template *cryp_alg = container_of(alg,
1069                         struct cryp_algo_template,
1070                         crypto);
1071 
1072         ctx->config.algomode = cryp_alg->algomode;
1073         ctx->blocksize = crypto_tfm_alg_blocksize(tfm);
1074 
1075         return 0;
1076 }
1077 
1078 static struct cryp_algo_template cryp_algs[] = {
1079         {
1080                 .algomode = CRYP_ALGO_AES_ECB,
1081                 .crypto = {
1082                         .cra_name = "aes",
1083                         .cra_driver_name = "aes-ux500",
1084                         .cra_priority = 300,
1085                         .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1086                                         CRYPTO_ALG_ASYNC,
1087                         .cra_blocksize = AES_BLOCK_SIZE,
1088                         .cra_ctxsize = sizeof(struct cryp_ctx),
1089                         .cra_alignmask = 3,
1090                         .cra_type = &crypto_ablkcipher_type,
1091                         .cra_init = cryp_cra_init,
1092                         .cra_module = THIS_MODULE,
1093                         .cra_u = {
1094                                 .ablkcipher = {
1095                                         .min_keysize = AES_MIN_KEY_SIZE,
1096                                         .max_keysize = AES_MAX_KEY_SIZE,
1097                                         .setkey = aes_ablkcipher_setkey,
1098                                         .encrypt = cryp_blk_encrypt,
1099                                         .decrypt = cryp_blk_decrypt
1100                                 }
1101                         }
1102                 }
1103         },
1104         {
1105                 .algomode = CRYP_ALGO_AES_ECB,
1106                 .crypto = {
1107                         .cra_name = "ecb(aes)",
1108                         .cra_driver_name = "ecb-aes-ux500",
1109                         .cra_priority = 300,
1110                         .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1111                                         CRYPTO_ALG_ASYNC,
1112                         .cra_blocksize = AES_BLOCK_SIZE,
1113                         .cra_ctxsize = sizeof(struct cryp_ctx),
1114                         .cra_alignmask = 3,
1115                         .cra_type = &crypto_ablkcipher_type,
1116                         .cra_init = cryp_cra_init,
1117                         .cra_module = THIS_MODULE,
1118                         .cra_u = {
1119                                 .ablkcipher = {
1120                                         .min_keysize = AES_MIN_KEY_SIZE,
1121                                         .max_keysize = AES_MAX_KEY_SIZE,
1122                                         .setkey = aes_ablkcipher_setkey,
1123                                         .encrypt = cryp_blk_encrypt,
1124                                         .decrypt = cryp_blk_decrypt,
1125                                 }
1126                         }
1127                 }
1128         },
1129         {
1130                 .algomode = CRYP_ALGO_AES_CBC,
1131                 .crypto = {
1132                         .cra_name = "cbc(aes)",
1133                         .cra_driver_name = "cbc-aes-ux500",
1134                         .cra_priority = 300,
1135                         .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1136                                         CRYPTO_ALG_ASYNC,
1137                         .cra_blocksize = AES_BLOCK_SIZE,
1138                         .cra_ctxsize = sizeof(struct cryp_ctx),
1139                         .cra_alignmask = 3,
1140                         .cra_type = &crypto_ablkcipher_type,
1141                         .cra_init = cryp_cra_init,
1142                         .cra_module = THIS_MODULE,
1143                         .cra_u = {
1144                                 .ablkcipher = {
1145                                         .min_keysize = AES_MIN_KEY_SIZE,
1146                                         .max_keysize = AES_MAX_KEY_SIZE,
1147                                         .setkey = aes_ablkcipher_setkey,
1148                                         .encrypt = cryp_blk_encrypt,
1149                                         .decrypt = cryp_blk_decrypt,
1150                                         .ivsize = AES_BLOCK_SIZE,
1151                                 }
1152                         }
1153                 }
1154         },
1155         {
1156                 .algomode = CRYP_ALGO_AES_CTR,
1157                 .crypto = {
1158                         .cra_name = "ctr(aes)",
1159                         .cra_driver_name = "ctr-aes-ux500",
1160                         .cra_priority = 300,
1161                         .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1162                                                 CRYPTO_ALG_ASYNC,
1163                         .cra_blocksize = AES_BLOCK_SIZE,
1164                         .cra_ctxsize = sizeof(struct cryp_ctx),
1165                         .cra_alignmask = 3,
1166                         .cra_type = &crypto_ablkcipher_type,
1167                         .cra_init = cryp_cra_init,
1168                         .cra_module = THIS_MODULE,
1169                         .cra_u = {
1170                                 .ablkcipher = {
1171                                         .min_keysize = AES_MIN_KEY_SIZE,
1172                                         .max_keysize = AES_MAX_KEY_SIZE,
1173                                         .setkey = aes_ablkcipher_setkey,
1174                                         .encrypt = cryp_blk_encrypt,
1175                                         .decrypt = cryp_blk_decrypt,
1176                                         .ivsize = AES_BLOCK_SIZE,
1177                                 }
1178                         }
1179                 }
1180         },
1181         {
1182                 .algomode = CRYP_ALGO_DES_ECB,
1183                 .crypto = {
1184                         .cra_name = "ecb(des)",
1185                         .cra_driver_name = "ecb-des-ux500",
1186                         .cra_priority = 300,
1187                         .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1188                                         CRYPTO_ALG_ASYNC,
1189                         .cra_blocksize = DES_BLOCK_SIZE,
1190                         .cra_ctxsize = sizeof(struct cryp_ctx),
1191                         .cra_alignmask = 3,
1192                         .cra_type = &crypto_ablkcipher_type,
1193                         .cra_init = cryp_cra_init,
1194                         .cra_module = THIS_MODULE,
1195                         .cra_u = {
1196                                 .ablkcipher = {
1197                                         .min_keysize = DES_KEY_SIZE,
1198                                         .max_keysize = DES_KEY_SIZE,
1199                                         .setkey = des_ablkcipher_setkey,
1200                                         .encrypt = cryp_blk_encrypt,
1201                                         .decrypt = cryp_blk_decrypt,
1202                                 }
1203                         }
1204                 }
1205         },
1206         {
1207                 .algomode = CRYP_ALGO_TDES_ECB,
1208                 .crypto = {
1209                         .cra_name = "ecb(des3_ede)",
1210                         .cra_driver_name = "ecb-des3_ede-ux500",
1211                         .cra_priority = 300,
1212                         .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1213                                         CRYPTO_ALG_ASYNC,
1214                         .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1215                         .cra_ctxsize = sizeof(struct cryp_ctx),
1216                         .cra_alignmask = 3,
1217                         .cra_type = &crypto_ablkcipher_type,
1218                         .cra_init = cryp_cra_init,
1219                         .cra_module = THIS_MODULE,
1220                         .cra_u = {
1221                                 .ablkcipher = {
1222                                         .min_keysize = DES3_EDE_KEY_SIZE,
1223                                         .max_keysize = DES3_EDE_KEY_SIZE,
1224                                         .setkey = des3_ablkcipher_setkey,
1225                                         .encrypt = cryp_blk_encrypt,
1226                                         .decrypt = cryp_blk_decrypt,
1227                                 }
1228                         }
1229                 }
1230         },
1231         {
1232                 .algomode = CRYP_ALGO_DES_CBC,
1233                 .crypto = {
1234                         .cra_name = "cbc(des)",
1235                         .cra_driver_name = "cbc-des-ux500",
1236                         .cra_priority = 300,
1237                         .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1238                                         CRYPTO_ALG_ASYNC,
1239                         .cra_blocksize = DES_BLOCK_SIZE,
1240                         .cra_ctxsize = sizeof(struct cryp_ctx),
1241                         .cra_alignmask = 3,
1242                         .cra_type = &crypto_ablkcipher_type,
1243                         .cra_init = cryp_cra_init,
1244                         .cra_module = THIS_MODULE,
1245                         .cra_u = {
1246                                 .ablkcipher = {
1247                                         .min_keysize = DES_KEY_SIZE,
1248                                         .max_keysize = DES_KEY_SIZE,
1249                                         .setkey = des_ablkcipher_setkey,
1250                                         .encrypt = cryp_blk_encrypt,
1251                                         .decrypt = cryp_blk_decrypt,
1252                                 }
1253                         }
1254                 }
1255         },
1256         {
1257                 .algomode = CRYP_ALGO_TDES_CBC,
1258                 .crypto = {
1259                         .cra_name = "cbc(des3_ede)",
1260                         .cra_driver_name = "cbc-des3_ede-ux500",
1261                         .cra_priority = 300,
1262                         .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1263                                         CRYPTO_ALG_ASYNC,
1264                         .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1265                         .cra_ctxsize = sizeof(struct cryp_ctx),
1266                         .cra_alignmask = 3,
1267                         .cra_type = &crypto_ablkcipher_type,
1268                         .cra_init = cryp_cra_init,
1269                         .cra_module = THIS_MODULE,
1270                         .cra_u = {
1271                                 .ablkcipher = {
1272                                         .min_keysize = DES3_EDE_KEY_SIZE,
1273                                         .max_keysize = DES3_EDE_KEY_SIZE,
1274                                         .setkey = des3_ablkcipher_setkey,
1275                                         .encrypt = cryp_blk_encrypt,
1276                                         .decrypt = cryp_blk_decrypt,
1277                                         .ivsize = DES3_EDE_BLOCK_SIZE,
1278                                 }
1279                         }
1280                 }
1281         }
1282 };
1283 
1284 /**
1285  * cryp_algs_register_all -
1286  */
1287 static int cryp_algs_register_all(void)
1288 {
1289         int ret;
1290         int i;
1291         int count;
1292 
1293         pr_debug("[%s]", __func__);
1294 
1295         for (i = 0; i < ARRAY_SIZE(cryp_algs); i++) {
1296                 ret = crypto_register_alg(&cryp_algs[i].crypto);
1297                 if (ret) {
1298                         count = i;
1299                         pr_err("[%s] alg registration failed",
1300                                         cryp_algs[i].crypto.cra_driver_name);
1301                         goto unreg;
1302                 }
1303         }
1304         return 0;
1305 unreg:
1306         for (i = 0; i < count; i++)
1307                 crypto_unregister_alg(&cryp_algs[i].crypto);
1308         return ret;
1309 }
1310 
1311 /**
1312  * cryp_algs_unregister_all -
1313  */
1314 static void cryp_algs_unregister_all(void)
1315 {
1316         int i;
1317 
1318         pr_debug(DEV_DBG_NAME " [%s]", __func__);
1319 
1320         for (i = 0; i < ARRAY_SIZE(cryp_algs); i++)
1321                 crypto_unregister_alg(&cryp_algs[i].crypto);
1322 }
1323 
1324 static int ux500_cryp_probe(struct platform_device *pdev)
1325 {
1326         int ret;
1327         struct resource *res;
1328         struct resource *res_irq;
1329         struct cryp_device_data *device_data;
1330         struct cryp_protection_config prot = {
1331                 .privilege_access = CRYP_STATE_ENABLE
1332         };
1333         struct device *dev = &pdev->dev;
1334 
1335         dev_dbg(dev, "[%s]", __func__);
1336         device_data = devm_kzalloc(dev, sizeof(*device_data), GFP_ATOMIC);
1337         if (!device_data) {
1338                 ret = -ENOMEM;
1339                 goto out;
1340         }
1341 
1342         device_data->dev = dev;
1343         device_data->current_ctx = NULL;
1344 
1345         /* Grab the DMA configuration from platform data. */
1346         mem_to_engine = &((struct cryp_platform_data *)
1347                          dev->platform_data)->mem_to_engine;
1348         engine_to_mem = &((struct cryp_platform_data *)
1349                          dev->platform_data)->engine_to_mem;
1350 
1351         res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1352         if (!res) {
1353                 dev_err(dev, "[%s]: platform_get_resource() failed",
1354                                 __func__);
1355                 ret = -ENODEV;
1356                 goto out;
1357         }
1358 
1359         device_data->phybase = res->start;
1360         device_data->base = devm_ioremap_resource(dev, res);
1361         if (IS_ERR(device_data->base)) {
1362                 dev_err(dev, "[%s]: ioremap failed!", __func__);
1363                 ret = PTR_ERR(device_data->base);
1364                 goto out;
1365         }
1366 
1367         spin_lock_init(&device_data->ctx_lock);
1368         spin_lock_init(&device_data->power_state_spinlock);
1369 
1370         /* Enable power for CRYP hardware block */
1371         device_data->pwr_regulator = regulator_get(&pdev->dev, "v-ape");
1372         if (IS_ERR(device_data->pwr_regulator)) {
1373                 dev_err(dev, "[%s]: could not get cryp regulator", __func__);
1374                 ret = PTR_ERR(device_data->pwr_regulator);
1375                 device_data->pwr_regulator = NULL;
1376                 goto out;
1377         }
1378 
1379         /* Enable the clk for CRYP hardware block */
1380         device_data->clk = devm_clk_get(&pdev->dev, NULL);
1381         if (IS_ERR(device_data->clk)) {
1382                 dev_err(dev, "[%s]: clk_get() failed!", __func__);
1383                 ret = PTR_ERR(device_data->clk);
1384                 goto out_regulator;
1385         }
1386 
1387         ret = clk_prepare(device_data->clk);
1388         if (ret) {
1389                 dev_err(dev, "[%s]: clk_prepare() failed!", __func__);
1390                 goto out_regulator;
1391         }
1392 
1393         /* Enable device power (and clock) */
1394         ret = cryp_enable_power(device_data->dev, device_data, false);
1395         if (ret) {
1396                 dev_err(dev, "[%s]: cryp_enable_power() failed!", __func__);
1397                 goto out_clk_unprepare;
1398         }
1399 
1400         if (cryp_check(device_data)) {
1401                 dev_err(dev, "[%s]: cryp_check() failed!", __func__);
1402                 ret = -EINVAL;
1403                 goto out_power;
1404         }
1405 
1406         if (cryp_configure_protection(device_data, &prot)) {
1407                 dev_err(dev, "[%s]: cryp_configure_protection() failed!",
1408                         __func__);
1409                 ret = -EINVAL;
1410                 goto out_power;
1411         }
1412 
1413         res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1414         if (!res_irq) {
1415                 dev_err(dev, "[%s]: IORESOURCE_IRQ unavailable",
1416                         __func__);
1417                 ret = -ENODEV;
1418                 goto out_power;
1419         }
1420 
1421         ret = devm_request_irq(&pdev->dev, res_irq->start,
1422                                cryp_interrupt_handler, 0, "cryp1", device_data);
1423         if (ret) {
1424                 dev_err(dev, "[%s]: Unable to request IRQ", __func__);
1425                 goto out_power;
1426         }
1427 
1428         if (cryp_mode == CRYP_MODE_DMA)
1429                 cryp_dma_setup_channel(device_data, dev);
1430 
1431         platform_set_drvdata(pdev, device_data);
1432 
1433         /* Put the new device into the device list... */
1434         klist_add_tail(&device_data->list_node, &driver_data.device_list);
1435 
1436         /* ... and signal that a new device is available. */
1437         up(&driver_data.device_allocation);
1438 
1439         atomic_set(&session_id, 1);
1440 
1441         ret = cryp_algs_register_all();
1442         if (ret) {
1443                 dev_err(dev, "[%s]: cryp_algs_register_all() failed!",
1444                         __func__);
1445                 goto out_power;
1446         }
1447 
1448         dev_info(dev, "successfully registered\n");
1449 
1450         return 0;
1451 
1452 out_power:
1453         cryp_disable_power(device_data->dev, device_data, false);
1454 
1455 out_clk_unprepare:
1456         clk_unprepare(device_data->clk);
1457 
1458 out_regulator:
1459         regulator_put(device_data->pwr_regulator);
1460 
1461 out:
1462         return ret;
1463 }
1464 
1465 static int ux500_cryp_remove(struct platform_device *pdev)
1466 {
1467         struct cryp_device_data *device_data;
1468 
1469         dev_dbg(&pdev->dev, "[%s]", __func__);
1470         device_data = platform_get_drvdata(pdev);
1471         if (!device_data) {
1472                 dev_err(&pdev->dev, "[%s]: platform_get_drvdata() failed!",
1473                         __func__);
1474                 return -ENOMEM;
1475         }
1476 
1477         /* Try to decrease the number of available devices. */
1478         if (down_trylock(&driver_data.device_allocation))
1479                 return -EBUSY;
1480 
1481         /* Check that the device is free */
1482         spin_lock(&device_data->ctx_lock);
1483         /* current_ctx allocates a device, NULL = unallocated */
1484         if (device_data->current_ctx) {
1485                 /* The device is busy */
1486                 spin_unlock(&device_data->ctx_lock);
1487                 /* Return the device to the pool. */
1488                 up(&driver_data.device_allocation);
1489                 return -EBUSY;
1490         }
1491 
1492         spin_unlock(&device_data->ctx_lock);
1493 
1494         /* Remove the device from the list */
1495         if (klist_node_attached(&device_data->list_node))
1496                 klist_remove(&device_data->list_node);
1497 
1498         /* If this was the last device, remove the services */
1499         if (list_empty(&driver_data.device_list.k_list))
1500                 cryp_algs_unregister_all();
1501 
1502         if (cryp_disable_power(&pdev->dev, device_data, false))
1503                 dev_err(&pdev->dev, "[%s]: cryp_disable_power() failed",
1504                         __func__);
1505 
1506         clk_unprepare(device_data->clk);
1507         regulator_put(device_data->pwr_regulator);
1508 
1509         return 0;
1510 }
1511 
1512 static void ux500_cryp_shutdown(struct platform_device *pdev)
1513 {
1514         struct cryp_device_data *device_data;
1515 
1516         dev_dbg(&pdev->dev, "[%s]", __func__);
1517 
1518         device_data = platform_get_drvdata(pdev);
1519         if (!device_data) {
1520                 dev_err(&pdev->dev, "[%s]: platform_get_drvdata() failed!",
1521                         __func__);
1522                 return;
1523         }
1524 
1525         /* Check that the device is free */
1526         spin_lock(&device_data->ctx_lock);
1527         /* current_ctx allocates a device, NULL = unallocated */
1528         if (!device_data->current_ctx) {
1529                 if (down_trylock(&driver_data.device_allocation))
1530                         dev_dbg(&pdev->dev, "[%s]: Cryp still in use!"
1531                                 "Shutting down anyway...", __func__);
1532                 /**
1533                  * (Allocate the device)
1534                  * Need to set this to non-null (dummy) value,
1535                  * to avoid usage if context switching.
1536                  */
1537                 device_data->current_ctx++;
1538         }
1539         spin_unlock(&device_data->ctx_lock);
1540 
1541         /* Remove the device from the list */
1542         if (klist_node_attached(&device_data->list_node))
1543                 klist_remove(&device_data->list_node);
1544 
1545         /* If this was the last device, remove the services */
1546         if (list_empty(&driver_data.device_list.k_list))
1547                 cryp_algs_unregister_all();
1548 
1549         if (cryp_disable_power(&pdev->dev, device_data, false))
1550                 dev_err(&pdev->dev, "[%s]: cryp_disable_power() failed",
1551                         __func__);
1552 
1553 }
1554 
1555 #ifdef CONFIG_PM_SLEEP
1556 static int ux500_cryp_suspend(struct device *dev)
1557 {
1558         int ret;
1559         struct platform_device *pdev = to_platform_device(dev);
1560         struct cryp_device_data *device_data;
1561         struct resource *res_irq;
1562         struct cryp_ctx *temp_ctx = NULL;
1563 
1564         dev_dbg(dev, "[%s]", __func__);
1565 
1566         /* Handle state? */
1567         device_data = platform_get_drvdata(pdev);
1568         if (!device_data) {
1569                 dev_err(dev, "[%s]: platform_get_drvdata() failed!", __func__);
1570                 return -ENOMEM;
1571         }
1572 
1573         res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1574         if (!res_irq)
1575                 dev_err(dev, "[%s]: IORESOURCE_IRQ, unavailable", __func__);
1576         else
1577                 disable_irq(res_irq->start);
1578 
1579         spin_lock(&device_data->ctx_lock);
1580         if (!device_data->current_ctx)
1581                 device_data->current_ctx++;
1582         spin_unlock(&device_data->ctx_lock);
1583 
1584         if (device_data->current_ctx == ++temp_ctx) {
1585                 if (down_interruptible(&driver_data.device_allocation))
1586                         dev_dbg(dev, "[%s]: down_interruptible() failed",
1587                                 __func__);
1588                 ret = cryp_disable_power(dev, device_data, false);
1589 
1590         } else
1591                 ret = cryp_disable_power(dev, device_data, true);
1592 
1593         if (ret)
1594                 dev_err(dev, "[%s]: cryp_disable_power()", __func__);
1595 
1596         return ret;
1597 }
1598 
1599 static int ux500_cryp_resume(struct device *dev)
1600 {
1601         int ret = 0;
1602         struct platform_device *pdev = to_platform_device(dev);
1603         struct cryp_device_data *device_data;
1604         struct resource *res_irq;
1605         struct cryp_ctx *temp_ctx = NULL;
1606 
1607         dev_dbg(dev, "[%s]", __func__);
1608 
1609         device_data = platform_get_drvdata(pdev);
1610         if (!device_data) {
1611                 dev_err(dev, "[%s]: platform_get_drvdata() failed!", __func__);
1612                 return -ENOMEM;
1613         }
1614 
1615         spin_lock(&device_data->ctx_lock);
1616         if (device_data->current_ctx == ++temp_ctx)
1617                 device_data->current_ctx = NULL;
1618         spin_unlock(&device_data->ctx_lock);
1619 
1620 
1621         if (!device_data->current_ctx)
1622                 up(&driver_data.device_allocation);
1623         else
1624                 ret = cryp_enable_power(dev, device_data, true);
1625 
1626         if (ret)
1627                 dev_err(dev, "[%s]: cryp_enable_power() failed!", __func__);
1628         else {
1629                 res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1630                 if (res_irq)
1631                         enable_irq(res_irq->start);
1632         }
1633 
1634         return ret;
1635 }
1636 #endif
1637 
1638 static SIMPLE_DEV_PM_OPS(ux500_cryp_pm, ux500_cryp_suspend, ux500_cryp_resume);
1639 
1640 static const struct of_device_id ux500_cryp_match[] = {
1641         { .compatible = "stericsson,ux500-cryp" },
1642         { },
1643 };
1644 MODULE_DEVICE_TABLE(of, ux500_cryp_match);
1645 
1646 static struct platform_driver cryp_driver = {
1647         .probe  = ux500_cryp_probe,
1648         .remove = ux500_cryp_remove,
1649         .shutdown = ux500_cryp_shutdown,
1650         .driver = {
1651                 .name  = "cryp1",
1652                 .of_match_table = ux500_cryp_match,
1653                 .pm    = &ux500_cryp_pm,
1654         }
1655 };
1656 
1657 static int __init ux500_cryp_mod_init(void)
1658 {
1659         pr_debug("[%s] is called!", __func__);
1660         klist_init(&driver_data.device_list, NULL, NULL);
1661         /* Initialize the semaphore to 0 devices (locked state) */
1662         sema_init(&driver_data.device_allocation, 0);
1663         return platform_driver_register(&cryp_driver);
1664 }
1665 
1666 static void __exit ux500_cryp_mod_fini(void)
1667 {
1668         pr_debug("[%s] is called!", __func__);
1669         platform_driver_unregister(&cryp_driver);
1670 }
1671 
1672 module_init(ux500_cryp_mod_init);
1673 module_exit(ux500_cryp_mod_fini);
1674 
1675 module_param(cryp_mode, int, 0);
1676 
1677 MODULE_DESCRIPTION("Driver for ST-Ericsson UX500 CRYP crypto engine.");
1678 MODULE_ALIAS_CRYPTO("aes-all");
1679 MODULE_ALIAS_CRYPTO("des-all");
1680 
1681 MODULE_LICENSE("GPL");

/* [<][>][^][v][top][bottom][index][help] */