root/drivers/crypto/ccree/cc_cipher.c

/* [<][>][^][v][top][bottom][index][help] */

DEFINITIONS

This source file includes following definitions.
  1. validate_keys_sizes
  2. validate_data_size
  3. cc_cipher_init
  4. cc_cipher_exit
  5. cc_slot_to_hw_key
  6. cc_slot_to_cpp_key
  7. cc_slot_to_key_type
  8. cc_cipher_sethkey
  9. cc_cipher_setkey
  10. cc_out_setup_mode
  11. cc_setup_readiv_desc
  12. cc_setup_state_desc
  13. cc_setup_xex_state_desc
  14. cc_out_flow_mode
  15. cc_setup_key_desc
  16. cc_setup_mlli_desc
  17. cc_setup_flow_desc
  18. cc_cipher_complete
  19. cc_cipher_process
  20. cc_cipher_encrypt
  21. cc_cipher_decrypt
  22. cc_create_alg
  23. cc_cipher_free
  24. cc_cipher_alloc

   1 // SPDX-License-Identifier: GPL-2.0
   2 /* Copyright (C) 2012-2019 ARM Limited (or its affiliates). */
   3 
   4 #include <linux/kernel.h>
   5 #include <linux/module.h>
   6 #include <crypto/algapi.h>
   7 #include <crypto/internal/skcipher.h>
   8 #include <crypto/internal/des.h>
   9 #include <crypto/xts.h>
  10 #include <crypto/sm4.h>
  11 #include <crypto/scatterwalk.h>
  12 
  13 #include "cc_driver.h"
  14 #include "cc_lli_defs.h"
  15 #include "cc_buffer_mgr.h"
  16 #include "cc_cipher.h"
  17 #include "cc_request_mgr.h"
  18 
  19 #define MAX_ABLKCIPHER_SEQ_LEN 6
  20 
  21 #define template_skcipher       template_u.skcipher
  22 
  23 struct cc_cipher_handle {
  24         struct list_head alg_list;
  25 };
  26 
  27 struct cc_user_key_info {
  28         u8 *key;
  29         dma_addr_t key_dma_addr;
  30 };
  31 
  32 struct cc_hw_key_info {
  33         enum cc_hw_crypto_key key1_slot;
  34         enum cc_hw_crypto_key key2_slot;
  35 };
  36 
  37 struct cc_cpp_key_info {
  38         u8 slot;
  39         enum cc_cpp_alg alg;
  40 };
  41 
  42 enum cc_key_type {
  43         CC_UNPROTECTED_KEY,             /* User key */
  44         CC_HW_PROTECTED_KEY,            /* HW (FDE) key */
  45         CC_POLICY_PROTECTED_KEY,        /* CPP key */
  46         CC_INVALID_PROTECTED_KEY        /* Invalid key */
  47 };
  48 
  49 struct cc_cipher_ctx {
  50         struct cc_drvdata *drvdata;
  51         int keylen;
  52         int key_round_number;
  53         int cipher_mode;
  54         int flow_mode;
  55         unsigned int flags;
  56         enum cc_key_type key_type;
  57         struct cc_user_key_info user;
  58         union {
  59                 struct cc_hw_key_info hw;
  60                 struct cc_cpp_key_info cpp;
  61         };
  62         struct crypto_shash *shash_tfm;
  63 };
  64 
  65 static void cc_cipher_complete(struct device *dev, void *cc_req, int err);
  66 
  67 static inline enum cc_key_type cc_key_type(struct crypto_tfm *tfm)
  68 {
  69         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
  70 
  71         return ctx_p->key_type;
  72 }
  73 
  74 static int validate_keys_sizes(struct cc_cipher_ctx *ctx_p, u32 size)
  75 {
  76         switch (ctx_p->flow_mode) {
  77         case S_DIN_to_AES:
  78                 switch (size) {
  79                 case CC_AES_128_BIT_KEY_SIZE:
  80                 case CC_AES_192_BIT_KEY_SIZE:
  81                         if (ctx_p->cipher_mode != DRV_CIPHER_XTS &&
  82                             ctx_p->cipher_mode != DRV_CIPHER_ESSIV &&
  83                             ctx_p->cipher_mode != DRV_CIPHER_BITLOCKER)
  84                                 return 0;
  85                         break;
  86                 case CC_AES_256_BIT_KEY_SIZE:
  87                         return 0;
  88                 case (CC_AES_192_BIT_KEY_SIZE * 2):
  89                 case (CC_AES_256_BIT_KEY_SIZE * 2):
  90                         if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
  91                             ctx_p->cipher_mode == DRV_CIPHER_ESSIV ||
  92                             ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER)
  93                                 return 0;
  94                         break;
  95                 default:
  96                         break;
  97                 }
  98                 break;
  99         case S_DIN_to_DES:
 100                 if (size == DES3_EDE_KEY_SIZE || size == DES_KEY_SIZE)
 101                         return 0;
 102                 break;
 103         case S_DIN_to_SM4:
 104                 if (size == SM4_KEY_SIZE)
 105                         return 0;
 106         default:
 107                 break;
 108         }
 109         return -EINVAL;
 110 }
 111 
 112 static int validate_data_size(struct cc_cipher_ctx *ctx_p,
 113                               unsigned int size)
 114 {
 115         switch (ctx_p->flow_mode) {
 116         case S_DIN_to_AES:
 117                 switch (ctx_p->cipher_mode) {
 118                 case DRV_CIPHER_XTS:
 119                 case DRV_CIPHER_CBC_CTS:
 120                         if (size >= AES_BLOCK_SIZE)
 121                                 return 0;
 122                         break;
 123                 case DRV_CIPHER_OFB:
 124                 case DRV_CIPHER_CTR:
 125                                 return 0;
 126                 case DRV_CIPHER_ECB:
 127                 case DRV_CIPHER_CBC:
 128                 case DRV_CIPHER_ESSIV:
 129                 case DRV_CIPHER_BITLOCKER:
 130                         if (IS_ALIGNED(size, AES_BLOCK_SIZE))
 131                                 return 0;
 132                         break;
 133                 default:
 134                         break;
 135                 }
 136                 break;
 137         case S_DIN_to_DES:
 138                 if (IS_ALIGNED(size, DES_BLOCK_SIZE))
 139                         return 0;
 140                 break;
 141         case S_DIN_to_SM4:
 142                 switch (ctx_p->cipher_mode) {
 143                 case DRV_CIPHER_CTR:
 144                         return 0;
 145                 case DRV_CIPHER_ECB:
 146                 case DRV_CIPHER_CBC:
 147                         if (IS_ALIGNED(size, SM4_BLOCK_SIZE))
 148                                 return 0;
 149                 default:
 150                         break;
 151                 }
 152         default:
 153                 break;
 154         }
 155         return -EINVAL;
 156 }
 157 
 158 static int cc_cipher_init(struct crypto_tfm *tfm)
 159 {
 160         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 161         struct cc_crypto_alg *cc_alg =
 162                         container_of(tfm->__crt_alg, struct cc_crypto_alg,
 163                                      skcipher_alg.base);
 164         struct device *dev = drvdata_to_dev(cc_alg->drvdata);
 165         unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
 166         int rc = 0;
 167 
 168         dev_dbg(dev, "Initializing context @%p for %s\n", ctx_p,
 169                 crypto_tfm_alg_name(tfm));
 170 
 171         crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
 172                                     sizeof(struct cipher_req_ctx));
 173 
 174         ctx_p->cipher_mode = cc_alg->cipher_mode;
 175         ctx_p->flow_mode = cc_alg->flow_mode;
 176         ctx_p->drvdata = cc_alg->drvdata;
 177 
 178         /* Allocate key buffer, cache line aligned */
 179         ctx_p->user.key = kmalloc(max_key_buf_size, GFP_KERNEL);
 180         if (!ctx_p->user.key)
 181                 return -ENOMEM;
 182 
 183         dev_dbg(dev, "Allocated key buffer in context. key=@%p\n",
 184                 ctx_p->user.key);
 185 
 186         /* Map key buffer */
 187         ctx_p->user.key_dma_addr = dma_map_single(dev, (void *)ctx_p->user.key,
 188                                                   max_key_buf_size,
 189                                                   DMA_TO_DEVICE);
 190         if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) {
 191                 dev_err(dev, "Mapping Key %u B at va=%pK for DMA failed\n",
 192                         max_key_buf_size, ctx_p->user.key);
 193                 return -ENOMEM;
 194         }
 195         dev_dbg(dev, "Mapped key %u B at va=%pK to dma=%pad\n",
 196                 max_key_buf_size, ctx_p->user.key, &ctx_p->user.key_dma_addr);
 197 
 198         if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
 199                 /* Alloc hash tfm for essiv */
 200                 ctx_p->shash_tfm = crypto_alloc_shash("sha256-generic", 0, 0);
 201                 if (IS_ERR(ctx_p->shash_tfm)) {
 202                         dev_err(dev, "Error allocating hash tfm for ESSIV.\n");
 203                         return PTR_ERR(ctx_p->shash_tfm);
 204                 }
 205         }
 206 
 207         return rc;
 208 }
 209 
 210 static void cc_cipher_exit(struct crypto_tfm *tfm)
 211 {
 212         struct crypto_alg *alg = tfm->__crt_alg;
 213         struct cc_crypto_alg *cc_alg =
 214                         container_of(alg, struct cc_crypto_alg,
 215                                      skcipher_alg.base);
 216         unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
 217         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 218         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
 219 
 220         dev_dbg(dev, "Clearing context @%p for %s\n",
 221                 crypto_tfm_ctx(tfm), crypto_tfm_alg_name(tfm));
 222 
 223         if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
 224                 /* Free hash tfm for essiv */
 225                 crypto_free_shash(ctx_p->shash_tfm);
 226                 ctx_p->shash_tfm = NULL;
 227         }
 228 
 229         /* Unmap key buffer */
 230         dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size,
 231                          DMA_TO_DEVICE);
 232         dev_dbg(dev, "Unmapped key buffer key_dma_addr=%pad\n",
 233                 &ctx_p->user.key_dma_addr);
 234 
 235         /* Free key buffer in context */
 236         kzfree(ctx_p->user.key);
 237         dev_dbg(dev, "Free key buffer in context. key=@%p\n", ctx_p->user.key);
 238 }
 239 
 240 struct tdes_keys {
 241         u8      key1[DES_KEY_SIZE];
 242         u8      key2[DES_KEY_SIZE];
 243         u8      key3[DES_KEY_SIZE];
 244 };
 245 
 246 static enum cc_hw_crypto_key cc_slot_to_hw_key(u8 slot_num)
 247 {
 248         switch (slot_num) {
 249         case 0:
 250                 return KFDE0_KEY;
 251         case 1:
 252                 return KFDE1_KEY;
 253         case 2:
 254                 return KFDE2_KEY;
 255         case 3:
 256                 return KFDE3_KEY;
 257         }
 258         return END_OF_KEYS;
 259 }
 260 
 261 static u8 cc_slot_to_cpp_key(u8 slot_num)
 262 {
 263         return (slot_num - CC_FIRST_CPP_KEY_SLOT);
 264 }
 265 
 266 static inline enum cc_key_type cc_slot_to_key_type(u8 slot_num)
 267 {
 268         if (slot_num >= CC_FIRST_HW_KEY_SLOT && slot_num <= CC_LAST_HW_KEY_SLOT)
 269                 return CC_HW_PROTECTED_KEY;
 270         else if (slot_num >=  CC_FIRST_CPP_KEY_SLOT &&
 271                  slot_num <=  CC_LAST_CPP_KEY_SLOT)
 272                 return CC_POLICY_PROTECTED_KEY;
 273         else
 274                 return CC_INVALID_PROTECTED_KEY;
 275 }
 276 
 277 static int cc_cipher_sethkey(struct crypto_skcipher *sktfm, const u8 *key,
 278                              unsigned int keylen)
 279 {
 280         struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm);
 281         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 282         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
 283         struct cc_hkey_info hki;
 284 
 285         dev_dbg(dev, "Setting HW key in context @%p for %s. keylen=%u\n",
 286                 ctx_p, crypto_tfm_alg_name(tfm), keylen);
 287         dump_byte_array("key", (u8 *)key, keylen);
 288 
 289         /* STAT_PHASE_0: Init and sanity checks */
 290 
 291         /* This check the size of the protected key token */
 292         if (keylen != sizeof(hki)) {
 293                 dev_err(dev, "Unsupported protected key size %d.\n", keylen);
 294                 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
 295                 return -EINVAL;
 296         }
 297 
 298         memcpy(&hki, key, keylen);
 299 
 300         /* The real key len for crypto op is the size of the HW key
 301          * referenced by the HW key slot, not the hardware key token
 302          */
 303         keylen = hki.keylen;
 304 
 305         if (validate_keys_sizes(ctx_p, keylen)) {
 306                 dev_err(dev, "Unsupported key size %d.\n", keylen);
 307                 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
 308                 return -EINVAL;
 309         }
 310 
 311         ctx_p->keylen = keylen;
 312 
 313         switch (cc_slot_to_key_type(hki.hw_key1)) {
 314         case CC_HW_PROTECTED_KEY:
 315                 if (ctx_p->flow_mode == S_DIN_to_SM4) {
 316                         dev_err(dev, "Only AES HW protected keys are supported\n");
 317                         return -EINVAL;
 318                 }
 319 
 320                 ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1);
 321                 if (ctx_p->hw.key1_slot == END_OF_KEYS) {
 322                         dev_err(dev, "Unsupported hw key1 number (%d)\n",
 323                                 hki.hw_key1);
 324                         return -EINVAL;
 325                 }
 326 
 327                 if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
 328                     ctx_p->cipher_mode == DRV_CIPHER_ESSIV ||
 329                     ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) {
 330                         if (hki.hw_key1 == hki.hw_key2) {
 331                                 dev_err(dev, "Illegal hw key numbers (%d,%d)\n",
 332                                         hki.hw_key1, hki.hw_key2);
 333                                 return -EINVAL;
 334                         }
 335 
 336                         ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2);
 337                         if (ctx_p->hw.key2_slot == END_OF_KEYS) {
 338                                 dev_err(dev, "Unsupported hw key2 number (%d)\n",
 339                                         hki.hw_key2);
 340                                 return -EINVAL;
 341                         }
 342                 }
 343 
 344                 ctx_p->key_type = CC_HW_PROTECTED_KEY;
 345                 dev_dbg(dev, "HW protected key  %d/%d set\n.",
 346                         ctx_p->hw.key1_slot, ctx_p->hw.key2_slot);
 347                 break;
 348 
 349         case CC_POLICY_PROTECTED_KEY:
 350                 if (ctx_p->drvdata->hw_rev < CC_HW_REV_713) {
 351                         dev_err(dev, "CPP keys not supported in this hardware revision.\n");
 352                         return -EINVAL;
 353                 }
 354 
 355                 if (ctx_p->cipher_mode != DRV_CIPHER_CBC &&
 356                     ctx_p->cipher_mode != DRV_CIPHER_CTR) {
 357                         dev_err(dev, "CPP keys only supported in CBC or CTR modes.\n");
 358                         return -EINVAL;
 359                 }
 360 
 361                 ctx_p->cpp.slot = cc_slot_to_cpp_key(hki.hw_key1);
 362                 if (ctx_p->flow_mode == S_DIN_to_AES)
 363                         ctx_p->cpp.alg = CC_CPP_AES;
 364                 else /* Must be SM4 since due to sethkey registration */
 365                         ctx_p->cpp.alg = CC_CPP_SM4;
 366                 ctx_p->key_type = CC_POLICY_PROTECTED_KEY;
 367                 dev_dbg(dev, "policy protected key alg: %d slot: %d.\n",
 368                         ctx_p->cpp.alg, ctx_p->cpp.slot);
 369                 break;
 370 
 371         default:
 372                 dev_err(dev, "Unsupported protected key (%d)\n", hki.hw_key1);
 373                 return -EINVAL;
 374         }
 375 
 376         return 0;
 377 }
 378 
 379 static int cc_cipher_setkey(struct crypto_skcipher *sktfm, const u8 *key,
 380                             unsigned int keylen)
 381 {
 382         struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm);
 383         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 384         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
 385         struct cc_crypto_alg *cc_alg =
 386                         container_of(tfm->__crt_alg, struct cc_crypto_alg,
 387                                      skcipher_alg.base);
 388         unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
 389 
 390         dev_dbg(dev, "Setting key in context @%p for %s. keylen=%u\n",
 391                 ctx_p, crypto_tfm_alg_name(tfm), keylen);
 392         dump_byte_array("key", (u8 *)key, keylen);
 393 
 394         /* STAT_PHASE_0: Init and sanity checks */
 395 
 396         if (validate_keys_sizes(ctx_p, keylen)) {
 397                 dev_err(dev, "Unsupported key size %d.\n", keylen);
 398                 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
 399                 return -EINVAL;
 400         }
 401 
 402         ctx_p->key_type = CC_UNPROTECTED_KEY;
 403 
 404         /*
 405          * Verify DES weak keys
 406          * Note that we're dropping the expanded key since the
 407          * HW does the expansion on its own.
 408          */
 409         if (ctx_p->flow_mode == S_DIN_to_DES) {
 410                 if ((keylen == DES3_EDE_KEY_SIZE &&
 411                      verify_skcipher_des3_key(sktfm, key)) ||
 412                     verify_skcipher_des_key(sktfm, key)) {
 413                         dev_dbg(dev, "weak DES key");
 414                         return -EINVAL;
 415                 }
 416         }
 417 
 418         if (ctx_p->cipher_mode == DRV_CIPHER_XTS &&
 419             xts_check_key(tfm, key, keylen)) {
 420                 dev_dbg(dev, "weak XTS key");
 421                 return -EINVAL;
 422         }
 423 
 424         /* STAT_PHASE_1: Copy key to ctx */
 425         dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr,
 426                                 max_key_buf_size, DMA_TO_DEVICE);
 427 
 428         memcpy(ctx_p->user.key, key, keylen);
 429         if (keylen == 24)
 430                 memset(ctx_p->user.key + 24, 0, CC_AES_KEY_SIZE_MAX - 24);
 431 
 432         if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
 433                 /* sha256 for key2 - use sw implementation */
 434                 int key_len = keylen >> 1;
 435                 int err;
 436 
 437                 SHASH_DESC_ON_STACK(desc, ctx_p->shash_tfm);
 438 
 439                 desc->tfm = ctx_p->shash_tfm;
 440 
 441                 err = crypto_shash_digest(desc, ctx_p->user.key, key_len,
 442                                           ctx_p->user.key + key_len);
 443                 if (err) {
 444                         dev_err(dev, "Failed to hash ESSIV key.\n");
 445                         return err;
 446                 }
 447         }
 448         dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr,
 449                                    max_key_buf_size, DMA_TO_DEVICE);
 450         ctx_p->keylen = keylen;
 451 
 452         dev_dbg(dev, "return safely");
 453         return 0;
 454 }
 455 
 456 static int cc_out_setup_mode(struct cc_cipher_ctx *ctx_p)
 457 {
 458         switch (ctx_p->flow_mode) {
 459         case S_DIN_to_AES:
 460                 return S_AES_to_DOUT;
 461         case S_DIN_to_DES:
 462                 return S_DES_to_DOUT;
 463         case S_DIN_to_SM4:
 464                 return S_SM4_to_DOUT;
 465         default:
 466                 return ctx_p->flow_mode;
 467         }
 468 }
 469 
 470 static void cc_setup_readiv_desc(struct crypto_tfm *tfm,
 471                                  struct cipher_req_ctx *req_ctx,
 472                                  unsigned int ivsize, struct cc_hw_desc desc[],
 473                                  unsigned int *seq_size)
 474 {
 475         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 476         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
 477         int cipher_mode = ctx_p->cipher_mode;
 478         int flow_mode = cc_out_setup_mode(ctx_p);
 479         int direction = req_ctx->gen_ctx.op_type;
 480         dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
 481 
 482         if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY)
 483                 return;
 484 
 485         switch (cipher_mode) {
 486         case DRV_CIPHER_ECB:
 487                 break;
 488         case DRV_CIPHER_CBC:
 489         case DRV_CIPHER_CBC_CTS:
 490         case DRV_CIPHER_CTR:
 491         case DRV_CIPHER_OFB:
 492                 /* Read next IV */
 493                 hw_desc_init(&desc[*seq_size]);
 494                 set_dout_dlli(&desc[*seq_size], iv_dma_addr, ivsize, NS_BIT, 1);
 495                 set_cipher_config0(&desc[*seq_size], direction);
 496                 set_flow_mode(&desc[*seq_size], flow_mode);
 497                 set_cipher_mode(&desc[*seq_size], cipher_mode);
 498                 if (cipher_mode == DRV_CIPHER_CTR ||
 499                     cipher_mode == DRV_CIPHER_OFB) {
 500                         set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1);
 501                 } else {
 502                         set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE0);
 503                 }
 504                 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
 505                 (*seq_size)++;
 506                 break;
 507         case DRV_CIPHER_XTS:
 508         case DRV_CIPHER_ESSIV:
 509         case DRV_CIPHER_BITLOCKER:
 510                 /*  IV */
 511                 hw_desc_init(&desc[*seq_size]);
 512                 set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1);
 513                 set_cipher_mode(&desc[*seq_size], cipher_mode);
 514                 set_cipher_config0(&desc[*seq_size], direction);
 515                 set_flow_mode(&desc[*seq_size], flow_mode);
 516                 set_dout_dlli(&desc[*seq_size], iv_dma_addr, CC_AES_BLOCK_SIZE,
 517                              NS_BIT, 1);
 518                 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
 519                 (*seq_size)++;
 520                 break;
 521         default:
 522                 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
 523         }
 524 }
 525 
 526 
 527 static void cc_setup_state_desc(struct crypto_tfm *tfm,
 528                                  struct cipher_req_ctx *req_ctx,
 529                                  unsigned int ivsize, unsigned int nbytes,
 530                                  struct cc_hw_desc desc[],
 531                                  unsigned int *seq_size)
 532 {
 533         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 534         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
 535         int cipher_mode = ctx_p->cipher_mode;
 536         int flow_mode = ctx_p->flow_mode;
 537         int direction = req_ctx->gen_ctx.op_type;
 538         dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
 539         unsigned int du_size = nbytes;
 540 
 541         struct cc_crypto_alg *cc_alg =
 542                 container_of(tfm->__crt_alg, struct cc_crypto_alg,
 543                              skcipher_alg.base);
 544 
 545         if (cc_alg->data_unit)
 546                 du_size = cc_alg->data_unit;
 547 
 548         switch (cipher_mode) {
 549         case DRV_CIPHER_ECB:
 550                 break;
 551         case DRV_CIPHER_CBC:
 552         case DRV_CIPHER_CBC_CTS:
 553         case DRV_CIPHER_CTR:
 554         case DRV_CIPHER_OFB:
 555                 /* Load IV */
 556                 hw_desc_init(&desc[*seq_size]);
 557                 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, ivsize,
 558                              NS_BIT);
 559                 set_cipher_config0(&desc[*seq_size], direction);
 560                 set_flow_mode(&desc[*seq_size], flow_mode);
 561                 set_cipher_mode(&desc[*seq_size], cipher_mode);
 562                 if (cipher_mode == DRV_CIPHER_CTR ||
 563                     cipher_mode == DRV_CIPHER_OFB) {
 564                         set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
 565                 } else {
 566                         set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE0);
 567                 }
 568                 (*seq_size)++;
 569                 break;
 570         case DRV_CIPHER_XTS:
 571         case DRV_CIPHER_ESSIV:
 572         case DRV_CIPHER_BITLOCKER:
 573                 break;
 574         default:
 575                 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
 576         }
 577 }
 578 
 579 
 580 static void cc_setup_xex_state_desc(struct crypto_tfm *tfm,
 581                                  struct cipher_req_ctx *req_ctx,
 582                                  unsigned int ivsize, unsigned int nbytes,
 583                                  struct cc_hw_desc desc[],
 584                                  unsigned int *seq_size)
 585 {
 586         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 587         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
 588         int cipher_mode = ctx_p->cipher_mode;
 589         int flow_mode = ctx_p->flow_mode;
 590         int direction = req_ctx->gen_ctx.op_type;
 591         dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
 592         unsigned int key_len = ctx_p->keylen;
 593         dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
 594         unsigned int du_size = nbytes;
 595 
 596         struct cc_crypto_alg *cc_alg =
 597                 container_of(tfm->__crt_alg, struct cc_crypto_alg,
 598                              skcipher_alg.base);
 599 
 600         if (cc_alg->data_unit)
 601                 du_size = cc_alg->data_unit;
 602 
 603         switch (cipher_mode) {
 604         case DRV_CIPHER_ECB:
 605                 break;
 606         case DRV_CIPHER_CBC:
 607         case DRV_CIPHER_CBC_CTS:
 608         case DRV_CIPHER_CTR:
 609         case DRV_CIPHER_OFB:
 610                 break;
 611         case DRV_CIPHER_XTS:
 612         case DRV_CIPHER_ESSIV:
 613         case DRV_CIPHER_BITLOCKER:
 614                 /* load XEX key */
 615                 hw_desc_init(&desc[*seq_size]);
 616                 set_cipher_mode(&desc[*seq_size], cipher_mode);
 617                 set_cipher_config0(&desc[*seq_size], direction);
 618                 if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
 619                         set_hw_crypto_key(&desc[*seq_size],
 620                                           ctx_p->hw.key2_slot);
 621                 } else {
 622                         set_din_type(&desc[*seq_size], DMA_DLLI,
 623                                      (key_dma_addr + (key_len / 2)),
 624                                      (key_len / 2), NS_BIT);
 625                 }
 626                 set_xex_data_unit_size(&desc[*seq_size], du_size);
 627                 set_flow_mode(&desc[*seq_size], S_DIN_to_AES2);
 628                 set_key_size_aes(&desc[*seq_size], (key_len / 2));
 629                 set_setup_mode(&desc[*seq_size], SETUP_LOAD_XEX_KEY);
 630                 (*seq_size)++;
 631 
 632                 /* Load IV */
 633                 hw_desc_init(&desc[*seq_size]);
 634                 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
 635                 set_cipher_mode(&desc[*seq_size], cipher_mode);
 636                 set_cipher_config0(&desc[*seq_size], direction);
 637                 set_key_size_aes(&desc[*seq_size], (key_len / 2));
 638                 set_flow_mode(&desc[*seq_size], flow_mode);
 639                 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr,
 640                              CC_AES_BLOCK_SIZE, NS_BIT);
 641                 (*seq_size)++;
 642                 break;
 643         default:
 644                 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
 645         }
 646 }
 647 
 648 static int cc_out_flow_mode(struct cc_cipher_ctx *ctx_p)
 649 {
 650         switch (ctx_p->flow_mode) {
 651         case S_DIN_to_AES:
 652                 return DIN_AES_DOUT;
 653         case S_DIN_to_DES:
 654                 return DIN_DES_DOUT;
 655         case S_DIN_to_SM4:
 656                 return DIN_SM4_DOUT;
 657         default:
 658                 return ctx_p->flow_mode;
 659         }
 660 }
 661 
 662 static void cc_setup_key_desc(struct crypto_tfm *tfm,
 663                               struct cipher_req_ctx *req_ctx,
 664                               unsigned int nbytes, struct cc_hw_desc desc[],
 665                               unsigned int *seq_size)
 666 {
 667         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 668         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
 669         int cipher_mode = ctx_p->cipher_mode;
 670         int flow_mode = ctx_p->flow_mode;
 671         int direction = req_ctx->gen_ctx.op_type;
 672         dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
 673         unsigned int key_len = ctx_p->keylen;
 674         unsigned int din_size;
 675 
 676         switch (cipher_mode) {
 677         case DRV_CIPHER_CBC:
 678         case DRV_CIPHER_CBC_CTS:
 679         case DRV_CIPHER_CTR:
 680         case DRV_CIPHER_OFB:
 681         case DRV_CIPHER_ECB:
 682                 /* Load key */
 683                 hw_desc_init(&desc[*seq_size]);
 684                 set_cipher_mode(&desc[*seq_size], cipher_mode);
 685                 set_cipher_config0(&desc[*seq_size], direction);
 686 
 687                 if (cc_key_type(tfm) == CC_POLICY_PROTECTED_KEY) {
 688                         /* We use the AES key size coding for all CPP algs */
 689                         set_key_size_aes(&desc[*seq_size], key_len);
 690                         set_cpp_crypto_key(&desc[*seq_size], ctx_p->cpp.slot);
 691                         flow_mode = cc_out_flow_mode(ctx_p);
 692                 } else {
 693                         if (flow_mode == S_DIN_to_AES) {
 694                                 if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
 695                                         set_hw_crypto_key(&desc[*seq_size],
 696                                                           ctx_p->hw.key1_slot);
 697                                 } else {
 698                                         /* CC_POLICY_UNPROTECTED_KEY
 699                                          * Invalid keys are filtered out in
 700                                          * sethkey()
 701                                          */
 702                                         din_size = (key_len == 24) ?
 703                                                 AES_MAX_KEY_SIZE : key_len;
 704 
 705                                         set_din_type(&desc[*seq_size], DMA_DLLI,
 706                                                      key_dma_addr, din_size,
 707                                                      NS_BIT);
 708                                 }
 709                                 set_key_size_aes(&desc[*seq_size], key_len);
 710                         } else {
 711                                 /*des*/
 712                                 set_din_type(&desc[*seq_size], DMA_DLLI,
 713                                              key_dma_addr, key_len, NS_BIT);
 714                                 set_key_size_des(&desc[*seq_size], key_len);
 715                         }
 716                         set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
 717                 }
 718                 set_flow_mode(&desc[*seq_size], flow_mode);
 719                 (*seq_size)++;
 720                 break;
 721         case DRV_CIPHER_XTS:
 722         case DRV_CIPHER_ESSIV:
 723         case DRV_CIPHER_BITLOCKER:
 724                 /* Load AES key */
 725                 hw_desc_init(&desc[*seq_size]);
 726                 set_cipher_mode(&desc[*seq_size], cipher_mode);
 727                 set_cipher_config0(&desc[*seq_size], direction);
 728                 if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
 729                         set_hw_crypto_key(&desc[*seq_size],
 730                                           ctx_p->hw.key1_slot);
 731                 } else {
 732                         set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr,
 733                                      (key_len / 2), NS_BIT);
 734                 }
 735                 set_key_size_aes(&desc[*seq_size], (key_len / 2));
 736                 set_flow_mode(&desc[*seq_size], flow_mode);
 737                 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
 738                 (*seq_size)++;
 739                 break;
 740         default:
 741                 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
 742         }
 743 }
 744 
 745 static void cc_setup_mlli_desc(struct crypto_tfm *tfm,
 746                                struct cipher_req_ctx *req_ctx,
 747                                struct scatterlist *dst, struct scatterlist *src,
 748                                unsigned int nbytes, void *areq,
 749                                struct cc_hw_desc desc[], unsigned int *seq_size)
 750 {
 751         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 752         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
 753 
 754         if (req_ctx->dma_buf_type == CC_DMA_BUF_MLLI) {
 755                 /* bypass */
 756                 dev_dbg(dev, " bypass params addr %pad length 0x%X addr 0x%08X\n",
 757                         &req_ctx->mlli_params.mlli_dma_addr,
 758                         req_ctx->mlli_params.mlli_len,
 759                         (unsigned int)ctx_p->drvdata->mlli_sram_addr);
 760                 hw_desc_init(&desc[*seq_size]);
 761                 set_din_type(&desc[*seq_size], DMA_DLLI,
 762                              req_ctx->mlli_params.mlli_dma_addr,
 763                              req_ctx->mlli_params.mlli_len, NS_BIT);
 764                 set_dout_sram(&desc[*seq_size],
 765                               ctx_p->drvdata->mlli_sram_addr,
 766                               req_ctx->mlli_params.mlli_len);
 767                 set_flow_mode(&desc[*seq_size], BYPASS);
 768                 (*seq_size)++;
 769         }
 770 }
 771 
 772 static void cc_setup_flow_desc(struct crypto_tfm *tfm,
 773                                struct cipher_req_ctx *req_ctx,
 774                                struct scatterlist *dst, struct scatterlist *src,
 775                                unsigned int nbytes, struct cc_hw_desc desc[],
 776                                unsigned int *seq_size)
 777 {
 778         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 779         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
 780         unsigned int flow_mode = cc_out_flow_mode(ctx_p);
 781         bool last_desc = (ctx_p->key_type == CC_POLICY_PROTECTED_KEY ||
 782                           ctx_p->cipher_mode == DRV_CIPHER_ECB);
 783 
 784         /* Process */
 785         if (req_ctx->dma_buf_type == CC_DMA_BUF_DLLI) {
 786                 dev_dbg(dev, " data params addr %pad length 0x%X\n",
 787                         &sg_dma_address(src), nbytes);
 788                 dev_dbg(dev, " data params addr %pad length 0x%X\n",
 789                         &sg_dma_address(dst), nbytes);
 790                 hw_desc_init(&desc[*seq_size]);
 791                 set_din_type(&desc[*seq_size], DMA_DLLI, sg_dma_address(src),
 792                              nbytes, NS_BIT);
 793                 set_dout_dlli(&desc[*seq_size], sg_dma_address(dst),
 794                               nbytes, NS_BIT, (!last_desc ? 0 : 1));
 795                 if (last_desc)
 796                         set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
 797 
 798                 set_flow_mode(&desc[*seq_size], flow_mode);
 799                 (*seq_size)++;
 800         } else {
 801                 hw_desc_init(&desc[*seq_size]);
 802                 set_din_type(&desc[*seq_size], DMA_MLLI,
 803                              ctx_p->drvdata->mlli_sram_addr,
 804                              req_ctx->in_mlli_nents, NS_BIT);
 805                 if (req_ctx->out_nents == 0) {
 806                         dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
 807                                 (unsigned int)ctx_p->drvdata->mlli_sram_addr,
 808                                 (unsigned int)ctx_p->drvdata->mlli_sram_addr);
 809                         set_dout_mlli(&desc[*seq_size],
 810                                       ctx_p->drvdata->mlli_sram_addr,
 811                                       req_ctx->in_mlli_nents, NS_BIT,
 812                                       (!last_desc ? 0 : 1));
 813                 } else {
 814                         dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
 815                                 (unsigned int)ctx_p->drvdata->mlli_sram_addr,
 816                                 (unsigned int)ctx_p->drvdata->mlli_sram_addr +
 817                                 (u32)LLI_ENTRY_BYTE_SIZE * req_ctx->in_nents);
 818                         set_dout_mlli(&desc[*seq_size],
 819                                       (ctx_p->drvdata->mlli_sram_addr +
 820                                        (LLI_ENTRY_BYTE_SIZE *
 821                                         req_ctx->in_mlli_nents)),
 822                                       req_ctx->out_mlli_nents, NS_BIT,
 823                                       (!last_desc ? 0 : 1));
 824                 }
 825                 if (last_desc)
 826                         set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
 827 
 828                 set_flow_mode(&desc[*seq_size], flow_mode);
 829                 (*seq_size)++;
 830         }
 831 }
 832 
 833 static void cc_cipher_complete(struct device *dev, void *cc_req, int err)
 834 {
 835         struct skcipher_request *req = (struct skcipher_request *)cc_req;
 836         struct scatterlist *dst = req->dst;
 837         struct scatterlist *src = req->src;
 838         struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
 839         struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
 840         unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
 841 
 842         if (err != -EINPROGRESS) {
 843                 /* Not a BACKLOG notification */
 844                 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
 845                 memcpy(req->iv, req_ctx->iv, ivsize);
 846                 kzfree(req_ctx->iv);
 847         }
 848 
 849         skcipher_request_complete(req, err);
 850 }
 851 
 852 static int cc_cipher_process(struct skcipher_request *req,
 853                              enum drv_crypto_direction direction)
 854 {
 855         struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
 856         struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm);
 857         struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
 858         unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
 859         struct scatterlist *dst = req->dst;
 860         struct scatterlist *src = req->src;
 861         unsigned int nbytes = req->cryptlen;
 862         void *iv = req->iv;
 863         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 864         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
 865         struct cc_hw_desc desc[MAX_ABLKCIPHER_SEQ_LEN];
 866         struct cc_crypto_req cc_req = {};
 867         int rc;
 868         unsigned int seq_len = 0;
 869         gfp_t flags = cc_gfp_flags(&req->base);
 870 
 871         dev_dbg(dev, "%s req=%p iv=%p nbytes=%d\n",
 872                 ((direction == DRV_CRYPTO_DIRECTION_ENCRYPT) ?
 873                 "Encrypt" : "Decrypt"), req, iv, nbytes);
 874 
 875         /* STAT_PHASE_0: Init and sanity checks */
 876 
 877         /* TODO: check data length according to mode */
 878         if (validate_data_size(ctx_p, nbytes)) {
 879                 dev_err(dev, "Unsupported data size %d.\n", nbytes);
 880                 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_BLOCK_LEN);
 881                 rc = -EINVAL;
 882                 goto exit_process;
 883         }
 884         if (nbytes == 0) {
 885                 /* No data to process is valid */
 886                 rc = 0;
 887                 goto exit_process;
 888         }
 889 
 890         /* The IV we are handed may be allocted from the stack so
 891          * we must copy it to a DMAable buffer before use.
 892          */
 893         req_ctx->iv = kmemdup(iv, ivsize, flags);
 894         if (!req_ctx->iv) {
 895                 rc = -ENOMEM;
 896                 goto exit_process;
 897         }
 898 
 899         /* Setup request structure */
 900         cc_req.user_cb = (void *)cc_cipher_complete;
 901         cc_req.user_arg = (void *)req;
 902 
 903         /* Setup CPP operation details */
 904         if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY) {
 905                 cc_req.cpp.is_cpp = true;
 906                 cc_req.cpp.alg = ctx_p->cpp.alg;
 907                 cc_req.cpp.slot = ctx_p->cpp.slot;
 908         }
 909 
 910         /* Setup request context */
 911         req_ctx->gen_ctx.op_type = direction;
 912 
 913         /* STAT_PHASE_1: Map buffers */
 914 
 915         rc = cc_map_cipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes,
 916                                       req_ctx->iv, src, dst, flags);
 917         if (rc) {
 918                 dev_err(dev, "map_request() failed\n");
 919                 goto exit_process;
 920         }
 921 
 922         /* STAT_PHASE_2: Create sequence */
 923 
 924         /* Setup state (IV)  */
 925         cc_setup_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len);
 926         /* Setup MLLI line, if needed */
 927         cc_setup_mlli_desc(tfm, req_ctx, dst, src, nbytes, req, desc, &seq_len);
 928         /* Setup key */
 929         cc_setup_key_desc(tfm, req_ctx, nbytes, desc, &seq_len);
 930         /* Setup state (IV and XEX key)  */
 931         cc_setup_xex_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len);
 932         /* Data processing */
 933         cc_setup_flow_desc(tfm, req_ctx, dst, src, nbytes, desc, &seq_len);
 934         /* Read next IV */
 935         cc_setup_readiv_desc(tfm, req_ctx, ivsize, desc, &seq_len);
 936 
 937         /* STAT_PHASE_3: Lock HW and push sequence */
 938 
 939         rc = cc_send_request(ctx_p->drvdata, &cc_req, desc, seq_len,
 940                              &req->base);
 941         if (rc != -EINPROGRESS && rc != -EBUSY) {
 942                 /* Failed to send the request or request completed
 943                  * synchronously
 944                  */
 945                 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
 946         }
 947 
 948 exit_process:
 949         if (rc != -EINPROGRESS && rc != -EBUSY) {
 950                 kzfree(req_ctx->iv);
 951         }
 952 
 953         return rc;
 954 }
 955 
 956 static int cc_cipher_encrypt(struct skcipher_request *req)
 957 {
 958         struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
 959 
 960         memset(req_ctx, 0, sizeof(*req_ctx));
 961 
 962         return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_ENCRYPT);
 963 }
 964 
 965 static int cc_cipher_decrypt(struct skcipher_request *req)
 966 {
 967         struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
 968 
 969         memset(req_ctx, 0, sizeof(*req_ctx));
 970 
 971         return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_DECRYPT);
 972 }
 973 
 974 /* Block cipher alg */
 975 static const struct cc_alg_template skcipher_algs[] = {
 976         {
 977                 .name = "xts(paes)",
 978                 .driver_name = "xts-paes-ccree",
 979                 .blocksize = 1,
 980                 .template_skcipher = {
 981                         .setkey = cc_cipher_sethkey,
 982                         .encrypt = cc_cipher_encrypt,
 983                         .decrypt = cc_cipher_decrypt,
 984                         .min_keysize = CC_HW_KEY_SIZE,
 985                         .max_keysize = CC_HW_KEY_SIZE,
 986                         .ivsize = AES_BLOCK_SIZE,
 987                         },
 988                 .cipher_mode = DRV_CIPHER_XTS,
 989                 .flow_mode = S_DIN_to_AES,
 990                 .min_hw_rev = CC_HW_REV_630,
 991                 .std_body = CC_STD_NIST,
 992                 .sec_func = true,
 993         },
 994         {
 995                 .name = "xts512(paes)",
 996                 .driver_name = "xts-paes-du512-ccree",
 997                 .blocksize = 1,
 998                 .template_skcipher = {
 999                         .setkey = cc_cipher_sethkey,
1000                         .encrypt = cc_cipher_encrypt,
1001                         .decrypt = cc_cipher_decrypt,
1002                         .min_keysize = CC_HW_KEY_SIZE,
1003                         .max_keysize = CC_HW_KEY_SIZE,
1004                         .ivsize = AES_BLOCK_SIZE,
1005                         },
1006                 .cipher_mode = DRV_CIPHER_XTS,
1007                 .flow_mode = S_DIN_to_AES,
1008                 .data_unit = 512,
1009                 .min_hw_rev = CC_HW_REV_712,
1010                 .std_body = CC_STD_NIST,
1011                 .sec_func = true,
1012         },
1013         {
1014                 .name = "xts4096(paes)",
1015                 .driver_name = "xts-paes-du4096-ccree",
1016                 .blocksize = 1,
1017                 .template_skcipher = {
1018                         .setkey = cc_cipher_sethkey,
1019                         .encrypt = cc_cipher_encrypt,
1020                         .decrypt = cc_cipher_decrypt,
1021                         .min_keysize = CC_HW_KEY_SIZE,
1022                         .max_keysize = CC_HW_KEY_SIZE,
1023                         .ivsize = AES_BLOCK_SIZE,
1024                         },
1025                 .cipher_mode = DRV_CIPHER_XTS,
1026                 .flow_mode = S_DIN_to_AES,
1027                 .data_unit = 4096,
1028                 .min_hw_rev = CC_HW_REV_712,
1029                 .std_body = CC_STD_NIST,
1030                 .sec_func = true,
1031         },
1032         {
1033                 .name = "essiv(paes)",
1034                 .driver_name = "essiv-paes-ccree",
1035                 .blocksize = AES_BLOCK_SIZE,
1036                 .template_skcipher = {
1037                         .setkey = cc_cipher_sethkey,
1038                         .encrypt = cc_cipher_encrypt,
1039                         .decrypt = cc_cipher_decrypt,
1040                         .min_keysize = CC_HW_KEY_SIZE,
1041                         .max_keysize = CC_HW_KEY_SIZE,
1042                         .ivsize = AES_BLOCK_SIZE,
1043                         },
1044                 .cipher_mode = DRV_CIPHER_ESSIV,
1045                 .flow_mode = S_DIN_to_AES,
1046                 .min_hw_rev = CC_HW_REV_712,
1047                 .std_body = CC_STD_NIST,
1048                 .sec_func = true,
1049         },
1050         {
1051                 .name = "essiv512(paes)",
1052                 .driver_name = "essiv-paes-du512-ccree",
1053                 .blocksize = AES_BLOCK_SIZE,
1054                 .template_skcipher = {
1055                         .setkey = cc_cipher_sethkey,
1056                         .encrypt = cc_cipher_encrypt,
1057                         .decrypt = cc_cipher_decrypt,
1058                         .min_keysize = CC_HW_KEY_SIZE,
1059                         .max_keysize = CC_HW_KEY_SIZE,
1060                         .ivsize = AES_BLOCK_SIZE,
1061                         },
1062                 .cipher_mode = DRV_CIPHER_ESSIV,
1063                 .flow_mode = S_DIN_to_AES,
1064                 .data_unit = 512,
1065                 .min_hw_rev = CC_HW_REV_712,
1066                 .std_body = CC_STD_NIST,
1067                 .sec_func = true,
1068         },
1069         {
1070                 .name = "essiv4096(paes)",
1071                 .driver_name = "essiv-paes-du4096-ccree",
1072                 .blocksize = AES_BLOCK_SIZE,
1073                 .template_skcipher = {
1074                         .setkey = cc_cipher_sethkey,
1075                         .encrypt = cc_cipher_encrypt,
1076                         .decrypt = cc_cipher_decrypt,
1077                         .min_keysize = CC_HW_KEY_SIZE,
1078                         .max_keysize = CC_HW_KEY_SIZE,
1079                         .ivsize = AES_BLOCK_SIZE,
1080                         },
1081                 .cipher_mode = DRV_CIPHER_ESSIV,
1082                 .flow_mode = S_DIN_to_AES,
1083                 .data_unit = 4096,
1084                 .min_hw_rev = CC_HW_REV_712,
1085                 .std_body = CC_STD_NIST,
1086                 .sec_func = true,
1087         },
1088         {
1089                 .name = "bitlocker(paes)",
1090                 .driver_name = "bitlocker-paes-ccree",
1091                 .blocksize = AES_BLOCK_SIZE,
1092                 .template_skcipher = {
1093                         .setkey = cc_cipher_sethkey,
1094                         .encrypt = cc_cipher_encrypt,
1095                         .decrypt = cc_cipher_decrypt,
1096                         .min_keysize = CC_HW_KEY_SIZE,
1097                         .max_keysize = CC_HW_KEY_SIZE,
1098                         .ivsize = AES_BLOCK_SIZE,
1099                         },
1100                 .cipher_mode = DRV_CIPHER_BITLOCKER,
1101                 .flow_mode = S_DIN_to_AES,
1102                 .min_hw_rev = CC_HW_REV_712,
1103                 .std_body = CC_STD_NIST,
1104                 .sec_func = true,
1105         },
1106         {
1107                 .name = "bitlocker512(paes)",
1108                 .driver_name = "bitlocker-paes-du512-ccree",
1109                 .blocksize = AES_BLOCK_SIZE,
1110                 .template_skcipher = {
1111                         .setkey = cc_cipher_sethkey,
1112                         .encrypt = cc_cipher_encrypt,
1113                         .decrypt = cc_cipher_decrypt,
1114                         .min_keysize = CC_HW_KEY_SIZE,
1115                         .max_keysize = CC_HW_KEY_SIZE,
1116                         .ivsize = AES_BLOCK_SIZE,
1117                         },
1118                 .cipher_mode = DRV_CIPHER_BITLOCKER,
1119                 .flow_mode = S_DIN_to_AES,
1120                 .data_unit = 512,
1121                 .min_hw_rev = CC_HW_REV_712,
1122                 .std_body = CC_STD_NIST,
1123                 .sec_func = true,
1124         },
1125         {
1126                 .name = "bitlocker4096(paes)",
1127                 .driver_name = "bitlocker-paes-du4096-ccree",
1128                 .blocksize = AES_BLOCK_SIZE,
1129                 .template_skcipher = {
1130                         .setkey = cc_cipher_sethkey,
1131                         .encrypt = cc_cipher_encrypt,
1132                         .decrypt = cc_cipher_decrypt,
1133                         .min_keysize = CC_HW_KEY_SIZE,
1134                         .max_keysize =  CC_HW_KEY_SIZE,
1135                         .ivsize = AES_BLOCK_SIZE,
1136                         },
1137                 .cipher_mode = DRV_CIPHER_BITLOCKER,
1138                 .flow_mode = S_DIN_to_AES,
1139                 .data_unit = 4096,
1140                 .min_hw_rev = CC_HW_REV_712,
1141                 .std_body = CC_STD_NIST,
1142                 .sec_func = true,
1143         },
1144         {
1145                 .name = "ecb(paes)",
1146                 .driver_name = "ecb-paes-ccree",
1147                 .blocksize = AES_BLOCK_SIZE,
1148                 .template_skcipher = {
1149                         .setkey = cc_cipher_sethkey,
1150                         .encrypt = cc_cipher_encrypt,
1151                         .decrypt = cc_cipher_decrypt,
1152                         .min_keysize = CC_HW_KEY_SIZE,
1153                         .max_keysize = CC_HW_KEY_SIZE,
1154                         .ivsize = 0,
1155                         },
1156                 .cipher_mode = DRV_CIPHER_ECB,
1157                 .flow_mode = S_DIN_to_AES,
1158                 .min_hw_rev = CC_HW_REV_712,
1159                 .std_body = CC_STD_NIST,
1160                 .sec_func = true,
1161         },
1162         {
1163                 .name = "cbc(paes)",
1164                 .driver_name = "cbc-paes-ccree",
1165                 .blocksize = AES_BLOCK_SIZE,
1166                 .template_skcipher = {
1167                         .setkey = cc_cipher_sethkey,
1168                         .encrypt = cc_cipher_encrypt,
1169                         .decrypt = cc_cipher_decrypt,
1170                         .min_keysize = CC_HW_KEY_SIZE,
1171                         .max_keysize = CC_HW_KEY_SIZE,
1172                         .ivsize = AES_BLOCK_SIZE,
1173                 },
1174                 .cipher_mode = DRV_CIPHER_CBC,
1175                 .flow_mode = S_DIN_to_AES,
1176                 .min_hw_rev = CC_HW_REV_712,
1177                 .std_body = CC_STD_NIST,
1178                 .sec_func = true,
1179         },
1180         {
1181                 .name = "ofb(paes)",
1182                 .driver_name = "ofb-paes-ccree",
1183                 .blocksize = AES_BLOCK_SIZE,
1184                 .template_skcipher = {
1185                         .setkey = cc_cipher_sethkey,
1186                         .encrypt = cc_cipher_encrypt,
1187                         .decrypt = cc_cipher_decrypt,
1188                         .min_keysize = CC_HW_KEY_SIZE,
1189                         .max_keysize = CC_HW_KEY_SIZE,
1190                         .ivsize = AES_BLOCK_SIZE,
1191                         },
1192                 .cipher_mode = DRV_CIPHER_OFB,
1193                 .flow_mode = S_DIN_to_AES,
1194                 .min_hw_rev = CC_HW_REV_712,
1195                 .std_body = CC_STD_NIST,
1196                 .sec_func = true,
1197         },
1198         {
1199                 .name = "cts(cbc(paes))",
1200                 .driver_name = "cts-cbc-paes-ccree",
1201                 .blocksize = AES_BLOCK_SIZE,
1202                 .template_skcipher = {
1203                         .setkey = cc_cipher_sethkey,
1204                         .encrypt = cc_cipher_encrypt,
1205                         .decrypt = cc_cipher_decrypt,
1206                         .min_keysize = CC_HW_KEY_SIZE,
1207                         .max_keysize = CC_HW_KEY_SIZE,
1208                         .ivsize = AES_BLOCK_SIZE,
1209                         },
1210                 .cipher_mode = DRV_CIPHER_CBC_CTS,
1211                 .flow_mode = S_DIN_to_AES,
1212                 .min_hw_rev = CC_HW_REV_712,
1213                 .std_body = CC_STD_NIST,
1214                 .sec_func = true,
1215         },
1216         {
1217                 .name = "ctr(paes)",
1218                 .driver_name = "ctr-paes-ccree",
1219                 .blocksize = 1,
1220                 .template_skcipher = {
1221                         .setkey = cc_cipher_sethkey,
1222                         .encrypt = cc_cipher_encrypt,
1223                         .decrypt = cc_cipher_decrypt,
1224                         .min_keysize = CC_HW_KEY_SIZE,
1225                         .max_keysize = CC_HW_KEY_SIZE,
1226                         .ivsize = AES_BLOCK_SIZE,
1227                         },
1228                 .cipher_mode = DRV_CIPHER_CTR,
1229                 .flow_mode = S_DIN_to_AES,
1230                 .min_hw_rev = CC_HW_REV_712,
1231                 .std_body = CC_STD_NIST,
1232                 .sec_func = true,
1233         },
1234         {
1235                 .name = "xts(aes)",
1236                 .driver_name = "xts-aes-ccree",
1237                 .blocksize = 1,
1238                 .template_skcipher = {
1239                         .setkey = cc_cipher_setkey,
1240                         .encrypt = cc_cipher_encrypt,
1241                         .decrypt = cc_cipher_decrypt,
1242                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1243                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1244                         .ivsize = AES_BLOCK_SIZE,
1245                         },
1246                 .cipher_mode = DRV_CIPHER_XTS,
1247                 .flow_mode = S_DIN_to_AES,
1248                 .min_hw_rev = CC_HW_REV_630,
1249                 .std_body = CC_STD_NIST,
1250         },
1251         {
1252                 .name = "xts512(aes)",
1253                 .driver_name = "xts-aes-du512-ccree",
1254                 .blocksize = 1,
1255                 .template_skcipher = {
1256                         .setkey = cc_cipher_setkey,
1257                         .encrypt = cc_cipher_encrypt,
1258                         .decrypt = cc_cipher_decrypt,
1259                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1260                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1261                         .ivsize = AES_BLOCK_SIZE,
1262                         },
1263                 .cipher_mode = DRV_CIPHER_XTS,
1264                 .flow_mode = S_DIN_to_AES,
1265                 .data_unit = 512,
1266                 .min_hw_rev = CC_HW_REV_712,
1267                 .std_body = CC_STD_NIST,
1268         },
1269         {
1270                 .name = "xts4096(aes)",
1271                 .driver_name = "xts-aes-du4096-ccree",
1272                 .blocksize = 1,
1273                 .template_skcipher = {
1274                         .setkey = cc_cipher_setkey,
1275                         .encrypt = cc_cipher_encrypt,
1276                         .decrypt = cc_cipher_decrypt,
1277                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1278                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1279                         .ivsize = AES_BLOCK_SIZE,
1280                         },
1281                 .cipher_mode = DRV_CIPHER_XTS,
1282                 .flow_mode = S_DIN_to_AES,
1283                 .data_unit = 4096,
1284                 .min_hw_rev = CC_HW_REV_712,
1285                 .std_body = CC_STD_NIST,
1286         },
1287         {
1288                 .name = "essiv(aes)",
1289                 .driver_name = "essiv-aes-ccree",
1290                 .blocksize = AES_BLOCK_SIZE,
1291                 .template_skcipher = {
1292                         .setkey = cc_cipher_setkey,
1293                         .encrypt = cc_cipher_encrypt,
1294                         .decrypt = cc_cipher_decrypt,
1295                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1296                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1297                         .ivsize = AES_BLOCK_SIZE,
1298                         },
1299                 .cipher_mode = DRV_CIPHER_ESSIV,
1300                 .flow_mode = S_DIN_to_AES,
1301                 .min_hw_rev = CC_HW_REV_712,
1302                 .std_body = CC_STD_NIST,
1303         },
1304         {
1305                 .name = "essiv512(aes)",
1306                 .driver_name = "essiv-aes-du512-ccree",
1307                 .blocksize = AES_BLOCK_SIZE,
1308                 .template_skcipher = {
1309                         .setkey = cc_cipher_setkey,
1310                         .encrypt = cc_cipher_encrypt,
1311                         .decrypt = cc_cipher_decrypt,
1312                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1313                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1314                         .ivsize = AES_BLOCK_SIZE,
1315                         },
1316                 .cipher_mode = DRV_CIPHER_ESSIV,
1317                 .flow_mode = S_DIN_to_AES,
1318                 .data_unit = 512,
1319                 .min_hw_rev = CC_HW_REV_712,
1320                 .std_body = CC_STD_NIST,
1321         },
1322         {
1323                 .name = "essiv4096(aes)",
1324                 .driver_name = "essiv-aes-du4096-ccree",
1325                 .blocksize = AES_BLOCK_SIZE,
1326                 .template_skcipher = {
1327                         .setkey = cc_cipher_setkey,
1328                         .encrypt = cc_cipher_encrypt,
1329                         .decrypt = cc_cipher_decrypt,
1330                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1331                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1332                         .ivsize = AES_BLOCK_SIZE,
1333                         },
1334                 .cipher_mode = DRV_CIPHER_ESSIV,
1335                 .flow_mode = S_DIN_to_AES,
1336                 .data_unit = 4096,
1337                 .min_hw_rev = CC_HW_REV_712,
1338                 .std_body = CC_STD_NIST,
1339         },
1340         {
1341                 .name = "bitlocker(aes)",
1342                 .driver_name = "bitlocker-aes-ccree",
1343                 .blocksize = AES_BLOCK_SIZE,
1344                 .template_skcipher = {
1345                         .setkey = cc_cipher_setkey,
1346                         .encrypt = cc_cipher_encrypt,
1347                         .decrypt = cc_cipher_decrypt,
1348                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1349                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1350                         .ivsize = AES_BLOCK_SIZE,
1351                         },
1352                 .cipher_mode = DRV_CIPHER_BITLOCKER,
1353                 .flow_mode = S_DIN_to_AES,
1354                 .min_hw_rev = CC_HW_REV_712,
1355                 .std_body = CC_STD_NIST,
1356         },
1357         {
1358                 .name = "bitlocker512(aes)",
1359                 .driver_name = "bitlocker-aes-du512-ccree",
1360                 .blocksize = AES_BLOCK_SIZE,
1361                 .template_skcipher = {
1362                         .setkey = cc_cipher_setkey,
1363                         .encrypt = cc_cipher_encrypt,
1364                         .decrypt = cc_cipher_decrypt,
1365                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1366                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1367                         .ivsize = AES_BLOCK_SIZE,
1368                         },
1369                 .cipher_mode = DRV_CIPHER_BITLOCKER,
1370                 .flow_mode = S_DIN_to_AES,
1371                 .data_unit = 512,
1372                 .min_hw_rev = CC_HW_REV_712,
1373                 .std_body = CC_STD_NIST,
1374         },
1375         {
1376                 .name = "bitlocker4096(aes)",
1377                 .driver_name = "bitlocker-aes-du4096-ccree",
1378                 .blocksize = AES_BLOCK_SIZE,
1379                 .template_skcipher = {
1380                         .setkey = cc_cipher_setkey,
1381                         .encrypt = cc_cipher_encrypt,
1382                         .decrypt = cc_cipher_decrypt,
1383                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1384                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1385                         .ivsize = AES_BLOCK_SIZE,
1386                         },
1387                 .cipher_mode = DRV_CIPHER_BITLOCKER,
1388                 .flow_mode = S_DIN_to_AES,
1389                 .data_unit = 4096,
1390                 .min_hw_rev = CC_HW_REV_712,
1391                 .std_body = CC_STD_NIST,
1392         },
1393         {
1394                 .name = "ecb(aes)",
1395                 .driver_name = "ecb-aes-ccree",
1396                 .blocksize = AES_BLOCK_SIZE,
1397                 .template_skcipher = {
1398                         .setkey = cc_cipher_setkey,
1399                         .encrypt = cc_cipher_encrypt,
1400                         .decrypt = cc_cipher_decrypt,
1401                         .min_keysize = AES_MIN_KEY_SIZE,
1402                         .max_keysize = AES_MAX_KEY_SIZE,
1403                         .ivsize = 0,
1404                         },
1405                 .cipher_mode = DRV_CIPHER_ECB,
1406                 .flow_mode = S_DIN_to_AES,
1407                 .min_hw_rev = CC_HW_REV_630,
1408                 .std_body = CC_STD_NIST,
1409         },
1410         {
1411                 .name = "cbc(aes)",
1412                 .driver_name = "cbc-aes-ccree",
1413                 .blocksize = AES_BLOCK_SIZE,
1414                 .template_skcipher = {
1415                         .setkey = cc_cipher_setkey,
1416                         .encrypt = cc_cipher_encrypt,
1417                         .decrypt = cc_cipher_decrypt,
1418                         .min_keysize = AES_MIN_KEY_SIZE,
1419                         .max_keysize = AES_MAX_KEY_SIZE,
1420                         .ivsize = AES_BLOCK_SIZE,
1421                 },
1422                 .cipher_mode = DRV_CIPHER_CBC,
1423                 .flow_mode = S_DIN_to_AES,
1424                 .min_hw_rev = CC_HW_REV_630,
1425                 .std_body = CC_STD_NIST,
1426         },
1427         {
1428                 .name = "ofb(aes)",
1429                 .driver_name = "ofb-aes-ccree",
1430                 .blocksize = AES_BLOCK_SIZE,
1431                 .template_skcipher = {
1432                         .setkey = cc_cipher_setkey,
1433                         .encrypt = cc_cipher_encrypt,
1434                         .decrypt = cc_cipher_decrypt,
1435                         .min_keysize = AES_MIN_KEY_SIZE,
1436                         .max_keysize = AES_MAX_KEY_SIZE,
1437                         .ivsize = AES_BLOCK_SIZE,
1438                         },
1439                 .cipher_mode = DRV_CIPHER_OFB,
1440                 .flow_mode = S_DIN_to_AES,
1441                 .min_hw_rev = CC_HW_REV_630,
1442                 .std_body = CC_STD_NIST,
1443         },
1444         {
1445                 .name = "cts(cbc(aes))",
1446                 .driver_name = "cts-cbc-aes-ccree",
1447                 .blocksize = AES_BLOCK_SIZE,
1448                 .template_skcipher = {
1449                         .setkey = cc_cipher_setkey,
1450                         .encrypt = cc_cipher_encrypt,
1451                         .decrypt = cc_cipher_decrypt,
1452                         .min_keysize = AES_MIN_KEY_SIZE,
1453                         .max_keysize = AES_MAX_KEY_SIZE,
1454                         .ivsize = AES_BLOCK_SIZE,
1455                         },
1456                 .cipher_mode = DRV_CIPHER_CBC_CTS,
1457                 .flow_mode = S_DIN_to_AES,
1458                 .min_hw_rev = CC_HW_REV_630,
1459                 .std_body = CC_STD_NIST,
1460         },
1461         {
1462                 .name = "ctr(aes)",
1463                 .driver_name = "ctr-aes-ccree",
1464                 .blocksize = 1,
1465                 .template_skcipher = {
1466                         .setkey = cc_cipher_setkey,
1467                         .encrypt = cc_cipher_encrypt,
1468                         .decrypt = cc_cipher_decrypt,
1469                         .min_keysize = AES_MIN_KEY_SIZE,
1470                         .max_keysize = AES_MAX_KEY_SIZE,
1471                         .ivsize = AES_BLOCK_SIZE,
1472                         },
1473                 .cipher_mode = DRV_CIPHER_CTR,
1474                 .flow_mode = S_DIN_to_AES,
1475                 .min_hw_rev = CC_HW_REV_630,
1476                 .std_body = CC_STD_NIST,
1477         },
1478         {
1479                 .name = "cbc(des3_ede)",
1480                 .driver_name = "cbc-3des-ccree",
1481                 .blocksize = DES3_EDE_BLOCK_SIZE,
1482                 .template_skcipher = {
1483                         .setkey = cc_cipher_setkey,
1484                         .encrypt = cc_cipher_encrypt,
1485                         .decrypt = cc_cipher_decrypt,
1486                         .min_keysize = DES3_EDE_KEY_SIZE,
1487                         .max_keysize = DES3_EDE_KEY_SIZE,
1488                         .ivsize = DES3_EDE_BLOCK_SIZE,
1489                         },
1490                 .cipher_mode = DRV_CIPHER_CBC,
1491                 .flow_mode = S_DIN_to_DES,
1492                 .min_hw_rev = CC_HW_REV_630,
1493                 .std_body = CC_STD_NIST,
1494         },
1495         {
1496                 .name = "ecb(des3_ede)",
1497                 .driver_name = "ecb-3des-ccree",
1498                 .blocksize = DES3_EDE_BLOCK_SIZE,
1499                 .template_skcipher = {
1500                         .setkey = cc_cipher_setkey,
1501                         .encrypt = cc_cipher_encrypt,
1502                         .decrypt = cc_cipher_decrypt,
1503                         .min_keysize = DES3_EDE_KEY_SIZE,
1504                         .max_keysize = DES3_EDE_KEY_SIZE,
1505                         .ivsize = 0,
1506                         },
1507                 .cipher_mode = DRV_CIPHER_ECB,
1508                 .flow_mode = S_DIN_to_DES,
1509                 .min_hw_rev = CC_HW_REV_630,
1510                 .std_body = CC_STD_NIST,
1511         },
1512         {
1513                 .name = "cbc(des)",
1514                 .driver_name = "cbc-des-ccree",
1515                 .blocksize = DES_BLOCK_SIZE,
1516                 .template_skcipher = {
1517                         .setkey = cc_cipher_setkey,
1518                         .encrypt = cc_cipher_encrypt,
1519                         .decrypt = cc_cipher_decrypt,
1520                         .min_keysize = DES_KEY_SIZE,
1521                         .max_keysize = DES_KEY_SIZE,
1522                         .ivsize = DES_BLOCK_SIZE,
1523                         },
1524                 .cipher_mode = DRV_CIPHER_CBC,
1525                 .flow_mode = S_DIN_to_DES,
1526                 .min_hw_rev = CC_HW_REV_630,
1527                 .std_body = CC_STD_NIST,
1528         },
1529         {
1530                 .name = "ecb(des)",
1531                 .driver_name = "ecb-des-ccree",
1532                 .blocksize = DES_BLOCK_SIZE,
1533                 .template_skcipher = {
1534                         .setkey = cc_cipher_setkey,
1535                         .encrypt = cc_cipher_encrypt,
1536                         .decrypt = cc_cipher_decrypt,
1537                         .min_keysize = DES_KEY_SIZE,
1538                         .max_keysize = DES_KEY_SIZE,
1539                         .ivsize = 0,
1540                         },
1541                 .cipher_mode = DRV_CIPHER_ECB,
1542                 .flow_mode = S_DIN_to_DES,
1543                 .min_hw_rev = CC_HW_REV_630,
1544                 .std_body = CC_STD_NIST,
1545         },
1546         {
1547                 .name = "cbc(sm4)",
1548                 .driver_name = "cbc-sm4-ccree",
1549                 .blocksize = SM4_BLOCK_SIZE,
1550                 .template_skcipher = {
1551                         .setkey = cc_cipher_setkey,
1552                         .encrypt = cc_cipher_encrypt,
1553                         .decrypt = cc_cipher_decrypt,
1554                         .min_keysize = SM4_KEY_SIZE,
1555                         .max_keysize = SM4_KEY_SIZE,
1556                         .ivsize = SM4_BLOCK_SIZE,
1557                         },
1558                 .cipher_mode = DRV_CIPHER_CBC,
1559                 .flow_mode = S_DIN_to_SM4,
1560                 .min_hw_rev = CC_HW_REV_713,
1561                 .std_body = CC_STD_OSCCA,
1562         },
1563         {
1564                 .name = "ecb(sm4)",
1565                 .driver_name = "ecb-sm4-ccree",
1566                 .blocksize = SM4_BLOCK_SIZE,
1567                 .template_skcipher = {
1568                         .setkey = cc_cipher_setkey,
1569                         .encrypt = cc_cipher_encrypt,
1570                         .decrypt = cc_cipher_decrypt,
1571                         .min_keysize = SM4_KEY_SIZE,
1572                         .max_keysize = SM4_KEY_SIZE,
1573                         .ivsize = 0,
1574                         },
1575                 .cipher_mode = DRV_CIPHER_ECB,
1576                 .flow_mode = S_DIN_to_SM4,
1577                 .min_hw_rev = CC_HW_REV_713,
1578                 .std_body = CC_STD_OSCCA,
1579         },
1580         {
1581                 .name = "ctr(sm4)",
1582                 .driver_name = "ctr-sm4-ccree",
1583                 .blocksize = SM4_BLOCK_SIZE,
1584                 .template_skcipher = {
1585                         .setkey = cc_cipher_setkey,
1586                         .encrypt = cc_cipher_encrypt,
1587                         .decrypt = cc_cipher_decrypt,
1588                         .min_keysize = SM4_KEY_SIZE,
1589                         .max_keysize = SM4_KEY_SIZE,
1590                         .ivsize = SM4_BLOCK_SIZE,
1591                         },
1592                 .cipher_mode = DRV_CIPHER_CTR,
1593                 .flow_mode = S_DIN_to_SM4,
1594                 .min_hw_rev = CC_HW_REV_713,
1595                 .std_body = CC_STD_OSCCA,
1596         },
1597         {
1598                 .name = "cbc(psm4)",
1599                 .driver_name = "cbc-psm4-ccree",
1600                 .blocksize = SM4_BLOCK_SIZE,
1601                 .template_skcipher = {
1602                         .setkey = cc_cipher_sethkey,
1603                         .encrypt = cc_cipher_encrypt,
1604                         .decrypt = cc_cipher_decrypt,
1605                         .min_keysize = CC_HW_KEY_SIZE,
1606                         .max_keysize = CC_HW_KEY_SIZE,
1607                         .ivsize = SM4_BLOCK_SIZE,
1608                         },
1609                 .cipher_mode = DRV_CIPHER_CBC,
1610                 .flow_mode = S_DIN_to_SM4,
1611                 .min_hw_rev = CC_HW_REV_713,
1612                 .std_body = CC_STD_OSCCA,
1613                 .sec_func = true,
1614         },
1615         {
1616                 .name = "ctr(psm4)",
1617                 .driver_name = "ctr-psm4-ccree",
1618                 .blocksize = SM4_BLOCK_SIZE,
1619                 .template_skcipher = {
1620                         .setkey = cc_cipher_sethkey,
1621                         .encrypt = cc_cipher_encrypt,
1622                         .decrypt = cc_cipher_decrypt,
1623                         .min_keysize = CC_HW_KEY_SIZE,
1624                         .max_keysize = CC_HW_KEY_SIZE,
1625                         .ivsize = SM4_BLOCK_SIZE,
1626                         },
1627                 .cipher_mode = DRV_CIPHER_CTR,
1628                 .flow_mode = S_DIN_to_SM4,
1629                 .min_hw_rev = CC_HW_REV_713,
1630                 .std_body = CC_STD_OSCCA,
1631                 .sec_func = true,
1632         },
1633 };
1634 
1635 static struct cc_crypto_alg *cc_create_alg(const struct cc_alg_template *tmpl,
1636                                            struct device *dev)
1637 {
1638         struct cc_crypto_alg *t_alg;
1639         struct skcipher_alg *alg;
1640 
1641         t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL);
1642         if (!t_alg)
1643                 return ERR_PTR(-ENOMEM);
1644 
1645         alg = &t_alg->skcipher_alg;
1646 
1647         memcpy(alg, &tmpl->template_skcipher, sizeof(*alg));
1648 
1649         snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name);
1650         snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
1651                  tmpl->driver_name);
1652         alg->base.cra_module = THIS_MODULE;
1653         alg->base.cra_priority = CC_CRA_PRIO;
1654         alg->base.cra_blocksize = tmpl->blocksize;
1655         alg->base.cra_alignmask = 0;
1656         alg->base.cra_ctxsize = sizeof(struct cc_cipher_ctx);
1657 
1658         alg->base.cra_init = cc_cipher_init;
1659         alg->base.cra_exit = cc_cipher_exit;
1660         alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
1661 
1662         t_alg->cipher_mode = tmpl->cipher_mode;
1663         t_alg->flow_mode = tmpl->flow_mode;
1664         t_alg->data_unit = tmpl->data_unit;
1665 
1666         return t_alg;
1667 }
1668 
1669 int cc_cipher_free(struct cc_drvdata *drvdata)
1670 {
1671         struct cc_crypto_alg *t_alg, *n;
1672         struct cc_cipher_handle *cipher_handle = drvdata->cipher_handle;
1673 
1674         if (cipher_handle) {
1675                 /* Remove registered algs */
1676                 list_for_each_entry_safe(t_alg, n, &cipher_handle->alg_list,
1677                                          entry) {
1678                         crypto_unregister_skcipher(&t_alg->skcipher_alg);
1679                         list_del(&t_alg->entry);
1680                         kfree(t_alg);
1681                 }
1682                 kfree(cipher_handle);
1683                 drvdata->cipher_handle = NULL;
1684         }
1685         return 0;
1686 }
1687 
1688 int cc_cipher_alloc(struct cc_drvdata *drvdata)
1689 {
1690         struct cc_cipher_handle *cipher_handle;
1691         struct cc_crypto_alg *t_alg;
1692         struct device *dev = drvdata_to_dev(drvdata);
1693         int rc = -ENOMEM;
1694         int alg;
1695 
1696         cipher_handle = kmalloc(sizeof(*cipher_handle), GFP_KERNEL);
1697         if (!cipher_handle)
1698                 return -ENOMEM;
1699 
1700         INIT_LIST_HEAD(&cipher_handle->alg_list);
1701         drvdata->cipher_handle = cipher_handle;
1702 
1703         /* Linux crypto */
1704         dev_dbg(dev, "Number of algorithms = %zu\n",
1705                 ARRAY_SIZE(skcipher_algs));
1706         for (alg = 0; alg < ARRAY_SIZE(skcipher_algs); alg++) {
1707                 if ((skcipher_algs[alg].min_hw_rev > drvdata->hw_rev) ||
1708                     !(drvdata->std_bodies & skcipher_algs[alg].std_body) ||
1709                     (drvdata->sec_disabled && skcipher_algs[alg].sec_func))
1710                         continue;
1711 
1712                 dev_dbg(dev, "creating %s\n", skcipher_algs[alg].driver_name);
1713                 t_alg = cc_create_alg(&skcipher_algs[alg], dev);
1714                 if (IS_ERR(t_alg)) {
1715                         rc = PTR_ERR(t_alg);
1716                         dev_err(dev, "%s alg allocation failed\n",
1717                                 skcipher_algs[alg].driver_name);
1718                         goto fail0;
1719                 }
1720                 t_alg->drvdata = drvdata;
1721 
1722                 dev_dbg(dev, "registering %s\n",
1723                         skcipher_algs[alg].driver_name);
1724                 rc = crypto_register_skcipher(&t_alg->skcipher_alg);
1725                 dev_dbg(dev, "%s alg registration rc = %x\n",
1726                         t_alg->skcipher_alg.base.cra_driver_name, rc);
1727                 if (rc) {
1728                         dev_err(dev, "%s alg registration failed\n",
1729                                 t_alg->skcipher_alg.base.cra_driver_name);
1730                         kfree(t_alg);
1731                         goto fail0;
1732                 } else {
1733                         list_add_tail(&t_alg->entry,
1734                                       &cipher_handle->alg_list);
1735                         dev_dbg(dev, "Registered %s\n",
1736                                 t_alg->skcipher_alg.base.cra_driver_name);
1737                 }
1738         }
1739         return 0;
1740 
1741 fail0:
1742         cc_cipher_free(drvdata);
1743         return rc;
1744 }

/* [<][>][^][v][top][bottom][index][help] */