ctx_p 69 drivers/crypto/ccree/cc_cipher.c struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); ctx_p 71 drivers/crypto/ccree/cc_cipher.c return ctx_p->key_type; ctx_p 74 drivers/crypto/ccree/cc_cipher.c static int validate_keys_sizes(struct cc_cipher_ctx *ctx_p, u32 size) ctx_p 76 drivers/crypto/ccree/cc_cipher.c switch (ctx_p->flow_mode) { ctx_p 81 drivers/crypto/ccree/cc_cipher.c if (ctx_p->cipher_mode != DRV_CIPHER_XTS && ctx_p 82 drivers/crypto/ccree/cc_cipher.c ctx_p->cipher_mode != DRV_CIPHER_ESSIV && ctx_p 83 drivers/crypto/ccree/cc_cipher.c ctx_p->cipher_mode != DRV_CIPHER_BITLOCKER) ctx_p 90 drivers/crypto/ccree/cc_cipher.c if (ctx_p->cipher_mode == DRV_CIPHER_XTS || ctx_p 91 drivers/crypto/ccree/cc_cipher.c ctx_p->cipher_mode == DRV_CIPHER_ESSIV || ctx_p 92 drivers/crypto/ccree/cc_cipher.c ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) ctx_p 112 drivers/crypto/ccree/cc_cipher.c static int validate_data_size(struct cc_cipher_ctx *ctx_p, ctx_p 115 drivers/crypto/ccree/cc_cipher.c switch (ctx_p->flow_mode) { ctx_p 117 drivers/crypto/ccree/cc_cipher.c switch (ctx_p->cipher_mode) { ctx_p 142 drivers/crypto/ccree/cc_cipher.c switch (ctx_p->cipher_mode) { ctx_p 160 drivers/crypto/ccree/cc_cipher.c struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); ctx_p 168 drivers/crypto/ccree/cc_cipher.c dev_dbg(dev, "Initializing context @%p for %s\n", ctx_p, ctx_p 174 drivers/crypto/ccree/cc_cipher.c ctx_p->cipher_mode = cc_alg->cipher_mode; ctx_p 175 drivers/crypto/ccree/cc_cipher.c ctx_p->flow_mode = cc_alg->flow_mode; ctx_p 176 drivers/crypto/ccree/cc_cipher.c ctx_p->drvdata = cc_alg->drvdata; ctx_p 179 drivers/crypto/ccree/cc_cipher.c ctx_p->user.key = kmalloc(max_key_buf_size, GFP_KERNEL); ctx_p 180 drivers/crypto/ccree/cc_cipher.c if (!ctx_p->user.key) ctx_p 184 drivers/crypto/ccree/cc_cipher.c ctx_p->user.key); ctx_p 187 drivers/crypto/ccree/cc_cipher.c ctx_p->user.key_dma_addr = dma_map_single(dev, (void *)ctx_p->user.key, ctx_p 190 drivers/crypto/ccree/cc_cipher.c if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) { ctx_p 192 drivers/crypto/ccree/cc_cipher.c max_key_buf_size, ctx_p->user.key); ctx_p 196 drivers/crypto/ccree/cc_cipher.c max_key_buf_size, ctx_p->user.key, &ctx_p->user.key_dma_addr); ctx_p 198 drivers/crypto/ccree/cc_cipher.c if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { ctx_p 200 drivers/crypto/ccree/cc_cipher.c ctx_p->shash_tfm = crypto_alloc_shash("sha256-generic", 0, 0); ctx_p 201 drivers/crypto/ccree/cc_cipher.c if (IS_ERR(ctx_p->shash_tfm)) { ctx_p 203 drivers/crypto/ccree/cc_cipher.c return PTR_ERR(ctx_p->shash_tfm); ctx_p 217 drivers/crypto/ccree/cc_cipher.c struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); ctx_p 218 drivers/crypto/ccree/cc_cipher.c struct device *dev = drvdata_to_dev(ctx_p->drvdata); ctx_p 223 drivers/crypto/ccree/cc_cipher.c if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { ctx_p 225 drivers/crypto/ccree/cc_cipher.c crypto_free_shash(ctx_p->shash_tfm); ctx_p 226 drivers/crypto/ccree/cc_cipher.c ctx_p->shash_tfm = NULL; ctx_p 230 drivers/crypto/ccree/cc_cipher.c dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size, ctx_p 233 drivers/crypto/ccree/cc_cipher.c &ctx_p->user.key_dma_addr); ctx_p 236 drivers/crypto/ccree/cc_cipher.c kzfree(ctx_p->user.key); ctx_p 237 drivers/crypto/ccree/cc_cipher.c dev_dbg(dev, "Free key buffer in context. key=@%p\n", ctx_p->user.key); ctx_p 281 drivers/crypto/ccree/cc_cipher.c struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); ctx_p 282 drivers/crypto/ccree/cc_cipher.c struct device *dev = drvdata_to_dev(ctx_p->drvdata); ctx_p 286 drivers/crypto/ccree/cc_cipher.c ctx_p, crypto_tfm_alg_name(tfm), keylen); ctx_p 305 drivers/crypto/ccree/cc_cipher.c if (validate_keys_sizes(ctx_p, keylen)) { ctx_p 311 drivers/crypto/ccree/cc_cipher.c ctx_p->keylen = keylen; ctx_p 315 drivers/crypto/ccree/cc_cipher.c if (ctx_p->flow_mode == S_DIN_to_SM4) { ctx_p 320 drivers/crypto/ccree/cc_cipher.c ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1); ctx_p 321 drivers/crypto/ccree/cc_cipher.c if (ctx_p->hw.key1_slot == END_OF_KEYS) { ctx_p 327 drivers/crypto/ccree/cc_cipher.c if (ctx_p->cipher_mode == DRV_CIPHER_XTS || ctx_p 328 drivers/crypto/ccree/cc_cipher.c ctx_p->cipher_mode == DRV_CIPHER_ESSIV || ctx_p 329 drivers/crypto/ccree/cc_cipher.c ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) { ctx_p 336 drivers/crypto/ccree/cc_cipher.c ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2); ctx_p 337 drivers/crypto/ccree/cc_cipher.c if (ctx_p->hw.key2_slot == END_OF_KEYS) { ctx_p 344 drivers/crypto/ccree/cc_cipher.c ctx_p->key_type = CC_HW_PROTECTED_KEY; ctx_p 346 drivers/crypto/ccree/cc_cipher.c ctx_p->hw.key1_slot, ctx_p->hw.key2_slot); ctx_p 350 drivers/crypto/ccree/cc_cipher.c if (ctx_p->drvdata->hw_rev < CC_HW_REV_713) { ctx_p 355 drivers/crypto/ccree/cc_cipher.c if (ctx_p->cipher_mode != DRV_CIPHER_CBC && ctx_p 356 drivers/crypto/ccree/cc_cipher.c ctx_p->cipher_mode != DRV_CIPHER_CTR) { ctx_p 361 drivers/crypto/ccree/cc_cipher.c ctx_p->cpp.slot = cc_slot_to_cpp_key(hki.hw_key1); ctx_p 362 drivers/crypto/ccree/cc_cipher.c if (ctx_p->flow_mode == S_DIN_to_AES) ctx_p 363 drivers/crypto/ccree/cc_cipher.c ctx_p->cpp.alg = CC_CPP_AES; ctx_p 365 drivers/crypto/ccree/cc_cipher.c ctx_p->cpp.alg = CC_CPP_SM4; ctx_p 366 drivers/crypto/ccree/cc_cipher.c ctx_p->key_type = CC_POLICY_PROTECTED_KEY; ctx_p 368 drivers/crypto/ccree/cc_cipher.c ctx_p->cpp.alg, ctx_p->cpp.slot); ctx_p 383 drivers/crypto/ccree/cc_cipher.c struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); ctx_p 384 drivers/crypto/ccree/cc_cipher.c struct device *dev = drvdata_to_dev(ctx_p->drvdata); ctx_p 391 drivers/crypto/ccree/cc_cipher.c ctx_p, crypto_tfm_alg_name(tfm), keylen); ctx_p 396 drivers/crypto/ccree/cc_cipher.c if (validate_keys_sizes(ctx_p, keylen)) { ctx_p 402 drivers/crypto/ccree/cc_cipher.c ctx_p->key_type = CC_UNPROTECTED_KEY; ctx_p 409 drivers/crypto/ccree/cc_cipher.c if (ctx_p->flow_mode == S_DIN_to_DES) { ctx_p 418 drivers/crypto/ccree/cc_cipher.c if (ctx_p->cipher_mode == DRV_CIPHER_XTS && ctx_p 425 drivers/crypto/ccree/cc_cipher.c dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr, ctx_p 428 drivers/crypto/ccree/cc_cipher.c memcpy(ctx_p->user.key, key, keylen); ctx_p 430 drivers/crypto/ccree/cc_cipher.c memset(ctx_p->user.key + 24, 0, CC_AES_KEY_SIZE_MAX - 24); ctx_p 432 drivers/crypto/ccree/cc_cipher.c if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { ctx_p 437 drivers/crypto/ccree/cc_cipher.c SHASH_DESC_ON_STACK(desc, ctx_p->shash_tfm); ctx_p 439 drivers/crypto/ccree/cc_cipher.c desc->tfm = ctx_p->shash_tfm; ctx_p 441 drivers/crypto/ccree/cc_cipher.c err = crypto_shash_digest(desc, ctx_p->user.key, key_len, ctx_p 442 drivers/crypto/ccree/cc_cipher.c ctx_p->user.key + key_len); ctx_p 448 drivers/crypto/ccree/cc_cipher.c dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr, ctx_p 450 drivers/crypto/ccree/cc_cipher.c ctx_p->keylen = keylen; ctx_p 456 drivers/crypto/ccree/cc_cipher.c static int cc_out_setup_mode(struct cc_cipher_ctx *ctx_p) ctx_p 458 drivers/crypto/ccree/cc_cipher.c switch (ctx_p->flow_mode) { ctx_p 466 drivers/crypto/ccree/cc_cipher.c return ctx_p->flow_mode; ctx_p 475 drivers/crypto/ccree/cc_cipher.c struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); ctx_p 476 drivers/crypto/ccree/cc_cipher.c struct device *dev = drvdata_to_dev(ctx_p->drvdata); ctx_p 477 drivers/crypto/ccree/cc_cipher.c int cipher_mode = ctx_p->cipher_mode; ctx_p 478 drivers/crypto/ccree/cc_cipher.c int flow_mode = cc_out_setup_mode(ctx_p); ctx_p 482 drivers/crypto/ccree/cc_cipher.c if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY) ctx_p 504 drivers/crypto/ccree/cc_cipher.c set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); ctx_p 518 drivers/crypto/ccree/cc_cipher.c set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); ctx_p 533 drivers/crypto/ccree/cc_cipher.c struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); ctx_p 534 drivers/crypto/ccree/cc_cipher.c struct device *dev = drvdata_to_dev(ctx_p->drvdata); ctx_p 535 drivers/crypto/ccree/cc_cipher.c int cipher_mode = ctx_p->cipher_mode; ctx_p 536 drivers/crypto/ccree/cc_cipher.c int flow_mode = ctx_p->flow_mode; ctx_p 586 drivers/crypto/ccree/cc_cipher.c struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); ctx_p 587 drivers/crypto/ccree/cc_cipher.c struct device *dev = drvdata_to_dev(ctx_p->drvdata); ctx_p 588 drivers/crypto/ccree/cc_cipher.c int cipher_mode = ctx_p->cipher_mode; ctx_p 589 drivers/crypto/ccree/cc_cipher.c int flow_mode = ctx_p->flow_mode; ctx_p 591 drivers/crypto/ccree/cc_cipher.c dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr; ctx_p 592 drivers/crypto/ccree/cc_cipher.c unsigned int key_len = ctx_p->keylen; ctx_p 620 drivers/crypto/ccree/cc_cipher.c ctx_p->hw.key2_slot); ctx_p 648 drivers/crypto/ccree/cc_cipher.c static int cc_out_flow_mode(struct cc_cipher_ctx *ctx_p) ctx_p 650 drivers/crypto/ccree/cc_cipher.c switch (ctx_p->flow_mode) { ctx_p 658 drivers/crypto/ccree/cc_cipher.c return ctx_p->flow_mode; ctx_p 667 drivers/crypto/ccree/cc_cipher.c struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); ctx_p 668 drivers/crypto/ccree/cc_cipher.c struct device *dev = drvdata_to_dev(ctx_p->drvdata); ctx_p 669 drivers/crypto/ccree/cc_cipher.c int cipher_mode = ctx_p->cipher_mode; ctx_p 670 drivers/crypto/ccree/cc_cipher.c int flow_mode = ctx_p->flow_mode; ctx_p 672 drivers/crypto/ccree/cc_cipher.c dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr; ctx_p 673 drivers/crypto/ccree/cc_cipher.c unsigned int key_len = ctx_p->keylen; ctx_p 690 drivers/crypto/ccree/cc_cipher.c set_cpp_crypto_key(&desc[*seq_size], ctx_p->cpp.slot); ctx_p 691 drivers/crypto/ccree/cc_cipher.c flow_mode = cc_out_flow_mode(ctx_p); ctx_p 696 drivers/crypto/ccree/cc_cipher.c ctx_p->hw.key1_slot); ctx_p 730 drivers/crypto/ccree/cc_cipher.c ctx_p->hw.key1_slot); ctx_p 751 drivers/crypto/ccree/cc_cipher.c struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); ctx_p 752 drivers/crypto/ccree/cc_cipher.c struct device *dev = drvdata_to_dev(ctx_p->drvdata); ctx_p 759 drivers/crypto/ccree/cc_cipher.c (unsigned int)ctx_p->drvdata->mlli_sram_addr); ctx_p 765 drivers/crypto/ccree/cc_cipher.c ctx_p->drvdata->mlli_sram_addr, ctx_p 778 drivers/crypto/ccree/cc_cipher.c struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); ctx_p 779 drivers/crypto/ccree/cc_cipher.c struct device *dev = drvdata_to_dev(ctx_p->drvdata); ctx_p 780 drivers/crypto/ccree/cc_cipher.c unsigned int flow_mode = cc_out_flow_mode(ctx_p); ctx_p 781 drivers/crypto/ccree/cc_cipher.c bool last_desc = (ctx_p->key_type == CC_POLICY_PROTECTED_KEY || ctx_p 782 drivers/crypto/ccree/cc_cipher.c ctx_p->cipher_mode == DRV_CIPHER_ECB); ctx_p 796 drivers/crypto/ccree/cc_cipher.c set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); ctx_p 803 drivers/crypto/ccree/cc_cipher.c ctx_p->drvdata->mlli_sram_addr, ctx_p 807 drivers/crypto/ccree/cc_cipher.c (unsigned int)ctx_p->drvdata->mlli_sram_addr, ctx_p 808 drivers/crypto/ccree/cc_cipher.c (unsigned int)ctx_p->drvdata->mlli_sram_addr); ctx_p 810 drivers/crypto/ccree/cc_cipher.c ctx_p->drvdata->mlli_sram_addr, ctx_p 815 drivers/crypto/ccree/cc_cipher.c (unsigned int)ctx_p->drvdata->mlli_sram_addr, ctx_p 816 drivers/crypto/ccree/cc_cipher.c (unsigned int)ctx_p->drvdata->mlli_sram_addr + ctx_p 819 drivers/crypto/ccree/cc_cipher.c (ctx_p->drvdata->mlli_sram_addr + ctx_p 826 drivers/crypto/ccree/cc_cipher.c set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); ctx_p 863 drivers/crypto/ccree/cc_cipher.c struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); ctx_p 864 drivers/crypto/ccree/cc_cipher.c struct device *dev = drvdata_to_dev(ctx_p->drvdata); ctx_p 878 drivers/crypto/ccree/cc_cipher.c if (validate_data_size(ctx_p, nbytes)) { ctx_p 904 drivers/crypto/ccree/cc_cipher.c if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY) { ctx_p 906 drivers/crypto/ccree/cc_cipher.c cc_req.cpp.alg = ctx_p->cpp.alg; ctx_p 907 drivers/crypto/ccree/cc_cipher.c cc_req.cpp.slot = ctx_p->cpp.slot; ctx_p 915 drivers/crypto/ccree/cc_cipher.c rc = cc_map_cipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes, ctx_p 939 drivers/crypto/ccree/cc_cipher.c rc = cc_send_request(ctx_p->drvdata, &cc_req, desc, seq_len,