areq_ctx 128 crypto/authenc.c struct authenc_request_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 129 crypto/authenc.c struct ahash_request *ahreq = (void *)(areq_ctx->tail + ictx->reqoff); areq_ctx 149 crypto/authenc.c struct authenc_request_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 150 crypto/authenc.c struct ahash_request *ahreq = (void *)(areq_ctx->tail + ictx->reqoff); areq_ctx 151 crypto/authenc.c u8 *hash = areq_ctx->tail; areq_ctx 208 crypto/authenc.c struct authenc_request_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 211 crypto/authenc.c struct skcipher_request *skreq = (void *)(areq_ctx->tail + areq_ctx 216 crypto/authenc.c src = scatterwalk_ffwd(areq_ctx->src, req->src, req->assoclen); areq_ctx 224 crypto/authenc.c dst = scatterwalk_ffwd(areq_ctx->dst, req->dst, req->assoclen); areq_ctx 246 crypto/authenc.c struct authenc_request_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 247 crypto/authenc.c struct ahash_request *ahreq = (void *)(areq_ctx->tail + ictx->reqoff); areq_ctx 248 crypto/authenc.c struct skcipher_request *skreq = (void *)(areq_ctx->tail + areq_ctx 259 crypto/authenc.c src = scatterwalk_ffwd(areq_ctx->src, req->src, req->assoclen); areq_ctx 263 crypto/authenc.c dst = scatterwalk_ffwd(areq_ctx->dst, req->dst, req->assoclen); areq_ctx 296 crypto/authenc.c struct authenc_request_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 297 crypto/authenc.c struct ahash_request *ahreq = (void *)(areq_ctx->tail + ictx->reqoff); areq_ctx 298 crypto/authenc.c u8 *hash = areq_ctx->tail; areq_ctx 101 crypto/authencesn.c struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 103 crypto/authencesn.c u8 *hash = PTR_ALIGN((u8 *)areq_ctx->tail, areq_ctx 133 crypto/authencesn.c struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 136 crypto/authencesn.c u8 *hash = PTR_ALIGN((u8 *)areq_ctx->tail, areq_ctx 138 crypto/authencesn.c struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff); areq_ctx 153 crypto/authencesn.c sg_init_table(areq_ctx->dst, 2); areq_ctx 154 crypto/authencesn.c dst = scatterwalk_ffwd(areq_ctx->dst, dst, 4); areq_ctx 194 crypto/authencesn.c struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 196 crypto/authencesn.c struct skcipher_request *skreq = (void *)(areq_ctx->tail + areq_ctx 204 crypto/authencesn.c sg_init_table(areq_ctx->src, 2); areq_ctx 205 crypto/authencesn.c src = scatterwalk_ffwd(areq_ctx->src, req->src, assoclen); areq_ctx 213 crypto/authencesn.c sg_init_table(areq_ctx->dst, 2); areq_ctx 214 crypto/authencesn.c dst = scatterwalk_ffwd(areq_ctx->dst, req->dst, assoclen); areq_ctx 234 crypto/authencesn.c struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 236 crypto/authencesn.c struct skcipher_request *skreq = (void *)(areq_ctx->tail + areq_ctx 239 crypto/authencesn.c u8 *ohash = PTR_ALIGN((u8 *)areq_ctx->tail, areq_ctx 260 crypto/authencesn.c sg_init_table(areq_ctx->dst, 2); areq_ctx 261 crypto/authencesn.c dst = scatterwalk_ffwd(areq_ctx->dst, dst, assoclen); areq_ctx 283 crypto/authencesn.c struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 285 crypto/authencesn.c struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff); areq_ctx 288 crypto/authencesn.c u8 *ohash = PTR_ALIGN((u8 *)areq_ctx->tail, areq_ctx 316 crypto/authencesn.c sg_init_table(areq_ctx->dst, 2); areq_ctx 317 crypto/authencesn.c dst = scatterwalk_ffwd(areq_ctx->dst, dst, 4); areq_ctx 215 drivers/crypto/ccree/cc_aead.c struct aead_req_ctx *areq_ctx = aead_request_ctx(areq); areq_ctx 226 drivers/crypto/ccree/cc_aead.c areq->iv = areq_ctx->backup_iv; areq_ctx 231 drivers/crypto/ccree/cc_aead.c if (areq_ctx->gen_ctx.op_type == DRV_CRYPTO_DIRECTION_DECRYPT) { areq_ctx 232 drivers/crypto/ccree/cc_aead.c if (memcmp(areq_ctx->mac_buf, areq_ctx->icv_virt_addr, areq_ctx 244 drivers/crypto/ccree/cc_aead.c } else if (areq_ctx->is_icv_fragmented) { areq_ctx 245 drivers/crypto/ccree/cc_aead.c u32 skip = areq->cryptlen + areq_ctx->dst_offset; areq_ctx 247 drivers/crypto/ccree/cc_aead.c cc_copy_sg_portion(dev, areq_ctx->mac_buf, areq_ctx->dst_sgl, areq_ctx 736 drivers/crypto/ccree/cc_aead.c struct aead_req_ctx *areq_ctx = aead_request_ctx(areq); areq_ctx 737 drivers/crypto/ccree/cc_aead.c enum cc_req_dma_buf_type assoc_dma_type = areq_ctx->assoc_buff_type; areq_ctx 746 drivers/crypto/ccree/cc_aead.c areq_ctx->assoclen, NS_BIT); areq_ctx 749 drivers/crypto/ccree/cc_aead.c areq_ctx->cryptlen > 0) areq_ctx 755 drivers/crypto/ccree/cc_aead.c set_din_type(&desc[idx], DMA_MLLI, areq_ctx->assoc.sram_addr, areq_ctx 756 drivers/crypto/ccree/cc_aead.c areq_ctx->assoc.mlli_nents, NS_BIT); areq_ctx 759 drivers/crypto/ccree/cc_aead.c areq_ctx->cryptlen > 0) areq_ctx 775 drivers/crypto/ccree/cc_aead.c struct aead_req_ctx *areq_ctx = aead_request_ctx(areq); areq_ctx 776 drivers/crypto/ccree/cc_aead.c enum cc_req_dma_buf_type data_dma_type = areq_ctx->data_buff_type; areq_ctx 787 drivers/crypto/ccree/cc_aead.c areq_ctx->dst_sgl : areq_ctx->src_sgl; areq_ctx 791 drivers/crypto/ccree/cc_aead.c areq_ctx->dst_offset : areq_ctx->src_offset; areq_ctx 796 drivers/crypto/ccree/cc_aead.c areq_ctx->cryptlen, NS_BIT); areq_ctx 806 drivers/crypto/ccree/cc_aead.c cc_sram_addr_t mlli_addr = areq_ctx->assoc.sram_addr; areq_ctx 807 drivers/crypto/ccree/cc_aead.c u32 mlli_nents = areq_ctx->assoc.mlli_nents; areq_ctx 809 drivers/crypto/ccree/cc_aead.c if (areq_ctx->is_single_pass) { areq_ctx 811 drivers/crypto/ccree/cc_aead.c mlli_addr = areq_ctx->dst.sram_addr; areq_ctx 812 drivers/crypto/ccree/cc_aead.c mlli_nents = areq_ctx->dst.mlli_nents; areq_ctx 814 drivers/crypto/ccree/cc_aead.c mlli_addr = areq_ctx->src.sram_addr; areq_ctx 815 drivers/crypto/ccree/cc_aead.c mlli_nents = areq_ctx->src.mlli_nents; areq_ctx 840 drivers/crypto/ccree/cc_aead.c struct aead_req_ctx *areq_ctx = aead_request_ctx(areq); areq_ctx 841 drivers/crypto/ccree/cc_aead.c enum cc_req_dma_buf_type data_dma_type = areq_ctx->data_buff_type; areq_ctx 846 drivers/crypto/ccree/cc_aead.c if (areq_ctx->cryptlen == 0) areq_ctx 854 drivers/crypto/ccree/cc_aead.c (sg_dma_address(areq_ctx->src_sgl) + areq_ctx 855 drivers/crypto/ccree/cc_aead.c areq_ctx->src_offset), areq_ctx->cryptlen, areq_ctx 858 drivers/crypto/ccree/cc_aead.c (sg_dma_address(areq_ctx->dst_sgl) + areq_ctx 859 drivers/crypto/ccree/cc_aead.c areq_ctx->dst_offset), areq_ctx 860 drivers/crypto/ccree/cc_aead.c areq_ctx->cryptlen, NS_BIT, 0); areq_ctx 866 drivers/crypto/ccree/cc_aead.c set_din_type(&desc[idx], DMA_MLLI, areq_ctx->src.sram_addr, areq_ctx 867 drivers/crypto/ccree/cc_aead.c areq_ctx->src.mlli_nents, NS_BIT); areq_ctx 868 drivers/crypto/ccree/cc_aead.c set_dout_mlli(&desc[idx], areq_ctx->dst.sram_addr, areq_ctx 869 drivers/crypto/ccree/cc_aead.c areq_ctx->dst.mlli_nents, NS_BIT, 0); areq_ctx 1095 drivers/crypto/ccree/cc_aead.c struct aead_req_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 1099 drivers/crypto/ccree/cc_aead.c if (areq_ctx->assoclen > 0) areq_ctx 1325 drivers/crypto/ccree/cc_aead.c struct aead_req_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 1327 drivers/crypto/ccree/cc_aead.c unsigned int assoclen = areq_ctx->assoclen; areq_ctx 1335 drivers/crypto/ccree/cc_aead.c areq_ctx->is_single_pass = true; /*defaulted to fast flow*/ areq_ctx 1345 drivers/crypto/ccree/cc_aead.c if (areq_ctx->plaintext_authenticate_only) areq_ctx 1346 drivers/crypto/ccree/cc_aead.c areq_ctx->is_single_pass = false; areq_ctx 1351 drivers/crypto/ccree/cc_aead.c areq_ctx->is_single_pass = false; areq_ctx 1355 drivers/crypto/ccree/cc_aead.c areq_ctx->is_single_pass = false; areq_ctx 1362 drivers/crypto/ccree/cc_aead.c areq_ctx->is_single_pass = false; areq_ctx 1604 drivers/crypto/ccree/cc_aead.c struct aead_req_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 1607 drivers/crypto/ccree/cc_aead.c memset(areq_ctx->ctr_iv, 0, AES_BLOCK_SIZE); areq_ctx 1611 drivers/crypto/ccree/cc_aead.c areq_ctx->ctr_iv[0] = 3; areq_ctx 1616 drivers/crypto/ccree/cc_aead.c memcpy(areq_ctx->ctr_iv + CCM_BLOCK_NONCE_OFFSET, ctx->ctr_nonce, areq_ctx 1618 drivers/crypto/ccree/cc_aead.c memcpy(areq_ctx->ctr_iv + CCM_BLOCK_IV_OFFSET, req->iv, areq_ctx 1620 drivers/crypto/ccree/cc_aead.c req->iv = areq_ctx->ctr_iv; areq_ctx 1621 drivers/crypto/ccree/cc_aead.c areq_ctx->assoclen -= CCM_BLOCK_IV_SIZE; areq_ctx 1894 drivers/crypto/ccree/cc_aead.c struct aead_req_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 1896 drivers/crypto/ccree/cc_aead.c memcpy(areq_ctx->ctr_iv + GCM_BLOCK_RFC4_NONCE_OFFSET, areq_ctx 1898 drivers/crypto/ccree/cc_aead.c memcpy(areq_ctx->ctr_iv + GCM_BLOCK_RFC4_IV_OFFSET, req->iv, areq_ctx 1900 drivers/crypto/ccree/cc_aead.c req->iv = areq_ctx->ctr_iv; areq_ctx 1901 drivers/crypto/ccree/cc_aead.c areq_ctx->assoclen -= GCM_BLOCK_RFC4_IV_SIZE; areq_ctx 1912 drivers/crypto/ccree/cc_aead.c struct aead_req_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 1926 drivers/crypto/ccree/cc_aead.c req->cryptlen, areq_ctx->assoclen); areq_ctx 1936 drivers/crypto/ccree/cc_aead.c areq_ctx->gen_ctx.op_type = direct; areq_ctx 1937 drivers/crypto/ccree/cc_aead.c areq_ctx->req_authsize = ctx->authsize; areq_ctx 1938 drivers/crypto/ccree/cc_aead.c areq_ctx->cipher_mode = ctx->cipher_mode; areq_ctx 1946 drivers/crypto/ccree/cc_aead.c memcpy(areq_ctx->ctr_iv, ctx->ctr_nonce, areq_ctx 1948 drivers/crypto/ccree/cc_aead.c memcpy(areq_ctx->ctr_iv + CTR_RFC3686_NONCE_SIZE, req->iv, areq_ctx 1951 drivers/crypto/ccree/cc_aead.c *(__be32 *)(areq_ctx->ctr_iv + CTR_RFC3686_NONCE_SIZE + areq_ctx 1955 drivers/crypto/ccree/cc_aead.c req->iv = areq_ctx->ctr_iv; areq_ctx 1956 drivers/crypto/ccree/cc_aead.c areq_ctx->hw_iv_size = CTR_RFC3686_BLOCK_SIZE; areq_ctx 1959 drivers/crypto/ccree/cc_aead.c areq_ctx->hw_iv_size = AES_BLOCK_SIZE; areq_ctx 1960 drivers/crypto/ccree/cc_aead.c if (areq_ctx->ctr_iv != req->iv) { areq_ctx 1961 drivers/crypto/ccree/cc_aead.c memcpy(areq_ctx->ctr_iv, req->iv, areq_ctx 1963 drivers/crypto/ccree/cc_aead.c req->iv = areq_ctx->ctr_iv; areq_ctx 1966 drivers/crypto/ccree/cc_aead.c areq_ctx->hw_iv_size = crypto_aead_ivsize(tfm); areq_ctx 1977 drivers/crypto/ccree/cc_aead.c areq_ctx->ccm_hdr_size = ccm_header_size_null; areq_ctx 2037 drivers/crypto/ccree/cc_aead.c struct aead_req_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 2040 drivers/crypto/ccree/cc_aead.c memset(areq_ctx, 0, sizeof(*areq_ctx)); areq_ctx 2043 drivers/crypto/ccree/cc_aead.c areq_ctx->backup_iv = req->iv; areq_ctx 2044 drivers/crypto/ccree/cc_aead.c areq_ctx->assoclen = req->assoclen; areq_ctx 2045 drivers/crypto/ccree/cc_aead.c areq_ctx->is_gcm4543 = false; areq_ctx 2047 drivers/crypto/ccree/cc_aead.c areq_ctx->plaintext_authenticate_only = false; areq_ctx 2051 drivers/crypto/ccree/cc_aead.c req->iv = areq_ctx->backup_iv; areq_ctx 2060 drivers/crypto/ccree/cc_aead.c struct aead_req_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 2071 drivers/crypto/ccree/cc_aead.c memset(areq_ctx, 0, sizeof(*areq_ctx)); areq_ctx 2074 drivers/crypto/ccree/cc_aead.c areq_ctx->backup_iv = req->iv; areq_ctx 2075 drivers/crypto/ccree/cc_aead.c areq_ctx->assoclen = req->assoclen; areq_ctx 2076 drivers/crypto/ccree/cc_aead.c areq_ctx->is_gcm4543 = true; areq_ctx 2082 drivers/crypto/ccree/cc_aead.c req->iv = areq_ctx->backup_iv; areq_ctx 2089 drivers/crypto/ccree/cc_aead.c struct aead_req_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 2092 drivers/crypto/ccree/cc_aead.c memset(areq_ctx, 0, sizeof(*areq_ctx)); areq_ctx 2095 drivers/crypto/ccree/cc_aead.c areq_ctx->backup_iv = req->iv; areq_ctx 2096 drivers/crypto/ccree/cc_aead.c areq_ctx->assoclen = req->assoclen; areq_ctx 2097 drivers/crypto/ccree/cc_aead.c areq_ctx->is_gcm4543 = false; areq_ctx 2099 drivers/crypto/ccree/cc_aead.c areq_ctx->plaintext_authenticate_only = false; areq_ctx 2103 drivers/crypto/ccree/cc_aead.c req->iv = areq_ctx->backup_iv; areq_ctx 2113 drivers/crypto/ccree/cc_aead.c struct aead_req_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 2121 drivers/crypto/ccree/cc_aead.c memset(areq_ctx, 0, sizeof(*areq_ctx)); areq_ctx 2124 drivers/crypto/ccree/cc_aead.c areq_ctx->backup_iv = req->iv; areq_ctx 2125 drivers/crypto/ccree/cc_aead.c areq_ctx->assoclen = req->assoclen; areq_ctx 2127 drivers/crypto/ccree/cc_aead.c areq_ctx->is_gcm4543 = true; areq_ctx 2132 drivers/crypto/ccree/cc_aead.c req->iv = areq_ctx->backup_iv; areq_ctx 2232 drivers/crypto/ccree/cc_aead.c struct aead_req_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 2240 drivers/crypto/ccree/cc_aead.c memset(areq_ctx, 0, sizeof(*areq_ctx)); areq_ctx 2243 drivers/crypto/ccree/cc_aead.c areq_ctx->backup_iv = req->iv; areq_ctx 2244 drivers/crypto/ccree/cc_aead.c areq_ctx->assoclen = req->assoclen; areq_ctx 2245 drivers/crypto/ccree/cc_aead.c areq_ctx->plaintext_authenticate_only = false; areq_ctx 2248 drivers/crypto/ccree/cc_aead.c areq_ctx->is_gcm4543 = true; areq_ctx 2252 drivers/crypto/ccree/cc_aead.c req->iv = areq_ctx->backup_iv; areq_ctx 2263 drivers/crypto/ccree/cc_aead.c struct aead_req_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 2271 drivers/crypto/ccree/cc_aead.c memset(areq_ctx, 0, sizeof(*areq_ctx)); areq_ctx 2274 drivers/crypto/ccree/cc_aead.c areq_ctx->plaintext_authenticate_only = true; areq_ctx 2277 drivers/crypto/ccree/cc_aead.c areq_ctx->backup_iv = req->iv; areq_ctx 2278 drivers/crypto/ccree/cc_aead.c areq_ctx->assoclen = req->assoclen; areq_ctx 2281 drivers/crypto/ccree/cc_aead.c areq_ctx->is_gcm4543 = true; areq_ctx 2285 drivers/crypto/ccree/cc_aead.c req->iv = areq_ctx->backup_iv; areq_ctx 2297 drivers/crypto/ccree/cc_aead.c struct aead_req_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 2305 drivers/crypto/ccree/cc_aead.c memset(areq_ctx, 0, sizeof(*areq_ctx)); areq_ctx 2308 drivers/crypto/ccree/cc_aead.c areq_ctx->backup_iv = req->iv; areq_ctx 2309 drivers/crypto/ccree/cc_aead.c areq_ctx->assoclen = req->assoclen; areq_ctx 2310 drivers/crypto/ccree/cc_aead.c areq_ctx->plaintext_authenticate_only = false; areq_ctx 2313 drivers/crypto/ccree/cc_aead.c areq_ctx->is_gcm4543 = true; areq_ctx 2317 drivers/crypto/ccree/cc_aead.c req->iv = areq_ctx->backup_iv; areq_ctx 2328 drivers/crypto/ccree/cc_aead.c struct aead_req_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 2336 drivers/crypto/ccree/cc_aead.c memset(areq_ctx, 0, sizeof(*areq_ctx)); areq_ctx 2339 drivers/crypto/ccree/cc_aead.c areq_ctx->plaintext_authenticate_only = true; areq_ctx 2342 drivers/crypto/ccree/cc_aead.c areq_ctx->backup_iv = req->iv; areq_ctx 2343 drivers/crypto/ccree/cc_aead.c areq_ctx->assoclen = req->assoclen; areq_ctx 2346 drivers/crypto/ccree/cc_aead.c areq_ctx->is_gcm4543 = true; areq_ctx 2350 drivers/crypto/ccree/cc_aead.c req->iv = areq_ctx->backup_iv; areq_ctx 66 drivers/crypto/ccree/cc_buffer_mgr.c struct aead_req_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 68 drivers/crypto/ccree/cc_buffer_mgr.c u32 skip = areq_ctx->assoclen + req->cryptlen; areq_ctx 70 drivers/crypto/ccree/cc_buffer_mgr.c if (areq_ctx->is_gcm4543) areq_ctx 73 drivers/crypto/ccree/cc_buffer_mgr.c cc_copy_sg_portion(dev, areq_ctx->backup_mac, req->src, areq_ctx 74 drivers/crypto/ccree/cc_buffer_mgr.c (skip - areq_ctx->req_authsize), skip, dir); areq_ctx 319 drivers/crypto/ccree/cc_buffer_mgr.c cc_set_aead_conf_buf(struct device *dev, struct aead_req_ctx *areq_ctx, areq_ctx 325 drivers/crypto/ccree/cc_buffer_mgr.c sg_init_one(&areq_ctx->ccm_adata_sg, config_data, areq_ctx 326 drivers/crypto/ccree/cc_buffer_mgr.c AES_BLOCK_SIZE + areq_ctx->ccm_hdr_size); areq_ctx 327 drivers/crypto/ccree/cc_buffer_mgr.c if (dma_map_sg(dev, &areq_ctx->ccm_adata_sg, 1, DMA_TO_DEVICE) != 1) { areq_ctx 332 drivers/crypto/ccree/cc_buffer_mgr.c &sg_dma_address(&areq_ctx->ccm_adata_sg), areq_ctx 333 drivers/crypto/ccree/cc_buffer_mgr.c sg_page(&areq_ctx->ccm_adata_sg), areq_ctx 334 drivers/crypto/ccree/cc_buffer_mgr.c sg_virt(&areq_ctx->ccm_adata_sg), areq_ctx 335 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->ccm_adata_sg.offset, areq_ctx->ccm_adata_sg.length); areq_ctx 338 drivers/crypto/ccree/cc_buffer_mgr.c cc_add_sg_entry(dev, sg_data, 1, &areq_ctx->ccm_adata_sg, areq_ctx 339 drivers/crypto/ccree/cc_buffer_mgr.c (AES_BLOCK_SIZE + areq_ctx->ccm_hdr_size), areq_ctx 345 drivers/crypto/ccree/cc_buffer_mgr.c static int cc_set_hash_buf(struct device *dev, struct ahash_req_ctx *areq_ctx, areq_ctx 351 drivers/crypto/ccree/cc_buffer_mgr.c sg_init_one(areq_ctx->buff_sg, curr_buff, curr_buff_cnt); areq_ctx 352 drivers/crypto/ccree/cc_buffer_mgr.c if (dma_map_sg(dev, areq_ctx->buff_sg, 1, DMA_TO_DEVICE) != 1) { areq_ctx 357 drivers/crypto/ccree/cc_buffer_mgr.c &sg_dma_address(areq_ctx->buff_sg), sg_page(areq_ctx->buff_sg), areq_ctx 358 drivers/crypto/ccree/cc_buffer_mgr.c sg_virt(areq_ctx->buff_sg), areq_ctx->buff_sg->offset, areq_ctx 359 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->buff_sg->length); areq_ctx 360 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->data_dma_buf_type = CC_DMA_BUF_DLLI; areq_ctx 361 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->curr_sg = areq_ctx->buff_sg; areq_ctx 362 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->in_nents = 0; areq_ctx 364 drivers/crypto/ccree/cc_buffer_mgr.c cc_add_sg_entry(dev, sg_data, 1, areq_ctx->buff_sg, curr_buff_cnt, 0, areq_ctx 488 drivers/crypto/ccree/cc_buffer_mgr.c struct aead_req_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 489 drivers/crypto/ccree/cc_buffer_mgr.c unsigned int hw_iv_size = areq_ctx->hw_iv_size; areq_ctx 492 drivers/crypto/ccree/cc_buffer_mgr.c if (areq_ctx->mac_buf_dma_addr) { areq_ctx 493 drivers/crypto/ccree/cc_buffer_mgr.c dma_unmap_single(dev, areq_ctx->mac_buf_dma_addr, areq_ctx 497 drivers/crypto/ccree/cc_buffer_mgr.c if (areq_ctx->cipher_mode == DRV_CIPHER_GCTR) { areq_ctx 498 drivers/crypto/ccree/cc_buffer_mgr.c if (areq_ctx->hkey_dma_addr) { areq_ctx 499 drivers/crypto/ccree/cc_buffer_mgr.c dma_unmap_single(dev, areq_ctx->hkey_dma_addr, areq_ctx 503 drivers/crypto/ccree/cc_buffer_mgr.c if (areq_ctx->gcm_block_len_dma_addr) { areq_ctx 504 drivers/crypto/ccree/cc_buffer_mgr.c dma_unmap_single(dev, areq_ctx->gcm_block_len_dma_addr, areq_ctx 508 drivers/crypto/ccree/cc_buffer_mgr.c if (areq_ctx->gcm_iv_inc1_dma_addr) { areq_ctx 509 drivers/crypto/ccree/cc_buffer_mgr.c dma_unmap_single(dev, areq_ctx->gcm_iv_inc1_dma_addr, areq_ctx 513 drivers/crypto/ccree/cc_buffer_mgr.c if (areq_ctx->gcm_iv_inc2_dma_addr) { areq_ctx 514 drivers/crypto/ccree/cc_buffer_mgr.c dma_unmap_single(dev, areq_ctx->gcm_iv_inc2_dma_addr, areq_ctx 519 drivers/crypto/ccree/cc_buffer_mgr.c if (areq_ctx->ccm_hdr_size != ccm_header_size_null) { areq_ctx 520 drivers/crypto/ccree/cc_buffer_mgr.c if (areq_ctx->ccm_iv0_dma_addr) { areq_ctx 521 drivers/crypto/ccree/cc_buffer_mgr.c dma_unmap_single(dev, areq_ctx->ccm_iv0_dma_addr, areq_ctx 525 drivers/crypto/ccree/cc_buffer_mgr.c dma_unmap_sg(dev, &areq_ctx->ccm_adata_sg, 1, DMA_TO_DEVICE); areq_ctx 527 drivers/crypto/ccree/cc_buffer_mgr.c if (areq_ctx->gen_ctx.iv_dma_addr) { areq_ctx 528 drivers/crypto/ccree/cc_buffer_mgr.c dma_unmap_single(dev, areq_ctx->gen_ctx.iv_dma_addr, areq_ctx 530 drivers/crypto/ccree/cc_buffer_mgr.c kzfree(areq_ctx->gen_ctx.iv); areq_ctx 534 drivers/crypto/ccree/cc_buffer_mgr.c if ((areq_ctx->assoc_buff_type == CC_DMA_BUF_MLLI || areq_ctx 535 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->data_buff_type == CC_DMA_BUF_MLLI) && areq_ctx 536 drivers/crypto/ccree/cc_buffer_mgr.c (areq_ctx->mlli_params.mlli_virt_addr)) { areq_ctx 538 drivers/crypto/ccree/cc_buffer_mgr.c &areq_ctx->mlli_params.mlli_dma_addr, areq_ctx 539 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->mlli_params.mlli_virt_addr); areq_ctx 540 drivers/crypto/ccree/cc_buffer_mgr.c dma_pool_free(areq_ctx->mlli_params.curr_pool, areq_ctx 541 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->mlli_params.mlli_virt_addr, areq_ctx 542 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->mlli_params.mlli_dma_addr); areq_ctx 546 drivers/crypto/ccree/cc_buffer_mgr.c sg_virt(req->src), areq_ctx->src.nents, areq_ctx->assoc.nents, areq_ctx 547 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->assoclen, req->cryptlen); areq_ctx 549 drivers/crypto/ccree/cc_buffer_mgr.c dma_unmap_sg(dev, req->src, areq_ctx->src.mapped_nents, areq_ctx 554 drivers/crypto/ccree/cc_buffer_mgr.c dma_unmap_sg(dev, req->dst, areq_ctx->dst.mapped_nents, areq_ctx 558 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->gen_ctx.op_type == DRV_CRYPTO_DIRECTION_DECRYPT && areq_ctx 579 drivers/crypto/ccree/cc_buffer_mgr.c struct aead_req_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 580 drivers/crypto/ccree/cc_buffer_mgr.c unsigned int hw_iv_size = areq_ctx->hw_iv_size; areq_ctx 586 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->gen_ctx.iv_dma_addr = 0; areq_ctx 587 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->gen_ctx.iv = NULL; areq_ctx 591 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->gen_ctx.iv = kmemdup(req->iv, hw_iv_size, flags); areq_ctx 592 drivers/crypto/ccree/cc_buffer_mgr.c if (!areq_ctx->gen_ctx.iv) areq_ctx 595 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->gen_ctx.iv_dma_addr = areq_ctx 596 drivers/crypto/ccree/cc_buffer_mgr.c dma_map_single(dev, areq_ctx->gen_ctx.iv, hw_iv_size, areq_ctx 598 drivers/crypto/ccree/cc_buffer_mgr.c if (dma_mapping_error(dev, areq_ctx->gen_ctx.iv_dma_addr)) { areq_ctx 601 drivers/crypto/ccree/cc_buffer_mgr.c kzfree(areq_ctx->gen_ctx.iv); areq_ctx 602 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->gen_ctx.iv = NULL; areq_ctx 608 drivers/crypto/ccree/cc_buffer_mgr.c hw_iv_size, req->iv, &areq_ctx->gen_ctx.iv_dma_addr); areq_ctx 610 drivers/crypto/ccree/cc_buffer_mgr.c if (do_chain && areq_ctx->plaintext_authenticate_only) { areq_ctx 616 drivers/crypto/ccree/cc_buffer_mgr.c (areq_ctx->gen_ctx.iv_dma_addr + iv_ofs), areq_ctx 618 drivers/crypto/ccree/cc_buffer_mgr.c &areq_ctx->assoc.mlli_nents); areq_ctx 619 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->assoc_buff_type = CC_DMA_BUF_MLLI; areq_ctx 631 drivers/crypto/ccree/cc_buffer_mgr.c struct aead_req_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 635 drivers/crypto/ccree/cc_buffer_mgr.c unsigned int size_of_assoc = areq_ctx->assoclen; areq_ctx 638 drivers/crypto/ccree/cc_buffer_mgr.c if (areq_ctx->is_gcm4543) areq_ctx 646 drivers/crypto/ccree/cc_buffer_mgr.c if (areq_ctx->assoclen == 0) { areq_ctx 647 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->assoc_buff_type = CC_DMA_BUF_NULL; areq_ctx 648 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->assoc.nents = 0; areq_ctx 649 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->assoc.mlli_nents = 0; areq_ctx 651 drivers/crypto/ccree/cc_buffer_mgr.c cc_dma_buf_type(areq_ctx->assoc_buff_type), areq_ctx 652 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->assoc.nents); areq_ctx 665 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->assoc.nents = mapped_nents; areq_ctx 670 drivers/crypto/ccree/cc_buffer_mgr.c if (areq_ctx->ccm_hdr_size != ccm_header_size_null) { areq_ctx 673 drivers/crypto/ccree/cc_buffer_mgr.c (areq_ctx->assoc.nents + 1), areq_ctx 680 drivers/crypto/ccree/cc_buffer_mgr.c if (mapped_nents == 1 && areq_ctx->ccm_hdr_size == ccm_header_size_null) areq_ctx 681 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->assoc_buff_type = CC_DMA_BUF_DLLI; areq_ctx 683 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->assoc_buff_type = CC_DMA_BUF_MLLI; areq_ctx 685 drivers/crypto/ccree/cc_buffer_mgr.c if (do_chain || areq_ctx->assoc_buff_type == CC_DMA_BUF_MLLI) { areq_ctx 687 drivers/crypto/ccree/cc_buffer_mgr.c cc_dma_buf_type(areq_ctx->assoc_buff_type), areq_ctx 688 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->assoc.nents); areq_ctx 689 drivers/crypto/ccree/cc_buffer_mgr.c cc_add_sg_entry(dev, sg_data, areq_ctx->assoc.nents, req->src, areq_ctx 690 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->assoclen, 0, is_last, areq_ctx 691 drivers/crypto/ccree/cc_buffer_mgr.c &areq_ctx->assoc.mlli_nents); areq_ctx 692 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->assoc_buff_type = CC_DMA_BUF_MLLI; areq_ctx 702 drivers/crypto/ccree/cc_buffer_mgr.c struct aead_req_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 703 drivers/crypto/ccree/cc_buffer_mgr.c enum drv_crypto_direction direct = areq_ctx->gen_ctx.op_type; areq_ctx 704 drivers/crypto/ccree/cc_buffer_mgr.c unsigned int authsize = areq_ctx->req_authsize; areq_ctx 708 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->is_icv_fragmented = false; areq_ctx 711 drivers/crypto/ccree/cc_buffer_mgr.c sg = areq_ctx->src_sgl; areq_ctx 714 drivers/crypto/ccree/cc_buffer_mgr.c sg = areq_ctx->dst_sgl; areq_ctx 718 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->icv_dma_addr = sg_dma_address(sg) + offset; areq_ctx 719 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->icv_virt_addr = sg_virt(sg) + offset; areq_ctx 728 drivers/crypto/ccree/cc_buffer_mgr.c struct aead_req_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 729 drivers/crypto/ccree/cc_buffer_mgr.c enum drv_crypto_direction direct = areq_ctx->gen_ctx.op_type; areq_ctx 730 drivers/crypto/ccree/cc_buffer_mgr.c unsigned int authsize = areq_ctx->req_authsize; areq_ctx 736 drivers/crypto/ccree/cc_buffer_mgr.c cc_add_sg_entry(dev, sg_data, areq_ctx->src.nents, areq_ctx 737 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->src_sgl, areq_ctx->cryptlen, areq_ctx 738 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->src_offset, is_last_table, areq_ctx 739 drivers/crypto/ccree/cc_buffer_mgr.c &areq_ctx->src.mlli_nents); areq_ctx 741 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->is_icv_fragmented = areq_ctx 742 drivers/crypto/ccree/cc_buffer_mgr.c cc_is_icv_frag(areq_ctx->src.nents, authsize, areq_ctx 745 drivers/crypto/ccree/cc_buffer_mgr.c if (areq_ctx->is_icv_fragmented) { areq_ctx 759 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->icv_virt_addr = areq_ctx->backup_mac; areq_ctx 761 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->icv_virt_addr = areq_ctx->mac_buf; areq_ctx 762 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->icv_dma_addr = areq_ctx 763 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->mac_buf_dma_addr; areq_ctx 766 drivers/crypto/ccree/cc_buffer_mgr.c sg = &areq_ctx->src_sgl[areq_ctx->src.nents - 1]; areq_ctx 768 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->icv_dma_addr = sg_dma_address(sg) + areq_ctx 770 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->icv_virt_addr = sg_virt(sg) + areq_ctx 776 drivers/crypto/ccree/cc_buffer_mgr.c cc_add_sg_entry(dev, sg_data, areq_ctx->src.nents, areq_ctx 777 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->src_sgl, areq_ctx->cryptlen, areq_ctx 778 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->src_offset, is_last_table, areq_ctx 779 drivers/crypto/ccree/cc_buffer_mgr.c &areq_ctx->src.mlli_nents); areq_ctx 780 drivers/crypto/ccree/cc_buffer_mgr.c cc_add_sg_entry(dev, sg_data, areq_ctx->dst.nents, areq_ctx 781 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->dst_sgl, areq_ctx->cryptlen, areq_ctx 782 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->dst_offset, is_last_table, areq_ctx 783 drivers/crypto/ccree/cc_buffer_mgr.c &areq_ctx->dst.mlli_nents); areq_ctx 785 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->is_icv_fragmented = areq_ctx 786 drivers/crypto/ccree/cc_buffer_mgr.c cc_is_icv_frag(areq_ctx->src.nents, authsize, areq_ctx 793 drivers/crypto/ccree/cc_buffer_mgr.c if (areq_ctx->is_icv_fragmented) { areq_ctx 795 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->icv_virt_addr = areq_ctx->backup_mac; areq_ctx 798 drivers/crypto/ccree/cc_buffer_mgr.c sg = &areq_ctx->src_sgl[areq_ctx->src.nents - 1]; areq_ctx 800 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->icv_dma_addr = sg_dma_address(sg) + areq_ctx 802 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->icv_virt_addr = sg_virt(sg) + areq_ctx 808 drivers/crypto/ccree/cc_buffer_mgr.c cc_add_sg_entry(dev, sg_data, areq_ctx->dst.nents, areq_ctx 809 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->dst_sgl, areq_ctx->cryptlen, areq_ctx 810 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->dst_offset, is_last_table, areq_ctx 811 drivers/crypto/ccree/cc_buffer_mgr.c &areq_ctx->dst.mlli_nents); areq_ctx 812 drivers/crypto/ccree/cc_buffer_mgr.c cc_add_sg_entry(dev, sg_data, areq_ctx->src.nents, areq_ctx 813 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->src_sgl, areq_ctx->cryptlen, areq_ctx 814 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->src_offset, is_last_table, areq_ctx 815 drivers/crypto/ccree/cc_buffer_mgr.c &areq_ctx->src.mlli_nents); areq_ctx 817 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->is_icv_fragmented = areq_ctx 818 drivers/crypto/ccree/cc_buffer_mgr.c cc_is_icv_frag(areq_ctx->dst.nents, authsize, areq_ctx 821 drivers/crypto/ccree/cc_buffer_mgr.c if (!areq_ctx->is_icv_fragmented) { areq_ctx 822 drivers/crypto/ccree/cc_buffer_mgr.c sg = &areq_ctx->dst_sgl[areq_ctx->dst.nents - 1]; areq_ctx 824 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->icv_dma_addr = sg_dma_address(sg) + areq_ctx 826 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->icv_virt_addr = sg_virt(sg) + areq_ctx 829 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->icv_dma_addr = areq_ctx->mac_buf_dma_addr; areq_ctx 830 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->icv_virt_addr = areq_ctx->mac_buf; areq_ctx 840 drivers/crypto/ccree/cc_buffer_mgr.c struct aead_req_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 842 drivers/crypto/ccree/cc_buffer_mgr.c enum drv_crypto_direction direct = areq_ctx->gen_ctx.op_type; areq_ctx 843 drivers/crypto/ccree/cc_buffer_mgr.c unsigned int authsize = areq_ctx->req_authsize; areq_ctx 849 drivers/crypto/ccree/cc_buffer_mgr.c unsigned int size_for_map = areq_ctx->assoclen + req->cryptlen; areq_ctx 852 drivers/crypto/ccree/cc_buffer_mgr.c bool is_gcm4543 = areq_ctx->is_gcm4543; areq_ctx 853 drivers/crypto/ccree/cc_buffer_mgr.c u32 size_to_skip = areq_ctx->assoclen; areq_ctx 864 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->src_sgl = req->src; areq_ctx 865 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->dst_sgl = req->dst; areq_ctx 874 drivers/crypto/ccree/cc_buffer_mgr.c sg_index = areq_ctx->src_sgl->length; areq_ctx 878 drivers/crypto/ccree/cc_buffer_mgr.c offset -= areq_ctx->src_sgl->length; areq_ctx 879 drivers/crypto/ccree/cc_buffer_mgr.c sgl = sg_next(areq_ctx->src_sgl); areq_ctx 882 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->src_sgl = sgl; areq_ctx 883 drivers/crypto/ccree/cc_buffer_mgr.c sg_index += areq_ctx->src_sgl->length; areq_ctx 891 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->src.nents = src_mapped_nents; areq_ctx 893 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->src_offset = offset; areq_ctx 896 drivers/crypto/ccree/cc_buffer_mgr.c size_for_map = areq_ctx->assoclen + req->cryptlen; areq_ctx 907 drivers/crypto/ccree/cc_buffer_mgr.c &areq_ctx->dst.mapped_nents, areq_ctx 916 drivers/crypto/ccree/cc_buffer_mgr.c sg_index = areq_ctx->dst_sgl->length; areq_ctx 922 drivers/crypto/ccree/cc_buffer_mgr.c offset -= areq_ctx->dst_sgl->length; areq_ctx 923 drivers/crypto/ccree/cc_buffer_mgr.c sgl = sg_next(areq_ctx->dst_sgl); areq_ctx 926 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->dst_sgl = sgl; areq_ctx 927 drivers/crypto/ccree/cc_buffer_mgr.c sg_index += areq_ctx->dst_sgl->length; areq_ctx 934 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->dst.nents = dst_mapped_nents; areq_ctx 935 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->dst_offset = offset; areq_ctx 939 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->data_buff_type = CC_DMA_BUF_MLLI; areq_ctx 944 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->data_buff_type = CC_DMA_BUF_DLLI; areq_ctx 956 drivers/crypto/ccree/cc_buffer_mgr.c struct aead_req_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 959 drivers/crypto/ccree/cc_buffer_mgr.c if (areq_ctx->assoc_buff_type == CC_DMA_BUF_MLLI) { areq_ctx 960 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->assoc.sram_addr = drvdata->mlli_sram_addr; areq_ctx 961 drivers/crypto/ccree/cc_buffer_mgr.c curr_mlli_size = areq_ctx->assoc.mlli_nents * areq_ctx 965 drivers/crypto/ccree/cc_buffer_mgr.c if (areq_ctx->data_buff_type == CC_DMA_BUF_MLLI) { areq_ctx 968 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->dst.mlli_nents = areq_ctx->src.mlli_nents; areq_ctx 969 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->src.sram_addr = drvdata->mlli_sram_addr + areq_ctx 971 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->dst.sram_addr = areq_ctx->src.sram_addr; areq_ctx 972 drivers/crypto/ccree/cc_buffer_mgr.c if (!areq_ctx->is_single_pass) areq_ctx 973 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->assoc.mlli_nents += areq_ctx 974 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->src.mlli_nents; areq_ctx 976 drivers/crypto/ccree/cc_buffer_mgr.c if (areq_ctx->gen_ctx.op_type == areq_ctx 978 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->src.sram_addr = areq_ctx 981 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->dst.sram_addr = areq_ctx 982 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->src.sram_addr + areq_ctx 983 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->src.mlli_nents * areq_ctx 985 drivers/crypto/ccree/cc_buffer_mgr.c if (!areq_ctx->is_single_pass) areq_ctx 986 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->assoc.mlli_nents += areq_ctx 987 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->src.mlli_nents; areq_ctx 989 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->dst.sram_addr = areq_ctx 992 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->src.sram_addr = areq_ctx 993 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->dst.sram_addr + areq_ctx 994 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->dst.mlli_nents * areq_ctx 996 drivers/crypto/ccree/cc_buffer_mgr.c if (!areq_ctx->is_single_pass) areq_ctx 997 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->assoc.mlli_nents += areq_ctx 998 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->dst.mlli_nents; areq_ctx 1006 drivers/crypto/ccree/cc_buffer_mgr.c struct aead_req_ctx *areq_ctx = aead_request_ctx(req); areq_ctx 1007 drivers/crypto/ccree/cc_buffer_mgr.c struct mlli_params *mlli_params = &areq_ctx->mlli_params; areq_ctx 1010 drivers/crypto/ccree/cc_buffer_mgr.c unsigned int authsize = areq_ctx->req_authsize; areq_ctx 1014 drivers/crypto/ccree/cc_buffer_mgr.c bool is_gcm4543 = areq_ctx->is_gcm4543; areq_ctx 1028 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->gen_ctx.op_type == DRV_CRYPTO_DIRECTION_DECRYPT && areq_ctx 1033 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->cryptlen = (areq_ctx->gen_ctx.op_type == areq_ctx 1038 drivers/crypto/ccree/cc_buffer_mgr.c dma_addr = dma_map_single(dev, areq_ctx->mac_buf, MAX_MAC_SIZE, areq_ctx 1042 drivers/crypto/ccree/cc_buffer_mgr.c MAX_MAC_SIZE, areq_ctx->mac_buf); areq_ctx 1046 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->mac_buf_dma_addr = dma_addr; areq_ctx 1048 drivers/crypto/ccree/cc_buffer_mgr.c if (areq_ctx->ccm_hdr_size != ccm_header_size_null) { areq_ctx 1049 drivers/crypto/ccree/cc_buffer_mgr.c void *addr = areq_ctx->ccm_config + CCM_CTR_COUNT_0_OFFSET; areq_ctx 1057 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->ccm_iv0_dma_addr = 0; areq_ctx 1061 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->ccm_iv0_dma_addr = dma_addr; areq_ctx 1063 drivers/crypto/ccree/cc_buffer_mgr.c rc = cc_set_aead_conf_buf(dev, areq_ctx, areq_ctx->ccm_config, areq_ctx 1064 drivers/crypto/ccree/cc_buffer_mgr.c &sg_data, areq_ctx->assoclen); areq_ctx 1069 drivers/crypto/ccree/cc_buffer_mgr.c if (areq_ctx->cipher_mode == DRV_CIPHER_GCTR) { areq_ctx 1070 drivers/crypto/ccree/cc_buffer_mgr.c dma_addr = dma_map_single(dev, areq_ctx->hkey, AES_BLOCK_SIZE, areq_ctx 1074 drivers/crypto/ccree/cc_buffer_mgr.c AES_BLOCK_SIZE, areq_ctx->hkey); areq_ctx 1078 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->hkey_dma_addr = dma_addr; areq_ctx 1080 drivers/crypto/ccree/cc_buffer_mgr.c dma_addr = dma_map_single(dev, &areq_ctx->gcm_len_block, areq_ctx 1084 drivers/crypto/ccree/cc_buffer_mgr.c AES_BLOCK_SIZE, &areq_ctx->gcm_len_block); areq_ctx 1088 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->gcm_block_len_dma_addr = dma_addr; areq_ctx 1090 drivers/crypto/ccree/cc_buffer_mgr.c dma_addr = dma_map_single(dev, areq_ctx->gcm_iv_inc1, areq_ctx 1095 drivers/crypto/ccree/cc_buffer_mgr.c AES_BLOCK_SIZE, (areq_ctx->gcm_iv_inc1)); areq_ctx 1096 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->gcm_iv_inc1_dma_addr = 0; areq_ctx 1100 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->gcm_iv_inc1_dma_addr = dma_addr; areq_ctx 1102 drivers/crypto/ccree/cc_buffer_mgr.c dma_addr = dma_map_single(dev, areq_ctx->gcm_iv_inc2, areq_ctx 1107 drivers/crypto/ccree/cc_buffer_mgr.c AES_BLOCK_SIZE, (areq_ctx->gcm_iv_inc2)); areq_ctx 1108 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->gcm_iv_inc2_dma_addr = 0; areq_ctx 1112 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->gcm_iv_inc2_dma_addr = dma_addr; areq_ctx 1115 drivers/crypto/ccree/cc_buffer_mgr.c size_to_map = req->cryptlen + areq_ctx->assoclen; areq_ctx 1117 drivers/crypto/ccree/cc_buffer_mgr.c if ((areq_ctx->gen_ctx.op_type == DRV_CRYPTO_DIRECTION_ENCRYPT) && areq_ctx 1124 drivers/crypto/ccree/cc_buffer_mgr.c &areq_ctx->src.mapped_nents, areq_ctx 1131 drivers/crypto/ccree/cc_buffer_mgr.c if (areq_ctx->is_single_pass) { areq_ctx 1182 drivers/crypto/ccree/cc_buffer_mgr.c if (areq_ctx->assoc_buff_type == CC_DMA_BUF_MLLI || areq_ctx 1183 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->data_buff_type == CC_DMA_BUF_MLLI) { areq_ctx 1191 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->assoc.mlli_nents); areq_ctx 1192 drivers/crypto/ccree/cc_buffer_mgr.c dev_dbg(dev, "src params mn %d\n", areq_ctx->src.mlli_nents); areq_ctx 1193 drivers/crypto/ccree/cc_buffer_mgr.c dev_dbg(dev, "dst params mn %d\n", areq_ctx->dst.mlli_nents); areq_ctx 1206 drivers/crypto/ccree/cc_buffer_mgr.c struct ahash_req_ctx *areq_ctx = (struct ahash_req_ctx *)ctx; areq_ctx 1208 drivers/crypto/ccree/cc_buffer_mgr.c u8 *curr_buff = cc_hash_buf(areq_ctx); areq_ctx 1209 drivers/crypto/ccree/cc_buffer_mgr.c u32 *curr_buff_cnt = cc_hash_buf_cnt(areq_ctx); areq_ctx 1210 drivers/crypto/ccree/cc_buffer_mgr.c struct mlli_params *mlli_params = &areq_ctx->mlli_params; areq_ctx 1218 drivers/crypto/ccree/cc_buffer_mgr.c curr_buff, *curr_buff_cnt, nbytes, src, areq_ctx->buff_index); areq_ctx 1220 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->data_dma_buf_type = CC_DMA_BUF_NULL; areq_ctx 1223 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->in_nents = 0; areq_ctx 1233 drivers/crypto/ccree/cc_buffer_mgr.c rc = cc_set_hash_buf(dev, areq_ctx, curr_buff, *curr_buff_cnt, areq_ctx 1241 drivers/crypto/ccree/cc_buffer_mgr.c &areq_ctx->in_nents, LLI_MAX_NUM_OF_DATA_ENTRIES, areq_ctx 1246 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->data_dma_buf_type == CC_DMA_BUF_NULL) { areq_ctx 1247 drivers/crypto/ccree/cc_buffer_mgr.c memcpy(areq_ctx->buff_sg, src, areq_ctx 1249 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->buff_sg->length = nbytes; areq_ctx 1250 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->curr_sg = areq_ctx->buff_sg; areq_ctx 1251 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->data_dma_buf_type = CC_DMA_BUF_DLLI; areq_ctx 1253 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->data_dma_buf_type = CC_DMA_BUF_MLLI; areq_ctx 1258 drivers/crypto/ccree/cc_buffer_mgr.c if (areq_ctx->data_dma_buf_type == CC_DMA_BUF_MLLI) { areq_ctx 1261 drivers/crypto/ccree/cc_buffer_mgr.c cc_add_sg_entry(dev, &sg_data, areq_ctx->in_nents, src, nbytes, areq_ctx 1262 drivers/crypto/ccree/cc_buffer_mgr.c 0, true, &areq_ctx->mlli_nents); areq_ctx 1268 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->buff_index = (areq_ctx->buff_index ^ 1); areq_ctx 1270 drivers/crypto/ccree/cc_buffer_mgr.c cc_dma_buf_type(areq_ctx->data_dma_buf_type)); areq_ctx 1274 drivers/crypto/ccree/cc_buffer_mgr.c dma_unmap_sg(dev, src, areq_ctx->in_nents, DMA_TO_DEVICE); areq_ctx 1278 drivers/crypto/ccree/cc_buffer_mgr.c dma_unmap_sg(dev, areq_ctx->buff_sg, 1, DMA_TO_DEVICE); areq_ctx 1287 drivers/crypto/ccree/cc_buffer_mgr.c struct ahash_req_ctx *areq_ctx = (struct ahash_req_ctx *)ctx; areq_ctx 1289 drivers/crypto/ccree/cc_buffer_mgr.c u8 *curr_buff = cc_hash_buf(areq_ctx); areq_ctx 1290 drivers/crypto/ccree/cc_buffer_mgr.c u32 *curr_buff_cnt = cc_hash_buf_cnt(areq_ctx); areq_ctx 1291 drivers/crypto/ccree/cc_buffer_mgr.c u8 *next_buff = cc_next_buf(areq_ctx); areq_ctx 1292 drivers/crypto/ccree/cc_buffer_mgr.c u32 *next_buff_cnt = cc_next_buf_cnt(areq_ctx); areq_ctx 1293 drivers/crypto/ccree/cc_buffer_mgr.c struct mlli_params *mlli_params = &areq_ctx->mlli_params; areq_ctx 1304 drivers/crypto/ccree/cc_buffer_mgr.c curr_buff, *curr_buff_cnt, nbytes, src, areq_ctx->buff_index); areq_ctx 1306 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->data_dma_buf_type = CC_DMA_BUF_NULL; areq_ctx 1308 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->curr_sg = NULL; areq_ctx 1310 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->in_nents = 0; areq_ctx 1315 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->in_nents = sg_nents_for_len(src, nbytes); areq_ctx 1316 drivers/crypto/ccree/cc_buffer_mgr.c sg_copy_to_buffer(src, areq_ctx->in_nents, areq_ctx 1343 drivers/crypto/ccree/cc_buffer_mgr.c rc = cc_set_hash_buf(dev, areq_ctx, curr_buff, *curr_buff_cnt, areq_ctx 1353 drivers/crypto/ccree/cc_buffer_mgr.c DMA_TO_DEVICE, &areq_ctx->in_nents, areq_ctx 1359 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->data_dma_buf_type == CC_DMA_BUF_NULL) { areq_ctx 1361 drivers/crypto/ccree/cc_buffer_mgr.c memcpy(areq_ctx->buff_sg, src, areq_ctx 1363 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->buff_sg->length = update_data_len; areq_ctx 1364 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->data_dma_buf_type = CC_DMA_BUF_DLLI; areq_ctx 1365 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->curr_sg = areq_ctx->buff_sg; areq_ctx 1367 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->data_dma_buf_type = CC_DMA_BUF_MLLI; areq_ctx 1371 drivers/crypto/ccree/cc_buffer_mgr.c if (areq_ctx->data_dma_buf_type == CC_DMA_BUF_MLLI) { areq_ctx 1374 drivers/crypto/ccree/cc_buffer_mgr.c cc_add_sg_entry(dev, &sg_data, areq_ctx->in_nents, src, areq_ctx 1376 drivers/crypto/ccree/cc_buffer_mgr.c &areq_ctx->mlli_nents); areq_ctx 1381 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->buff_index = (areq_ctx->buff_index ^ swap_index); areq_ctx 1386 drivers/crypto/ccree/cc_buffer_mgr.c dma_unmap_sg(dev, src, areq_ctx->in_nents, DMA_TO_DEVICE); areq_ctx 1390 drivers/crypto/ccree/cc_buffer_mgr.c dma_unmap_sg(dev, areq_ctx->buff_sg, 1, DMA_TO_DEVICE); areq_ctx 1398 drivers/crypto/ccree/cc_buffer_mgr.c struct ahash_req_ctx *areq_ctx = (struct ahash_req_ctx *)ctx; areq_ctx 1399 drivers/crypto/ccree/cc_buffer_mgr.c u32 *prev_len = cc_next_buf_cnt(areq_ctx); areq_ctx 1404 drivers/crypto/ccree/cc_buffer_mgr.c if (areq_ctx->mlli_params.curr_pool) { areq_ctx 1406 drivers/crypto/ccree/cc_buffer_mgr.c &areq_ctx->mlli_params.mlli_dma_addr, areq_ctx 1407 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->mlli_params.mlli_virt_addr); areq_ctx 1408 drivers/crypto/ccree/cc_buffer_mgr.c dma_pool_free(areq_ctx->mlli_params.curr_pool, areq_ctx 1409 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->mlli_params.mlli_virt_addr, areq_ctx 1410 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->mlli_params.mlli_dma_addr); areq_ctx 1413 drivers/crypto/ccree/cc_buffer_mgr.c if (src && areq_ctx->in_nents) { areq_ctx 1417 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->in_nents, DMA_TO_DEVICE); areq_ctx 1422 drivers/crypto/ccree/cc_buffer_mgr.c sg_virt(areq_ctx->buff_sg), areq_ctx 1423 drivers/crypto/ccree/cc_buffer_mgr.c &sg_dma_address(areq_ctx->buff_sg), areq_ctx 1424 drivers/crypto/ccree/cc_buffer_mgr.c sg_dma_len(areq_ctx->buff_sg)); areq_ctx 1425 drivers/crypto/ccree/cc_buffer_mgr.c dma_unmap_sg(dev, areq_ctx->buff_sg, 1, DMA_TO_DEVICE); areq_ctx 1432 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->buff_index ^= 1; areq_ctx 96 drivers/crypto/ccree/cc_hash.c static void cc_set_desc(struct ahash_req_ctx *areq_ctx, struct cc_hash_ctx *ctx, areq_ctx 2214 drivers/crypto/ccree/cc_hash.c static void cc_set_desc(struct ahash_req_ctx *areq_ctx, areq_ctx 2222 drivers/crypto/ccree/cc_hash.c if (areq_ctx->data_dma_buf_type == CC_DMA_BUF_DLLI) { areq_ctx 2225 drivers/crypto/ccree/cc_hash.c sg_dma_address(areq_ctx->curr_sg), areq_ctx 2226 drivers/crypto/ccree/cc_hash.c areq_ctx->curr_sg->length, NS_BIT); areq_ctx 2230 drivers/crypto/ccree/cc_hash.c if (areq_ctx->data_dma_buf_type == CC_DMA_BUF_NULL) { areq_ctx 2238 drivers/crypto/ccree/cc_hash.c areq_ctx->mlli_params.mlli_dma_addr, areq_ctx 2239 drivers/crypto/ccree/cc_hash.c areq_ctx->mlli_params.mlli_len, NS_BIT); areq_ctx 2241 drivers/crypto/ccree/cc_hash.c areq_ctx->mlli_params.mlli_len); areq_ctx 2248 drivers/crypto/ccree/cc_hash.c areq_ctx->mlli_nents, NS_BIT);