edesc 943 drivers/crypto/caam/caamalg.c struct aead_edesc *edesc, edesc 947 drivers/crypto/caam/caamalg.c edesc->src_nents, edesc->dst_nents, 0, 0, edesc 948 drivers/crypto/caam/caamalg.c edesc->sec4_sg_dma, edesc->sec4_sg_bytes); edesc 951 drivers/crypto/caam/caamalg.c static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc, edesc 958 drivers/crypto/caam/caamalg.c edesc->src_nents, edesc->dst_nents, edesc 959 drivers/crypto/caam/caamalg.c edesc->iv_dma, ivsize, edesc 960 drivers/crypto/caam/caamalg.c edesc->sec4_sg_dma, edesc->sec4_sg_bytes); edesc 967 drivers/crypto/caam/caamalg.c struct aead_edesc *edesc; edesc 972 drivers/crypto/caam/caamalg.c edesc = container_of(desc, struct aead_edesc, hw_desc[0]); edesc 977 drivers/crypto/caam/caamalg.c aead_unmap(jrdev, edesc, req); edesc 979 drivers/crypto/caam/caamalg.c kfree(edesc); edesc 988 drivers/crypto/caam/caamalg.c struct aead_edesc *edesc; edesc 993 drivers/crypto/caam/caamalg.c edesc = container_of(desc, struct aead_edesc, hw_desc[0]); edesc 998 drivers/crypto/caam/caamalg.c aead_unmap(jrdev, edesc, req); edesc 1000 drivers/crypto/caam/caamalg.c kfree(edesc); edesc 1009 drivers/crypto/caam/caamalg.c struct skcipher_edesc *edesc; edesc 1016 drivers/crypto/caam/caamalg.c edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]); edesc 1021 drivers/crypto/caam/caamalg.c skcipher_unmap(jrdev, edesc, req); edesc 1029 drivers/crypto/caam/caamalg.c memcpy(req->iv, (u8 *)edesc->sec4_sg + edesc->sec4_sg_bytes, edesc 1033 drivers/crypto/caam/caamalg.c edesc->src_nents > 1 ? 100 : ivsize, 1); edesc 1038 drivers/crypto/caam/caamalg.c edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); edesc 1040 drivers/crypto/caam/caamalg.c kfree(edesc); edesc 1049 drivers/crypto/caam/caamalg.c struct skcipher_edesc *edesc; edesc 1056 drivers/crypto/caam/caamalg.c edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]); edesc 1060 drivers/crypto/caam/caamalg.c skcipher_unmap(jrdev, edesc, req); edesc 1068 drivers/crypto/caam/caamalg.c memcpy(req->iv, (u8 *)edesc->sec4_sg + edesc->sec4_sg_bytes, edesc 1078 drivers/crypto/caam/caamalg.c edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); edesc 1080 drivers/crypto/caam/caamalg.c kfree(edesc); edesc 1089 drivers/crypto/caam/caamalg.c struct aead_edesc *edesc, edesc 1095 drivers/crypto/caam/caamalg.c u32 *desc = edesc->hw_desc; edesc 1109 drivers/crypto/caam/caamalg.c src_dma = edesc->mapped_src_nents ? sg_dma_address(req->src) : edesc 1113 drivers/crypto/caam/caamalg.c src_dma = edesc->sec4_sg_dma; edesc 1114 drivers/crypto/caam/caamalg.c sec4_sg_index += edesc->mapped_src_nents; edesc 1125 drivers/crypto/caam/caamalg.c if (!edesc->mapped_dst_nents) { edesc 1128 drivers/crypto/caam/caamalg.c } else if (edesc->mapped_dst_nents == 1) { edesc 1132 drivers/crypto/caam/caamalg.c dst_dma = edesc->sec4_sg_dma + edesc 1150 drivers/crypto/caam/caamalg.c struct aead_edesc *edesc, edesc 1156 drivers/crypto/caam/caamalg.c u32 *desc = edesc->hw_desc; edesc 1160 drivers/crypto/caam/caamalg.c init_aead_job(req, edesc, all_contig, encrypt); edesc 1180 drivers/crypto/caam/caamalg.c struct aead_edesc *edesc, bool all_contig, edesc 1186 drivers/crypto/caam/caamalg.c u32 *desc = edesc->hw_desc; edesc 1189 drivers/crypto/caam/caamalg.c init_aead_job(req, edesc, all_contig, encrypt); edesc 1214 drivers/crypto/caam/caamalg.c struct aead_edesc *edesc, edesc 1226 drivers/crypto/caam/caamalg.c u32 *desc = edesc->hw_desc; edesc 1244 drivers/crypto/caam/caamalg.c init_aead_job(req, edesc, all_contig, encrypt); edesc 1266 drivers/crypto/caam/caamalg.c struct skcipher_edesc *edesc, edesc 1273 drivers/crypto/caam/caamalg.c u32 *desc = edesc->hw_desc; edesc 1282 drivers/crypto/caam/caamalg.c (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen); edesc 1286 drivers/crypto/caam/caamalg.c edesc->src_nents > 1 ? 100 : req->cryptlen, 1); edesc 1294 drivers/crypto/caam/caamalg.c if (ivsize || edesc->mapped_src_nents > 1) { edesc 1295 drivers/crypto/caam/caamalg.c src_dma = edesc->sec4_sg_dma; edesc 1296 drivers/crypto/caam/caamalg.c sec4_sg_index = edesc->mapped_src_nents + !!ivsize; edesc 1307 drivers/crypto/caam/caamalg.c } else if (!ivsize && edesc->mapped_dst_nents == 1) { edesc 1310 drivers/crypto/caam/caamalg.c dst_dma = edesc->sec4_sg_dma + sec4_sg_index * edesc 1332 drivers/crypto/caam/caamalg.c struct aead_edesc *edesc; edesc 1414 drivers/crypto/caam/caamalg.c edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, edesc 1416 drivers/crypto/caam/caamalg.c if (!edesc) { edesc 1422 drivers/crypto/caam/caamalg.c edesc->src_nents = src_nents; edesc 1423 drivers/crypto/caam/caamalg.c edesc->dst_nents = dst_nents; edesc 1424 drivers/crypto/caam/caamalg.c edesc->mapped_src_nents = mapped_src_nents; edesc 1425 drivers/crypto/caam/caamalg.c edesc->mapped_dst_nents = mapped_dst_nents; edesc 1426 drivers/crypto/caam/caamalg.c edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) + edesc 1433 drivers/crypto/caam/caamalg.c edesc->sec4_sg + sec4_sg_index, 0); edesc 1438 drivers/crypto/caam/caamalg.c edesc->sec4_sg + sec4_sg_index, 0); edesc 1442 drivers/crypto/caam/caamalg.c return edesc; edesc 1444 drivers/crypto/caam/caamalg.c edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, edesc 1446 drivers/crypto/caam/caamalg.c if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { edesc 1448 drivers/crypto/caam/caamalg.c aead_unmap(jrdev, edesc, req); edesc 1449 drivers/crypto/caam/caamalg.c kfree(edesc); edesc 1453 drivers/crypto/caam/caamalg.c edesc->sec4_sg_bytes = sec4_sg_bytes; edesc 1455 drivers/crypto/caam/caamalg.c return edesc; edesc 1460 drivers/crypto/caam/caamalg.c struct aead_edesc *edesc; edesc 1469 drivers/crypto/caam/caamalg.c edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true); edesc 1470 drivers/crypto/caam/caamalg.c if (IS_ERR(edesc)) edesc 1471 drivers/crypto/caam/caamalg.c return PTR_ERR(edesc); edesc 1474 drivers/crypto/caam/caamalg.c init_gcm_job(req, edesc, all_contig, true); edesc 1477 drivers/crypto/caam/caamalg.c DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, edesc 1478 drivers/crypto/caam/caamalg.c desc_bytes(edesc->hw_desc), 1); edesc 1480 drivers/crypto/caam/caamalg.c desc = edesc->hw_desc; edesc 1485 drivers/crypto/caam/caamalg.c aead_unmap(jrdev, edesc, req); edesc 1486 drivers/crypto/caam/caamalg.c kfree(edesc); edesc 1494 drivers/crypto/caam/caamalg.c struct aead_edesc *edesc; edesc 1502 drivers/crypto/caam/caamalg.c edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig, edesc 1504 drivers/crypto/caam/caamalg.c if (IS_ERR(edesc)) edesc 1505 drivers/crypto/caam/caamalg.c return PTR_ERR(edesc); edesc 1507 drivers/crypto/caam/caamalg.c desc = edesc->hw_desc; edesc 1509 drivers/crypto/caam/caamalg.c init_chachapoly_job(req, edesc, all_contig, true); edesc 1518 drivers/crypto/caam/caamalg.c aead_unmap(jrdev, edesc, req); edesc 1519 drivers/crypto/caam/caamalg.c kfree(edesc); edesc 1527 drivers/crypto/caam/caamalg.c struct aead_edesc *edesc; edesc 1535 drivers/crypto/caam/caamalg.c edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig, edesc 1537 drivers/crypto/caam/caamalg.c if (IS_ERR(edesc)) edesc 1538 drivers/crypto/caam/caamalg.c return PTR_ERR(edesc); edesc 1540 drivers/crypto/caam/caamalg.c desc = edesc->hw_desc; edesc 1542 drivers/crypto/caam/caamalg.c init_chachapoly_job(req, edesc, all_contig, false); edesc 1551 drivers/crypto/caam/caamalg.c aead_unmap(jrdev, edesc, req); edesc 1552 drivers/crypto/caam/caamalg.c kfree(edesc); edesc 1565 drivers/crypto/caam/caamalg.c struct aead_edesc *edesc; edesc 1574 drivers/crypto/caam/caamalg.c edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN, edesc 1576 drivers/crypto/caam/caamalg.c if (IS_ERR(edesc)) edesc 1577 drivers/crypto/caam/caamalg.c return PTR_ERR(edesc); edesc 1580 drivers/crypto/caam/caamalg.c init_authenc_job(req, edesc, all_contig, true); edesc 1583 drivers/crypto/caam/caamalg.c DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, edesc 1584 drivers/crypto/caam/caamalg.c desc_bytes(edesc->hw_desc), 1); edesc 1586 drivers/crypto/caam/caamalg.c desc = edesc->hw_desc; edesc 1591 drivers/crypto/caam/caamalg.c aead_unmap(jrdev, edesc, req); edesc 1592 drivers/crypto/caam/caamalg.c kfree(edesc); edesc 1600 drivers/crypto/caam/caamalg.c struct aead_edesc *edesc; edesc 1609 drivers/crypto/caam/caamalg.c edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false); edesc 1610 drivers/crypto/caam/caamalg.c if (IS_ERR(edesc)) edesc 1611 drivers/crypto/caam/caamalg.c return PTR_ERR(edesc); edesc 1614 drivers/crypto/caam/caamalg.c init_gcm_job(req, edesc, all_contig, false); edesc 1617 drivers/crypto/caam/caamalg.c DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, edesc 1618 drivers/crypto/caam/caamalg.c desc_bytes(edesc->hw_desc), 1); edesc 1620 drivers/crypto/caam/caamalg.c desc = edesc->hw_desc; edesc 1625 drivers/crypto/caam/caamalg.c aead_unmap(jrdev, edesc, req); edesc 1626 drivers/crypto/caam/caamalg.c kfree(edesc); edesc 1639 drivers/crypto/caam/caamalg.c struct aead_edesc *edesc; edesc 1652 drivers/crypto/caam/caamalg.c edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN, edesc 1654 drivers/crypto/caam/caamalg.c if (IS_ERR(edesc)) edesc 1655 drivers/crypto/caam/caamalg.c return PTR_ERR(edesc); edesc 1658 drivers/crypto/caam/caamalg.c init_authenc_job(req, edesc, all_contig, false); edesc 1661 drivers/crypto/caam/caamalg.c DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, edesc 1662 drivers/crypto/caam/caamalg.c desc_bytes(edesc->hw_desc), 1); edesc 1664 drivers/crypto/caam/caamalg.c desc = edesc->hw_desc; edesc 1669 drivers/crypto/caam/caamalg.c aead_unmap(jrdev, edesc, req); edesc 1670 drivers/crypto/caam/caamalg.c kfree(edesc); edesc 1688 drivers/crypto/caam/caamalg.c struct skcipher_edesc *edesc; edesc 1766 drivers/crypto/caam/caamalg.c edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize, edesc 1768 drivers/crypto/caam/caamalg.c if (!edesc) { edesc 1775 drivers/crypto/caam/caamalg.c edesc->src_nents = src_nents; edesc 1776 drivers/crypto/caam/caamalg.c edesc->dst_nents = dst_nents; edesc 1777 drivers/crypto/caam/caamalg.c edesc->mapped_src_nents = mapped_src_nents; edesc 1778 drivers/crypto/caam/caamalg.c edesc->mapped_dst_nents = mapped_dst_nents; edesc 1779 drivers/crypto/caam/caamalg.c edesc->sec4_sg_bytes = sec4_sg_bytes; edesc 1780 drivers/crypto/caam/caamalg.c edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc + edesc 1785 drivers/crypto/caam/caamalg.c iv = (u8 *)edesc->sec4_sg + sec4_sg_bytes; edesc 1793 drivers/crypto/caam/caamalg.c kfree(edesc); edesc 1797 drivers/crypto/caam/caamalg.c dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0); edesc 1800 drivers/crypto/caam/caamalg.c sg_to_sec4_sg(req->src, req->cryptlen, edesc->sec4_sg + edesc 1804 drivers/crypto/caam/caamalg.c sg_to_sec4_sg(req->dst, req->cryptlen, edesc->sec4_sg + edesc 1808 drivers/crypto/caam/caamalg.c dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx + edesc 1812 drivers/crypto/caam/caamalg.c sg_to_sec4_set_last(edesc->sec4_sg + dst_sg_idx + edesc 1816 drivers/crypto/caam/caamalg.c edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, edesc 1819 drivers/crypto/caam/caamalg.c if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { edesc 1823 drivers/crypto/caam/caamalg.c kfree(edesc); edesc 1828 drivers/crypto/caam/caamalg.c edesc->iv_dma = iv_dma; edesc 1831 drivers/crypto/caam/caamalg.c DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg, edesc 1834 drivers/crypto/caam/caamalg.c return edesc; edesc 1839 drivers/crypto/caam/caamalg.c struct skcipher_edesc *edesc; edesc 1850 drivers/crypto/caam/caamalg.c edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ); edesc 1851 drivers/crypto/caam/caamalg.c if (IS_ERR(edesc)) edesc 1852 drivers/crypto/caam/caamalg.c return PTR_ERR(edesc); edesc 1855 drivers/crypto/caam/caamalg.c init_skcipher_job(req, edesc, true); edesc 1858 drivers/crypto/caam/caamalg.c DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, edesc 1859 drivers/crypto/caam/caamalg.c desc_bytes(edesc->hw_desc), 1); edesc 1861 drivers/crypto/caam/caamalg.c desc = edesc->hw_desc; edesc 1867 drivers/crypto/caam/caamalg.c skcipher_unmap(jrdev, edesc, req); edesc 1868 drivers/crypto/caam/caamalg.c kfree(edesc); edesc 1876 drivers/crypto/caam/caamalg.c struct skcipher_edesc *edesc; edesc 1887 drivers/crypto/caam/caamalg.c edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ); edesc 1888 drivers/crypto/caam/caamalg.c if (IS_ERR(edesc)) edesc 1889 drivers/crypto/caam/caamalg.c return PTR_ERR(edesc); edesc 1892 drivers/crypto/caam/caamalg.c init_skcipher_job(req, edesc, false); edesc 1893 drivers/crypto/caam/caamalg.c desc = edesc->hw_desc; edesc 1896 drivers/crypto/caam/caamalg.c DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, edesc 1897 drivers/crypto/caam/caamalg.c desc_bytes(edesc->hw_desc), 1); edesc 1903 drivers/crypto/caam/caamalg.c skcipher_unmap(jrdev, edesc, req); edesc 1904 drivers/crypto/caam/caamalg.c kfree(edesc); edesc 891 drivers/crypto/caam/caamalg_qi.c struct aead_edesc *edesc, edesc 897 drivers/crypto/caam/caamalg_qi.c caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, edesc 898 drivers/crypto/caam/caamalg_qi.c edesc->iv_dma, ivsize, DMA_TO_DEVICE, edesc->qm_sg_dma, edesc 899 drivers/crypto/caam/caamalg_qi.c edesc->qm_sg_bytes); edesc 900 drivers/crypto/caam/caamalg_qi.c dma_unmap_single(dev, edesc->assoclen_dma, 4, DMA_TO_DEVICE); edesc 903 drivers/crypto/caam/caamalg_qi.c static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc, edesc 909 drivers/crypto/caam/caamalg_qi.c caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, edesc 910 drivers/crypto/caam/caamalg_qi.c edesc->iv_dma, ivsize, DMA_BIDIRECTIONAL, edesc->qm_sg_dma, edesc 911 drivers/crypto/caam/caamalg_qi.c edesc->qm_sg_bytes); edesc 917 drivers/crypto/caam/caamalg_qi.c struct aead_edesc *edesc; edesc 928 drivers/crypto/caam/caamalg_qi.c edesc = container_of(drv_req, typeof(*edesc), drv_req); edesc 929 drivers/crypto/caam/caamalg_qi.c aead_unmap(qidev, edesc, aead_req); edesc 932 drivers/crypto/caam/caamalg_qi.c qi_cache_free(edesc); edesc 950 drivers/crypto/caam/caamalg_qi.c struct aead_edesc *edesc; edesc 964 drivers/crypto/caam/caamalg_qi.c edesc = qi_cache_alloc(GFP_DMA | flags); edesc 965 drivers/crypto/caam/caamalg_qi.c if (unlikely(!edesc)) { edesc 978 drivers/crypto/caam/caamalg_qi.c qi_cache_free(edesc); edesc 986 drivers/crypto/caam/caamalg_qi.c qi_cache_free(edesc); edesc 997 drivers/crypto/caam/caamalg_qi.c qi_cache_free(edesc); edesc 1005 drivers/crypto/caam/caamalg_qi.c qi_cache_free(edesc); edesc 1014 drivers/crypto/caam/caamalg_qi.c qi_cache_free(edesc); edesc 1029 drivers/crypto/caam/caamalg_qi.c qi_cache_free(edesc); edesc 1061 drivers/crypto/caam/caamalg_qi.c sg_table = &edesc->sgt[0]; edesc 1069 drivers/crypto/caam/caamalg_qi.c qi_cache_free(edesc); edesc 1084 drivers/crypto/caam/caamalg_qi.c qi_cache_free(edesc); edesc 1089 drivers/crypto/caam/caamalg_qi.c edesc->src_nents = src_nents; edesc 1090 drivers/crypto/caam/caamalg_qi.c edesc->dst_nents = dst_nents; edesc 1091 drivers/crypto/caam/caamalg_qi.c edesc->iv_dma = iv_dma; edesc 1092 drivers/crypto/caam/caamalg_qi.c edesc->drv_req.app_ctx = req; edesc 1093 drivers/crypto/caam/caamalg_qi.c edesc->drv_req.cbk = aead_done; edesc 1094 drivers/crypto/caam/caamalg_qi.c edesc->drv_req.drv_ctx = drv_ctx; edesc 1096 drivers/crypto/caam/caamalg_qi.c edesc->assoclen = cpu_to_caam32(req->assoclen); edesc 1097 drivers/crypto/caam/caamalg_qi.c edesc->assoclen_dma = dma_map_single(qidev, &edesc->assoclen, 4, edesc 1099 drivers/crypto/caam/caamalg_qi.c if (dma_mapping_error(qidev, edesc->assoclen_dma)) { edesc 1103 drivers/crypto/caam/caamalg_qi.c qi_cache_free(edesc); edesc 1107 drivers/crypto/caam/caamalg_qi.c dma_to_qm_sg_one(sg_table, edesc->assoclen_dma, 4, 0); edesc 1122 drivers/crypto/caam/caamalg_qi.c dma_unmap_single(qidev, edesc->assoclen_dma, 4, DMA_TO_DEVICE); edesc 1125 drivers/crypto/caam/caamalg_qi.c qi_cache_free(edesc); edesc 1129 drivers/crypto/caam/caamalg_qi.c edesc->qm_sg_dma = qm_sg_dma; edesc 1130 drivers/crypto/caam/caamalg_qi.c edesc->qm_sg_bytes = qm_sg_bytes; edesc 1136 drivers/crypto/caam/caamalg_qi.c fd_sgt = &edesc->drv_req.fd_sgt[0]; edesc 1155 drivers/crypto/caam/caamalg_qi.c return edesc; edesc 1160 drivers/crypto/caam/caamalg_qi.c struct aead_edesc *edesc; edesc 1169 drivers/crypto/caam/caamalg_qi.c edesc = aead_edesc_alloc(req, encrypt); edesc 1170 drivers/crypto/caam/caamalg_qi.c if (IS_ERR_OR_NULL(edesc)) edesc 1171 drivers/crypto/caam/caamalg_qi.c return PTR_ERR(edesc); edesc 1174 drivers/crypto/caam/caamalg_qi.c ret = caam_qi_enqueue(ctx->qidev, &edesc->drv_req); edesc 1178 drivers/crypto/caam/caamalg_qi.c aead_unmap(ctx->qidev, edesc, req); edesc 1179 drivers/crypto/caam/caamalg_qi.c qi_cache_free(edesc); edesc 1209 drivers/crypto/caam/caamalg_qi.c struct skcipher_edesc *edesc; edesc 1219 drivers/crypto/caam/caamalg_qi.c edesc = container_of(drv_req, typeof(*edesc), drv_req); edesc 1226 drivers/crypto/caam/caamalg_qi.c edesc->src_nents > 1 ? 100 : ivsize, 1); edesc 1229 drivers/crypto/caam/caamalg_qi.c edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); edesc 1231 drivers/crypto/caam/caamalg_qi.c skcipher_unmap(qidev, edesc, req); edesc 1239 drivers/crypto/caam/caamalg_qi.c memcpy(req->iv, (u8 *)&edesc->sgt[0] + edesc->qm_sg_bytes, edesc 1242 drivers/crypto/caam/caamalg_qi.c qi_cache_free(edesc); edesc 1255 drivers/crypto/caam/caamalg_qi.c struct skcipher_edesc *edesc; edesc 1332 drivers/crypto/caam/caamalg_qi.c edesc = qi_cache_alloc(GFP_DMA | flags); edesc 1333 drivers/crypto/caam/caamalg_qi.c if (unlikely(!edesc)) { edesc 1341 drivers/crypto/caam/caamalg_qi.c sg_table = &edesc->sgt[0]; edesc 1350 drivers/crypto/caam/caamalg_qi.c qi_cache_free(edesc); edesc 1354 drivers/crypto/caam/caamalg_qi.c edesc->src_nents = src_nents; edesc 1355 drivers/crypto/caam/caamalg_qi.c edesc->dst_nents = dst_nents; edesc 1356 drivers/crypto/caam/caamalg_qi.c edesc->iv_dma = iv_dma; edesc 1357 drivers/crypto/caam/caamalg_qi.c edesc->qm_sg_bytes = qm_sg_bytes; edesc 1358 drivers/crypto/caam/caamalg_qi.c edesc->drv_req.app_ctx = req; edesc 1359 drivers/crypto/caam/caamalg_qi.c edesc->drv_req.cbk = skcipher_done; edesc 1360 drivers/crypto/caam/caamalg_qi.c edesc->drv_req.drv_ctx = drv_ctx; edesc 1371 drivers/crypto/caam/caamalg_qi.c edesc->qm_sg_dma = dma_map_single(qidev, sg_table, edesc->qm_sg_bytes, edesc 1373 drivers/crypto/caam/caamalg_qi.c if (dma_mapping_error(qidev, edesc->qm_sg_dma)) { edesc 1377 drivers/crypto/caam/caamalg_qi.c qi_cache_free(edesc); edesc 1381 drivers/crypto/caam/caamalg_qi.c fd_sgt = &edesc->drv_req.fd_sgt[0]; edesc 1383 drivers/crypto/caam/caamalg_qi.c dma_to_qm_sg_one_last_ext(&fd_sgt[1], edesc->qm_sg_dma, edesc 1387 drivers/crypto/caam/caamalg_qi.c dma_to_qm_sg_one_ext(&fd_sgt[0], edesc->qm_sg_dma + edesc 1391 drivers/crypto/caam/caamalg_qi.c dma_to_qm_sg_one_ext(&fd_sgt[0], edesc->qm_sg_dma + dst_sg_idx * edesc 1395 drivers/crypto/caam/caamalg_qi.c return edesc; edesc 1400 drivers/crypto/caam/caamalg_qi.c struct skcipher_edesc *edesc; edesc 1412 drivers/crypto/caam/caamalg_qi.c edesc = skcipher_edesc_alloc(req, encrypt); edesc 1413 drivers/crypto/caam/caamalg_qi.c if (IS_ERR(edesc)) edesc 1414 drivers/crypto/caam/caamalg_qi.c return PTR_ERR(edesc); edesc 1416 drivers/crypto/caam/caamalg_qi.c ret = caam_qi_enqueue(ctx->qidev, &edesc->drv_req); edesc 1420 drivers/crypto/caam/caamalg_qi.c skcipher_unmap(ctx->qidev, edesc, req); edesc 1421 drivers/crypto/caam/caamalg_qi.c qi_cache_free(edesc); edesc 362 drivers/crypto/caam/caamalg_qi2.c struct aead_edesc *edesc; edesc 371 drivers/crypto/caam/caamalg_qi2.c edesc = qi_cache_zalloc(GFP_DMA | flags); edesc 372 drivers/crypto/caam/caamalg_qi2.c if (unlikely(!edesc)) { edesc 385 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 393 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 402 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 416 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 430 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 438 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 468 drivers/crypto/caam/caamalg_qi2.c sg_table = &edesc->sgt[0]; edesc 476 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 491 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 496 drivers/crypto/caam/caamalg_qi2.c edesc->src_nents = src_nents; edesc 497 drivers/crypto/caam/caamalg_qi2.c edesc->dst_nents = dst_nents; edesc 498 drivers/crypto/caam/caamalg_qi2.c edesc->iv_dma = iv_dma; edesc 506 drivers/crypto/caam/caamalg_qi2.c edesc->assoclen = cpu_to_caam32(req->assoclen - ivsize); edesc 508 drivers/crypto/caam/caamalg_qi2.c edesc->assoclen = cpu_to_caam32(req->assoclen); edesc 509 drivers/crypto/caam/caamalg_qi2.c edesc->assoclen_dma = dma_map_single(dev, &edesc->assoclen, 4, edesc 511 drivers/crypto/caam/caamalg_qi2.c if (dma_mapping_error(dev, edesc->assoclen_dma)) { edesc 515 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 519 drivers/crypto/caam/caamalg_qi2.c dma_to_qm_sg_one(sg_table, edesc->assoclen_dma, 4, 0); edesc 534 drivers/crypto/caam/caamalg_qi2.c dma_unmap_single(dev, edesc->assoclen_dma, 4, DMA_TO_DEVICE); edesc 537 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 541 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_dma = qm_sg_dma; edesc 542 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_bytes = qm_sg_bytes; edesc 583 drivers/crypto/caam/caamalg_qi2.c return edesc; edesc 1127 drivers/crypto/caam/caamalg_qi2.c struct skcipher_edesc *edesc; edesc 1199 drivers/crypto/caam/caamalg_qi2.c edesc = qi_cache_zalloc(GFP_DMA | flags); edesc 1200 drivers/crypto/caam/caamalg_qi2.c if (unlikely(!edesc)) { edesc 1208 drivers/crypto/caam/caamalg_qi2.c sg_table = &edesc->sgt[0]; edesc 1217 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 1221 drivers/crypto/caam/caamalg_qi2.c edesc->src_nents = src_nents; edesc 1222 drivers/crypto/caam/caamalg_qi2.c edesc->dst_nents = dst_nents; edesc 1223 drivers/crypto/caam/caamalg_qi2.c edesc->iv_dma = iv_dma; edesc 1224 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_bytes = qm_sg_bytes; edesc 1235 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_dma = dma_map_single(dev, sg_table, edesc->qm_sg_bytes, edesc 1237 drivers/crypto/caam/caamalg_qi2.c if (dma_mapping_error(dev, edesc->qm_sg_dma)) { edesc 1241 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 1251 drivers/crypto/caam/caamalg_qi2.c dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); edesc 1256 drivers/crypto/caam/caamalg_qi2.c dpaa2_fl_set_addr(out_fle, edesc->qm_sg_dma + edesc 1259 drivers/crypto/caam/caamalg_qi2.c dpaa2_fl_set_addr(out_fle, edesc->qm_sg_dma + dst_sg_idx * edesc 1262 drivers/crypto/caam/caamalg_qi2.c return edesc; edesc 1265 drivers/crypto/caam/caamalg_qi2.c static void aead_unmap(struct device *dev, struct aead_edesc *edesc, edesc 1271 drivers/crypto/caam/caamalg_qi2.c caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, edesc 1272 drivers/crypto/caam/caamalg_qi2.c edesc->iv_dma, ivsize, DMA_TO_DEVICE, edesc->qm_sg_dma, edesc 1273 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_bytes); edesc 1274 drivers/crypto/caam/caamalg_qi2.c dma_unmap_single(dev, edesc->assoclen_dma, 4, DMA_TO_DEVICE); edesc 1277 drivers/crypto/caam/caamalg_qi2.c static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc, edesc 1283 drivers/crypto/caam/caamalg_qi2.c caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, edesc 1284 drivers/crypto/caam/caamalg_qi2.c edesc->iv_dma, ivsize, DMA_BIDIRECTIONAL, edesc->qm_sg_dma, edesc 1285 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_bytes); edesc 1294 drivers/crypto/caam/caamalg_qi2.c struct aead_edesc *edesc = req_ctx->edesc; edesc 1304 drivers/crypto/caam/caamalg_qi2.c aead_unmap(ctx->dev, edesc, req); edesc 1305 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 1315 drivers/crypto/caam/caamalg_qi2.c struct aead_edesc *edesc = req_ctx->edesc; edesc 1325 drivers/crypto/caam/caamalg_qi2.c aead_unmap(ctx->dev, edesc, req); edesc 1326 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 1332 drivers/crypto/caam/caamalg_qi2.c struct aead_edesc *edesc; edesc 1339 drivers/crypto/caam/caamalg_qi2.c edesc = aead_edesc_alloc(req, true); edesc 1340 drivers/crypto/caam/caamalg_qi2.c if (IS_ERR(edesc)) edesc 1341 drivers/crypto/caam/caamalg_qi2.c return PTR_ERR(edesc); edesc 1347 drivers/crypto/caam/caamalg_qi2.c caam_req->edesc = edesc; edesc 1351 drivers/crypto/caam/caamalg_qi2.c aead_unmap(ctx->dev, edesc, req); edesc 1352 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 1360 drivers/crypto/caam/caamalg_qi2.c struct aead_edesc *edesc; edesc 1367 drivers/crypto/caam/caamalg_qi2.c edesc = aead_edesc_alloc(req, false); edesc 1368 drivers/crypto/caam/caamalg_qi2.c if (IS_ERR(edesc)) edesc 1369 drivers/crypto/caam/caamalg_qi2.c return PTR_ERR(edesc); edesc 1375 drivers/crypto/caam/caamalg_qi2.c caam_req->edesc = edesc; edesc 1379 drivers/crypto/caam/caamalg_qi2.c aead_unmap(ctx->dev, edesc, req); edesc 1380 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 1403 drivers/crypto/caam/caamalg_qi2.c struct skcipher_edesc *edesc = req_ctx->edesc; edesc 1414 drivers/crypto/caam/caamalg_qi2.c edesc->src_nents > 1 ? 100 : ivsize, 1); edesc 1417 drivers/crypto/caam/caamalg_qi2.c edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); edesc 1419 drivers/crypto/caam/caamalg_qi2.c skcipher_unmap(ctx->dev, edesc, req); edesc 1427 drivers/crypto/caam/caamalg_qi2.c memcpy(req->iv, (u8 *)&edesc->sgt[0] + edesc->qm_sg_bytes, edesc 1430 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 1441 drivers/crypto/caam/caamalg_qi2.c struct skcipher_edesc *edesc = req_ctx->edesc; edesc 1452 drivers/crypto/caam/caamalg_qi2.c edesc->src_nents > 1 ? 100 : ivsize, 1); edesc 1455 drivers/crypto/caam/caamalg_qi2.c edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); edesc 1457 drivers/crypto/caam/caamalg_qi2.c skcipher_unmap(ctx->dev, edesc, req); edesc 1465 drivers/crypto/caam/caamalg_qi2.c memcpy(req->iv, (u8 *)&edesc->sgt[0] + edesc->qm_sg_bytes, edesc 1468 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 1474 drivers/crypto/caam/caamalg_qi2.c struct skcipher_edesc *edesc; edesc 1484 drivers/crypto/caam/caamalg_qi2.c edesc = skcipher_edesc_alloc(req); edesc 1485 drivers/crypto/caam/caamalg_qi2.c if (IS_ERR(edesc)) edesc 1486 drivers/crypto/caam/caamalg_qi2.c return PTR_ERR(edesc); edesc 1492 drivers/crypto/caam/caamalg_qi2.c caam_req->edesc = edesc; edesc 1496 drivers/crypto/caam/caamalg_qi2.c skcipher_unmap(ctx->dev, edesc, req); edesc 1497 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 1505 drivers/crypto/caam/caamalg_qi2.c struct skcipher_edesc *edesc; edesc 1514 drivers/crypto/caam/caamalg_qi2.c edesc = skcipher_edesc_alloc(req); edesc 1515 drivers/crypto/caam/caamalg_qi2.c if (IS_ERR(edesc)) edesc 1516 drivers/crypto/caam/caamalg_qi2.c return PTR_ERR(edesc); edesc 1522 drivers/crypto/caam/caamalg_qi2.c caam_req->edesc = edesc; edesc 1526 drivers/crypto/caam/caamalg_qi2.c skcipher_unmap(ctx->dev, edesc, req); edesc 1527 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 3311 drivers/crypto/caam/caamalg_qi2.c static inline void ahash_unmap(struct device *dev, struct ahash_edesc *edesc, edesc 3316 drivers/crypto/caam/caamalg_qi2.c if (edesc->src_nents) edesc 3317 drivers/crypto/caam/caamalg_qi2.c dma_unmap_sg(dev, req->src, edesc->src_nents, DMA_TO_DEVICE); edesc 3319 drivers/crypto/caam/caamalg_qi2.c if (edesc->qm_sg_bytes) edesc 3320 drivers/crypto/caam/caamalg_qi2.c dma_unmap_single(dev, edesc->qm_sg_dma, edesc->qm_sg_bytes, edesc 3331 drivers/crypto/caam/caamalg_qi2.c struct ahash_edesc *edesc, edesc 3340 drivers/crypto/caam/caamalg_qi2.c ahash_unmap(dev, edesc, req); edesc 3349 drivers/crypto/caam/caamalg_qi2.c struct ahash_edesc *edesc = state->caam_req.edesc; edesc 3359 drivers/crypto/caam/caamalg_qi2.c ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); edesc 3361 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 3376 drivers/crypto/caam/caamalg_qi2.c struct ahash_edesc *edesc = state->caam_req.edesc; edesc 3385 drivers/crypto/caam/caamalg_qi2.c ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); edesc 3387 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 3406 drivers/crypto/caam/caamalg_qi2.c struct ahash_edesc *edesc = state->caam_req.edesc; edesc 3416 drivers/crypto/caam/caamalg_qi2.c ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); edesc 3418 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 3433 drivers/crypto/caam/caamalg_qi2.c struct ahash_edesc *edesc = state->caam_req.edesc; edesc 3442 drivers/crypto/caam/caamalg_qi2.c ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); edesc 3444 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 3473 drivers/crypto/caam/caamalg_qi2.c struct ahash_edesc *edesc; edesc 3502 drivers/crypto/caam/caamalg_qi2.c edesc = qi_cache_zalloc(GFP_DMA | flags); edesc 3503 drivers/crypto/caam/caamalg_qi2.c if (!edesc) { edesc 3509 drivers/crypto/caam/caamalg_qi2.c edesc->src_nents = src_nents; edesc 3513 drivers/crypto/caam/caamalg_qi2.c sg_table = &edesc->sgt[0]; edesc 3536 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, edesc 3538 drivers/crypto/caam/caamalg_qi2.c if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { edesc 3543 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_bytes = qm_sg_bytes; edesc 3548 drivers/crypto/caam/caamalg_qi2.c dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); edesc 3558 drivers/crypto/caam/caamalg_qi2.c req_ctx->edesc = edesc; edesc 3580 drivers/crypto/caam/caamalg_qi2.c ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); edesc 3581 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 3598 drivers/crypto/caam/caamalg_qi2.c struct ahash_edesc *edesc; edesc 3603 drivers/crypto/caam/caamalg_qi2.c edesc = qi_cache_zalloc(GFP_DMA | flags); edesc 3604 drivers/crypto/caam/caamalg_qi2.c if (!edesc) edesc 3608 drivers/crypto/caam/caamalg_qi2.c sg_table = &edesc->sgt[0]; edesc 3621 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_bytes, edesc 3623 drivers/crypto/caam/caamalg_qi2.c if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { edesc 3628 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_bytes = qm_sg_bytes; edesc 3633 drivers/crypto/caam/caamalg_qi2.c dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); edesc 3643 drivers/crypto/caam/caamalg_qi2.c req_ctx->edesc = edesc; edesc 3651 drivers/crypto/caam/caamalg_qi2.c ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); edesc 3652 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 3670 drivers/crypto/caam/caamalg_qi2.c struct ahash_edesc *edesc; edesc 3692 drivers/crypto/caam/caamalg_qi2.c edesc = qi_cache_zalloc(GFP_DMA | flags); edesc 3693 drivers/crypto/caam/caamalg_qi2.c if (!edesc) { edesc 3698 drivers/crypto/caam/caamalg_qi2.c edesc->src_nents = src_nents; edesc 3702 drivers/crypto/caam/caamalg_qi2.c sg_table = &edesc->sgt[0]; edesc 3715 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_bytes, edesc 3717 drivers/crypto/caam/caamalg_qi2.c if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { edesc 3722 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_bytes = qm_sg_bytes; edesc 3727 drivers/crypto/caam/caamalg_qi2.c dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); edesc 3737 drivers/crypto/caam/caamalg_qi2.c req_ctx->edesc = edesc; edesc 3745 drivers/crypto/caam/caamalg_qi2.c ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); edesc 3746 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 3762 drivers/crypto/caam/caamalg_qi2.c struct ahash_edesc *edesc; edesc 3785 drivers/crypto/caam/caamalg_qi2.c edesc = qi_cache_zalloc(GFP_DMA | flags); edesc 3786 drivers/crypto/caam/caamalg_qi2.c if (!edesc) { edesc 3791 drivers/crypto/caam/caamalg_qi2.c edesc->src_nents = src_nents; edesc 3796 drivers/crypto/caam/caamalg_qi2.c struct dpaa2_sg_entry *sg_table = &edesc->sgt[0]; edesc 3800 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, edesc 3802 drivers/crypto/caam/caamalg_qi2.c if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { edesc 3806 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_bytes = qm_sg_bytes; edesc 3808 drivers/crypto/caam/caamalg_qi2.c dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); edesc 3833 drivers/crypto/caam/caamalg_qi2.c req_ctx->edesc = edesc; edesc 3840 drivers/crypto/caam/caamalg_qi2.c ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); edesc 3841 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 3858 drivers/crypto/caam/caamalg_qi2.c struct ahash_edesc *edesc; edesc 3862 drivers/crypto/caam/caamalg_qi2.c edesc = qi_cache_zalloc(GFP_DMA | flags); edesc 3863 drivers/crypto/caam/caamalg_qi2.c if (!edesc) edesc 3905 drivers/crypto/caam/caamalg_qi2.c req_ctx->edesc = edesc; edesc 3913 drivers/crypto/caam/caamalg_qi2.c ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); edesc 3914 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 3934 drivers/crypto/caam/caamalg_qi2.c struct ahash_edesc *edesc; edesc 3962 drivers/crypto/caam/caamalg_qi2.c edesc = qi_cache_zalloc(GFP_DMA | flags); edesc 3963 drivers/crypto/caam/caamalg_qi2.c if (!edesc) { edesc 3969 drivers/crypto/caam/caamalg_qi2.c edesc->src_nents = src_nents; edesc 3972 drivers/crypto/caam/caamalg_qi2.c sg_table = &edesc->sgt[0]; edesc 3985 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, edesc 3987 drivers/crypto/caam/caamalg_qi2.c if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { edesc 3992 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_bytes = qm_sg_bytes; edesc 4007 drivers/crypto/caam/caamalg_qi2.c dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); edesc 4017 drivers/crypto/caam/caamalg_qi2.c req_ctx->edesc = edesc; edesc 4043 drivers/crypto/caam/caamalg_qi2.c ahash_unmap_ctx(ctx->dev, edesc, req, DMA_TO_DEVICE); edesc 4044 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 4061 drivers/crypto/caam/caamalg_qi2.c struct ahash_edesc *edesc; edesc 4083 drivers/crypto/caam/caamalg_qi2.c edesc = qi_cache_zalloc(GFP_DMA | flags); edesc 4084 drivers/crypto/caam/caamalg_qi2.c if (!edesc) { edesc 4089 drivers/crypto/caam/caamalg_qi2.c edesc->src_nents = src_nents; edesc 4091 drivers/crypto/caam/caamalg_qi2.c sg_table = &edesc->sgt[0]; edesc 4099 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_bytes, edesc 4101 drivers/crypto/caam/caamalg_qi2.c if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { edesc 4106 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_bytes = qm_sg_bytes; edesc 4121 drivers/crypto/caam/caamalg_qi2.c dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); edesc 4131 drivers/crypto/caam/caamalg_qi2.c req_ctx->edesc = edesc; edesc 4139 drivers/crypto/caam/caamalg_qi2.c ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); edesc 4140 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 4158 drivers/crypto/caam/caamalg_qi2.c struct ahash_edesc *edesc; edesc 4187 drivers/crypto/caam/caamalg_qi2.c edesc = qi_cache_zalloc(GFP_DMA | flags); edesc 4188 drivers/crypto/caam/caamalg_qi2.c if (!edesc) { edesc 4194 drivers/crypto/caam/caamalg_qi2.c edesc->src_nents = src_nents; edesc 4195 drivers/crypto/caam/caamalg_qi2.c sg_table = &edesc->sgt[0]; edesc 4207 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, edesc 4210 drivers/crypto/caam/caamalg_qi2.c if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { edesc 4215 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_bytes = qm_sg_bytes; edesc 4217 drivers/crypto/caam/caamalg_qi2.c dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); edesc 4245 drivers/crypto/caam/caamalg_qi2.c req_ctx->edesc = edesc; edesc 4271 drivers/crypto/caam/caamalg_qi2.c ahash_unmap_ctx(ctx->dev, edesc, req, DMA_TO_DEVICE); edesc 4272 drivers/crypto/caam/caamalg_qi2.c qi_cache_free(edesc); edesc 188 drivers/crypto/caam/caamalg_qi2.h void *edesc; edesc 568 drivers/crypto/caam/caamhash.c struct ahash_edesc *edesc, edesc 573 drivers/crypto/caam/caamhash.c if (edesc->src_nents) edesc 574 drivers/crypto/caam/caamhash.c dma_unmap_sg(dev, req->src, edesc->src_nents, DMA_TO_DEVICE); edesc 576 drivers/crypto/caam/caamhash.c if (edesc->sec4_sg_bytes) edesc 577 drivers/crypto/caam/caamhash.c dma_unmap_single(dev, edesc->sec4_sg_dma, edesc 578 drivers/crypto/caam/caamhash.c edesc->sec4_sg_bytes, DMA_TO_DEVICE); edesc 588 drivers/crypto/caam/caamhash.c struct ahash_edesc *edesc, edesc 597 drivers/crypto/caam/caamhash.c ahash_unmap(dev, edesc, req, dst_len); edesc 604 drivers/crypto/caam/caamhash.c struct ahash_edesc *edesc; edesc 613 drivers/crypto/caam/caamhash.c edesc = container_of(desc, struct ahash_edesc, hw_desc[0]); edesc 617 drivers/crypto/caam/caamhash.c ahash_unmap_ctx(jrdev, edesc, req, digestsize, DMA_FROM_DEVICE); edesc 619 drivers/crypto/caam/caamhash.c kfree(edesc); edesc 632 drivers/crypto/caam/caamhash.c struct ahash_edesc *edesc; edesc 641 drivers/crypto/caam/caamhash.c edesc = container_of(desc, struct ahash_edesc, hw_desc[0]); edesc 645 drivers/crypto/caam/caamhash.c ahash_unmap_ctx(jrdev, edesc, req, ctx->ctx_len, DMA_BIDIRECTIONAL); edesc 647 drivers/crypto/caam/caamhash.c kfree(edesc); edesc 664 drivers/crypto/caam/caamhash.c struct ahash_edesc *edesc; edesc 673 drivers/crypto/caam/caamhash.c edesc = container_of(desc, struct ahash_edesc, hw_desc[0]); edesc 677 drivers/crypto/caam/caamhash.c ahash_unmap_ctx(jrdev, edesc, req, digestsize, DMA_BIDIRECTIONAL); edesc 679 drivers/crypto/caam/caamhash.c kfree(edesc); edesc 692 drivers/crypto/caam/caamhash.c struct ahash_edesc *edesc; edesc 701 drivers/crypto/caam/caamhash.c edesc = container_of(desc, struct ahash_edesc, hw_desc[0]); edesc 705 drivers/crypto/caam/caamhash.c ahash_unmap_ctx(jrdev, edesc, req, ctx->ctx_len, DMA_FROM_DEVICE); edesc 707 drivers/crypto/caam/caamhash.c kfree(edesc); edesc 729 drivers/crypto/caam/caamhash.c struct ahash_edesc *edesc; edesc 732 drivers/crypto/caam/caamhash.c edesc = kzalloc(sizeof(*edesc) + sg_size, GFP_DMA | flags); edesc 733 drivers/crypto/caam/caamhash.c if (!edesc) { edesc 738 drivers/crypto/caam/caamhash.c init_job_desc_shared(edesc->hw_desc, sh_desc_dma, desc_len(sh_desc), edesc 741 drivers/crypto/caam/caamhash.c return edesc; edesc 745 drivers/crypto/caam/caamhash.c struct ahash_edesc *edesc, edesc 754 drivers/crypto/caam/caamhash.c struct sec4_sg_entry *sg = edesc->sec4_sg; edesc 766 drivers/crypto/caam/caamhash.c edesc->sec4_sg_bytes = sgsize; edesc 767 drivers/crypto/caam/caamhash.c edesc->sec4_sg_dma = src_dma; edesc 774 drivers/crypto/caam/caamhash.c append_seq_in_ptr(edesc->hw_desc, src_dma, first_bytes + to_hash, edesc 797 drivers/crypto/caam/caamhash.c struct ahash_edesc *edesc; edesc 844 drivers/crypto/caam/caamhash.c edesc = ahash_edesc_alloc(ctx, pad_nents, ctx->sh_desc_update, edesc 846 drivers/crypto/caam/caamhash.c if (!edesc) { edesc 851 drivers/crypto/caam/caamhash.c edesc->src_nents = src_nents; edesc 852 drivers/crypto/caam/caamhash.c edesc->sec4_sg_bytes = sec4_sg_bytes; edesc 855 drivers/crypto/caam/caamhash.c edesc->sec4_sg, DMA_BIDIRECTIONAL); edesc 859 drivers/crypto/caam/caamhash.c ret = buf_map_to_sec4_sg(jrdev, edesc->sec4_sg + 1, state); edesc 865 drivers/crypto/caam/caamhash.c edesc->sec4_sg + sec4_sg_src_index, edesc 868 drivers/crypto/caam/caamhash.c sg_to_sec4_set_last(edesc->sec4_sg + sec4_sg_src_index - edesc 875 drivers/crypto/caam/caamhash.c desc = edesc->hw_desc; edesc 877 drivers/crypto/caam/caamhash.c edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, edesc 880 drivers/crypto/caam/caamhash.c if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { edesc 886 drivers/crypto/caam/caamhash.c append_seq_in_ptr(desc, edesc->sec4_sg_dma, ctx->ctx_len + edesc 915 drivers/crypto/caam/caamhash.c ahash_unmap_ctx(jrdev, edesc, req, ctx->ctx_len, DMA_BIDIRECTIONAL); edesc 916 drivers/crypto/caam/caamhash.c kfree(edesc); edesc 932 drivers/crypto/caam/caamhash.c struct ahash_edesc *edesc; edesc 939 drivers/crypto/caam/caamhash.c edesc = ahash_edesc_alloc(ctx, 4, ctx->sh_desc_fin, edesc 941 drivers/crypto/caam/caamhash.c if (!edesc) edesc 944 drivers/crypto/caam/caamhash.c desc = edesc->hw_desc; edesc 946 drivers/crypto/caam/caamhash.c edesc->sec4_sg_bytes = sec4_sg_bytes; edesc 949 drivers/crypto/caam/caamhash.c edesc->sec4_sg, DMA_BIDIRECTIONAL); edesc 953 drivers/crypto/caam/caamhash.c ret = buf_map_to_sec4_sg(jrdev, edesc->sec4_sg + 1, state); edesc 957 drivers/crypto/caam/caamhash.c sg_to_sec4_set_last(edesc->sec4_sg + (buflen ? 1 : 0)); edesc 959 drivers/crypto/caam/caamhash.c edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, edesc 961 drivers/crypto/caam/caamhash.c if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { edesc 967 drivers/crypto/caam/caamhash.c append_seq_in_ptr(desc, edesc->sec4_sg_dma, ctx->ctx_len + buflen, edesc 981 drivers/crypto/caam/caamhash.c ahash_unmap_ctx(jrdev, edesc, req, digestsize, DMA_BIDIRECTIONAL); edesc 982 drivers/crypto/caam/caamhash.c kfree(edesc); edesc 999 drivers/crypto/caam/caamhash.c struct ahash_edesc *edesc; edesc 1022 drivers/crypto/caam/caamhash.c edesc = ahash_edesc_alloc(ctx, sec4_sg_src_index + mapped_nents, edesc 1025 drivers/crypto/caam/caamhash.c if (!edesc) { edesc 1030 drivers/crypto/caam/caamhash.c desc = edesc->hw_desc; edesc 1032 drivers/crypto/caam/caamhash.c edesc->src_nents = src_nents; edesc 1035 drivers/crypto/caam/caamhash.c edesc->sec4_sg, DMA_BIDIRECTIONAL); edesc 1039 drivers/crypto/caam/caamhash.c ret = buf_map_to_sec4_sg(jrdev, edesc->sec4_sg + 1, state); edesc 1043 drivers/crypto/caam/caamhash.c ret = ahash_edesc_add_src(ctx, edesc, req, mapped_nents, edesc 1061 drivers/crypto/caam/caamhash.c ahash_unmap_ctx(jrdev, edesc, req, digestsize, DMA_BIDIRECTIONAL); edesc 1062 drivers/crypto/caam/caamhash.c kfree(edesc); edesc 1077 drivers/crypto/caam/caamhash.c struct ahash_edesc *edesc; edesc 1100 drivers/crypto/caam/caamhash.c edesc = ahash_edesc_alloc(ctx, mapped_nents > 1 ? mapped_nents : 0, edesc 1103 drivers/crypto/caam/caamhash.c if (!edesc) { edesc 1108 drivers/crypto/caam/caamhash.c edesc->src_nents = src_nents; edesc 1110 drivers/crypto/caam/caamhash.c ret = ahash_edesc_add_src(ctx, edesc, req, mapped_nents, 0, 0, edesc 1113 drivers/crypto/caam/caamhash.c ahash_unmap(jrdev, edesc, req, digestsize); edesc 1114 drivers/crypto/caam/caamhash.c kfree(edesc); edesc 1118 drivers/crypto/caam/caamhash.c desc = edesc->hw_desc; edesc 1122 drivers/crypto/caam/caamhash.c ahash_unmap(jrdev, edesc, req, digestsize); edesc 1123 drivers/crypto/caam/caamhash.c kfree(edesc); edesc 1135 drivers/crypto/caam/caamhash.c ahash_unmap_ctx(jrdev, edesc, req, digestsize, DMA_FROM_DEVICE); edesc 1136 drivers/crypto/caam/caamhash.c kfree(edesc); edesc 1155 drivers/crypto/caam/caamhash.c struct ahash_edesc *edesc; edesc 1159 drivers/crypto/caam/caamhash.c edesc = ahash_edesc_alloc(ctx, 0, ctx->sh_desc_digest, edesc 1161 drivers/crypto/caam/caamhash.c if (!edesc) edesc 1164 drivers/crypto/caam/caamhash.c desc = edesc->hw_desc; edesc 1189 drivers/crypto/caam/caamhash.c ahash_unmap_ctx(jrdev, edesc, req, digestsize, DMA_FROM_DEVICE); edesc 1190 drivers/crypto/caam/caamhash.c kfree(edesc); edesc 1195 drivers/crypto/caam/caamhash.c ahash_unmap(jrdev, edesc, req, digestsize); edesc 1196 drivers/crypto/caam/caamhash.c kfree(edesc); edesc 1217 drivers/crypto/caam/caamhash.c struct ahash_edesc *edesc; edesc 1263 drivers/crypto/caam/caamhash.c edesc = ahash_edesc_alloc(ctx, pad_nents, edesc 1267 drivers/crypto/caam/caamhash.c if (!edesc) { edesc 1272 drivers/crypto/caam/caamhash.c edesc->src_nents = src_nents; edesc 1273 drivers/crypto/caam/caamhash.c edesc->sec4_sg_bytes = sec4_sg_bytes; edesc 1275 drivers/crypto/caam/caamhash.c ret = buf_map_to_sec4_sg(jrdev, edesc->sec4_sg, state); edesc 1279 drivers/crypto/caam/caamhash.c sg_to_sec4_sg_last(req->src, src_len, edesc->sec4_sg + 1, 0); edesc 1287 drivers/crypto/caam/caamhash.c desc = edesc->hw_desc; edesc 1289 drivers/crypto/caam/caamhash.c edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, edesc 1292 drivers/crypto/caam/caamhash.c if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { edesc 1298 drivers/crypto/caam/caamhash.c append_seq_in_ptr(desc, edesc->sec4_sg_dma, to_hash, LDST_SGF); edesc 1331 drivers/crypto/caam/caamhash.c ahash_unmap_ctx(jrdev, edesc, req, ctx->ctx_len, DMA_TO_DEVICE); edesc 1332 drivers/crypto/caam/caamhash.c kfree(edesc); edesc 1349 drivers/crypto/caam/caamhash.c struct ahash_edesc *edesc; edesc 1374 drivers/crypto/caam/caamhash.c edesc = ahash_edesc_alloc(ctx, sec4_sg_src_index + mapped_nents, edesc 1377 drivers/crypto/caam/caamhash.c if (!edesc) { edesc 1382 drivers/crypto/caam/caamhash.c desc = edesc->hw_desc; edesc 1384 drivers/crypto/caam/caamhash.c edesc->src_nents = src_nents; edesc 1385 drivers/crypto/caam/caamhash.c edesc->sec4_sg_bytes = sec4_sg_bytes; edesc 1387 drivers/crypto/caam/caamhash.c ret = buf_map_to_sec4_sg(jrdev, edesc->sec4_sg, state); edesc 1391 drivers/crypto/caam/caamhash.c ret = ahash_edesc_add_src(ctx, edesc, req, mapped_nents, 1, buflen, edesc 1410 drivers/crypto/caam/caamhash.c ahash_unmap_ctx(jrdev, edesc, req, digestsize, DMA_FROM_DEVICE); edesc 1411 drivers/crypto/caam/caamhash.c kfree(edesc); edesc 1416 drivers/crypto/caam/caamhash.c ahash_unmap(jrdev, edesc, req, digestsize); edesc 1417 drivers/crypto/caam/caamhash.c kfree(edesc); edesc 1437 drivers/crypto/caam/caamhash.c struct ahash_edesc *edesc; edesc 1477 drivers/crypto/caam/caamhash.c edesc = ahash_edesc_alloc(ctx, mapped_nents > 1 ? edesc 1482 drivers/crypto/caam/caamhash.c if (!edesc) { edesc 1487 drivers/crypto/caam/caamhash.c edesc->src_nents = src_nents; edesc 1489 drivers/crypto/caam/caamhash.c ret = ahash_edesc_add_src(ctx, edesc, req, mapped_nents, 0, 0, edesc 1498 drivers/crypto/caam/caamhash.c desc = edesc->hw_desc; edesc 1531 drivers/crypto/caam/caamhash.c ahash_unmap_ctx(jrdev, edesc, req, ctx->ctx_len, DMA_TO_DEVICE); edesc 1532 drivers/crypto/caam/caamhash.c kfree(edesc); edesc 43 drivers/crypto/caam/caampkc.c static void rsa_io_unmap(struct device *dev, struct rsa_edesc *edesc, edesc 48 drivers/crypto/caam/caampkc.c dma_unmap_sg(dev, req->dst, edesc->dst_nents, DMA_FROM_DEVICE); edesc 49 drivers/crypto/caam/caampkc.c dma_unmap_sg(dev, req_ctx->fixup_src, edesc->src_nents, DMA_TO_DEVICE); edesc 51 drivers/crypto/caam/caampkc.c if (edesc->sec4_sg_bytes) edesc 52 drivers/crypto/caam/caampkc.c dma_unmap_single(dev, edesc->sec4_sg_dma, edesc->sec4_sg_bytes, edesc 56 drivers/crypto/caam/caampkc.c static void rsa_pub_unmap(struct device *dev, struct rsa_edesc *edesc, edesc 62 drivers/crypto/caam/caampkc.c struct rsa_pub_pdb *pdb = &edesc->pdb.pub; edesc 68 drivers/crypto/caam/caampkc.c static void rsa_priv_f1_unmap(struct device *dev, struct rsa_edesc *edesc, edesc 74 drivers/crypto/caam/caampkc.c struct rsa_priv_f1_pdb *pdb = &edesc->pdb.priv_f1; edesc 80 drivers/crypto/caam/caampkc.c static void rsa_priv_f2_unmap(struct device *dev, struct rsa_edesc *edesc, edesc 86 drivers/crypto/caam/caampkc.c struct rsa_priv_f2_pdb *pdb = &edesc->pdb.priv_f2; edesc 97 drivers/crypto/caam/caampkc.c static void rsa_priv_f3_unmap(struct device *dev, struct rsa_edesc *edesc, edesc 103 drivers/crypto/caam/caampkc.c struct rsa_priv_f3_pdb *pdb = &edesc->pdb.priv_f3; edesc 120 drivers/crypto/caam/caampkc.c struct rsa_edesc *edesc; edesc 126 drivers/crypto/caam/caampkc.c edesc = container_of(desc, struct rsa_edesc, hw_desc[0]); edesc 128 drivers/crypto/caam/caampkc.c rsa_pub_unmap(dev, edesc, req); edesc 129 drivers/crypto/caam/caampkc.c rsa_io_unmap(dev, edesc, req); edesc 130 drivers/crypto/caam/caampkc.c kfree(edesc); edesc 139 drivers/crypto/caam/caampkc.c struct rsa_edesc *edesc; edesc 145 drivers/crypto/caam/caampkc.c edesc = container_of(desc, struct rsa_edesc, hw_desc[0]); edesc 147 drivers/crypto/caam/caampkc.c rsa_priv_f1_unmap(dev, edesc, req); edesc 148 drivers/crypto/caam/caampkc.c rsa_io_unmap(dev, edesc, req); edesc 149 drivers/crypto/caam/caampkc.c kfree(edesc); edesc 158 drivers/crypto/caam/caampkc.c struct rsa_edesc *edesc; edesc 164 drivers/crypto/caam/caampkc.c edesc = container_of(desc, struct rsa_edesc, hw_desc[0]); edesc 166 drivers/crypto/caam/caampkc.c rsa_priv_f2_unmap(dev, edesc, req); edesc 167 drivers/crypto/caam/caampkc.c rsa_io_unmap(dev, edesc, req); edesc 168 drivers/crypto/caam/caampkc.c kfree(edesc); edesc 177 drivers/crypto/caam/caampkc.c struct rsa_edesc *edesc; edesc 183 drivers/crypto/caam/caampkc.c edesc = container_of(desc, struct rsa_edesc, hw_desc[0]); edesc 185 drivers/crypto/caam/caampkc.c rsa_priv_f3_unmap(dev, edesc, req); edesc 186 drivers/crypto/caam/caampkc.c rsa_io_unmap(dev, edesc, req); edesc 187 drivers/crypto/caam/caampkc.c kfree(edesc); edesc 251 drivers/crypto/caam/caampkc.c struct rsa_edesc *edesc; edesc 301 drivers/crypto/caam/caampkc.c edesc = kzalloc(sizeof(*edesc) + desclen + sec4_sg_bytes, edesc 303 drivers/crypto/caam/caampkc.c if (!edesc) edesc 318 drivers/crypto/caam/caampkc.c edesc->sec4_sg = (void *)edesc + sizeof(*edesc) + desclen; edesc 320 drivers/crypto/caam/caampkc.c dma_to_sec4_sg_one(edesc->sec4_sg, ctx->padding_dma, diff_size, edesc 325 drivers/crypto/caam/caampkc.c edesc->sec4_sg + !!diff_size, 0); edesc 329 drivers/crypto/caam/caampkc.c edesc->sec4_sg + sec4_sg_index, 0); edesc 332 drivers/crypto/caam/caampkc.c edesc->src_nents = src_nents; edesc 333 drivers/crypto/caam/caampkc.c edesc->dst_nents = dst_nents; edesc 336 drivers/crypto/caam/caampkc.c return edesc; edesc 338 drivers/crypto/caam/caampkc.c edesc->sec4_sg_dma = dma_map_single(dev, edesc->sec4_sg, edesc 340 drivers/crypto/caam/caampkc.c if (dma_mapping_error(dev, edesc->sec4_sg_dma)) { edesc 345 drivers/crypto/caam/caampkc.c edesc->sec4_sg_bytes = sec4_sg_bytes; edesc 348 drivers/crypto/caam/caampkc.c DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg, edesc 349 drivers/crypto/caam/caampkc.c edesc->sec4_sg_bytes, 1); edesc 351 drivers/crypto/caam/caampkc.c return edesc; edesc 358 drivers/crypto/caam/caampkc.c kfree(edesc); edesc 363 drivers/crypto/caam/caampkc.c struct rsa_edesc *edesc) edesc 370 drivers/crypto/caam/caampkc.c struct rsa_pub_pdb *pdb = &edesc->pdb.pub; edesc 386 drivers/crypto/caam/caampkc.c if (edesc->src_nents > 1) { edesc 388 drivers/crypto/caam/caampkc.c pdb->f_dma = edesc->sec4_sg_dma; edesc 389 drivers/crypto/caam/caampkc.c sec4_sg_index += edesc->src_nents; edesc 394 drivers/crypto/caam/caampkc.c if (edesc->dst_nents > 1) { edesc 396 drivers/crypto/caam/caampkc.c pdb->g_dma = edesc->sec4_sg_dma + edesc 409 drivers/crypto/caam/caampkc.c struct rsa_edesc *edesc) edesc 415 drivers/crypto/caam/caampkc.c struct rsa_priv_f1_pdb *pdb = &edesc->pdb.priv_f1; edesc 431 drivers/crypto/caam/caampkc.c if (edesc->src_nents > 1) { edesc 433 drivers/crypto/caam/caampkc.c pdb->g_dma = edesc->sec4_sg_dma; edesc 434 drivers/crypto/caam/caampkc.c sec4_sg_index += edesc->src_nents; edesc 441 drivers/crypto/caam/caampkc.c if (edesc->dst_nents > 1) { edesc 443 drivers/crypto/caam/caampkc.c pdb->f_dma = edesc->sec4_sg_dma + edesc 455 drivers/crypto/caam/caampkc.c struct rsa_edesc *edesc) edesc 461 drivers/crypto/caam/caampkc.c struct rsa_priv_f2_pdb *pdb = &edesc->pdb.priv_f2; edesc 496 drivers/crypto/caam/caampkc.c if (edesc->src_nents > 1) { edesc 498 drivers/crypto/caam/caampkc.c pdb->g_dma = edesc->sec4_sg_dma; edesc 499 drivers/crypto/caam/caampkc.c sec4_sg_index += edesc->src_nents; edesc 506 drivers/crypto/caam/caampkc.c if (edesc->dst_nents > 1) { edesc 508 drivers/crypto/caam/caampkc.c pdb->f_dma = edesc->sec4_sg_dma + edesc 532 drivers/crypto/caam/caampkc.c struct rsa_edesc *edesc) edesc 538 drivers/crypto/caam/caampkc.c struct rsa_priv_f3_pdb *pdb = &edesc->pdb.priv_f3; edesc 585 drivers/crypto/caam/caampkc.c if (edesc->src_nents > 1) { edesc 587 drivers/crypto/caam/caampkc.c pdb->g_dma = edesc->sec4_sg_dma; edesc 588 drivers/crypto/caam/caampkc.c sec4_sg_index += edesc->src_nents; edesc 595 drivers/crypto/caam/caampkc.c if (edesc->dst_nents > 1) { edesc 597 drivers/crypto/caam/caampkc.c pdb->f_dma = edesc->sec4_sg_dma + edesc 630 drivers/crypto/caam/caampkc.c struct rsa_edesc *edesc; edesc 643 drivers/crypto/caam/caampkc.c edesc = rsa_edesc_alloc(req, DESC_RSA_PUB_LEN); edesc 644 drivers/crypto/caam/caampkc.c if (IS_ERR(edesc)) edesc 645 drivers/crypto/caam/caampkc.c return PTR_ERR(edesc); edesc 648 drivers/crypto/caam/caampkc.c ret = set_rsa_pub_pdb(req, edesc); edesc 653 drivers/crypto/caam/caampkc.c init_rsa_pub_desc(edesc->hw_desc, &edesc->pdb.pub); edesc 655 drivers/crypto/caam/caampkc.c ret = caam_jr_enqueue(jrdev, edesc->hw_desc, rsa_pub_done, req); edesc 659 drivers/crypto/caam/caampkc.c rsa_pub_unmap(jrdev, edesc, req); edesc 662 drivers/crypto/caam/caampkc.c rsa_io_unmap(jrdev, edesc, req); edesc 663 drivers/crypto/caam/caampkc.c kfree(edesc); edesc 672 drivers/crypto/caam/caampkc.c struct rsa_edesc *edesc; edesc 676 drivers/crypto/caam/caampkc.c edesc = rsa_edesc_alloc(req, DESC_RSA_PRIV_F1_LEN); edesc 677 drivers/crypto/caam/caampkc.c if (IS_ERR(edesc)) edesc 678 drivers/crypto/caam/caampkc.c return PTR_ERR(edesc); edesc 681 drivers/crypto/caam/caampkc.c ret = set_rsa_priv_f1_pdb(req, edesc); edesc 686 drivers/crypto/caam/caampkc.c init_rsa_priv_f1_desc(edesc->hw_desc, &edesc->pdb.priv_f1); edesc 688 drivers/crypto/caam/caampkc.c ret = caam_jr_enqueue(jrdev, edesc->hw_desc, rsa_priv_f1_done, req); edesc 692 drivers/crypto/caam/caampkc.c rsa_priv_f1_unmap(jrdev, edesc, req); edesc 695 drivers/crypto/caam/caampkc.c rsa_io_unmap(jrdev, edesc, req); edesc 696 drivers/crypto/caam/caampkc.c kfree(edesc); edesc 705 drivers/crypto/caam/caampkc.c struct rsa_edesc *edesc; edesc 709 drivers/crypto/caam/caampkc.c edesc = rsa_edesc_alloc(req, DESC_RSA_PRIV_F2_LEN); edesc 710 drivers/crypto/caam/caampkc.c if (IS_ERR(edesc)) edesc 711 drivers/crypto/caam/caampkc.c return PTR_ERR(edesc); edesc 714 drivers/crypto/caam/caampkc.c ret = set_rsa_priv_f2_pdb(req, edesc); edesc 719 drivers/crypto/caam/caampkc.c init_rsa_priv_f2_desc(edesc->hw_desc, &edesc->pdb.priv_f2); edesc 721 drivers/crypto/caam/caampkc.c ret = caam_jr_enqueue(jrdev, edesc->hw_desc, rsa_priv_f2_done, req); edesc 725 drivers/crypto/caam/caampkc.c rsa_priv_f2_unmap(jrdev, edesc, req); edesc 728 drivers/crypto/caam/caampkc.c rsa_io_unmap(jrdev, edesc, req); edesc 729 drivers/crypto/caam/caampkc.c kfree(edesc); edesc 738 drivers/crypto/caam/caampkc.c struct rsa_edesc *edesc; edesc 742 drivers/crypto/caam/caampkc.c edesc = rsa_edesc_alloc(req, DESC_RSA_PRIV_F3_LEN); edesc 743 drivers/crypto/caam/caampkc.c if (IS_ERR(edesc)) edesc 744 drivers/crypto/caam/caampkc.c return PTR_ERR(edesc); edesc 747 drivers/crypto/caam/caampkc.c ret = set_rsa_priv_f3_pdb(req, edesc); edesc 752 drivers/crypto/caam/caampkc.c init_rsa_priv_f3_desc(edesc->hw_desc, &edesc->pdb.priv_f3); edesc 754 drivers/crypto/caam/caampkc.c ret = caam_jr_enqueue(jrdev, edesc->hw_desc, rsa_priv_f3_done, req); edesc 758 drivers/crypto/caam/caampkc.c rsa_priv_f3_unmap(jrdev, edesc, req); edesc 761 drivers/crypto/caam/caampkc.c rsa_io_unmap(jrdev, edesc, req); edesc 762 drivers/crypto/caam/caampkc.c kfree(edesc); edesc 325 drivers/crypto/talitos.c struct talitos_edesc *edesc; edesc 333 drivers/crypto/talitos.c edesc = container_of(request->desc, struct talitos_edesc, desc); edesc 335 drivers/crypto/talitos.c return ((struct talitos_desc *)(edesc->buf + edesc->dma_len))->hdr1; edesc 490 drivers/crypto/talitos.c struct talitos_edesc *edesc; edesc 492 drivers/crypto/talitos.c edesc = container_of(priv->chan[ch].fifo[iter].desc, edesc 495 drivers/crypto/talitos.c (edesc->buf + edesc->dma_len))->hdr; edesc 964 drivers/crypto/talitos.c struct talitos_edesc *edesc, edesc 971 drivers/crypto/talitos.c unsigned int src_nents = edesc->src_nents ? : 1; edesc 972 drivers/crypto/talitos.c unsigned int dst_nents = edesc->dst_nents ? : 1; edesc 975 drivers/crypto/talitos.c dma_sync_single_for_device(dev, edesc->dma_link_tbl + offset, edesc 977 drivers/crypto/talitos.c sg_pcopy_from_buffer(dst, dst_nents, edesc->buf + offset, len, edesc 992 drivers/crypto/talitos.c struct talitos_edesc *edesc, edesc 1000 drivers/crypto/talitos.c bool is_ipsec_esp = edesc->desc.hdr & DESC_HDR_TYPE_IPSEC_ESP; edesc 1001 drivers/crypto/talitos.c struct talitos_ptr *civ_ptr = &edesc->desc.ptr[is_ipsec_esp ? 2 : 3]; edesc 1004 drivers/crypto/talitos.c unmap_single_talitos_ptr(dev, &edesc->desc.ptr[6], edesc 1008 drivers/crypto/talitos.c talitos_sg_unmap(dev, edesc, areq->src, areq->dst, edesc 1011 drivers/crypto/talitos.c if (edesc->dma_len) edesc 1012 drivers/crypto/talitos.c dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len, edesc 1016 drivers/crypto/talitos.c unsigned int dst_nents = edesc->dst_nents ? : 1; edesc 1033 drivers/crypto/talitos.c struct talitos_edesc *edesc; edesc 1035 drivers/crypto/talitos.c edesc = container_of(desc, struct talitos_edesc, desc); edesc 1037 drivers/crypto/talitos.c ipsec_esp_unmap(dev, edesc, areq, true); edesc 1039 drivers/crypto/talitos.c dma_unmap_single(dev, edesc->iv_dma, ivsize, DMA_TO_DEVICE); edesc 1041 drivers/crypto/talitos.c kfree(edesc); edesc 1053 drivers/crypto/talitos.c struct talitos_edesc *edesc; edesc 1056 drivers/crypto/talitos.c edesc = container_of(desc, struct talitos_edesc, desc); edesc 1058 drivers/crypto/talitos.c ipsec_esp_unmap(dev, edesc, req, false); edesc 1062 drivers/crypto/talitos.c oicv = edesc->buf + edesc->dma_len; edesc 1068 drivers/crypto/talitos.c kfree(edesc); edesc 1078 drivers/crypto/talitos.c struct talitos_edesc *edesc; edesc 1080 drivers/crypto/talitos.c edesc = container_of(desc, struct talitos_edesc, desc); edesc 1082 drivers/crypto/talitos.c ipsec_esp_unmap(dev, edesc, req, false); edesc 1089 drivers/crypto/talitos.c kfree(edesc); edesc 1148 drivers/crypto/talitos.c unsigned int len, struct talitos_edesc *edesc, edesc 1166 drivers/crypto/talitos.c to_talitos_ptr(ptr, edesc->dma_link_tbl + offset, len, is_sec1); edesc 1170 drivers/crypto/talitos.c &edesc->link_tbl[tbl_off]); edesc 1173 drivers/crypto/talitos.c copy_talitos_ptr(ptr, &edesc->link_tbl[tbl_off], is_sec1); edesc 1176 drivers/crypto/talitos.c to_talitos_ptr(ptr, edesc->dma_link_tbl + edesc 1184 drivers/crypto/talitos.c unsigned int len, struct talitos_edesc *edesc, edesc 1188 drivers/crypto/talitos.c return talitos_sg_map_ext(dev, src, len, edesc, ptr, sg_count, offset, edesc 1195 drivers/crypto/talitos.c static int ipsec_esp(struct talitos_edesc *edesc, struct aead_request *areq, edesc 1205 drivers/crypto/talitos.c struct talitos_desc *desc = &edesc->desc; edesc 1217 drivers/crypto/talitos.c dma_addr_t dma_icv = edesc->dma_link_tbl + edesc->dma_len - authsize; edesc 1222 drivers/crypto/talitos.c sg_count = edesc->src_nents ?: 1; edesc 1224 drivers/crypto/talitos.c sg_copy_to_buffer(areq->src, sg_count, edesc->buf, edesc 1232 drivers/crypto/talitos.c ret = talitos_sg_map(dev, areq->src, areq->assoclen, edesc, edesc 1241 drivers/crypto/talitos.c to_talitos_ptr(civ_ptr, edesc->iv_dma, ivsize, is_sec1); edesc 1256 drivers/crypto/talitos.c ret = talitos_sg_map_ext(dev, areq->src, cryptlen, edesc, &desc->ptr[4], edesc 1267 drivers/crypto/talitos.c sg_count = edesc->dst_nents ? : 1; edesc 1276 drivers/crypto/talitos.c ret = talitos_sg_map_ext(dev, areq->dst, cryptlen, edesc, &desc->ptr[5], edesc 1282 drivers/crypto/talitos.c struct talitos_ptr *tbl_ptr = &edesc->link_tbl[tbl_off]; edesc 1296 drivers/crypto/talitos.c talitos_sg_map(dev, areq->dst, authsize, edesc, &desc->ptr[6], edesc 1306 drivers/crypto/talitos.c dma_sync_single_for_device(dev, edesc->dma_link_tbl, edesc 1307 drivers/crypto/talitos.c edesc->dma_len, edesc 1312 drivers/crypto/talitos.c ipsec_esp_unmap(dev, edesc, areq, encrypt); edesc 1313 drivers/crypto/talitos.c kfree(edesc); edesc 1333 drivers/crypto/talitos.c struct talitos_edesc *edesc; edesc 1398 drivers/crypto/talitos.c edesc = kmalloc(alloc_len, GFP_DMA | flags); edesc 1399 drivers/crypto/talitos.c if (!edesc) edesc 1402 drivers/crypto/talitos.c iv = memcpy(((u8 *)edesc) + alloc_len - ivsize, iv, ivsize); edesc 1405 drivers/crypto/talitos.c memset(&edesc->desc, 0, sizeof(edesc->desc)); edesc 1407 drivers/crypto/talitos.c edesc->src_nents = src_nents; edesc 1408 drivers/crypto/talitos.c edesc->dst_nents = dst_nents; edesc 1409 drivers/crypto/talitos.c edesc->iv_dma = iv_dma; edesc 1410 drivers/crypto/talitos.c edesc->dma_len = dma_len; edesc 1412 drivers/crypto/talitos.c edesc->dma_link_tbl = dma_map_single(dev, &edesc->link_tbl[0], edesc 1413 drivers/crypto/talitos.c edesc->dma_len, edesc 1416 drivers/crypto/talitos.c return edesc; edesc 1438 drivers/crypto/talitos.c struct talitos_edesc *edesc; edesc 1441 drivers/crypto/talitos.c edesc = aead_edesc_alloc(req, req->iv, 0, true); edesc 1442 drivers/crypto/talitos.c if (IS_ERR(edesc)) edesc 1443 drivers/crypto/talitos.c return PTR_ERR(edesc); edesc 1446 drivers/crypto/talitos.c edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT; edesc 1448 drivers/crypto/talitos.c return ipsec_esp(edesc, req, true, ipsec_esp_encrypt_done); edesc 1457 drivers/crypto/talitos.c struct talitos_edesc *edesc; edesc 1461 drivers/crypto/talitos.c edesc = aead_edesc_alloc(req, req->iv, 1, false); edesc 1462 drivers/crypto/talitos.c if (IS_ERR(edesc)) edesc 1463 drivers/crypto/talitos.c return PTR_ERR(edesc); edesc 1465 drivers/crypto/talitos.c if ((edesc->desc.hdr & DESC_HDR_TYPE_IPSEC_ESP) && edesc 1467 drivers/crypto/talitos.c ((!edesc->src_nents && !edesc->dst_nents) || edesc 1471 drivers/crypto/talitos.c edesc->desc.hdr = ctx->desc_hdr_template | edesc 1477 drivers/crypto/talitos.c return ipsec_esp(edesc, req, false, edesc 1482 drivers/crypto/talitos.c edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND; edesc 1485 drivers/crypto/talitos.c icvdata = edesc->buf + edesc->dma_len; edesc 1487 drivers/crypto/talitos.c sg_pcopy_to_buffer(req->src, edesc->src_nents ? : 1, icvdata, authsize, edesc 1490 drivers/crypto/talitos.c return ipsec_esp(edesc, req, false, ipsec_esp_decrypt_swauth_done); edesc 1537 drivers/crypto/talitos.c struct talitos_edesc *edesc, edesc 1540 drivers/crypto/talitos.c unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE); edesc 1542 drivers/crypto/talitos.c talitos_sg_unmap(dev, edesc, areq->src, areq->dst, areq->nbytes, 0); edesc 1543 drivers/crypto/talitos.c unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1], DMA_TO_DEVICE); edesc 1545 drivers/crypto/talitos.c if (edesc->dma_len) edesc 1546 drivers/crypto/talitos.c dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len, edesc 1558 drivers/crypto/talitos.c struct talitos_edesc *edesc; edesc 1560 drivers/crypto/talitos.c edesc = container_of(desc, struct talitos_edesc, desc); edesc 1562 drivers/crypto/talitos.c common_nonsnoop_unmap(dev, edesc, areq); edesc 1565 drivers/crypto/talitos.c kfree(edesc); edesc 1570 drivers/crypto/talitos.c static int common_nonsnoop(struct talitos_edesc *edesc, edesc 1579 drivers/crypto/talitos.c struct talitos_desc *desc = &edesc->desc; edesc 1590 drivers/crypto/talitos.c to_talitos_ptr(&desc->ptr[1], edesc->iv_dma, ivsize, is_sec1); edesc 1595 drivers/crypto/talitos.c sg_count = edesc->src_nents ?: 1; edesc 1597 drivers/crypto/talitos.c sg_copy_to_buffer(areq->src, sg_count, edesc->buf, edesc 1606 drivers/crypto/talitos.c sg_count = talitos_sg_map(dev, areq->src, cryptlen, edesc, edesc 1613 drivers/crypto/talitos.c sg_count = edesc->dst_nents ? : 1; edesc 1618 drivers/crypto/talitos.c ret = talitos_sg_map(dev, areq->dst, cryptlen, edesc, &desc->ptr[4], edesc 1619 drivers/crypto/talitos.c sg_count, 0, (edesc->src_nents + 1)); edesc 1630 drivers/crypto/talitos.c dma_sync_single_for_device(dev, edesc->dma_link_tbl, edesc 1631 drivers/crypto/talitos.c edesc->dma_len, DMA_BIDIRECTIONAL); edesc 1635 drivers/crypto/talitos.c common_nonsnoop_unmap(dev, edesc, areq); edesc 1636 drivers/crypto/talitos.c kfree(edesc); edesc 1657 drivers/crypto/talitos.c struct talitos_edesc *edesc; edesc 1668 drivers/crypto/talitos.c edesc = ablkcipher_edesc_alloc(areq, true); edesc 1669 drivers/crypto/talitos.c if (IS_ERR(edesc)) edesc 1670 drivers/crypto/talitos.c return PTR_ERR(edesc); edesc 1673 drivers/crypto/talitos.c edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT; edesc 1675 drivers/crypto/talitos.c return common_nonsnoop(edesc, areq, ablkcipher_done); edesc 1682 drivers/crypto/talitos.c struct talitos_edesc *edesc; edesc 1693 drivers/crypto/talitos.c edesc = ablkcipher_edesc_alloc(areq, false); edesc 1694 drivers/crypto/talitos.c if (IS_ERR(edesc)) edesc 1695 drivers/crypto/talitos.c return PTR_ERR(edesc); edesc 1697 drivers/crypto/talitos.c edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND; edesc 1699 drivers/crypto/talitos.c return common_nonsnoop(edesc, areq, ablkcipher_done); edesc 1703 drivers/crypto/talitos.c struct talitos_edesc *edesc, edesc 1709 drivers/crypto/talitos.c struct talitos_desc *desc = &edesc->desc; edesc 1711 drivers/crypto/talitos.c (edesc->buf + edesc->dma_len); edesc 1713 drivers/crypto/talitos.c unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE); edesc 1719 drivers/crypto/talitos.c talitos_sg_unmap(dev, edesc, req_ctx->psrc, NULL, 0, 0); edesc 1722 drivers/crypto/talitos.c if (from_talitos_ptr_len(&edesc->desc.ptr[1], is_sec1)) edesc 1723 drivers/crypto/talitos.c unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1], edesc 1733 drivers/crypto/talitos.c if (edesc->dma_len) edesc 1734 drivers/crypto/talitos.c dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len, edesc 1737 drivers/crypto/talitos.c if (edesc->desc.next_desc) edesc 1738 drivers/crypto/talitos.c dma_unmap_single(dev, be32_to_cpu(edesc->desc.next_desc), edesc 1747 drivers/crypto/talitos.c struct talitos_edesc *edesc = edesc 1756 drivers/crypto/talitos.c common_nonsnoop_hash_unmap(dev, edesc, areq); edesc 1758 drivers/crypto/talitos.c kfree(edesc); edesc 1768 drivers/crypto/talitos.c struct talitos_edesc *edesc, edesc 1779 drivers/crypto/talitos.c edesc->desc.hdr &= ~DESC_HDR_MODE0_MDEU_PAD; edesc 1784 drivers/crypto/talitos.c static int common_nonsnoop_hash(struct talitos_edesc *edesc, edesc 1794 drivers/crypto/talitos.c struct talitos_desc *desc = &edesc->desc; edesc 1822 drivers/crypto/talitos.c sg_count = edesc->src_nents ?: 1; edesc 1824 drivers/crypto/talitos.c sg_copy_to_buffer(req_ctx->psrc, sg_count, edesc->buf, length); edesc 1836 drivers/crypto/talitos.c sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc, edesc 1858 drivers/crypto/talitos.c talitos_handle_buggy_hash(ctx, edesc, &desc->ptr[3]); edesc 1862 drivers/crypto/talitos.c (edesc->buf + edesc->dma_len); edesc 1882 drivers/crypto/talitos.c sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc, edesc 1899 drivers/crypto/talitos.c dma_sync_single_for_device(dev, edesc->dma_link_tbl, edesc 1900 drivers/crypto/talitos.c edesc->dma_len, DMA_BIDIRECTIONAL); edesc 1904 drivers/crypto/talitos.c common_nonsnoop_hash_unmap(dev, edesc, areq); edesc 1905 drivers/crypto/talitos.c kfree(edesc); edesc 1984 drivers/crypto/talitos.c struct talitos_edesc *edesc; edesc 2066 drivers/crypto/talitos.c edesc = ahash_edesc_alloc(areq, nbytes_to_hash); edesc 2067 drivers/crypto/talitos.c if (IS_ERR(edesc)) edesc 2068 drivers/crypto/talitos.c return PTR_ERR(edesc); edesc 2070 drivers/crypto/talitos.c edesc->desc.hdr = ctx->desc_hdr_template; edesc 2074 drivers/crypto/talitos.c edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_PAD; edesc 2076 drivers/crypto/talitos.c edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_CONT; edesc 2080 drivers/crypto/talitos.c edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_INIT; edesc 2086 drivers/crypto/talitos.c edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_HMAC; edesc 2088 drivers/crypto/talitos.c return common_nonsnoop_hash(edesc, areq, nbytes_to_hash, ahash_done); edesc 163 drivers/dma/fsl-edma-common.c fsl_chan->edesc = NULL; edesc 178 drivers/dma/fsl-edma-common.c if (fsl_chan->edesc) { edesc 194 drivers/dma/fsl-edma-common.c if (fsl_chan->edesc) { edesc 268 drivers/dma/fsl-edma-common.c struct fsl_edma_desc *edesc = fsl_chan->edesc; edesc 271 drivers/dma/fsl-edma-common.c enum dma_transfer_direction dir = edesc->dirn; edesc 277 drivers/dma/fsl-edma-common.c for (len = i = 0; i < fsl_chan->edesc->n_tcds; i++) edesc 278 drivers/dma/fsl-edma-common.c len += le32_to_cpu(edesc->tcd[i].vtcd->nbytes) edesc 279 drivers/dma/fsl-edma-common.c * le16_to_cpu(edesc->tcd[i].vtcd->biter); edesc 290 drivers/dma/fsl-edma-common.c for (i = 0; i < fsl_chan->edesc->n_tcds; i++) { edesc 291 drivers/dma/fsl-edma-common.c size = le32_to_cpu(edesc->tcd[i].vtcd->nbytes) edesc 292 drivers/dma/fsl-edma-common.c * le16_to_cpu(edesc->tcd[i].vtcd->biter); edesc 294 drivers/dma/fsl-edma-common.c dma_addr = le32_to_cpu(edesc->tcd[i].vtcd->saddr); edesc 296 drivers/dma/fsl-edma-common.c dma_addr = le32_to_cpu(edesc->tcd[i].vtcd->daddr); edesc 325 drivers/dma/fsl-edma-common.c if (fsl_chan->edesc && cookie == fsl_chan->edesc->vdesc.tx.cookie) edesc 590 drivers/dma/fsl-edma-common.c fsl_chan->edesc = to_fsl_edma_desc(vdesc); edesc 591 drivers/dma/fsl-edma-common.c fsl_edma_set_tcd_regs(fsl_chan, fsl_chan->edesc->tcd[0].vtcd); edesc 611 drivers/dma/fsl-edma-common.c if (vchan_issue_pending(&fsl_chan->vchan) && !fsl_chan->edesc) edesc 638 drivers/dma/fsl-edma-common.c fsl_chan->edesc = NULL; edesc 121 drivers/dma/fsl-edma-common.h struct fsl_edma_desc *edesc; edesc 48 drivers/dma/fsl-edma.c if (!fsl_chan->edesc->iscyclic) { edesc 49 drivers/dma/fsl-edma.c list_del(&fsl_chan->edesc->vdesc.node); edesc 50 drivers/dma/fsl-edma.c vchan_cookie_complete(&fsl_chan->edesc->vdesc); edesc 51 drivers/dma/fsl-edma.c fsl_chan->edesc = NULL; edesc 55 drivers/dma/fsl-edma.c vchan_cyclic_callback(&fsl_chan->edesc->vdesc); edesc 58 drivers/dma/fsl-edma.c if (!fsl_chan->edesc) edesc 38 drivers/dma/mcf-edma.c if (!mcf_chan->edesc->iscyclic) { edesc 39 drivers/dma/mcf-edma.c list_del(&mcf_chan->edesc->vdesc.node); edesc 40 drivers/dma/mcf-edma.c vchan_cookie_complete(&mcf_chan->edesc->vdesc); edesc 41 drivers/dma/mcf-edma.c mcf_chan->edesc = NULL; edesc 45 drivers/dma/mcf-edma.c vchan_cyclic_callback(&mcf_chan->edesc->vdesc); edesc 48 drivers/dma/mcf-edma.c if (!mcf_chan->edesc) edesc 227 drivers/dma/ti/edma.c struct edma_desc *edesc; edesc 766 drivers/dma/ti/edma.c struct edma_desc *edesc; edesc 770 drivers/dma/ti/edma.c if (!echan->edesc) { edesc 776 drivers/dma/ti/edma.c echan->edesc = to_edma_desc(&vdesc->tx); edesc 779 drivers/dma/ti/edma.c edesc = echan->edesc; edesc 782 drivers/dma/ti/edma.c left = edesc->pset_nr - edesc->processed; edesc 784 drivers/dma/ti/edma.c edesc->sg_len = 0; edesc 788 drivers/dma/ti/edma.c j = i + edesc->processed; edesc 789 drivers/dma/ti/edma.c edma_write_slot(ecc, echan->slot[i], &edesc->pset[j].param); edesc 790 drivers/dma/ti/edma.c edesc->sg_len += edesc->pset[j].len; edesc 804 drivers/dma/ti/edma.c edesc->pset[j].param.opt, edesc 805 drivers/dma/ti/edma.c edesc->pset[j].param.src, edesc 806 drivers/dma/ti/edma.c edesc->pset[j].param.dst, edesc 807 drivers/dma/ti/edma.c edesc->pset[j].param.a_b_cnt, edesc 808 drivers/dma/ti/edma.c edesc->pset[j].param.ccnt, edesc 809 drivers/dma/ti/edma.c edesc->pset[j].param.src_dst_bidx, edesc 810 drivers/dma/ti/edma.c edesc->pset[j].param.src_dst_cidx, edesc 811 drivers/dma/ti/edma.c edesc->pset[j].param.link_bcntrld); edesc 817 drivers/dma/ti/edma.c edesc->processed += nslots; edesc 824 drivers/dma/ti/edma.c if (edesc->processed == edesc->pset_nr) { edesc 825 drivers/dma/ti/edma.c if (edesc->cyclic) edesc 844 drivers/dma/ti/edma.c } else if (edesc->processed <= MAX_NR_SG) { edesc 850 drivers/dma/ti/edma.c echan->ch_num, edesc->processed); edesc 868 drivers/dma/ti/edma.c if (echan->edesc) { edesc 871 drivers/dma/ti/edma.c if (!echan->tc && echan->edesc->cyclic) edesc 874 drivers/dma/ti/edma.c vchan_terminate_vdesc(&echan->edesc->vdesc); edesc 875 drivers/dma/ti/edma.c echan->edesc = NULL; edesc 914 drivers/dma/ti/edma.c if (!echan->edesc) edesc 1060 drivers/dma/ti/edma.c struct edma_desc *edesc; edesc 1088 drivers/dma/ti/edma.c edesc = kzalloc(struct_size(edesc, pset, sg_len), GFP_ATOMIC); edesc 1089 drivers/dma/ti/edma.c if (!edesc) edesc 1092 drivers/dma/ti/edma.c edesc->pset_nr = sg_len; edesc 1093 drivers/dma/ti/edma.c edesc->residue = 0; edesc 1094 drivers/dma/ti/edma.c edesc->direction = direction; edesc 1095 drivers/dma/ti/edma.c edesc->echan = echan; edesc 1105 drivers/dma/ti/edma.c kfree(edesc); edesc 1121 drivers/dma/ti/edma.c ret = edma_config_pset(chan, &edesc->pset[i], src_addr, edesc 1125 drivers/dma/ti/edma.c kfree(edesc); edesc 1129 drivers/dma/ti/edma.c edesc->absync = ret; edesc 1130 drivers/dma/ti/edma.c edesc->residue += sg_dma_len(sg); edesc 1134 drivers/dma/ti/edma.c edesc->pset[i].param.opt |= TCINTEN; edesc 1142 drivers/dma/ti/edma.c edesc->pset[i].param.opt |= (TCINTEN | TCCMODE); edesc 1144 drivers/dma/ti/edma.c edesc->residue_stat = edesc->residue; edesc 1146 drivers/dma/ti/edma.c return vchan_tx_prep(&echan->vchan, &edesc->vdesc, tx_flags); edesc 1154 drivers/dma/ti/edma.c struct edma_desc *edesc; edesc 1205 drivers/dma/ti/edma.c edesc = kzalloc(struct_size(edesc, pset, nslots), GFP_ATOMIC); edesc 1206 drivers/dma/ti/edma.c if (!edesc) edesc 1209 drivers/dma/ti/edma.c edesc->pset_nr = nslots; edesc 1210 drivers/dma/ti/edma.c edesc->residue = edesc->residue_stat = len; edesc 1211 drivers/dma/ti/edma.c edesc->direction = DMA_MEM_TO_MEM; edesc 1212 drivers/dma/ti/edma.c edesc->echan = echan; edesc 1214 drivers/dma/ti/edma.c ret = edma_config_pset(chan, &edesc->pset[0], src, dest, 1, edesc 1217 drivers/dma/ti/edma.c kfree(edesc); edesc 1221 drivers/dma/ti/edma.c edesc->absync = ret; edesc 1223 drivers/dma/ti/edma.c edesc->pset[0].param.opt |= ITCCHEN; edesc 1227 drivers/dma/ti/edma.c edesc->pset[0].param.opt |= TCINTEN; edesc 1230 drivers/dma/ti/edma.c edesc->pset[0].param.opt |= TCCHEN; edesc 1236 drivers/dma/ti/edma.c kfree(edesc); edesc 1246 drivers/dma/ti/edma.c ret = edma_config_pset(chan, &edesc->pset[1], src, dest, 1, edesc 1249 drivers/dma/ti/edma.c kfree(edesc); edesc 1253 drivers/dma/ti/edma.c edesc->pset[1].param.opt |= ITCCHEN; edesc 1256 drivers/dma/ti/edma.c edesc->pset[1].param.opt |= TCINTEN; edesc 1260 drivers/dma/ti/edma.c edesc->polled = true; edesc 1262 drivers/dma/ti/edma.c return vchan_tx_prep(&echan->vchan, &edesc->vdesc, tx_flags); edesc 1272 drivers/dma/ti/edma.c struct edma_desc *edesc; edesc 1333 drivers/dma/ti/edma.c edesc = kzalloc(struct_size(edesc, pset, nslots), GFP_ATOMIC); edesc 1334 drivers/dma/ti/edma.c if (!edesc) edesc 1337 drivers/dma/ti/edma.c edesc->cyclic = 1; edesc 1338 drivers/dma/ti/edma.c edesc->pset_nr = nslots; edesc 1339 drivers/dma/ti/edma.c edesc->residue = edesc->residue_stat = buf_len; edesc 1340 drivers/dma/ti/edma.c edesc->direction = direction; edesc 1341 drivers/dma/ti/edma.c edesc->echan = echan; edesc 1352 drivers/dma/ti/edma.c kfree(edesc); edesc 1360 drivers/dma/ti/edma.c memcpy(&edesc->pset[i], &edesc->pset[0], edesc 1361 drivers/dma/ti/edma.c sizeof(edesc->pset[0])); edesc 1365 drivers/dma/ti/edma.c ret = edma_config_pset(chan, &edesc->pset[i], src_addr, edesc 1369 drivers/dma/ti/edma.c kfree(edesc); edesc 1392 drivers/dma/ti/edma.c edesc->pset[i].param.opt, edesc 1393 drivers/dma/ti/edma.c edesc->pset[i].param.src, edesc 1394 drivers/dma/ti/edma.c edesc->pset[i].param.dst, edesc 1395 drivers/dma/ti/edma.c edesc->pset[i].param.a_b_cnt, edesc 1396 drivers/dma/ti/edma.c edesc->pset[i].param.ccnt, edesc 1397 drivers/dma/ti/edma.c edesc->pset[i].param.src_dst_bidx, edesc 1398 drivers/dma/ti/edma.c edesc->pset[i].param.src_dst_cidx, edesc 1399 drivers/dma/ti/edma.c edesc->pset[i].param.link_bcntrld); edesc 1401 drivers/dma/ti/edma.c edesc->absync = ret; edesc 1407 drivers/dma/ti/edma.c edesc->pset[i].param.opt |= TCINTEN; edesc 1411 drivers/dma/ti/edma.c edesc->pset[i].param.opt |= ITCINTEN; edesc 1419 drivers/dma/ti/edma.c return vchan_tx_prep(&echan->vchan, &edesc->vdesc, tx_flags); edesc 1425 drivers/dma/ti/edma.c struct edma_desc *edesc; edesc 1428 drivers/dma/ti/edma.c edesc = echan->edesc; edesc 1429 drivers/dma/ti/edma.c if (edesc) { edesc 1430 drivers/dma/ti/edma.c if (edesc->cyclic) { edesc 1431 drivers/dma/ti/edma.c vchan_cyclic_callback(&edesc->vdesc); edesc 1434 drivers/dma/ti/edma.c } else if (edesc->processed == edesc->pset_nr) { edesc 1435 drivers/dma/ti/edma.c edesc->residue = 0; edesc 1437 drivers/dma/ti/edma.c vchan_cookie_complete(&edesc->vdesc); edesc 1438 drivers/dma/ti/edma.c echan->edesc = NULL; edesc 1449 drivers/dma/ti/edma.c edesc->residue -= edesc->sg_len; edesc 1450 drivers/dma/ti/edma.c edesc->residue_stat = edesc->residue; edesc 1451 drivers/dma/ti/edma.c edesc->processed_stat = edesc->processed; edesc 1512 drivers/dma/ti/edma.c if (!echan->edesc) edesc 1721 drivers/dma/ti/edma.c if (vchan_issue_pending(&echan->vchan) && !echan->edesc) edesc 1735 drivers/dma/ti/edma.c static u32 edma_residue(struct edma_desc *edesc) edesc 1737 drivers/dma/ti/edma.c bool dst = edesc->direction == DMA_DEV_TO_MEM; edesc 1739 drivers/dma/ti/edma.c struct edma_chan *echan = edesc->echan; edesc 1740 drivers/dma/ti/edma.c struct edma_pset *pset = edesc->pset; edesc 1762 drivers/dma/ti/edma.c if (is_slave_direction(edesc->direction)) edesc 1790 drivers/dma/ti/edma.c if (edesc->cyclic) { edesc 1792 drivers/dma/ti/edma.c edesc->residue_stat = edesc->residue - done; edesc 1793 drivers/dma/ti/edma.c return edesc->residue_stat; edesc 1806 drivers/dma/ti/edma.c pset += edesc->processed_stat; edesc 1808 drivers/dma/ti/edma.c for (i = edesc->processed_stat; i < edesc->processed; i++, pset++) { edesc 1815 drivers/dma/ti/edma.c return edesc->residue_stat - (pos - pset->addr); edesc 1818 drivers/dma/ti/edma.c edesc->processed_stat++; edesc 1819 drivers/dma/ti/edma.c edesc->residue_stat -= pset->len; edesc 1821 drivers/dma/ti/edma.c return edesc->residue_stat; edesc 1844 drivers/dma/ti/edma.c if (echan->edesc && echan->edesc->vdesc.tx.cookie == cookie) { edesc 1845 drivers/dma/ti/edma.c txstate->residue = edma_residue(echan->edesc); edesc 1861 drivers/dma/ti/edma.c echan->edesc && echan->edesc->polled && edesc 1862 drivers/dma/ti/edma.c echan->edesc->vdesc.tx.cookie == cookie) { edesc 1864 drivers/dma/ti/edma.c vchan_cookie_complete(&echan->edesc->vdesc); edesc 1865 drivers/dma/ti/edma.c echan->edesc = NULL;