qm_sg_dma 804 drivers/crypto/caam/caamalg_qi.c dma_addr_t qm_sg_dma; qm_sg_dma 826 drivers/crypto/caam/caamalg_qi.c dma_addr_t qm_sg_dma; qm_sg_dma 872 drivers/crypto/caam/caamalg_qi.c enum dma_data_direction iv_dir, dma_addr_t qm_sg_dma, qm_sg_dma 887 drivers/crypto/caam/caamalg_qi.c dma_unmap_single(dev, qm_sg_dma, qm_sg_bytes, DMA_TO_DEVICE); qm_sg_dma 898 drivers/crypto/caam/caamalg_qi.c edesc->iv_dma, ivsize, DMA_TO_DEVICE, edesc->qm_sg_dma, qm_sg_dma 910 drivers/crypto/caam/caamalg_qi.c edesc->iv_dma, ivsize, DMA_BIDIRECTIONAL, edesc->qm_sg_dma, qm_sg_dma 951 drivers/crypto/caam/caamalg_qi.c dma_addr_t qm_sg_dma, iv_dma = 0; qm_sg_dma 1119 drivers/crypto/caam/caamalg_qi.c qm_sg_dma = dma_map_single(qidev, sg_table, qm_sg_bytes, DMA_TO_DEVICE); qm_sg_dma 1120 drivers/crypto/caam/caamalg_qi.c if (dma_mapping_error(qidev, qm_sg_dma)) { qm_sg_dma 1129 drivers/crypto/caam/caamalg_qi.c edesc->qm_sg_dma = qm_sg_dma; qm_sg_dma 1137 drivers/crypto/caam/caamalg_qi.c dma_to_qm_sg_one_last_ext(&fd_sgt[1], qm_sg_dma, in_len, 0); qm_sg_dma 1144 drivers/crypto/caam/caamalg_qi.c dma_to_qm_sg_one_ext(&fd_sgt[0], qm_sg_dma + qm_sg_dma 1151 drivers/crypto/caam/caamalg_qi.c dma_to_qm_sg_one_ext(&fd_sgt[0], qm_sg_dma + sizeof(*sg_table) * qm_sg_dma 1371 drivers/crypto/caam/caamalg_qi.c edesc->qm_sg_dma = dma_map_single(qidev, sg_table, edesc->qm_sg_bytes, qm_sg_dma 1373 drivers/crypto/caam/caamalg_qi.c if (dma_mapping_error(qidev, edesc->qm_sg_dma)) { qm_sg_dma 1383 drivers/crypto/caam/caamalg_qi.c dma_to_qm_sg_one_last_ext(&fd_sgt[1], edesc->qm_sg_dma, qm_sg_dma 1387 drivers/crypto/caam/caamalg_qi.c dma_to_qm_sg_one_ext(&fd_sgt[0], edesc->qm_sg_dma + qm_sg_dma 1391 drivers/crypto/caam/caamalg_qi.c dma_to_qm_sg_one_ext(&fd_sgt[0], edesc->qm_sg_dma + dst_sg_idx * qm_sg_dma 144 drivers/crypto/caam/caamalg_qi2.c enum dma_data_direction iv_dir, dma_addr_t qm_sg_dma, qm_sg_dma 160 drivers/crypto/caam/caamalg_qi2.c dma_unmap_single(dev, qm_sg_dma, qm_sg_bytes, DMA_TO_DEVICE); qm_sg_dma 363 drivers/crypto/caam/caamalg_qi2.c dma_addr_t qm_sg_dma, iv_dma = 0; qm_sg_dma 531 drivers/crypto/caam/caamalg_qi2.c qm_sg_dma = dma_map_single(dev, sg_table, qm_sg_bytes, DMA_TO_DEVICE); qm_sg_dma 532 drivers/crypto/caam/caamalg_qi2.c if (dma_mapping_error(dev, qm_sg_dma)) { qm_sg_dma 541 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_dma = qm_sg_dma; qm_sg_dma 551 drivers/crypto/caam/caamalg_qi2.c dpaa2_fl_set_addr(in_fle, qm_sg_dma); qm_sg_dma 560 drivers/crypto/caam/caamalg_qi2.c dpaa2_fl_set_addr(out_fle, qm_sg_dma + qm_sg_dma 576 drivers/crypto/caam/caamalg_qi2.c dpaa2_fl_set_addr(out_fle, qm_sg_dma + qm_sg_index * qm_sg_dma 1235 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_dma = dma_map_single(dev, sg_table, edesc->qm_sg_bytes, qm_sg_dma 1237 drivers/crypto/caam/caamalg_qi2.c if (dma_mapping_error(dev, edesc->qm_sg_dma)) { qm_sg_dma 1251 drivers/crypto/caam/caamalg_qi2.c dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); qm_sg_dma 1256 drivers/crypto/caam/caamalg_qi2.c dpaa2_fl_set_addr(out_fle, edesc->qm_sg_dma + qm_sg_dma 1259 drivers/crypto/caam/caamalg_qi2.c dpaa2_fl_set_addr(out_fle, edesc->qm_sg_dma + dst_sg_idx * qm_sg_dma 1272 drivers/crypto/caam/caamalg_qi2.c edesc->iv_dma, ivsize, DMA_TO_DEVICE, edesc->qm_sg_dma, qm_sg_dma 1284 drivers/crypto/caam/caamalg_qi2.c edesc->iv_dma, ivsize, DMA_BIDIRECTIONAL, edesc->qm_sg_dma, qm_sg_dma 3320 drivers/crypto/caam/caamalg_qi2.c dma_unmap_single(dev, edesc->qm_sg_dma, edesc->qm_sg_bytes, qm_sg_dma 3536 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_dma 3538 drivers/crypto/caam/caamalg_qi2.c if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { qm_sg_dma 3548 drivers/crypto/caam/caamalg_qi2.c dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); qm_sg_dma 3621 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_bytes, qm_sg_dma 3623 drivers/crypto/caam/caamalg_qi2.c if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { qm_sg_dma 3633 drivers/crypto/caam/caamalg_qi2.c dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); qm_sg_dma 3715 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_bytes, qm_sg_dma 3717 drivers/crypto/caam/caamalg_qi2.c if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { qm_sg_dma 3727 drivers/crypto/caam/caamalg_qi2.c dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); qm_sg_dma 3800 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_dma 3802 drivers/crypto/caam/caamalg_qi2.c if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { qm_sg_dma 3808 drivers/crypto/caam/caamalg_qi2.c dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); qm_sg_dma 3985 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_dma 3987 drivers/crypto/caam/caamalg_qi2.c if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { qm_sg_dma 4007 drivers/crypto/caam/caamalg_qi2.c dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); qm_sg_dma 4099 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_bytes, qm_sg_dma 4101 drivers/crypto/caam/caamalg_qi2.c if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { qm_sg_dma 4121 drivers/crypto/caam/caamalg_qi2.c dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); qm_sg_dma 4207 drivers/crypto/caam/caamalg_qi2.c edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_dma 4210 drivers/crypto/caam/caamalg_qi2.c if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { qm_sg_dma 4217 drivers/crypto/caam/caamalg_qi2.c dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); qm_sg_dma 114 drivers/crypto/caam/caamalg_qi2.h dma_addr_t qm_sg_dma; qm_sg_dma 134 drivers/crypto/caam/caamalg_qi2.h dma_addr_t qm_sg_dma; qm_sg_dma 146 drivers/crypto/caam/caamalg_qi2.h dma_addr_t qm_sg_dma;