/linux-4.1.27/drivers/crypto/nx/ |
D | nx-aes-xcbc.c | 75 struct nx_sg *in_sg, *out_sg; in nx_xcbc_empty() local 99 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *) keys, &len, in nx_xcbc_empty() 106 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_xcbc_empty() 128 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len, in nx_xcbc_empty() 135 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_xcbc_empty() 187 struct nx_sg *out_sg; in nx_xcbc_update() local 216 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state, in nx_xcbc_update() 224 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_xcbc_update() 312 struct nx_sg *in_sg, *out_sg; in nx_xcbc_final() local 348 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len, in nx_xcbc_final() [all …]
|
D | nx-sha256.c | 74 struct nx_sg *out_sg; in nx_sha256_update() local 105 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state, in nx_sha256_update() 107 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_sha256_update() 194 struct nx_sg *in_sg, *out_sg; in nx_sha256_final() local 232 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len, max_sg_len); in nx_sha256_final() 240 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_sha256_final()
|
D | nx-sha512.c | 74 struct nx_sg *out_sg; in nx_sha512_update() local 105 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state, in nx_sha512_update() 107 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_sha512_update() 197 struct nx_sg *in_sg, *out_sg; in nx_sha512_final() local 241 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len, in nx_sha512_final() 245 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_sha512_final()
|
D | nx.c | 282 struct nx_sg *nx_outsg = nx_ctx->out_sg; in nx_build_sg_lists() 307 nx_ctx->op.outlen = trim_sg_list(nx_ctx->out_sg, nx_outsg, delta, nbytes); in nx_build_sg_lists() 327 nx_ctx->op.out = __pa(nx_ctx->out_sg); in nx_ctx_init() 335 nx_ctx->op_aead.out = __pa(nx_ctx->out_sg); in nx_ctx_init() 619 nx_ctx->out_sg = (struct nx_sg *)((u8 *)nx_ctx->in_sg + NX_PAGE_SIZE); in nx_crypto_ctx_init() 623 (struct nx_csbcpb *)((u8 *)nx_ctx->out_sg + in nx_crypto_ctx_init() 695 nx_ctx->out_sg = NULL; in nx_crypto_ctx_exit()
|
D | nx-aes-gcm.c | 272 struct nx_sg *in_sg, *out_sg; in gcm_empty() local 299 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *) out, &len, in gcm_empty() 306 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in gcm_empty()
|
D | nx.h | 137 struct nx_sg *out_sg; /* aligned pointer into kmem to an sg list */ member
|
D | nx-aes-ccm.c | 180 struct nx_sg *nx_outsg = nx_ctx->out_sg; in generate_pat() 269 nx_ctx->op.outlen = (nx_ctx->out_sg - nx_outsg) * in generate_pat()
|
/linux-4.1.27/drivers/crypto/ |
D | omap-des.c | 151 struct scatterlist *out_sg; member 401 struct scatterlist *in_sg, struct scatterlist *out_sg, in omap_des_crypt_dma() argument 412 scatterwalk_start(&dd->out_walk, dd->out_sg); in omap_des_crypt_dma() 458 tx_out = dmaengine_prep_slave_sg(dd->dma_lch_out, out_sg, out_sg_len, in omap_des_crypt_dma() 497 err = dma_map_sg(dd->dev, dd->out_sg, dd->out_sg_len, in omap_des_crypt_dma_start() 505 err = omap_des_crypt_dma(tfm, dd->in_sg, dd->out_sg, dd->in_sg_len, in omap_des_crypt_dma_start() 509 dma_unmap_sg(dd->dev, dd->out_sg, dd->out_sg_len, in omap_des_crypt_dma_start() 574 dd->orig_out = dd->out_sg; in omap_des_copy_sgs() 584 dd->out_sg = &dd->out_sgl; in omap_des_copy_sgs() 624 dd->out_sg = req->dst; in omap_des_handle_queue() [all …]
|
D | omap-aes.c | 170 struct scatterlist *out_sg; member 417 struct scatterlist *in_sg, struct scatterlist *out_sg, in omap_aes_crypt_dma() argument 428 scatterwalk_start(&dd->out_walk, dd->out_sg); in omap_aes_crypt_dma() 474 tx_out = dmaengine_prep_slave_sg(dd->dma_lch_out, out_sg, out_sg_len, in omap_aes_crypt_dma() 513 err = dma_map_sg(dd->dev, dd->out_sg, dd->out_sg_len, in omap_aes_crypt_dma_start() 521 err = omap_aes_crypt_dma(tfm, dd->in_sg, dd->out_sg, dd->in_sg_len, in omap_aes_crypt_dma_start() 525 dma_unmap_sg(dd->dev, dd->out_sg, dd->out_sg_len, in omap_aes_crypt_dma_start() 592 dd->orig_out = dd->out_sg; in omap_aes_copy_sgs() 602 dd->out_sg = &dd->out_sgl; in omap_aes_copy_sgs() 642 dd->out_sg = req->dst; in omap_aes_handle_queue() [all …]
|
D | atmel-tdes.c | 118 struct scatterlist *out_sg; member 328 dma_unmap_sg(dd->dev, dd->out_sg, 1, DMA_FROM_DEVICE); in atmel_tdes_crypt_pdc_stop() 335 count = atmel_tdes_sg_copy(&dd->out_sg, &dd->out_offset, in atmel_tdes_crypt_pdc_stop() 518 out = IS_ALIGNED((u32)dd->out_sg->offset, sizeof(u32)) && in atmel_tdes_crypt_start() 519 IS_ALIGNED(dd->out_sg->length, dd->ctx->block_size); in atmel_tdes_crypt_start() 522 if (sg_dma_len(dd->in_sg) != sg_dma_len(dd->out_sg)) in atmel_tdes_crypt_start() 529 count = min(count, sg_dma_len(dd->out_sg)); in atmel_tdes_crypt_start() 537 err = dma_map_sg(dd->dev, dd->out_sg, 1, in atmel_tdes_crypt_start() 547 addr_out = sg_dma_address(dd->out_sg); in atmel_tdes_crypt_start() 571 dma_unmap_sg(dd->dev, dd->out_sg, 1, DMA_TO_DEVICE); in atmel_tdes_crypt_start() [all …]
|
D | atmel-aes.c | 125 struct scatterlist *out_sg; member 408 dd->nb_out_sg = atmel_aes_sg_length(dd->req, dd->out_sg); in atmel_aes_crypt_cpu_start() 437 out = IS_ALIGNED((u32)dd->out_sg->offset, sizeof(u32)) && in atmel_aes_crypt_dma_start() 438 IS_ALIGNED(dd->out_sg->length, dd->ctx->block_size); in atmel_aes_crypt_dma_start() 441 if (sg_dma_len(dd->in_sg) != sg_dma_len(dd->out_sg)) in atmel_aes_crypt_dma_start() 448 count = min(count, sg_dma_len(dd->out_sg)); in atmel_aes_crypt_dma_start() 456 err = dma_map_sg(dd->dev, dd->out_sg, 1, in atmel_aes_crypt_dma_start() 466 addr_out = sg_dma_address(dd->out_sg); in atmel_aes_crypt_dma_start() 490 dma_unmap_sg(dd->dev, dd->out_sg, 1, DMA_TO_DEVICE); in atmel_aes_crypt_dma_start() 598 dd->out_sg = req->dst; in atmel_aes_handle_queue() [all …]
|
D | sahara.c | 234 struct scatterlist *out_sg; member 506 dev->nb_out_sg = sahara_sg_length(dev->out_sg, dev->total); in sahara_hw_descriptor_create() 519 ret = dma_map_sg(dev->device, dev->out_sg, dev->nb_out_sg, in sahara_hw_descriptor_create() 542 sg = dev->out_sg; in sahara_hw_descriptor_create() 568 dma_unmap_sg(dev->device, dev->out_sg, dev->nb_out_sg, in sahara_hw_descriptor_create() 593 dev->out_sg = req->dst; in sahara_aes_process() 619 dma_unmap_sg(dev->device, dev->out_sg, dev->nb_out_sg, in sahara_aes_process()
|
/linux-4.1.27/net/ceph/ |
D | crypto.c | 208 goto out_sg; in ceph_aes_encrypt() 215 out_sg: in ceph_aes_encrypt() 271 goto out_sg; in ceph_aes_encrypt2() 278 out_sg: in ceph_aes_encrypt2() 323 goto out_sg; in ceph_aes_decrypt() 342 out_sg: in ceph_aes_decrypt() 389 goto out_sg; in ceph_aes_decrypt2() 419 out_sg: in ceph_aes_decrypt2()
|
/linux-4.1.27/drivers/usb/wusbcore/ |
D | wa-xfer.c | 1023 struct scatterlist *out_sg; in wa_xfer_create_subset_sg() local 1053 out_sg = kmalloc((sizeof(struct scatterlist) * nents), GFP_ATOMIC); in wa_xfer_create_subset_sg() 1054 if (out_sg) { in wa_xfer_create_subset_sg() 1055 sg_init_table(out_sg, nents); in wa_xfer_create_subset_sg() 1059 last_seg_sg = current_seg_sg = out_sg; in wa_xfer_create_subset_sg() 1093 return out_sg; in wa_xfer_create_subset_sg()
|