/linux-4.1.27/drivers/crypto/nx/ |
H A D | nx-aes-xcbc.c | 75 struct nx_sg *in_sg, *out_sg; nx_xcbc_empty() local 93 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) keys, &len, nx_xcbc_empty() 105 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); nx_xcbc_empty() 121 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) keys[1], &len, nx_xcbc_empty() 134 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); nx_xcbc_empty() 186 struct nx_sg *in_sg; nx_xcbc_update() local 209 in_sg = nx_ctx->in_sg; nx_xcbc_update() 244 in_sg = nx_build_sg_list(nx_ctx->in_sg, nx_xcbc_update() 255 in_sg = nx_build_sg_list(in_sg, nx_xcbc_update() 265 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * nx_xcbc_update() 295 in_sg = nx_ctx->in_sg; nx_xcbc_update() 312 struct nx_sg *in_sg, *out_sg; nx_xcbc_final() local 339 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *)sctx->buffer, nx_xcbc_final() 356 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); nx_xcbc_final()
|
H A D | nx-sha256.c | 116 struct nx_sg *in_sg = nx_ctx->in_sg; nx_sha256_update() local 120 in_sg = nx_build_sg_list(in_sg, nx_sha256_update() 129 used_sgs = in_sg - nx_ctx->in_sg; nx_sha256_update() 144 in_sg = nx_build_sg_list(in_sg, (u8 *) data, nx_sha256_update() 147 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); nx_sha256_update() 194 struct nx_sg *in_sg, *out_sg; nx_sha256_final() local 223 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) sctx->buf, nx_sha256_final() 239 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); nx_sha256_final()
|
H A D | nx-sha512.c | 116 struct nx_sg *in_sg = nx_ctx->in_sg; nx_sha512_update() local 120 in_sg = nx_build_sg_list(in_sg, nx_sha512_update() 128 used_sgs = in_sg - nx_ctx->in_sg; nx_sha512_update() 143 in_sg = nx_build_sg_list(in_sg, (u8 *) data, nx_sha512_update() 146 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); nx_sha512_update() 197 struct nx_sg *in_sg, *out_sg; nx_sha512_final() local 232 in_sg = nx_build_sg_list(nx_ctx->in_sg, sctx->buf, &len, nx_sha512_final() 244 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); nx_sha512_final()
|
H A D | nx-aes-gcm.c | 131 struct nx_sg *nx_sg = nx_ctx->in_sg; nx_gca() 161 nx_sg = nx_walk_and_build(nx_ctx->in_sg, max_sg_len, nx_gca() 169 nx_ctx->op_aead.inlen = (nx_ctx->in_sg - nx_sg) nx_gca() 227 nx_sg = nx_walk_and_build(nx_ctx->in_sg, max_sg_len, gmac() 235 nx_ctx->op.inlen = (nx_ctx->in_sg - nx_sg) gmac() 272 struct nx_sg *in_sg, *out_sg; gcm_empty() local 292 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) desc->info, gcm_empty() 305 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); gcm_empty()
|
H A D | nx.c | 281 struct nx_sg *nx_insg = nx_ctx->in_sg; nx_build_sg_lists() 306 nx_ctx->op.inlen = trim_sg_list(nx_ctx->in_sg, nx_insg, delta, nbytes); nx_build_sg_lists() 326 nx_ctx->op.in = __pa(nx_ctx->in_sg); nx_ctx_init() 334 nx_ctx->op_aead.in = __pa(nx_ctx->in_sg); nx_ctx_init() 618 nx_ctx->in_sg = (struct nx_sg *)((u8 *)nx_ctx->csbcpb + NX_PAGE_SIZE); nx_crypto_ctx_init() 619 nx_ctx->out_sg = (struct nx_sg *)((u8 *)nx_ctx->in_sg + NX_PAGE_SIZE); nx_crypto_ctx_init() 694 nx_ctx->in_sg = NULL; nx_crypto_ctx_exit()
|
H A D | nx-aes-ccm.c | 179 struct nx_sg *nx_insg = nx_ctx->in_sg; generate_pat() 267 nx_ctx->op.inlen = (nx_ctx->in_sg - nx_insg) * generate_pat() 300 nx_insg = nx_walk_and_build(nx_ctx->in_sg, generate_pat() 314 nx_ctx->op_aead.inlen = (nx_ctx->in_sg - nx_insg) * generate_pat()
|
H A D | nx.h | 136 struct nx_sg *in_sg; /* aligned pointer into kmem to an sg list */ member in struct:nx_crypto_ctx
|
/linux-4.1.27/drivers/crypto/ |
H A D | omap-aes.c | 169 struct scatterlist *in_sg; member in struct:omap_aes_dev 417 struct scatterlist *in_sg, struct scatterlist *out_sg, omap_aes_crypt_dma() 427 scatterwalk_start(&dd->in_walk, dd->in_sg); omap_aes_crypt_dma() 436 dma_sync_sg_for_device(dd->dev, dd->in_sg, in_sg_len, DMA_TO_DEVICE); omap_aes_crypt_dma() 455 tx_in = dmaengine_prep_slave_sg(dd->dma_lch_in, in_sg, in_sg_len, omap_aes_crypt_dma() 506 err = dma_map_sg(dd->dev, dd->in_sg, dd->in_sg_len, omap_aes_crypt_dma_start() 521 err = omap_aes_crypt_dma(tfm, dd->in_sg, dd->out_sg, dd->in_sg_len, omap_aes_crypt_dma_start() 524 dma_unmap_sg(dd->dev, dd->in_sg, dd->in_sg_len, DMA_TO_DEVICE); omap_aes_crypt_dma_start() 594 sg_copy_buf(buf_in, dd->in_sg, 0, dd->total, 0); omap_aes_copy_sgs() 598 dd->in_sg = &dd->in_sgl; omap_aes_copy_sgs() 641 dd->in_sg = req->src; omap_aes_handle_queue() 644 if (omap_aes_check_aligned(dd->in_sg, dd->total) || omap_aes_handle_queue() 653 dd->in_sg_len = scatterwalk_bytes_sglen(dd->in_sg, dd->total); omap_aes_handle_queue() 688 dma_unmap_sg(dd->dev, dd->in_sg, dd->in_sg_len, DMA_TO_DEVICE); omap_aes_done_task() 994 BUG_ON(!dd->in_sg); omap_aes_irq() 996 BUG_ON(_calc_walked(in) > dd->in_sg->length); omap_aes_irq() 998 src = sg_virt(dd->in_sg) + _calc_walked(in); omap_aes_irq() 1004 if (dd->in_sg->length == _calc_walked(in)) { omap_aes_irq() 1005 dd->in_sg = sg_next(dd->in_sg); omap_aes_irq() 1006 if (dd->in_sg) { omap_aes_irq() 1008 dd->in_sg); omap_aes_irq() 1009 src = sg_virt(dd->in_sg) + omap_aes_irq() 416 omap_aes_crypt_dma(struct crypto_tfm *tfm, struct scatterlist *in_sg, struct scatterlist *out_sg, int in_sg_len, int out_sg_len) omap_aes_crypt_dma() argument
|
H A D | omap-des.c | 150 struct scatterlist *in_sg; member in struct:omap_des_dev 401 struct scatterlist *in_sg, struct scatterlist *out_sg, omap_des_crypt_dma() 411 scatterwalk_start(&dd->in_walk, dd->in_sg); omap_des_crypt_dma() 420 dma_sync_sg_for_device(dd->dev, dd->in_sg, in_sg_len, DMA_TO_DEVICE); omap_des_crypt_dma() 439 tx_in = dmaengine_prep_slave_sg(dd->dma_lch_in, in_sg, in_sg_len, omap_des_crypt_dma() 490 err = dma_map_sg(dd->dev, dd->in_sg, dd->in_sg_len, omap_des_crypt_dma_start() 505 err = omap_des_crypt_dma(tfm, dd->in_sg, dd->out_sg, dd->in_sg_len, omap_des_crypt_dma_start() 508 dma_unmap_sg(dd->dev, dd->in_sg, dd->in_sg_len, DMA_TO_DEVICE); omap_des_crypt_dma_start() 576 sg_copy_buf(buf_in, dd->in_sg, 0, dd->total, 0); omap_des_copy_sgs() 580 dd->in_sg = &dd->in_sgl; omap_des_copy_sgs() 623 dd->in_sg = req->src; omap_des_handle_queue() 626 if (omap_des_copy_needed(dd->in_sg) || omap_des_handle_queue() 635 dd->in_sg_len = scatterwalk_bytes_sglen(dd->in_sg, dd->total); omap_des_handle_queue() 670 dma_unmap_sg(dd->dev, dd->in_sg, dd->in_sg_len, DMA_TO_DEVICE); omap_des_done_task() 910 BUG_ON(!dd->in_sg); omap_des_irq() 912 BUG_ON(_calc_walked(in) > dd->in_sg->length); omap_des_irq() 914 src = sg_virt(dd->in_sg) + _calc_walked(in); omap_des_irq() 920 if (dd->in_sg->length == _calc_walked(in)) { omap_des_irq() 921 dd->in_sg = sg_next(dd->in_sg); omap_des_irq() 922 if (dd->in_sg) { omap_des_irq() 924 dd->in_sg); omap_des_irq() 925 src = sg_virt(dd->in_sg) + omap_des_irq() 400 omap_des_crypt_dma(struct crypto_tfm *tfm, struct scatterlist *in_sg, struct scatterlist *out_sg, int in_sg_len, int out_sg_len) omap_des_crypt_dma() argument
|
H A D | sahara.c | 174 * @in_sg: scatterlist for input data 192 struct scatterlist *in_sg; member in struct:sahara_sha_reqctx 232 struct scatterlist *in_sg; member in struct:sahara_dev 505 dev->nb_in_sg = sahara_sg_length(dev->in_sg, dev->total); sahara_hw_descriptor_create() 513 ret = dma_map_sg(dev->device, dev->in_sg, dev->nb_in_sg, sahara_hw_descriptor_create() 528 sg = dev->in_sg; sahara_hw_descriptor_create() 571 dma_unmap_sg(dev->device, dev->in_sg, dev->nb_in_sg, sahara_hw_descriptor_create() 592 dev->in_sg = req->src; sahara_aes_process() 621 dma_unmap_sg(dev->device, dev->in_sg, dev->nb_in_sg, sahara_aes_process() 819 dev->in_sg = rctx->in_sg; sahara_sha_hw_links_create() 821 dev->nb_in_sg = sahara_sg_length(dev->in_sg, rctx->total); sahara_sha_hw_links_create() 830 sg = dev->in_sg; sahara_sha_hw_links_create() 845 sg = dev->in_sg; sahara_sha_hw_links_create() 846 ret = dma_map_sg(dev->device, dev->in_sg, dev->nb_in_sg, sahara_sha_hw_links_create() 1005 rctx->in_sg = rctx->in_sg_chain; sahara_sha_prepare_request() 1012 rctx->in_sg = req->src; sahara_sha_prepare_request() 1014 rctx->in_sg = rctx->in_sg_chain; sahara_sha_prepare_request() 1016 sg_init_one(rctx->in_sg, rctx->rembuf, rctx->buf_cnt); sahara_sha_prepare_request() 1021 rctx->in_sg = req->src; sahara_sha_prepare_request() 1023 req->src = rctx->in_sg; sahara_sha_prepare_request() 1039 sg = dev->in_sg; sahara_sha_unmap_sg() 1045 dma_unmap_sg(dev->device, dev->in_sg, dev->nb_in_sg, sahara_sha_unmap_sg()
|
H A D | atmel-aes.c | 122 struct scatterlist *in_sg; member in struct:atmel_aes_dev 404 dd->nb_in_sg = atmel_aes_sg_length(dd->req, dd->in_sg); atmel_aes_crypt_cpu_start() 412 dd->bufcnt = sg_copy_to_buffer(dd->in_sg, dd->nb_in_sg, atmel_aes_crypt_cpu_start() 435 in = IS_ALIGNED((u32)dd->in_sg->offset, sizeof(u32)) && atmel_aes_crypt_dma_start() 436 IS_ALIGNED(dd->in_sg->length, dd->ctx->block_size); atmel_aes_crypt_dma_start() 441 if (sg_dma_len(dd->in_sg) != sg_dma_len(dd->out_sg)) atmel_aes_crypt_dma_start() 447 count = min(dd->total, sg_dma_len(dd->in_sg)); atmel_aes_crypt_dma_start() 450 err = dma_map_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); atmel_aes_crypt_dma_start() 460 dma_unmap_sg(dd->dev, dd->in_sg, 1, atmel_aes_crypt_dma_start() 465 addr_in = sg_dma_address(dd->in_sg); atmel_aes_crypt_dma_start() 475 count = atmel_aes_sg_copy(&dd->in_sg, &dd->in_offset, atmel_aes_crypt_dma_start() 489 dma_unmap_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); atmel_aes_crypt_dma_start() 596 dd->in_sg = req->src; atmel_aes_handle_queue() 632 dma_unmap_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); atmel_aes_crypt_dma_stop() 1182 dd->in_sg = sg_next(dd->in_sg); atmel_aes_done_task() 1184 if (!dd->in_sg || !dd->out_sg) atmel_aes_done_task()
|
H A D | atmel-tdes.c | 115 struct scatterlist *in_sg; member in struct:atmel_tdes_dev 329 dma_unmap_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); atmel_tdes_crypt_pdc_stop() 516 in = IS_ALIGNED((u32)dd->in_sg->offset, sizeof(u32)) && atmel_tdes_crypt_start() 517 IS_ALIGNED(dd->in_sg->length, dd->ctx->block_size); atmel_tdes_crypt_start() 522 if (sg_dma_len(dd->in_sg) != sg_dma_len(dd->out_sg)) atmel_tdes_crypt_start() 528 count = min(dd->total, sg_dma_len(dd->in_sg)); atmel_tdes_crypt_start() 531 err = dma_map_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); atmel_tdes_crypt_start() 541 dma_unmap_sg(dd->dev, dd->in_sg, 1, atmel_tdes_crypt_start() 546 addr_in = sg_dma_address(dd->in_sg); atmel_tdes_crypt_start() 553 count = atmel_tdes_sg_copy(&dd->in_sg, &dd->in_offset, atmel_tdes_crypt_start() 570 dma_unmap_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); atmel_tdes_crypt_start() 622 dd->in_sg = req->src; atmel_tdes_handle_queue() 654 dma_unmap_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); atmel_tdes_crypt_dma_stop() 1235 dd->in_sg = sg_next(dd->in_sg); atmel_tdes_done_task() 1237 if (!dd->in_sg || !dd->out_sg) atmel_tdes_done_task()
|
/linux-4.1.27/drivers/usb/wusbcore/ |
H A D | wa-xfer.c | 1016 * subset of the in_sg that matches the buffer subset 1019 static struct scatterlist *wa_xfer_create_subset_sg(struct scatterlist *in_sg, wa_xfer_create_subset_sg() argument 1026 struct scatterlist *current_xfer_sg = in_sg; wa_xfer_create_subset_sg()
|