tsgl 656 crypto/af_alg.c struct scatterlist *tsgl; tsgl 668 crypto/af_alg.c tsgl = areq->tsgl; tsgl 669 crypto/af_alg.c if (tsgl) { tsgl 670 crypto/af_alg.c for_each_sg(tsgl, sg, areq->tsgl_entries, i) { tsgl 676 crypto/af_alg.c sock_kfree_s(sk, tsgl, areq->tsgl_entries * sizeof(*tsgl)); tsgl 1095 crypto/af_alg.c areq->tsgl = NULL; tsgl 101 crypto/algif_aead.c struct af_alg_tsgl *tsgl, *tmp; tsgl 182 crypto/algif_aead.c list_for_each_entry_safe(tsgl, tmp, &ctx->tsgl_list, list) { tsgl 183 crypto/algif_aead.c for (i = 0; i < tsgl->cur; i++) { tsgl 184 crypto/algif_aead.c struct scatterlist *process_sg = tsgl->sg + i; tsgl 254 crypto/algif_aead.c areq->tsgl = sock_kmalloc(sk, array_size(sizeof(*areq->tsgl), tsgl 257 crypto/algif_aead.c if (!areq->tsgl) { tsgl 261 crypto/algif_aead.c sg_init_table(areq->tsgl, areq->tsgl_entries); tsgl 264 crypto/algif_aead.c af_alg_pull_tsgl(sk, processed, areq->tsgl, processed - as); tsgl 273 crypto/algif_aead.c areq->tsgl); tsgl 276 crypto/algif_aead.c rsgl_src = areq->tsgl; tsgl 99 crypto/algif_skcipher.c areq->tsgl = sock_kmalloc(sk, array_size(sizeof(*areq->tsgl), tsgl 102 crypto/algif_skcipher.c if (!areq->tsgl) { tsgl 106 crypto/algif_skcipher.c sg_init_table(areq->tsgl, areq->tsgl_entries); tsgl 107 crypto/algif_skcipher.c af_alg_pull_tsgl(sk, len, areq->tsgl, 0); tsgl 111 crypto/algif_skcipher.c skcipher_request_set_crypt(&areq->cra_u.skcipher_req, areq->tsgl, tsgl 499 crypto/testmgr.c static int init_test_sglist(struct test_sglist *tsgl) tsgl 501 crypto/testmgr.c return __testmgr_alloc_buf(tsgl->bufs, 1 /* two pages per buffer */); tsgl 504 crypto/testmgr.c static void destroy_test_sglist(struct test_sglist *tsgl) tsgl 506 crypto/testmgr.c return __testmgr_free_buf(tsgl->bufs, 1 /* two pages per buffer */); tsgl 527 crypto/testmgr.c static int build_test_sglist(struct test_sglist *tsgl, tsgl 542 crypto/testmgr.c BUILD_BUG_ON(ARRAY_SIZE(partitions) != ARRAY_SIZE(tsgl->sgl)); tsgl 547 crypto/testmgr.c tsgl->nents = 0; tsgl 555 crypto/testmgr.c partitions[tsgl->nents].div = &divs[i]; tsgl 556 crypto/testmgr.c partitions[tsgl->nents].length = len_this_sg; tsgl 557 crypto/testmgr.c tsgl->nents++; tsgl 561 crypto/testmgr.c if (tsgl->nents == 0) { tsgl 562 crypto/testmgr.c partitions[tsgl->nents].div = &divs[0]; tsgl 563 crypto/testmgr.c partitions[tsgl->nents].length = 0; tsgl 564 crypto/testmgr.c tsgl->nents++; tsgl 566 crypto/testmgr.c partitions[tsgl->nents - 1].length += len_remaining; tsgl 569 crypto/testmgr.c sg_init_table(tsgl->sgl, tsgl->nents); tsgl 570 crypto/testmgr.c for (i = 0; i < tsgl->nents; i++) { tsgl 584 crypto/testmgr.c addr = &tsgl->bufs[i][offset]; tsgl 585 crypto/testmgr.c sg_set_buf(&tsgl->sgl[i], addr, partitions[i].length); tsgl 605 crypto/testmgr.c sg_mark_end(&tsgl->sgl[tsgl->nents - 1]); tsgl 606 crypto/testmgr.c tsgl->sgl_ptr = tsgl->sgl; tsgl 607 crypto/testmgr.c memcpy(tsgl->sgl_saved, tsgl->sgl, tsgl->nents * sizeof(tsgl->sgl[0])); tsgl 622 crypto/testmgr.c static int verify_correct_output(const struct test_sglist *tsgl, tsgl 630 crypto/testmgr.c for (i = 0; i < tsgl->nents; i++) { tsgl 631 crypto/testmgr.c struct scatterlist *sg = &tsgl->sgl_ptr[i]; tsgl 660 crypto/testmgr.c static bool is_test_sglist_corrupted(const struct test_sglist *tsgl) tsgl 664 crypto/testmgr.c for (i = 0; i < tsgl->nents; i++) { tsgl 665 crypto/testmgr.c if (tsgl->sgl[i].page_link != tsgl->sgl_saved[i].page_link) tsgl 667 crypto/testmgr.c if (tsgl->sgl[i].offset != tsgl->sgl_saved[i].offset) tsgl 669 crypto/testmgr.c if (tsgl->sgl[i].length != tsgl->sgl_saved[i].length) tsgl 1035 crypto/testmgr.c static int build_hash_sglist(struct test_sglist *tsgl, tsgl 1047 crypto/testmgr.c return build_test_sglist(tsgl, cfg->src_divs, alignmask, vec->psize, tsgl 1092 crypto/testmgr.c struct test_sglist *tsgl, tsgl 1123 crypto/testmgr.c err = build_hash_sglist(tsgl, vec, cfg, alignmask, divs); tsgl 1138 crypto/testmgr.c if (tsgl->nents != 1) tsgl 1142 crypto/testmgr.c err = crypto_shash_digest(desc, sg_data(&tsgl->sgl[0]), tsgl 1143 crypto/testmgr.c tsgl->sgl[0].length, result); tsgl 1173 crypto/testmgr.c for (i = 0; i < tsgl->nents; i++) { tsgl 1174 crypto/testmgr.c if (i + 1 == tsgl->nents && tsgl 1178 crypto/testmgr.c err = crypto_shash_finup(desc, sg_data(&tsgl->sgl[i]), tsgl 1179 crypto/testmgr.c tsgl->sgl[i].length, result); tsgl 1190 crypto/testmgr.c err = crypto_shash_update(desc, sg_data(&tsgl->sgl[i]), tsgl 1191 crypto/testmgr.c tsgl->sgl[i].length); tsgl 1275 crypto/testmgr.c struct test_sglist *tsgl, tsgl 1310 crypto/testmgr.c err = build_hash_sglist(tsgl, vec, cfg, alignmask, divs); tsgl 1327 crypto/testmgr.c ahash_request_set_crypt(req, tsgl->sgl, result, vec->psize); tsgl 1357 crypto/testmgr.c for (i = 0; i < tsgl->nents; i++) { tsgl 1401 crypto/testmgr.c pending_sgl = &tsgl->sgl[i]; tsgl 1402 crypto/testmgr.c pending_len += tsgl->sgl[i].length; tsgl 1441 crypto/testmgr.c struct test_sglist *tsgl, tsgl 1453 crypto/testmgr.c err = test_shash_vec_cfg(driver, vec, vec_name, cfg, desc, tsgl, tsgl 1459 crypto/testmgr.c return test_ahash_vec_cfg(driver, vec, vec_name, cfg, req, tsgl, tsgl 1465 crypto/testmgr.c struct shash_desc *desc, struct test_sglist *tsgl, tsgl 1477 crypto/testmgr.c req, desc, tsgl, hashstate); tsgl 1491 crypto/testmgr.c req, desc, tsgl, hashstate); tsgl 1552 crypto/testmgr.c struct test_sglist *tsgl, tsgl 1647 crypto/testmgr.c req, desc, tsgl, hashstate); tsgl 1668 crypto/testmgr.c struct test_sglist *tsgl, tsgl 1717 crypto/testmgr.c struct test_sglist *tsgl = NULL; tsgl 1751 crypto/testmgr.c tsgl = kmalloc(sizeof(*tsgl), GFP_KERNEL); tsgl 1752 crypto/testmgr.c if (!tsgl || init_test_sglist(tsgl) != 0) { tsgl 1755 crypto/testmgr.c kfree(tsgl); tsgl 1756 crypto/testmgr.c tsgl = NULL; tsgl 1773 crypto/testmgr.c err = test_hash_vec(driver, &vecs[i], i, req, desc, tsgl, tsgl 1780 crypto/testmgr.c desc, tsgl, hashstate); tsgl 1783 crypto/testmgr.c if (tsgl) { tsgl 1784 crypto/testmgr.c destroy_test_sglist(tsgl); tsgl 1785 crypto/testmgr.c kfree(tsgl); tsgl 103 include/crypto/if_alg.h struct scatterlist *tsgl;