src_nents         229 drivers/crypto/bcm/cipher.c 				 rctx->src_nents, chunksize);
src_nents         352 drivers/crypto/bcm/cipher.c 	rctx->src_nents = spu_sg_count(rctx->src_sg, rctx->src_skip, chunksize);
src_nents         477 drivers/crypto/bcm/cipher.c 	tx_frag_num += rctx->src_nents;
src_nents         643 drivers/crypto/bcm/cipher.c 					 rctx->src_nents, new_data_len);
src_nents         805 drivers/crypto/bcm/cipher.c 	rctx->src_nents = spu_sg_count(rctx->src_sg, rctx->src_skip,
src_nents         908 drivers/crypto/bcm/cipher.c 	tx_frag_num += rctx->src_nents;
src_nents        1255 drivers/crypto/bcm/cipher.c 					 rctx->src_nents, datalen);
src_nents        1381 drivers/crypto/bcm/cipher.c 	rctx->src_nents = spu_sg_count(rctx->src_sg, rctx->src_skip, chunksize);
src_nents        1540 drivers/crypto/bcm/cipher.c 	tx_frag_num += rctx->src_nents;
src_nents        1772 drivers/crypto/bcm/cipher.c 	rctx->src_nents = 0;
src_nents        1986 drivers/crypto/bcm/cipher.c 	rctx->src_nents = 0;
src_nents        2742 drivers/crypto/bcm/cipher.c 	rctx->src_nents = 0;
src_nents         284 drivers/crypto/bcm/cipher.h 	int src_nents;		/* Number of src entries with data */
src_nents         885 drivers/crypto/caam/caamalg.c 	int src_nents;
src_nents         909 drivers/crypto/caam/caamalg.c 	int src_nents;
src_nents         921 drivers/crypto/caam/caamalg.c 		       struct scatterlist *dst, int src_nents,
src_nents         927 drivers/crypto/caam/caamalg.c 		if (src_nents)
src_nents         928 drivers/crypto/caam/caamalg.c 			dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
src_nents         932 drivers/crypto/caam/caamalg.c 		dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
src_nents         947 drivers/crypto/caam/caamalg.c 		   edesc->src_nents, edesc->dst_nents, 0, 0,
src_nents         958 drivers/crypto/caam/caamalg.c 		   edesc->src_nents, edesc->dst_nents,
src_nents        1033 drivers/crypto/caam/caamalg.c 				     edesc->src_nents > 1 ? 100 : ivsize, 1);
src_nents        1282 drivers/crypto/caam/caamalg.c 	       (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen);
src_nents        1286 drivers/crypto/caam/caamalg.c 		     edesc->src_nents > 1 ? 100 : req->cryptlen, 1);
src_nents        1330 drivers/crypto/caam/caamalg.c 	int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
src_nents        1340 drivers/crypto/caam/caamalg.c 		src_nents = sg_nents_for_len(req->src, src_len);
src_nents        1341 drivers/crypto/caam/caamalg.c 		if (unlikely(src_nents < 0)) {
src_nents        1344 drivers/crypto/caam/caamalg.c 			return ERR_PTR(src_nents);
src_nents        1357 drivers/crypto/caam/caamalg.c 		src_nents = sg_nents_for_len(req->src, src_len);
src_nents        1358 drivers/crypto/caam/caamalg.c 		if (unlikely(src_nents < 0)) {
src_nents        1361 drivers/crypto/caam/caamalg.c 			return ERR_PTR(src_nents);
src_nents        1366 drivers/crypto/caam/caamalg.c 		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
src_nents        1374 drivers/crypto/caam/caamalg.c 		if (src_nents) {
src_nents        1376 drivers/crypto/caam/caamalg.c 						      src_nents, DMA_TO_DEVICE);
src_nents        1392 drivers/crypto/caam/caamalg.c 				dma_unmap_sg(jrdev, req->src, src_nents,
src_nents        1417 drivers/crypto/caam/caamalg.c 		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
src_nents        1422 drivers/crypto/caam/caamalg.c 	edesc->src_nents = src_nents;
src_nents        1687 drivers/crypto/caam/caamalg.c 	int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
src_nents        1694 drivers/crypto/caam/caamalg.c 	src_nents = sg_nents_for_len(req->src, req->cryptlen);
src_nents        1695 drivers/crypto/caam/caamalg.c 	if (unlikely(src_nents < 0)) {
src_nents        1698 drivers/crypto/caam/caamalg.c 		return ERR_PTR(src_nents);
src_nents        1711 drivers/crypto/caam/caamalg.c 		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
src_nents        1718 drivers/crypto/caam/caamalg.c 		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
src_nents        1728 drivers/crypto/caam/caamalg.c 			dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
src_nents        1770 drivers/crypto/caam/caamalg.c 		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
src_nents        1775 drivers/crypto/caam/caamalg.c 	edesc->src_nents = src_nents;
src_nents        1791 drivers/crypto/caam/caamalg.c 			caam_unmap(jrdev, req->src, req->dst, src_nents,
src_nents        1821 drivers/crypto/caam/caamalg.c 			caam_unmap(jrdev, req->src, req->dst, src_nents,
src_nents         800 drivers/crypto/caam/caamalg_qi.c 	int src_nents;
src_nents         822 drivers/crypto/caam/caamalg_qi.c 	int src_nents;
src_nents         870 drivers/crypto/caam/caamalg_qi.c 		       struct scatterlist *dst, int src_nents,
src_nents         876 drivers/crypto/caam/caamalg_qi.c 		if (src_nents)
src_nents         877 drivers/crypto/caam/caamalg_qi.c 			dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
src_nents         881 drivers/crypto/caam/caamalg_qi.c 		dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
src_nents         897 drivers/crypto/caam/caamalg_qi.c 	caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents,
src_nents         909 drivers/crypto/caam/caamalg_qi.c 	caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents,
src_nents         948 drivers/crypto/caam/caamalg_qi.c 	int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
src_nents         974 drivers/crypto/caam/caamalg_qi.c 		src_nents = sg_nents_for_len(req->src, src_len);
src_nents         975 drivers/crypto/caam/caamalg_qi.c 		if (unlikely(src_nents < 0)) {
src_nents         979 drivers/crypto/caam/caamalg_qi.c 			return ERR_PTR(src_nents);
src_nents         982 drivers/crypto/caam/caamalg_qi.c 		mapped_src_nents = dma_map_sg(qidev, req->src, src_nents,
src_nents         993 drivers/crypto/caam/caamalg_qi.c 		src_nents = sg_nents_for_len(req->src, src_len);
src_nents         994 drivers/crypto/caam/caamalg_qi.c 		if (unlikely(src_nents < 0)) {
src_nents         998 drivers/crypto/caam/caamalg_qi.c 			return ERR_PTR(src_nents);
src_nents        1009 drivers/crypto/caam/caamalg_qi.c 		if (src_nents) {
src_nents        1011 drivers/crypto/caam/caamalg_qi.c 						      src_nents, DMA_TO_DEVICE);
src_nents        1027 drivers/crypto/caam/caamalg_qi.c 				dma_unmap_sg(qidev, req->src, src_nents,
src_nents        1067 drivers/crypto/caam/caamalg_qi.c 		caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0,
src_nents        1082 drivers/crypto/caam/caamalg_qi.c 			caam_unmap(qidev, req->src, req->dst, src_nents,
src_nents        1089 drivers/crypto/caam/caamalg_qi.c 	edesc->src_nents = src_nents;
src_nents        1101 drivers/crypto/caam/caamalg_qi.c 		caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents,
src_nents        1123 drivers/crypto/caam/caamalg_qi.c 		caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents,
src_nents        1226 drivers/crypto/caam/caamalg_qi.c 			     edesc->src_nents > 1 ? 100 : ivsize, 1);
src_nents        1254 drivers/crypto/caam/caamalg_qi.c 	int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
src_nents        1267 drivers/crypto/caam/caamalg_qi.c 	src_nents = sg_nents_for_len(req->src, req->cryptlen);
src_nents        1268 drivers/crypto/caam/caamalg_qi.c 	if (unlikely(src_nents < 0)) {
src_nents        1271 drivers/crypto/caam/caamalg_qi.c 		return ERR_PTR(src_nents);
src_nents        1282 drivers/crypto/caam/caamalg_qi.c 		mapped_src_nents = dma_map_sg(qidev, req->src, src_nents,
src_nents        1293 drivers/crypto/caam/caamalg_qi.c 			dma_unmap_sg(qidev, req->src, src_nents, DMA_TO_DEVICE);
src_nents        1297 drivers/crypto/caam/caamalg_qi.c 		mapped_src_nents = dma_map_sg(qidev, req->src, src_nents,
src_nents        1326 drivers/crypto/caam/caamalg_qi.c 		caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0,
src_nents        1335 drivers/crypto/caam/caamalg_qi.c 		caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0,
src_nents        1348 drivers/crypto/caam/caamalg_qi.c 		caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0,
src_nents        1354 drivers/crypto/caam/caamalg_qi.c 	edesc->src_nents = src_nents;
src_nents        1375 drivers/crypto/caam/caamalg_qi.c 		caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents,
src_nents         142 drivers/crypto/caam/caamalg_qi2.c 		       struct scatterlist *dst, int src_nents,
src_nents         148 drivers/crypto/caam/caamalg_qi2.c 		if (src_nents)
src_nents         149 drivers/crypto/caam/caamalg_qi2.c 			dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
src_nents         153 drivers/crypto/caam/caamalg_qi2.c 		dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
src_nents         360 drivers/crypto/caam/caamalg_qi2.c 	int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
src_nents         381 drivers/crypto/caam/caamalg_qi2.c 		src_nents = sg_nents_for_len(req->src, src_len);
src_nents         382 drivers/crypto/caam/caamalg_qi2.c 		if (unlikely(src_nents < 0)) {
src_nents         386 drivers/crypto/caam/caamalg_qi2.c 			return ERR_PTR(src_nents);
src_nents         397 drivers/crypto/caam/caamalg_qi2.c 		if (src_nents) {
src_nents         398 drivers/crypto/caam/caamalg_qi2.c 			mapped_src_nents = dma_map_sg(dev, req->src, src_nents,
src_nents         414 drivers/crypto/caam/caamalg_qi2.c 				dma_unmap_sg(dev, req->src, src_nents,
src_nents         426 drivers/crypto/caam/caamalg_qi2.c 		src_nents = sg_nents_for_len(req->src, src_len);
src_nents         427 drivers/crypto/caam/caamalg_qi2.c 		if (unlikely(src_nents < 0)) {
src_nents         431 drivers/crypto/caam/caamalg_qi2.c 			return ERR_PTR(src_nents);
src_nents         434 drivers/crypto/caam/caamalg_qi2.c 		mapped_src_nents = dma_map_sg(dev, req->src, src_nents,
src_nents         474 drivers/crypto/caam/caamalg_qi2.c 		caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, 0,
src_nents         489 drivers/crypto/caam/caamalg_qi2.c 			caam_unmap(dev, req->src, req->dst, src_nents,
src_nents         496 drivers/crypto/caam/caamalg_qi2.c 	edesc->src_nents = src_nents;
src_nents         513 drivers/crypto/caam/caamalg_qi2.c 		caam_unmap(dev, req->src, req->dst, src_nents, dst_nents,
src_nents         535 drivers/crypto/caam/caamalg_qi2.c 		caam_unmap(dev, req->src, req->dst, src_nents, dst_nents,
src_nents        1126 drivers/crypto/caam/caamalg_qi2.c 	int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
src_nents        1134 drivers/crypto/caam/caamalg_qi2.c 	src_nents = sg_nents_for_len(req->src, req->cryptlen);
src_nents        1135 drivers/crypto/caam/caamalg_qi2.c 	if (unlikely(src_nents < 0)) {
src_nents        1138 drivers/crypto/caam/caamalg_qi2.c 		return ERR_PTR(src_nents);
src_nents        1149 drivers/crypto/caam/caamalg_qi2.c 		mapped_src_nents = dma_map_sg(dev, req->src, src_nents,
src_nents        1160 drivers/crypto/caam/caamalg_qi2.c 			dma_unmap_sg(dev, req->src, src_nents, DMA_TO_DEVICE);
src_nents        1164 drivers/crypto/caam/caamalg_qi2.c 		mapped_src_nents = dma_map_sg(dev, req->src, src_nents,
src_nents        1193 drivers/crypto/caam/caamalg_qi2.c 		caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, 0,
src_nents        1202 drivers/crypto/caam/caamalg_qi2.c 		caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, 0,
src_nents        1215 drivers/crypto/caam/caamalg_qi2.c 		caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, 0,
src_nents        1221 drivers/crypto/caam/caamalg_qi2.c 	edesc->src_nents = src_nents;
src_nents        1239 drivers/crypto/caam/caamalg_qi2.c 		caam_unmap(dev, req->src, req->dst, src_nents, dst_nents,
src_nents        1271 drivers/crypto/caam/caamalg_qi2.c 	caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents,
src_nents        1283 drivers/crypto/caam/caamalg_qi2.c 	caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents,
src_nents        1414 drivers/crypto/caam/caamalg_qi2.c 			     edesc->src_nents > 1 ? 100 : ivsize, 1);
src_nents        1452 drivers/crypto/caam/caamalg_qi2.c 			     edesc->src_nents > 1 ? 100 : ivsize, 1);
src_nents        3316 drivers/crypto/caam/caamalg_qi2.c 	if (edesc->src_nents)
src_nents        3317 drivers/crypto/caam/caamalg_qi2.c 		dma_unmap_sg(dev, req->src, edesc->src_nents, DMA_TO_DEVICE);
src_nents        3472 drivers/crypto/caam/caamalg_qi2.c 	int src_nents, mapped_nents, qm_sg_bytes, qm_sg_src_index;
src_nents        3484 drivers/crypto/caam/caamalg_qi2.c 		src_nents = sg_nents_for_len(req->src, src_len);
src_nents        3485 drivers/crypto/caam/caamalg_qi2.c 		if (src_nents < 0) {
src_nents        3487 drivers/crypto/caam/caamalg_qi2.c 			return src_nents;
src_nents        3490 drivers/crypto/caam/caamalg_qi2.c 		if (src_nents) {
src_nents        3491 drivers/crypto/caam/caamalg_qi2.c 			mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents,
src_nents        3504 drivers/crypto/caam/caamalg_qi2.c 			dma_unmap_sg(ctx->dev, req->src, src_nents,
src_nents        3509 drivers/crypto/caam/caamalg_qi2.c 		edesc->src_nents = src_nents;
src_nents        3668 drivers/crypto/caam/caamalg_qi2.c 	int src_nents, mapped_nents;
src_nents        3674 drivers/crypto/caam/caamalg_qi2.c 	src_nents = sg_nents_for_len(req->src, req->nbytes);
src_nents        3675 drivers/crypto/caam/caamalg_qi2.c 	if (src_nents < 0) {
src_nents        3677 drivers/crypto/caam/caamalg_qi2.c 		return src_nents;
src_nents        3680 drivers/crypto/caam/caamalg_qi2.c 	if (src_nents) {
src_nents        3681 drivers/crypto/caam/caamalg_qi2.c 		mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents,
src_nents        3694 drivers/crypto/caam/caamalg_qi2.c 		dma_unmap_sg(ctx->dev, req->src, src_nents, DMA_TO_DEVICE);
src_nents        3698 drivers/crypto/caam/caamalg_qi2.c 	edesc->src_nents = src_nents;
src_nents        3761 drivers/crypto/caam/caamalg_qi2.c 	int src_nents, mapped_nents;
src_nents        3767 drivers/crypto/caam/caamalg_qi2.c 	src_nents = sg_nents_for_len(req->src, req->nbytes);
src_nents        3768 drivers/crypto/caam/caamalg_qi2.c 	if (src_nents < 0) {
src_nents        3770 drivers/crypto/caam/caamalg_qi2.c 		return src_nents;
src_nents        3773 drivers/crypto/caam/caamalg_qi2.c 	if (src_nents) {
src_nents        3774 drivers/crypto/caam/caamalg_qi2.c 		mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents,
src_nents        3787 drivers/crypto/caam/caamalg_qi2.c 		dma_unmap_sg(ctx->dev, req->src, src_nents, DMA_TO_DEVICE);
src_nents        3791 drivers/crypto/caam/caamalg_qi2.c 	edesc->src_nents = src_nents;
src_nents        3933 drivers/crypto/caam/caamalg_qi2.c 	int qm_sg_bytes, src_nents, mapped_nents;
src_nents        3944 drivers/crypto/caam/caamalg_qi2.c 		src_nents = sg_nents_for_len(req->src, src_len);
src_nents        3945 drivers/crypto/caam/caamalg_qi2.c 		if (src_nents < 0) {
src_nents        3947 drivers/crypto/caam/caamalg_qi2.c 			return src_nents;
src_nents        3950 drivers/crypto/caam/caamalg_qi2.c 		if (src_nents) {
src_nents        3951 drivers/crypto/caam/caamalg_qi2.c 			mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents,
src_nents        3964 drivers/crypto/caam/caamalg_qi2.c 			dma_unmap_sg(ctx->dev, req->src, src_nents,
src_nents        3969 drivers/crypto/caam/caamalg_qi2.c 		edesc->src_nents = src_nents;
src_nents        4059 drivers/crypto/caam/caamalg_qi2.c 	int qm_sg_bytes, src_nents, mapped_nents;
src_nents        4065 drivers/crypto/caam/caamalg_qi2.c 	src_nents = sg_nents_for_len(req->src, req->nbytes);
src_nents        4066 drivers/crypto/caam/caamalg_qi2.c 	if (src_nents < 0) {
src_nents        4068 drivers/crypto/caam/caamalg_qi2.c 		return src_nents;
src_nents        4071 drivers/crypto/caam/caamalg_qi2.c 	if (src_nents) {
src_nents        4072 drivers/crypto/caam/caamalg_qi2.c 		mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents,
src_nents        4085 drivers/crypto/caam/caamalg_qi2.c 		dma_unmap_sg(ctx->dev, req->src, src_nents, DMA_TO_DEVICE);
src_nents        4089 drivers/crypto/caam/caamalg_qi2.c 	edesc->src_nents = src_nents;
src_nents        4157 drivers/crypto/caam/caamalg_qi2.c 	int src_nents, mapped_nents;
src_nents        4169 drivers/crypto/caam/caamalg_qi2.c 		src_nents = sg_nents_for_len(req->src, src_len);
src_nents        4170 drivers/crypto/caam/caamalg_qi2.c 		if (src_nents < 0) {
src_nents        4172 drivers/crypto/caam/caamalg_qi2.c 			return src_nents;
src_nents        4175 drivers/crypto/caam/caamalg_qi2.c 		if (src_nents) {
src_nents        4176 drivers/crypto/caam/caamalg_qi2.c 			mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents,
src_nents        4189 drivers/crypto/caam/caamalg_qi2.c 			dma_unmap_sg(ctx->dev, req->src, src_nents,
src_nents        4194 drivers/crypto/caam/caamalg_qi2.c 		edesc->src_nents = src_nents;
src_nents         110 drivers/crypto/caam/caamalg_qi2.h 	int src_nents;
src_nents         130 drivers/crypto/caam/caamalg_qi2.h 	int src_nents;
src_nents         147 drivers/crypto/caam/caamalg_qi2.h 	int src_nents;
src_nents         561 drivers/crypto/caam/caamhash.c 	int src_nents;
src_nents         573 drivers/crypto/caam/caamhash.c 	if (edesc->src_nents)
src_nents         574 drivers/crypto/caam/caamhash.c 		dma_unmap_sg(dev, req->src, edesc->src_nents, DMA_TO_DEVICE);
src_nents         796 drivers/crypto/caam/caamhash.c 	int src_nents, mapped_nents, sec4_sg_bytes, sec4_sg_src_index;
src_nents         819 drivers/crypto/caam/caamhash.c 		src_nents = sg_nents_for_len(req->src, src_len);
src_nents         820 drivers/crypto/caam/caamhash.c 		if (src_nents < 0) {
src_nents         822 drivers/crypto/caam/caamhash.c 			return src_nents;
src_nents         825 drivers/crypto/caam/caamhash.c 		if (src_nents) {
src_nents         826 drivers/crypto/caam/caamhash.c 			mapped_nents = dma_map_sg(jrdev, req->src, src_nents,
src_nents         847 drivers/crypto/caam/caamhash.c 			dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
src_nents         851 drivers/crypto/caam/caamhash.c 		edesc->src_nents = src_nents;
src_nents         997 drivers/crypto/caam/caamhash.c 	int src_nents, mapped_nents;
src_nents        1002 drivers/crypto/caam/caamhash.c 	src_nents = sg_nents_for_len(req->src, req->nbytes);
src_nents        1003 drivers/crypto/caam/caamhash.c 	if (src_nents < 0) {
src_nents        1005 drivers/crypto/caam/caamhash.c 		return src_nents;
src_nents        1008 drivers/crypto/caam/caamhash.c 	if (src_nents) {
src_nents        1009 drivers/crypto/caam/caamhash.c 		mapped_nents = dma_map_sg(jrdev, req->src, src_nents,
src_nents        1026 drivers/crypto/caam/caamhash.c 		dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
src_nents        1032 drivers/crypto/caam/caamhash.c 	edesc->src_nents = src_nents;
src_nents        1076 drivers/crypto/caam/caamhash.c 	int src_nents, mapped_nents;
src_nents        1082 drivers/crypto/caam/caamhash.c 	src_nents = sg_nents_for_len(req->src, req->nbytes);
src_nents        1083 drivers/crypto/caam/caamhash.c 	if (src_nents < 0) {
src_nents        1085 drivers/crypto/caam/caamhash.c 		return src_nents;
src_nents        1088 drivers/crypto/caam/caamhash.c 	if (src_nents) {
src_nents        1089 drivers/crypto/caam/caamhash.c 		mapped_nents = dma_map_sg(jrdev, req->src, src_nents,
src_nents        1104 drivers/crypto/caam/caamhash.c 		dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
src_nents        1108 drivers/crypto/caam/caamhash.c 	edesc->src_nents = src_nents;
src_nents        1216 drivers/crypto/caam/caamhash.c 	int sec4_sg_bytes, src_nents, mapped_nents;
src_nents        1239 drivers/crypto/caam/caamhash.c 		src_nents = sg_nents_for_len(req->src, src_len);
src_nents        1240 drivers/crypto/caam/caamhash.c 		if (src_nents < 0) {
src_nents        1242 drivers/crypto/caam/caamhash.c 			return src_nents;
src_nents        1245 drivers/crypto/caam/caamhash.c 		if (src_nents) {
src_nents        1246 drivers/crypto/caam/caamhash.c 			mapped_nents = dma_map_sg(jrdev, req->src, src_nents,
src_nents        1268 drivers/crypto/caam/caamhash.c 			dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
src_nents        1272 drivers/crypto/caam/caamhash.c 		edesc->src_nents = src_nents;
src_nents        1347 drivers/crypto/caam/caamhash.c 	int sec4_sg_bytes, sec4_sg_src_index, src_nents, mapped_nents;
src_nents        1352 drivers/crypto/caam/caamhash.c 	src_nents = sg_nents_for_len(req->src, req->nbytes);
src_nents        1353 drivers/crypto/caam/caamhash.c 	if (src_nents < 0) {
src_nents        1355 drivers/crypto/caam/caamhash.c 		return src_nents;
src_nents        1358 drivers/crypto/caam/caamhash.c 	if (src_nents) {
src_nents        1359 drivers/crypto/caam/caamhash.c 		mapped_nents = dma_map_sg(jrdev, req->src, src_nents,
src_nents        1378 drivers/crypto/caam/caamhash.c 		dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
src_nents        1384 drivers/crypto/caam/caamhash.c 	edesc->src_nents = src_nents;
src_nents        1436 drivers/crypto/caam/caamhash.c 	int src_nents, mapped_nents;
src_nents        1455 drivers/crypto/caam/caamhash.c 		src_nents = sg_nents_for_len(req->src,
src_nents        1457 drivers/crypto/caam/caamhash.c 		if (src_nents < 0) {
src_nents        1459 drivers/crypto/caam/caamhash.c 			return src_nents;
src_nents        1462 drivers/crypto/caam/caamhash.c 		if (src_nents) {
src_nents        1463 drivers/crypto/caam/caamhash.c 			mapped_nents = dma_map_sg(jrdev, req->src, src_nents,
src_nents        1483 drivers/crypto/caam/caamhash.c 			dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
src_nents        1487 drivers/crypto/caam/caamhash.c 		edesc->src_nents = src_nents;
src_nents          49 drivers/crypto/caam/caampkc.c 	dma_unmap_sg(dev, req_ctx->fixup_src, edesc->src_nents, DMA_TO_DEVICE);
src_nents         257 drivers/crypto/caam/caampkc.c 	int src_nents, dst_nents;
src_nents         284 drivers/crypto/caam/caampkc.c 	src_nents = sg_nents_for_len(req_ctx->fixup_src,
src_nents         288 drivers/crypto/caam/caampkc.c 	if (!diff_size && src_nents == 1)
src_nents         291 drivers/crypto/caam/caampkc.c 		sec4_sg_len = src_nents + !!diff_size;
src_nents         306 drivers/crypto/caam/caampkc.c 	sgc = dma_map_sg(dev, req_ctx->fixup_src, src_nents, DMA_TO_DEVICE);
src_nents         332 drivers/crypto/caam/caampkc.c 	edesc->src_nents = src_nents;
src_nents         356 drivers/crypto/caam/caampkc.c 	dma_unmap_sg(dev, req_ctx->fixup_src, src_nents, DMA_TO_DEVICE);
src_nents         386 drivers/crypto/caam/caampkc.c 	if (edesc->src_nents > 1) {
src_nents         389 drivers/crypto/caam/caampkc.c 		sec4_sg_index += edesc->src_nents;
src_nents         431 drivers/crypto/caam/caampkc.c 	if (edesc->src_nents > 1) {
src_nents         434 drivers/crypto/caam/caampkc.c 		sec4_sg_index += edesc->src_nents;
src_nents         496 drivers/crypto/caam/caampkc.c 	if (edesc->src_nents > 1) {
src_nents         499 drivers/crypto/caam/caampkc.c 		sec4_sg_index += edesc->src_nents;
src_nents         585 drivers/crypto/caam/caampkc.c 	if (edesc->src_nents > 1) {
src_nents         588 drivers/crypto/caam/caampkc.c 		sec4_sg_index += edesc->src_nents;
src_nents         124 drivers/crypto/caam/caampkc.h 	int src_nents;
src_nents         357 drivers/crypto/ccp/ccp-dmaengine.c 					    unsigned int src_nents,
src_nents         376 drivers/crypto/ccp/ccp-dmaengine.c 	if (!dst_nents || !src_nents)
src_nents         393 drivers/crypto/ccp/ccp-dmaengine.c 			src_nents--;
src_nents         394 drivers/crypto/ccp/ccp-dmaengine.c 			if (!src_nents)
src_nents         564 drivers/crypto/marvell/cesa.h 	int src_nents;
src_nents         610 drivers/crypto/marvell/cesa.h 	int src_nents;
src_nents          64 drivers/crypto/marvell/cipher.c 		dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents,
src_nents          67 drivers/crypto/marvell/cipher.c 		dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents,
src_nents          92 drivers/crypto/marvell/cipher.c 	len = sg_pcopy_to_buffer(req->src, creq->src_nents,
src_nents         317 drivers/crypto/marvell/cipher.c 		ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents,
src_nents         329 drivers/crypto/marvell/cipher.c 		ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents,
src_nents         388 drivers/crypto/marvell/cipher.c 	dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents,
src_nents         421 drivers/crypto/marvell/cipher.c 	creq->src_nents = sg_nents_for_len(req->src, req->cryptlen);
src_nents         422 drivers/crypto/marvell/cipher.c 	if (creq->src_nents < 0) {
src_nents         424 drivers/crypto/marvell/cipher.c 		return creq->src_nents;
src_nents         102 drivers/crypto/marvell/hash.c 	dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, DMA_TO_DEVICE);
src_nents         187 drivers/crypto/marvell/hash.c 		sreq->offset += sg_pcopy_to_buffer(req->src, creq->src_nents,
src_nents         395 drivers/crypto/marvell/hash.c 		sg_pcopy_to_buffer(ahashreq->src, creq->src_nents,
src_nents         448 drivers/crypto/marvell/hash.c 		sg_pcopy_to_buffer(req->src, creq->src_nents,
src_nents         613 drivers/crypto/marvell/hash.c 	if (creq->src_nents) {
src_nents         614 drivers/crypto/marvell/hash.c 		ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents,
src_nents         719 drivers/crypto/marvell/hash.c 	dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, DMA_TO_DEVICE);
src_nents         731 drivers/crypto/marvell/hash.c 	creq->src_nents = sg_nents_for_len(req->src, req->nbytes);
src_nents         732 drivers/crypto/marvell/hash.c 	if (creq->src_nents < 0) {
src_nents         734 drivers/crypto/marvell/hash.c 		return creq->src_nents;
src_nents         317 drivers/crypto/picoxcell_crypto.c 	int src_nents, dst_nents;
src_nents         325 drivers/crypto/picoxcell_crypto.c 	src_nents = sg_nents_for_len(areq->src, total);
src_nents         326 drivers/crypto/picoxcell_crypto.c 	if (src_nents < 0) {
src_nents         328 drivers/crypto/picoxcell_crypto.c 		return src_nents;
src_nents         330 drivers/crypto/picoxcell_crypto.c 	if (src_nents + 1 > MAX_DDT_LEN)
src_nents         340 drivers/crypto/picoxcell_crypto.c 		if (src_nents + 1 > MAX_DDT_LEN)
src_nents         356 drivers/crypto/picoxcell_crypto.c 		src_ents = dma_map_sg(engine->dev, areq->src, src_nents,
src_nents         365 drivers/crypto/picoxcell_crypto.c 			dma_unmap_sg(engine->dev, areq->src, src_nents,
src_nents         370 drivers/crypto/picoxcell_crypto.c 		src_ents = dma_map_sg(engine->dev, areq->src, src_nents,
src_nents          39 drivers/crypto/qce/ablkcipher.c 		dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src);
src_nents          73 drivers/crypto/qce/ablkcipher.c 	rctx->src_nents = sg_nents_for_len(req->src, req->nbytes);
src_nents          77 drivers/crypto/qce/ablkcipher.c 		rctx->dst_nents = rctx->src_nents;
src_nents          78 drivers/crypto/qce/ablkcipher.c 	if (rctx->src_nents < 0) {
src_nents          80 drivers/crypto/qce/ablkcipher.c 		return rctx->src_nents;
src_nents         118 drivers/crypto/qce/ablkcipher.c 		ret = dma_map_sg(qce->dev, req->src, rctx->src_nents, dir_src);
src_nents         126 drivers/crypto/qce/ablkcipher.c 	ret = qce_dma_prep_sgs(&qce->dma, rctx->src_sg, rctx->src_nents,
src_nents         144 drivers/crypto/qce/ablkcipher.c 		dma_unmap_sg(qce->dev, req->src, rctx->src_nents, dir_src);
src_nents          38 drivers/crypto/qce/cipher.h 	int src_nents;
src_nents          46 drivers/crypto/qce/sha.c 	dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE);
src_nents          86 drivers/crypto/qce/sha.c 	rctx->src_nents = sg_nents_for_len(req->src, req->nbytes);
src_nents          87 drivers/crypto/qce/sha.c 	if (rctx->src_nents < 0) {
src_nents          89 drivers/crypto/qce/sha.c 		return rctx->src_nents;
src_nents          92 drivers/crypto/qce/sha.c 	ret = dma_map_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE);
src_nents         102 drivers/crypto/qce/sha.c 	ret = qce_dma_prep_sgs(&qce->dma, req->src, rctx->src_nents,
src_nents         120 drivers/crypto/qce/sha.c 	dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE);
src_nents          49 drivers/crypto/qce/sha.h 	int src_nents;
src_nents         119 drivers/crypto/rockchip/rk3288_crypto.c 		if (!sg_pcopy_to_buffer(dev->first, dev->src_nents,
src_nents         210 drivers/crypto/rockchip/rk3288_crypto.h 	size_t				src_nents;
src_nents         260 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 		sg_pcopy_to_buffer(dev->first, dev->src_nents, req->info,
src_nents         281 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	dev->src_nents = sg_nents(req->src);
src_nents         206 drivers/crypto/rockchip/rk3288_crypto_ahash.c 	dev->src_nents = sg_nents(req->src);
src_nents         971 drivers/crypto/talitos.c 	unsigned int src_nents = edesc->src_nents ? : 1;
src_nents         981 drivers/crypto/talitos.c 		if (src_nents == 1 || !is_sec1)
src_nents         982 drivers/crypto/talitos.c 			dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
src_nents         986 drivers/crypto/talitos.c 	} else if (src_nents == 1 || !is_sec1) {
src_nents         987 drivers/crypto/talitos.c 		dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
src_nents        1222 drivers/crypto/talitos.c 	sg_count = edesc->src_nents ?: 1;
src_nents        1334 drivers/crypto/talitos.c 	int src_nents, dst_nents, alloc_len, dma_len, src_len, dst_len;
src_nents        1349 drivers/crypto/talitos.c 		src_nents = sg_nents_for_len(src, src_len);
src_nents        1350 drivers/crypto/talitos.c 		if (src_nents < 0) {
src_nents        1354 drivers/crypto/talitos.c 		src_nents = (src_nents == 1) ? 0 : src_nents;
src_nents        1355 drivers/crypto/talitos.c 		dst_nents = dst ? src_nents : 0;
src_nents        1359 drivers/crypto/talitos.c 		src_nents = sg_nents_for_len(src, src_len);
src_nents        1360 drivers/crypto/talitos.c 		if (src_nents < 0) {
src_nents        1364 drivers/crypto/talitos.c 		src_nents = (src_nents == 1) ? 0 : src_nents;
src_nents        1380 drivers/crypto/talitos.c 	if (src_nents || dst_nents || !encrypt) {
src_nents        1382 drivers/crypto/talitos.c 			dma_len = (src_nents ? src_len : 0) +
src_nents        1385 drivers/crypto/talitos.c 			dma_len = (src_nents + dst_nents + 2) *
src_nents        1407 drivers/crypto/talitos.c 	edesc->src_nents = src_nents;
src_nents        1467 drivers/crypto/talitos.c 	    ((!edesc->src_nents && !edesc->dst_nents) ||
src_nents        1487 drivers/crypto/talitos.c 	sg_pcopy_to_buffer(req->src, edesc->src_nents ? : 1, icvdata, authsize,
src_nents        1595 drivers/crypto/talitos.c 	sg_count = edesc->src_nents ?: 1;
src_nents        1619 drivers/crypto/talitos.c 			     sg_count, 0, (edesc->src_nents + 1));
src_nents        1822 drivers/crypto/talitos.c 	sg_count = edesc->src_nents ?: 1;
src_nents          61 drivers/crypto/talitos.h 	int src_nents;
src_nents         352 drivers/crypto/virtio/virtio_crypto_algs.c 	int src_nents, dst_nents;
src_nents         362 drivers/crypto/virtio/virtio_crypto_algs.c 	src_nents = sg_nents_for_len(req->src, req->nbytes);
src_nents         363 drivers/crypto/virtio/virtio_crypto_algs.c 	if (src_nents < 0) {
src_nents         365 drivers/crypto/virtio/virtio_crypto_algs.c 		return src_nents;
src_nents         371 drivers/crypto/virtio/virtio_crypto_algs.c 			src_nents, dst_nents);
src_nents         374 drivers/crypto/virtio/virtio_crypto_algs.c 	sg_total = src_nents + dst_nents + 3;
src_nents         454 drivers/crypto/virtio/virtio_crypto_algs.c 	for (sg = req->src; src_nents; sg = sg_next(sg), src_nents--)