in_nents 362 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->in_nents = 0; in_nents 389 drivers/crypto/ccree/cc_buffer_mgr.c dma_unmap_sg(dev, src, req_ctx->in_nents, DMA_BIDIRECTIONAL); in_nents 434 drivers/crypto/ccree/cc_buffer_mgr.c rc = cc_map_sg(dev, src, nbytes, DMA_BIDIRECTIONAL, &req_ctx->in_nents, in_nents 445 drivers/crypto/ccree/cc_buffer_mgr.c cc_add_sg_entry(dev, &sg_data, req_ctx->in_nents, src, in_nents 460 drivers/crypto/ccree/cc_buffer_mgr.c cc_add_sg_entry(dev, &sg_data, req_ctx->in_nents, src, in_nents 1223 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->in_nents = 0; in_nents 1241 drivers/crypto/ccree/cc_buffer_mgr.c &areq_ctx->in_nents, LLI_MAX_NUM_OF_DATA_ENTRIES, in_nents 1261 drivers/crypto/ccree/cc_buffer_mgr.c cc_add_sg_entry(dev, &sg_data, areq_ctx->in_nents, src, nbytes, in_nents 1274 drivers/crypto/ccree/cc_buffer_mgr.c dma_unmap_sg(dev, src, areq_ctx->in_nents, DMA_TO_DEVICE); in_nents 1310 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->in_nents = 0; in_nents 1315 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->in_nents = sg_nents_for_len(src, nbytes); in_nents 1316 drivers/crypto/ccree/cc_buffer_mgr.c sg_copy_to_buffer(src, areq_ctx->in_nents, in_nents 1353 drivers/crypto/ccree/cc_buffer_mgr.c DMA_TO_DEVICE, &areq_ctx->in_nents, in_nents 1374 drivers/crypto/ccree/cc_buffer_mgr.c cc_add_sg_entry(dev, &sg_data, areq_ctx->in_nents, src, in_nents 1386 drivers/crypto/ccree/cc_buffer_mgr.c dma_unmap_sg(dev, src, areq_ctx->in_nents, DMA_TO_DEVICE); in_nents 1413 drivers/crypto/ccree/cc_buffer_mgr.c if (src && areq_ctx->in_nents) { in_nents 1417 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->in_nents, DMA_TO_DEVICE); in_nents 817 drivers/crypto/ccree/cc_cipher.c (u32)LLI_ENTRY_BYTE_SIZE * req_ctx->in_nents); in_nents 19 drivers/crypto/ccree/cc_cipher.h u32 in_nents; in_nents 54 drivers/crypto/ccree/cc_hash.h u32 in_nents;