buff_sg 351 drivers/crypto/ccree/cc_buffer_mgr.c sg_init_one(areq_ctx->buff_sg, curr_buff, curr_buff_cnt); buff_sg 352 drivers/crypto/ccree/cc_buffer_mgr.c if (dma_map_sg(dev, areq_ctx->buff_sg, 1, DMA_TO_DEVICE) != 1) { buff_sg 357 drivers/crypto/ccree/cc_buffer_mgr.c &sg_dma_address(areq_ctx->buff_sg), sg_page(areq_ctx->buff_sg), buff_sg 358 drivers/crypto/ccree/cc_buffer_mgr.c sg_virt(areq_ctx->buff_sg), areq_ctx->buff_sg->offset, buff_sg 359 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->buff_sg->length); buff_sg 361 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->curr_sg = areq_ctx->buff_sg; buff_sg 364 drivers/crypto/ccree/cc_buffer_mgr.c cc_add_sg_entry(dev, sg_data, 1, areq_ctx->buff_sg, curr_buff_cnt, 0, buff_sg 1247 drivers/crypto/ccree/cc_buffer_mgr.c memcpy(areq_ctx->buff_sg, src, buff_sg 1249 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->buff_sg->length = nbytes; buff_sg 1250 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->curr_sg = areq_ctx->buff_sg; buff_sg 1278 drivers/crypto/ccree/cc_buffer_mgr.c dma_unmap_sg(dev, areq_ctx->buff_sg, 1, DMA_TO_DEVICE); buff_sg 1361 drivers/crypto/ccree/cc_buffer_mgr.c memcpy(areq_ctx->buff_sg, src, buff_sg 1363 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->buff_sg->length = update_data_len; buff_sg 1365 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->curr_sg = areq_ctx->buff_sg; buff_sg 1390 drivers/crypto/ccree/cc_buffer_mgr.c dma_unmap_sg(dev, areq_ctx->buff_sg, 1, DMA_TO_DEVICE); buff_sg 1422 drivers/crypto/ccree/cc_buffer_mgr.c sg_virt(areq_ctx->buff_sg), buff_sg 1423 drivers/crypto/ccree/cc_buffer_mgr.c &sg_dma_address(areq_ctx->buff_sg), buff_sg 1424 drivers/crypto/ccree/cc_buffer_mgr.c sg_dma_len(areq_ctx->buff_sg)); buff_sg 1425 drivers/crypto/ccree/cc_buffer_mgr.c dma_unmap_sg(dev, areq_ctx->buff_sg, 1, DMA_TO_DEVICE); buff_sg 52 drivers/crypto/ccree/cc_hash.h struct scatterlist buff_sg[2];