Searched refs:rctx (Results 1 - 41 of 41) sorted by relevance

/linux-4.4.14/drivers/crypto/ccp/
H A Dccp-crypto-sha.c30 struct ccp_sha_req_ctx *rctx = ahash_request_ctx(req); ccp_sha_complete() local
36 if (rctx->hash_rem) { ccp_sha_complete()
38 unsigned int offset = rctx->nbytes - rctx->hash_rem; ccp_sha_complete()
40 scatterwalk_map_and_copy(rctx->buf, rctx->src, ccp_sha_complete()
41 offset, rctx->hash_rem, 0); ccp_sha_complete()
42 rctx->buf_count = rctx->hash_rem; ccp_sha_complete()
44 rctx->buf_count = 0; ccp_sha_complete()
49 memcpy(req->result, rctx->ctx, digest_size); ccp_sha_complete()
52 sg_free_table(&rctx->data_sg); ccp_sha_complete()
62 struct ccp_sha_req_ctx *rctx = ahash_request_ctx(req); ccp_do_sha_update() local
71 len = (u64)rctx->buf_count + (u64)nbytes; ccp_do_sha_update()
74 scatterwalk_map_and_copy(rctx->buf + rctx->buf_count, req->src, ccp_do_sha_update()
76 rctx->buf_count += nbytes; ccp_do_sha_update()
81 rctx->src = req->src; ccp_do_sha_update()
82 rctx->nbytes = nbytes; ccp_do_sha_update()
84 rctx->final = final; ccp_do_sha_update()
85 rctx->hash_rem = final ? 0 : len & (block_size - 1); ccp_do_sha_update()
86 rctx->hash_cnt = len - rctx->hash_rem; ccp_do_sha_update()
87 if (!final && !rctx->hash_rem) { ccp_do_sha_update()
89 rctx->hash_cnt -= block_size; ccp_do_sha_update()
90 rctx->hash_rem = block_size; ccp_do_sha_update()
94 sg_init_one(&rctx->ctx_sg, rctx->ctx, sizeof(rctx->ctx)); ccp_do_sha_update()
97 if (rctx->buf_count && nbytes) { ccp_do_sha_update()
104 ret = sg_alloc_table(&rctx->data_sg, sg_count, gfp); ccp_do_sha_update()
108 sg_init_one(&rctx->buf_sg, rctx->buf, rctx->buf_count); ccp_do_sha_update()
109 sg = ccp_crypto_sg_table_add(&rctx->data_sg, &rctx->buf_sg); ccp_do_sha_update()
114 sg = ccp_crypto_sg_table_add(&rctx->data_sg, req->src); ccp_do_sha_update()
121 sg = rctx->data_sg.sgl; ccp_do_sha_update()
122 } else if (rctx->buf_count) { ccp_do_sha_update()
123 sg_init_one(&rctx->buf_sg, rctx->buf, rctx->buf_count); ccp_do_sha_update()
125 sg = &rctx->buf_sg; ccp_do_sha_update()
130 rctx->msg_bits += (rctx->hash_cnt << 3); /* Total in bits */ ccp_do_sha_update()
132 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); ccp_do_sha_update()
133 INIT_LIST_HEAD(&rctx->cmd.entry); ccp_do_sha_update()
134 rctx->cmd.engine = CCP_ENGINE_SHA; ccp_do_sha_update()
135 rctx->cmd.u.sha.type = rctx->type; ccp_do_sha_update()
136 rctx->cmd.u.sha.ctx = &rctx->ctx_sg; ccp_do_sha_update()
137 rctx->cmd.u.sha.ctx_len = sizeof(rctx->ctx); ccp_do_sha_update()
138 rctx->cmd.u.sha.src = sg; ccp_do_sha_update()
139 rctx->cmd.u.sha.src_len = rctx->hash_cnt; ccp_do_sha_update()
140 rctx->cmd.u.sha.opad = ctx->u.sha.key_len ? ccp_do_sha_update()
142 rctx->cmd.u.sha.opad_len = ctx->u.sha.key_len ? ccp_do_sha_update()
144 rctx->cmd.u.sha.first = rctx->first; ccp_do_sha_update()
145 rctx->cmd.u.sha.final = rctx->final; ccp_do_sha_update()
146 rctx->cmd.u.sha.msg_bits = rctx->msg_bits; ccp_do_sha_update()
148 rctx->first = 0; ccp_do_sha_update()
150 ret = ccp_crypto_enqueue_request(&req->base, &rctx->cmd); ccp_do_sha_update()
155 sg_free_table(&rctx->data_sg); ccp_do_sha_update()
164 struct ccp_sha_req_ctx *rctx = ahash_request_ctx(req); ccp_sha_init() local
170 memset(rctx, 0, sizeof(*rctx)); ccp_sha_init()
172 rctx->type = alg->type; ccp_sha_init()
173 rctx->first = 1; ccp_sha_init()
177 memcpy(rctx->buf, ctx->u.sha.ipad, block_size); ccp_sha_init()
178 rctx->buf_count = block_size; ccp_sha_init()
212 struct ccp_sha_req_ctx *rctx = ahash_request_ctx(req); ccp_sha_export() local
218 state.type = rctx->type; ccp_sha_export()
219 state.msg_bits = rctx->msg_bits; ccp_sha_export()
220 state.first = rctx->first; ccp_sha_export()
221 memcpy(state.ctx, rctx->ctx, sizeof(state.ctx)); ccp_sha_export()
222 state.buf_count = rctx->buf_count; ccp_sha_export()
223 memcpy(state.buf, rctx->buf, sizeof(state.buf)); ccp_sha_export()
233 struct ccp_sha_req_ctx *rctx = ahash_request_ctx(req); ccp_sha_import() local
239 memset(rctx, 0, sizeof(*rctx)); ccp_sha_import()
240 rctx->type = state.type; ccp_sha_import()
241 rctx->msg_bits = state.msg_bits; ccp_sha_import()
242 rctx->first = state.first; ccp_sha_import()
243 memcpy(rctx->ctx, state.ctx, sizeof(rctx->ctx)); ccp_sha_import()
244 rctx->buf_count = state.buf_count; ccp_sha_import()
245 memcpy(rctx->buf, state.buf, sizeof(rctx->buf)); ccp_sha_import()
H A Dccp-crypto-aes-cmac.c31 struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx(req); ccp_aes_cmac_complete() local
37 if (rctx->hash_rem) { ccp_aes_cmac_complete()
39 unsigned int offset = rctx->nbytes - rctx->hash_rem; ccp_aes_cmac_complete()
41 scatterwalk_map_and_copy(rctx->buf, rctx->src, ccp_aes_cmac_complete()
42 offset, rctx->hash_rem, 0); ccp_aes_cmac_complete()
43 rctx->buf_count = rctx->hash_rem; ccp_aes_cmac_complete()
45 rctx->buf_count = 0; ccp_aes_cmac_complete()
50 memcpy(req->result, rctx->iv, digest_size); ccp_aes_cmac_complete()
53 sg_free_table(&rctx->data_sg); ccp_aes_cmac_complete()
63 struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx(req); ccp_do_cmac_update() local
76 rctx->null_msg = 0; ccp_do_cmac_update()
78 len = (u64)rctx->buf_count + (u64)nbytes; ccp_do_cmac_update()
81 scatterwalk_map_and_copy(rctx->buf + rctx->buf_count, req->src, ccp_do_cmac_update()
83 rctx->buf_count += nbytes; ccp_do_cmac_update()
88 rctx->src = req->src; ccp_do_cmac_update()
89 rctx->nbytes = nbytes; ccp_do_cmac_update()
91 rctx->final = final; ccp_do_cmac_update()
92 rctx->hash_rem = final ? 0 : len & (block_size - 1); ccp_do_cmac_update()
93 rctx->hash_cnt = len - rctx->hash_rem; ccp_do_cmac_update()
94 if (!final && !rctx->hash_rem) { ccp_do_cmac_update()
96 rctx->hash_cnt -= block_size; ccp_do_cmac_update()
97 rctx->hash_rem = block_size; ccp_do_cmac_update()
100 if (final && (rctx->null_msg || (len & (block_size - 1)))) ccp_do_cmac_update()
105 sg_init_one(&rctx->iv_sg, rctx->iv, sizeof(rctx->iv)); ccp_do_cmac_update()
113 ret = sg_alloc_table(&rctx->data_sg, sg_count, gfp); ccp_do_cmac_update()
118 if (rctx->buf_count) { ccp_do_cmac_update()
119 sg_init_one(&rctx->buf_sg, rctx->buf, rctx->buf_count); ccp_do_cmac_update()
120 sg = ccp_crypto_sg_table_add(&rctx->data_sg, &rctx->buf_sg); ccp_do_cmac_update()
128 sg = ccp_crypto_sg_table_add(&rctx->data_sg, req->src); ccp_do_cmac_update()
138 rctx->hash_cnt += pad_length; ccp_do_cmac_update()
140 memset(rctx->pad, 0, sizeof(rctx->pad)); ccp_do_cmac_update()
141 rctx->pad[0] = 0x80; ccp_do_cmac_update()
142 sg_init_one(&rctx->pad_sg, rctx->pad, pad_length); ccp_do_cmac_update()
143 sg = ccp_crypto_sg_table_add(&rctx->data_sg, &rctx->pad_sg); ccp_do_cmac_update()
151 sg = rctx->data_sg.sgl; ccp_do_cmac_update()
159 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); ccp_do_cmac_update()
160 INIT_LIST_HEAD(&rctx->cmd.entry); ccp_do_cmac_update()
161 rctx->cmd.engine = CCP_ENGINE_AES; ccp_do_cmac_update()
162 rctx->cmd.u.aes.type = ctx->u.aes.type; ccp_do_cmac_update()
163 rctx->cmd.u.aes.mode = ctx->u.aes.mode; ccp_do_cmac_update()
164 rctx->cmd.u.aes.action = CCP_AES_ACTION_ENCRYPT; ccp_do_cmac_update()
165 rctx->cmd.u.aes.key = &ctx->u.aes.key_sg; ccp_do_cmac_update()
166 rctx->cmd.u.aes.key_len = ctx->u.aes.key_len; ccp_do_cmac_update()
167 rctx->cmd.u.aes.iv = &rctx->iv_sg; ccp_do_cmac_update()
168 rctx->cmd.u.aes.iv_len = AES_BLOCK_SIZE; ccp_do_cmac_update()
169 rctx->cmd.u.aes.src = sg; ccp_do_cmac_update()
170 rctx->cmd.u.aes.src_len = rctx->hash_cnt; ccp_do_cmac_update()
171 rctx->cmd.u.aes.dst = NULL; ccp_do_cmac_update()
172 rctx->cmd.u.aes.cmac_key = cmac_key_sg; ccp_do_cmac_update()
173 rctx->cmd.u.aes.cmac_key_len = ctx->u.aes.kn_len; ccp_do_cmac_update()
174 rctx->cmd.u.aes.cmac_final = final; ccp_do_cmac_update()
176 ret = ccp_crypto_enqueue_request(&req->base, &rctx->cmd); ccp_do_cmac_update()
181 sg_free_table(&rctx->data_sg); ccp_do_cmac_update()
188 struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx(req); ccp_aes_cmac_init() local
190 memset(rctx, 0, sizeof(*rctx)); ccp_aes_cmac_init()
192 rctx->null_msg = 1; ccp_aes_cmac_init()
225 struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx(req); ccp_aes_cmac_export() local
231 state.null_msg = rctx->null_msg; ccp_aes_cmac_export()
232 memcpy(state.iv, rctx->iv, sizeof(state.iv)); ccp_aes_cmac_export()
233 state.buf_count = rctx->buf_count; ccp_aes_cmac_export()
234 memcpy(state.buf, rctx->buf, sizeof(state.buf)); ccp_aes_cmac_export()
244 struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx(req); ccp_aes_cmac_import() local
250 memset(rctx, 0, sizeof(*rctx)); ccp_aes_cmac_import()
251 rctx->null_msg = state.null_msg; ccp_aes_cmac_import()
252 memcpy(rctx->iv, state.iv, sizeof(rctx->iv)); ccp_aes_cmac_import()
253 rctx->buf_count = state.buf_count; ccp_aes_cmac_import()
254 memcpy(rctx->buf, state.buf, sizeof(rctx->buf)); ccp_aes_cmac_import()
H A Dccp-crypto-aes.c29 struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); ccp_aes_complete() local
35 memcpy(req->info, rctx->iv, AES_BLOCK_SIZE); ccp_aes_complete()
73 struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); ccp_aes_crypt() local
91 memcpy(rctx->iv, req->info, AES_BLOCK_SIZE); ccp_aes_crypt()
92 iv_sg = &rctx->iv_sg; ccp_aes_crypt()
94 sg_init_one(iv_sg, rctx->iv, iv_len); ccp_aes_crypt()
97 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); ccp_aes_crypt()
98 INIT_LIST_HEAD(&rctx->cmd.entry); ccp_aes_crypt()
99 rctx->cmd.engine = CCP_ENGINE_AES; ccp_aes_crypt()
100 rctx->cmd.u.aes.type = ctx->u.aes.type; ccp_aes_crypt()
101 rctx->cmd.u.aes.mode = ctx->u.aes.mode; ccp_aes_crypt()
102 rctx->cmd.u.aes.action = ccp_aes_crypt()
104 rctx->cmd.u.aes.key = &ctx->u.aes.key_sg; ccp_aes_crypt()
105 rctx->cmd.u.aes.key_len = ctx->u.aes.key_len; ccp_aes_crypt()
106 rctx->cmd.u.aes.iv = iv_sg; ccp_aes_crypt()
107 rctx->cmd.u.aes.iv_len = iv_len; ccp_aes_crypt()
108 rctx->cmd.u.aes.src = req->src; ccp_aes_crypt()
109 rctx->cmd.u.aes.src_len = req->nbytes; ccp_aes_crypt()
110 rctx->cmd.u.aes.dst = req->dst; ccp_aes_crypt()
112 ret = ccp_crypto_enqueue_request(&req->base, &rctx->cmd); ccp_aes_crypt()
147 struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); ccp_aes_rfc3686_complete() local
150 req->info = rctx->rfc3686_info; ccp_aes_rfc3686_complete()
172 struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); ccp_aes_rfc3686_crypt() local
176 iv = rctx->rfc3686_iv; ccp_aes_rfc3686_crypt()
186 rctx->rfc3686_info = req->info; ccp_aes_rfc3686_crypt()
187 req->info = rctx->rfc3686_iv; ccp_aes_rfc3686_crypt()
H A Dccp-crypto-aes-xts.c87 struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); ccp_aes_xts_complete() local
92 memcpy(req->info, rctx->iv, AES_BLOCK_SIZE); ccp_aes_xts_complete()
123 struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); ccp_aes_xts_crypt() local
160 memcpy(rctx->iv, req->info, AES_BLOCK_SIZE); ccp_aes_xts_crypt()
161 sg_init_one(&rctx->iv_sg, rctx->iv, AES_BLOCK_SIZE); ccp_aes_xts_crypt()
163 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); ccp_aes_xts_crypt()
164 INIT_LIST_HEAD(&rctx->cmd.entry); ccp_aes_xts_crypt()
165 rctx->cmd.engine = CCP_ENGINE_XTS_AES_128; ccp_aes_xts_crypt()
166 rctx->cmd.u.xts.action = (encrypt) ? CCP_AES_ACTION_ENCRYPT ccp_aes_xts_crypt()
168 rctx->cmd.u.xts.unit_size = unit_size; ccp_aes_xts_crypt()
169 rctx->cmd.u.xts.key = &ctx->u.aes.key_sg; ccp_aes_xts_crypt()
170 rctx->cmd.u.xts.key_len = ctx->u.aes.key_len; ccp_aes_xts_crypt()
171 rctx->cmd.u.xts.iv = &rctx->iv_sg; ccp_aes_xts_crypt()
172 rctx->cmd.u.xts.iv_len = AES_BLOCK_SIZE; ccp_aes_xts_crypt()
173 rctx->cmd.u.xts.src = req->src; ccp_aes_xts_crypt()
174 rctx->cmd.u.xts.src_len = req->nbytes; ccp_aes_xts_crypt()
175 rctx->cmd.u.xts.dst = req->dst; ccp_aes_xts_crypt()
177 ret = ccp_crypto_enqueue_request(&req->base, &rctx->cmd); ccp_aes_xts_crypt()
/linux-4.4.14/drivers/crypto/qce/
H A Dsha.c42 struct qce_sha_reqctx *rctx = ahash_request_ctx(req); qce_ahash_done() local
54 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); qce_ahash_done()
55 dma_unmap_sg(qce->dev, &rctx->result_sg, 1, DMA_FROM_DEVICE); qce_ahash_done()
57 memcpy(rctx->digest, result->auth_iv, digestsize); qce_ahash_done()
61 rctx->byte_count[0] = cpu_to_be32(result->auth_byte_count[0]); qce_ahash_done()
62 rctx->byte_count[1] = cpu_to_be32(result->auth_byte_count[1]); qce_ahash_done()
68 req->src = rctx->src_orig; qce_ahash_done()
69 req->nbytes = rctx->nbytes_orig; qce_ahash_done()
70 rctx->last_blk = false; qce_ahash_done()
71 rctx->first_blk = false; qce_ahash_done()
79 struct qce_sha_reqctx *rctx = ahash_request_ctx(req); qce_ahash_async_req_handle() local
83 unsigned long flags = rctx->flags; qce_ahash_async_req_handle()
87 rctx->authkey = ctx->authkey; qce_ahash_async_req_handle()
88 rctx->authklen = QCE_SHA_HMAC_KEY_SIZE; qce_ahash_async_req_handle()
90 rctx->authkey = ctx->authkey; qce_ahash_async_req_handle()
91 rctx->authklen = AES_KEYSIZE_128; qce_ahash_async_req_handle()
94 rctx->src_nents = sg_nents_for_len(req->src, req->nbytes); qce_ahash_async_req_handle()
95 ret = dma_map_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); qce_ahash_async_req_handle()
99 sg_init_one(&rctx->result_sg, qce->dma.result_buf, QCE_RESULT_BUF_SZ); qce_ahash_async_req_handle()
101 ret = dma_map_sg(qce->dev, &rctx->result_sg, 1, DMA_FROM_DEVICE); qce_ahash_async_req_handle()
105 ret = qce_dma_prep_sgs(&qce->dma, req->src, rctx->src_nents, qce_ahash_async_req_handle()
106 &rctx->result_sg, 1, qce_ahash_done, async_req); qce_ahash_async_req_handle()
121 dma_unmap_sg(qce->dev, &rctx->result_sg, 1, DMA_FROM_DEVICE); qce_ahash_async_req_handle()
123 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); qce_ahash_async_req_handle()
129 struct qce_sha_reqctx *rctx = ahash_request_ctx(req); qce_ahash_init() local
133 memset(rctx, 0, sizeof(*rctx)); qce_ahash_init()
134 rctx->first_blk = true; qce_ahash_init()
135 rctx->last_blk = false; qce_ahash_init()
136 rctx->flags = tmpl->alg_flags; qce_ahash_init()
137 memcpy(rctx->digest, std_iv, sizeof(rctx->digest)); qce_ahash_init()
145 struct qce_sha_reqctx *rctx = ahash_request_ctx(req); qce_ahash_export() local
146 unsigned long flags = rctx->flags; qce_ahash_export()
154 out_state->count = rctx->count; qce_ahash_export()
156 rctx->digest, digestsize); qce_ahash_export()
157 memcpy(out_state->buffer, rctx->buf, blocksize); qce_ahash_export()
161 out_state->count = rctx->count; qce_ahash_export()
163 rctx->digest, digestsize); qce_ahash_export()
164 memcpy(out_state->buf, rctx->buf, blocksize); qce_ahash_export()
176 struct qce_sha_reqctx *rctx = ahash_request_ctx(req); qce_import_common() local
182 rctx->count = in_count; qce_import_common()
183 memcpy(rctx->buf, buffer, blocksize); qce_import_common()
186 rctx->first_blk = 1; qce_import_common()
188 rctx->first_blk = 0; qce_import_common()
198 rctx->byte_count[0] = (__force __be32)(count & ~SHA_PADDING_MASK); qce_import_common()
199 rctx->byte_count[1] = (__force __be32)(count >> 32); qce_import_common()
200 qce_cpu_to_be32p_array((__be32 *)rctx->digest, (const u8 *)state, qce_import_common()
202 rctx->buflen = (unsigned int)(in_count & (blocksize - 1)); qce_import_common()
209 struct qce_sha_reqctx *rctx = ahash_request_ctx(req); qce_ahash_import() local
210 unsigned long flags = rctx->flags; qce_ahash_import()
232 struct qce_sha_reqctx *rctx = ahash_request_ctx(req); qce_ahash_update() local
242 rctx->count += req->nbytes; qce_ahash_update()
245 total = req->nbytes + rctx->buflen; qce_ahash_update()
248 scatterwalk_map_and_copy(rctx->buf + rctx->buflen, req->src, qce_ahash_update()
250 rctx->buflen += req->nbytes; qce_ahash_update()
255 rctx->src_orig = req->src; qce_ahash_update()
256 rctx->nbytes_orig = req->nbytes; qce_ahash_update()
262 if (rctx->buflen) qce_ahash_update()
263 memcpy(rctx->tmpbuf, rctx->buf, rctx->buflen); qce_ahash_update()
269 scatterwalk_map_and_copy(rctx->buf, req->src, src_offset, qce_ahash_update()
276 len = rctx->buflen; qce_ahash_update()
292 if (rctx->buflen) { qce_ahash_update()
293 sg_init_table(rctx->sg, 2); qce_ahash_update()
294 sg_set_buf(rctx->sg, rctx->tmpbuf, rctx->buflen); qce_ahash_update()
295 sg_chain(rctx->sg, 2, req->src); qce_ahash_update()
296 req->src = rctx->sg; qce_ahash_update()
300 rctx->buflen = hash_later; qce_ahash_update()
307 struct qce_sha_reqctx *rctx = ahash_request_ctx(req); qce_ahash_final() local
311 if (!rctx->buflen) qce_ahash_final()
314 rctx->last_blk = true; qce_ahash_final()
316 rctx->src_orig = req->src; qce_ahash_final()
317 rctx->nbytes_orig = req->nbytes; qce_ahash_final()
319 memcpy(rctx->tmpbuf, rctx->buf, rctx->buflen); qce_ahash_final()
320 sg_init_one(rctx->sg, rctx->tmpbuf, rctx->buflen); qce_ahash_final()
322 req->src = rctx->sg; qce_ahash_final()
323 req->nbytes = rctx->buflen; qce_ahash_final()
330 struct qce_sha_reqctx *rctx = ahash_request_ctx(req); qce_ahash_digest() local
339 rctx->src_orig = req->src; qce_ahash_digest()
340 rctx->nbytes_orig = req->nbytes; qce_ahash_digest()
341 rctx->first_blk = true; qce_ahash_digest()
342 rctx->last_blk = true; qce_ahash_digest()
H A Dablkcipher.c29 struct qce_cipher_reqctx *rctx = ablkcipher_request_ctx(req); qce_ablkcipher_done() local
47 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); qce_ablkcipher_done()
48 dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); qce_ablkcipher_done()
50 sg_free_table(&rctx->dst_tbl); qce_ablkcipher_done()
63 struct qce_cipher_reqctx *rctx = ablkcipher_request_ctx(req); qce_ablkcipher_async_req_handle() local
73 rctx->iv = req->info; qce_ablkcipher_async_req_handle()
74 rctx->ivsize = crypto_ablkcipher_ivsize(ablkcipher); qce_ablkcipher_async_req_handle()
75 rctx->cryptlen = req->nbytes; qce_ablkcipher_async_req_handle()
81 rctx->src_nents = sg_nents_for_len(req->src, req->nbytes); qce_ablkcipher_async_req_handle()
83 rctx->dst_nents = sg_nents_for_len(req->dst, req->nbytes); qce_ablkcipher_async_req_handle()
85 rctx->dst_nents = rctx->src_nents; qce_ablkcipher_async_req_handle()
87 rctx->dst_nents += 1; qce_ablkcipher_async_req_handle()
92 ret = sg_alloc_table(&rctx->dst_tbl, rctx->dst_nents, gfp); qce_ablkcipher_async_req_handle()
96 sg_init_one(&rctx->result_sg, qce->dma.result_buf, QCE_RESULT_BUF_SZ); qce_ablkcipher_async_req_handle()
98 sg = qce_sgtable_add(&rctx->dst_tbl, req->dst); qce_ablkcipher_async_req_handle()
104 sg = qce_sgtable_add(&rctx->dst_tbl, &rctx->result_sg); qce_ablkcipher_async_req_handle()
111 rctx->dst_sg = rctx->dst_tbl.sgl; qce_ablkcipher_async_req_handle()
113 ret = dma_map_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); qce_ablkcipher_async_req_handle()
118 ret = dma_map_sg(qce->dev, req->src, rctx->src_nents, dir_src); qce_ablkcipher_async_req_handle()
121 rctx->src_sg = req->src; qce_ablkcipher_async_req_handle()
123 rctx->src_sg = rctx->dst_sg; qce_ablkcipher_async_req_handle()
126 ret = qce_dma_prep_sgs(&qce->dma, rctx->src_sg, rctx->src_nents, qce_ablkcipher_async_req_handle()
127 rctx->dst_sg, rctx->dst_nents, qce_ablkcipher_async_req_handle()
144 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, dir_src); qce_ablkcipher_async_req_handle()
146 dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); qce_ablkcipher_async_req_handle()
148 sg_free_table(&rctx->dst_tbl); qce_ablkcipher_async_req_handle()
198 struct qce_cipher_reqctx *rctx = ablkcipher_request_ctx(req); qce_ablkcipher_crypt() local
202 rctx->flags = tmpl->alg_flags; qce_ablkcipher_crypt()
203 rctx->flags |= encrypt ? QCE_ENCRYPT : QCE_DECRYPT; qce_ablkcipher_crypt()
205 if (IS_AES(rctx->flags) && ctx->enc_keylen != AES_KEYSIZE_128 && qce_ablkcipher_crypt()
H A Dcommon.c235 struct qce_sha_reqctx *rctx = ahash_request_ctx(req); qce_setup_regs_ahash() local
246 if (!rctx->last_blk && req->nbytes % blocksize) qce_setup_regs_ahash()
251 if (IS_CMAC(rctx->flags)) { qce_setup_regs_ahash()
259 auth_cfg = qce_auth_cfg(rctx->flags, rctx->authklen); qce_setup_regs_ahash()
262 if (IS_SHA_HMAC(rctx->flags) || IS_CMAC(rctx->flags)) { qce_setup_regs_ahash()
263 u32 authkey_words = rctx->authklen / sizeof(u32); qce_setup_regs_ahash()
265 qce_cpu_to_be32p_array(mackey, rctx->authkey, rctx->authklen); qce_setup_regs_ahash()
270 if (IS_CMAC(rctx->flags)) qce_setup_regs_ahash()
273 if (rctx->first_blk) qce_setup_regs_ahash()
274 memcpy(auth, rctx->digest, digestsize); qce_setup_regs_ahash()
276 qce_cpu_to_be32p_array(auth, rctx->digest, digestsize); qce_setup_regs_ahash()
278 iv_words = (IS_SHA1(rctx->flags) || IS_SHA1_HMAC(rctx->flags)) ? 5 : 8; qce_setup_regs_ahash()
281 if (rctx->first_blk) qce_setup_regs_ahash()
285 (u32 *)rctx->byte_count, 2); qce_setup_regs_ahash()
287 auth_cfg = qce_auth_cfg(rctx->flags, 0); qce_setup_regs_ahash()
289 if (rctx->last_blk) qce_setup_regs_ahash()
294 if (rctx->first_blk) qce_setup_regs_ahash()
319 struct qce_cipher_reqctx *rctx = ablkcipher_request_ctx(req); qce_setup_regs_ablkcipher() local
328 unsigned int ivsize = rctx->ivsize; qce_setup_regs_ablkcipher()
329 unsigned long flags = rctx->flags; qce_setup_regs_ablkcipher()
354 rctx->cryptlen); qce_setup_regs_ablkcipher()
364 qce_xts_swapiv(enciv, rctx->iv, ivsize); qce_setup_regs_ablkcipher()
366 qce_cpu_to_be32p_array(enciv, rctx->iv, ivsize); qce_setup_regs_ablkcipher()
375 qce_write(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen); qce_setup_regs_ablkcipher()
/linux-4.4.14/arch/x86/crypto/sha-mb/
H A Dsha1_mb.c93 static void req_ctx_init(struct mcryptd_hash_request_ctx *rctx, req_ctx_init() argument
96 rctx->flag = HASH_UPDATE; req_ctx_init()
363 static int sha1_mb_set_results(struct mcryptd_hash_request_ctx *rctx) sha1_mb_set_results() argument
366 struct sha1_hash_ctx *sctx = shash_desc_ctx(&rctx->desc); sha1_mb_set_results()
367 __be32 *dst = (__be32 *) rctx->out; sha1_mb_set_results()
380 struct mcryptd_hash_request_ctx *rctx = *ret_rctx; sha_finish_walk() local
384 while (!(rctx->flag & HASH_DONE)) { sha_finish_walk()
385 nbytes = crypto_ahash_walk_done(&rctx->walk, 0); sha_finish_walk()
391 if (crypto_ahash_walk_last(&rctx->walk)) { sha_finish_walk()
392 rctx->flag |= HASH_DONE; sha_finish_walk()
393 if (rctx->flag & HASH_FINAL) sha_finish_walk()
397 sha_ctx = (struct sha1_hash_ctx *) shash_desc_ctx(&rctx->desc); sha_finish_walk()
399 sha_ctx = sha1_ctx_mgr_submit(cstate->mgr, sha_ctx, rctx->walk.data, nbytes, flag); sha_finish_walk()
406 rctx = cast_hash_to_mcryptd_ctx(sha_ctx); sha_finish_walk()
408 rctx = NULL; sha_finish_walk()
414 if (rctx->flag & HASH_FINAL) sha_finish_walk()
415 sha1_mb_set_results(rctx); sha_finish_walk()
418 *ret_rctx = rctx; sha_finish_walk()
422 static int sha_complete_job(struct mcryptd_hash_request_ctx *rctx, sha_complete_job() argument
426 struct ahash_request *req = cast_mcryptd_ctx_to_req(rctx); sha_complete_job()
433 list_del(&rctx->waiter); sha_complete_job()
437 rctx->complete(&req->base, err); sha_complete_job()
440 rctx->complete(&req->base, err); sha_complete_job()
469 static void sha1_mb_add_list(struct mcryptd_hash_request_ctx *rctx, sha1_mb_add_list() argument
476 rctx->tag.arrival = jiffies; /* tag the arrival time */ sha1_mb_add_list()
477 rctx->tag.seq_num = cstate->next_seq_num++; sha1_mb_add_list()
478 next_flush = rctx->tag.arrival + delay; sha1_mb_add_list()
479 rctx->tag.expire = next_flush; sha1_mb_add_list()
482 list_add_tail(&rctx->waiter, &cstate->work_list); sha1_mb_add_list()
491 struct mcryptd_hash_request_ctx *rctx = sha1_mb_update() local
496 struct ahash_request *req = cast_mcryptd_ctx_to_req(rctx); sha1_mb_update()
502 if (rctx->tag.cpu != smp_processor_id()) { sha1_mb_update()
508 req_ctx_init(rctx, desc); sha1_mb_update()
510 nbytes = crypto_ahash_walk_first(req, &rctx->walk); sha1_mb_update()
517 if (crypto_ahash_walk_last(&rctx->walk)) sha1_mb_update()
518 rctx->flag |= HASH_DONE; sha1_mb_update()
522 sha1_mb_add_list(rctx, cstate); sha1_mb_update()
524 sha_ctx = sha1_ctx_mgr_submit(cstate->mgr, sha_ctx, rctx->walk.data, nbytes, HASH_UPDATE); sha1_mb_update()
533 rctx = cast_hash_to_mcryptd_ctx(sha_ctx); sha1_mb_update()
537 rctx = cast_hash_to_mcryptd_ctx(sha_ctx); sha1_mb_update()
538 ret = sha_finish_walk(&rctx, cstate, false); sha1_mb_update()
540 if (!rctx) sha1_mb_update()
543 sha_complete_job(rctx, cstate, ret); sha1_mb_update()
550 struct mcryptd_hash_request_ctx *rctx = sha1_mb_finup() local
555 struct ahash_request *req = cast_mcryptd_ctx_to_req(rctx); sha1_mb_finup()
560 if (rctx->tag.cpu != smp_processor_id()) { sha1_mb_finup()
566 req_ctx_init(rctx, desc); sha1_mb_finup()
568 nbytes = crypto_ahash_walk_first(req, &rctx->walk); sha1_mb_finup()
575 if (crypto_ahash_walk_last(&rctx->walk)) { sha1_mb_finup()
576 rctx->flag |= HASH_DONE; sha1_mb_finup()
579 rctx->out = out; sha1_mb_finup()
582 rctx->flag |= HASH_FINAL; sha1_mb_finup()
584 sha1_mb_add_list(rctx, cstate); sha1_mb_finup()
587 sha_ctx = sha1_ctx_mgr_submit(cstate->mgr, sha_ctx, rctx->walk.data, nbytes, flag); sha1_mb_finup()
599 rctx = cast_hash_to_mcryptd_ctx(sha_ctx); sha1_mb_finup()
600 ret = sha_finish_walk(&rctx, cstate, false); sha1_mb_finup()
601 if (!rctx) sha1_mb_finup()
604 sha_complete_job(rctx, cstate, ret); sha1_mb_finup()
610 struct mcryptd_hash_request_ctx *rctx = sha1_mb_final() local
620 if (rctx->tag.cpu != smp_processor_id()) { sha1_mb_final()
626 req_ctx_init(rctx, desc); sha1_mb_final()
628 rctx->out = out; sha1_mb_final()
629 rctx->flag |= HASH_DONE | HASH_FINAL; sha1_mb_final()
633 sha1_mb_add_list(rctx, cstate); sha1_mb_final()
644 rctx = cast_hash_to_mcryptd_ctx(sha_ctx); sha1_mb_final()
648 rctx = cast_hash_to_mcryptd_ctx(sha_ctx); sha1_mb_final()
649 ret = sha_finish_walk(&rctx, cstate, false); sha1_mb_final()
650 if (!rctx) sha1_mb_final()
653 sha_complete_job(rctx, cstate, ret); sha1_mb_final()
820 struct mcryptd_hash_request_ctx *rctx; sha1_mb_flusher() local
829 rctx = list_entry(cstate->work_list.next, sha1_mb_flusher()
831 if (time_before(cur_time, rctx->tag.expire)) sha1_mb_flusher()
840 rctx = cast_hash_to_mcryptd_ctx(sha_ctx); sha1_mb_flusher()
841 sha_finish_walk(&rctx, cstate, true); sha1_mb_flusher()
842 sha_complete_job(rctx, cstate, 0); sha1_mb_flusher()
846 rctx = list_entry(cstate->work_list.next, sha1_mb_flusher()
849 next_flush = rctx->tag.expire; sha1_mb_flusher()
/linux-4.4.14/crypto/
H A Dchacha20poly1305.c99 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); poly_verify_tag() local
100 u8 tag[sizeof(rctx->tag)]; poly_verify_tag()
103 req->assoclen + rctx->cryptlen, poly_verify_tag()
105 if (crypto_memneq(tag, rctx->tag, sizeof(tag))) poly_verify_tag()
112 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); poly_copy_tag() local
114 scatterwalk_map_and_copy(rctx->tag, req->dst, poly_copy_tag()
115 req->assoclen + rctx->cryptlen, poly_copy_tag()
116 sizeof(rctx->tag), 1); poly_copy_tag()
128 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); chacha_decrypt() local
129 struct chacha_req *creq = &rctx->u.chacha; chacha_decrypt()
135 sg_init_table(rctx->src, 2); chacha_decrypt()
136 src = scatterwalk_ffwd(rctx->src, req->src, req->assoclen); chacha_decrypt()
140 sg_init_table(rctx->dst, 2); chacha_decrypt()
141 dst = scatterwalk_ffwd(rctx->dst, req->dst, req->assoclen); chacha_decrypt()
148 rctx->cryptlen, creq->iv); chacha_decrypt()
158 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); poly_tail_continue() local
160 if (rctx->cryptlen == req->cryptlen) /* encrypting */ poly_tail_continue()
175 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); poly_tail() local
176 struct poly_req *preq = &rctx->u.poly; poly_tail()
181 len = cpu_to_le64(rctx->assoclen); poly_tail()
183 len = cpu_to_le64(rctx->cryptlen); poly_tail()
191 rctx->tag, sizeof(preq->tail)); poly_tail()
208 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); poly_cipherpad() local
209 struct poly_req *preq = &rctx->u.poly; poly_cipherpad()
213 padlen = (bs - (rctx->cryptlen % bs)) % bs; poly_cipherpad()
238 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); poly_cipher() local
239 struct poly_req *preq = &rctx->u.poly; poly_cipher()
243 if (rctx->cryptlen == req->cryptlen) /* encrypting */ poly_cipher()
246 sg_init_table(rctx->src, 2); poly_cipher()
247 crypt = scatterwalk_ffwd(rctx->src, crypt, req->assoclen); poly_cipher()
252 ahash_request_set_crypt(&preq->req, crypt, NULL, rctx->cryptlen); poly_cipher()
269 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); poly_adpad() local
270 struct poly_req *preq = &rctx->u.poly; poly_adpad()
274 padlen = (bs - (rctx->assoclen % bs)) % bs; poly_adpad()
299 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); poly_ad() local
300 struct poly_req *preq = &rctx->u.poly; poly_ad()
306 ahash_request_set_crypt(&preq->req, req->src, NULL, rctx->assoclen); poly_ad()
323 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); poly_setkey() local
324 struct poly_req *preq = &rctx->u.poly; poly_setkey()
328 sg_set_buf(preq->src, rctx->key, sizeof(rctx->key)); poly_setkey()
333 ahash_request_set_crypt(&preq->req, preq->src, NULL, sizeof(rctx->key)); poly_setkey()
350 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); poly_init() local
351 struct poly_req *preq = &rctx->u.poly; poly_init()
374 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); poly_genkey() local
375 struct chacha_req *creq = &rctx->u.chacha; poly_genkey()
378 rctx->assoclen = req->assoclen; poly_genkey()
381 if (rctx->assoclen < 8) poly_genkey()
383 rctx->assoclen -= 8; poly_genkey()
387 memset(rctx->key, 0, sizeof(rctx->key)); poly_genkey()
388 sg_set_buf(creq->src, rctx->key, sizeof(rctx->key)); poly_genkey()
413 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); chacha_encrypt() local
414 struct chacha_req *creq = &rctx->u.chacha; chacha_encrypt()
420 sg_init_table(rctx->src, 2); chacha_encrypt()
421 src = scatterwalk_ffwd(rctx->src, req->src, req->assoclen); chacha_encrypt()
425 sg_init_table(rctx->dst, 2); chacha_encrypt()
426 dst = scatterwalk_ffwd(rctx->dst, req->dst, req->assoclen); chacha_encrypt()
443 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); chachapoly_encrypt() local
445 rctx->cryptlen = req->cryptlen; chachapoly_encrypt()
464 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); chachapoly_decrypt() local
466 rctx->cryptlen = req->cryptlen - POLY1305_DIGEST_SIZE; chachapoly_decrypt()
H A Drmd256.c237 struct rmd256_ctx *rctx = shash_desc_ctx(desc); rmd256_init() local
239 rctx->byte_count = 0; rmd256_init()
241 rctx->state[0] = RMD_H0; rmd256_init()
242 rctx->state[1] = RMD_H1; rmd256_init()
243 rctx->state[2] = RMD_H2; rmd256_init()
244 rctx->state[3] = RMD_H3; rmd256_init()
245 rctx->state[4] = RMD_H5; rmd256_init()
246 rctx->state[5] = RMD_H6; rmd256_init()
247 rctx->state[6] = RMD_H7; rmd256_init()
248 rctx->state[7] = RMD_H8; rmd256_init()
250 memset(rctx->buffer, 0, sizeof(rctx->buffer)); rmd256_init()
258 struct rmd256_ctx *rctx = shash_desc_ctx(desc); rmd256_update() local
259 const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f); rmd256_update()
261 rctx->byte_count += len; rmd256_update()
265 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), rmd256_update()
270 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), rmd256_update()
273 rmd256_transform(rctx->state, rctx->buffer); rmd256_update()
277 while (len >= sizeof(rctx->buffer)) { rmd256_update()
278 memcpy(rctx->buffer, data, sizeof(rctx->buffer)); rmd256_update()
279 rmd256_transform(rctx->state, rctx->buffer); rmd256_update()
280 data += sizeof(rctx->buffer); rmd256_update()
281 len -= sizeof(rctx->buffer); rmd256_update()
284 memcpy(rctx->buffer, data, len); rmd256_update()
293 struct rmd256_ctx *rctx = shash_desc_ctx(desc); rmd256_final() local
299 bits = cpu_to_le64(rctx->byte_count << 3); rmd256_final()
302 index = rctx->byte_count & 0x3f; rmd256_final()
311 dst[i] = cpu_to_le32p(&rctx->state[i]); rmd256_final()
314 memset(rctx, 0, sizeof(*rctx)); rmd256_final()
H A Drmd128.c222 struct rmd128_ctx *rctx = shash_desc_ctx(desc); rmd128_init() local
224 rctx->byte_count = 0; rmd128_init()
226 rctx->state[0] = RMD_H0; rmd128_init()
227 rctx->state[1] = RMD_H1; rmd128_init()
228 rctx->state[2] = RMD_H2; rmd128_init()
229 rctx->state[3] = RMD_H3; rmd128_init()
231 memset(rctx->buffer, 0, sizeof(rctx->buffer)); rmd128_init()
239 struct rmd128_ctx *rctx = shash_desc_ctx(desc); rmd128_update() local
240 const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f); rmd128_update()
242 rctx->byte_count += len; rmd128_update()
246 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), rmd128_update()
251 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), rmd128_update()
254 rmd128_transform(rctx->state, rctx->buffer); rmd128_update()
258 while (len >= sizeof(rctx->buffer)) { rmd128_update()
259 memcpy(rctx->buffer, data, sizeof(rctx->buffer)); rmd128_update()
260 rmd128_transform(rctx->state, rctx->buffer); rmd128_update()
261 data += sizeof(rctx->buffer); rmd128_update()
262 len -= sizeof(rctx->buffer); rmd128_update()
265 memcpy(rctx->buffer, data, len); rmd128_update()
274 struct rmd128_ctx *rctx = shash_desc_ctx(desc); rmd128_final() local
280 bits = cpu_to_le64(rctx->byte_count << 3); rmd128_final()
283 index = rctx->byte_count & 0x3f; rmd128_final()
292 dst[i] = cpu_to_le32p(&rctx->state[i]); rmd128_final()
295 memset(rctx, 0, sizeof(*rctx)); rmd128_final()
H A Dmcryptd.c101 struct mcryptd_hash_request_ctx *rctx) mcryptd_enqueue_request()
108 rctx->tag.cpu = cpu; mcryptd_enqueue_request()
322 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req); mcryptd_hash_enqueue() local
327 rctx->complete = req->base.complete; mcryptd_hash_enqueue()
330 ret = mcryptd_enqueue_request(queue, &req->base, rctx); mcryptd_hash_enqueue()
340 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req); mcryptd_hash_init() local
341 struct shash_desc *desc = &rctx->desc; mcryptd_hash_init()
351 req->base.complete = rctx->complete; mcryptd_hash_init()
355 rctx->complete(&req->base, err); mcryptd_hash_init()
367 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req); mcryptd_hash_update() local
372 err = shash_ahash_mcryptd_update(req, &rctx->desc); mcryptd_hash_update()
374 req->base.complete = rctx->complete; mcryptd_hash_update()
381 rctx->complete(&req->base, err); mcryptd_hash_update()
393 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req); mcryptd_hash_final() local
398 err = shash_ahash_mcryptd_final(req, &rctx->desc); mcryptd_hash_final()
400 req->base.complete = rctx->complete; mcryptd_hash_final()
407 rctx->complete(&req->base, err); mcryptd_hash_final()
419 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req); mcryptd_hash_finup() local
424 err = shash_ahash_mcryptd_finup(req, &rctx->desc); mcryptd_hash_finup()
427 req->base.complete = rctx->complete; mcryptd_hash_finup()
434 rctx->complete(&req->base, err); mcryptd_hash_finup()
448 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req); mcryptd_hash_digest() local
449 struct shash_desc *desc = &rctx->desc; mcryptd_hash_digest()
460 req->base.complete = rctx->complete; mcryptd_hash_digest()
467 rctx->complete(&req->base, err); mcryptd_hash_digest()
478 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req); mcryptd_hash_export() local
480 return crypto_shash_export(&rctx->desc, out); mcryptd_hash_export()
485 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req); mcryptd_hash_import() local
487 return crypto_shash_import(&rctx->desc, in); mcryptd_hash_import()
676 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req); mcryptd_shash_desc() local
677 return &rctx->desc; mcryptd_shash_desc()
99 mcryptd_enqueue_request(struct mcryptd_queue *queue, struct crypto_async_request *request, struct mcryptd_hash_request_ctx *rctx) mcryptd_enqueue_request() argument
H A Dcryptd.c208 struct cryptd_blkcipher_request_ctx *rctx; cryptd_blkcipher_crypt() local
211 rctx = ablkcipher_request_ctx(req); cryptd_blkcipher_crypt()
222 req->base.complete = rctx->complete; cryptd_blkcipher_crypt()
226 rctx->complete(&req->base, err); cryptd_blkcipher_crypt()
251 struct cryptd_blkcipher_request_ctx *rctx = ablkcipher_request_ctx(req); cryptd_blkcipher_enqueue() local
256 rctx->complete = req->base.complete; cryptd_blkcipher_enqueue()
448 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); cryptd_hash_enqueue() local
453 rctx->complete = req->base.complete; cryptd_hash_enqueue()
464 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); cryptd_hash_init() local
465 struct shash_desc *desc = &rctx->desc; cryptd_hash_init()
475 req->base.complete = rctx->complete; cryptd_hash_init()
479 rctx->complete(&req->base, err); cryptd_hash_init()
491 struct cryptd_hash_request_ctx *rctx; cryptd_hash_update() local
493 rctx = ahash_request_ctx(req); cryptd_hash_update()
498 err = shash_ahash_update(req, &rctx->desc); cryptd_hash_update()
500 req->base.complete = rctx->complete; cryptd_hash_update()
504 rctx->complete(&req->base, err); cryptd_hash_update()
516 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); cryptd_hash_final() local
521 err = crypto_shash_final(&rctx->desc, req->result); cryptd_hash_final()
523 req->base.complete = rctx->complete; cryptd_hash_final()
527 rctx->complete(&req->base, err); cryptd_hash_final()
539 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); cryptd_hash_finup() local
544 err = shash_ahash_finup(req, &rctx->desc); cryptd_hash_finup()
546 req->base.complete = rctx->complete; cryptd_hash_finup()
550 rctx->complete(&req->base, err); cryptd_hash_finup()
564 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); cryptd_hash_digest() local
565 struct shash_desc *desc = &rctx->desc; cryptd_hash_digest()
575 req->base.complete = rctx->complete; cryptd_hash_digest()
579 rctx->complete(&req->base, err); cryptd_hash_digest()
590 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); cryptd_hash_export() local
592 return crypto_shash_export(&rctx->desc, out); cryptd_hash_export()
597 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); cryptd_hash_import() local
599 return crypto_shash_import(&rctx->desc, in); cryptd_hash_import()
689 struct cryptd_aead_request_ctx *rctx; cryptd_aead_crypt() local
692 rctx = aead_request_ctx(req); cryptd_aead_crypt()
693 compl = rctx->complete; cryptd_aead_crypt()
728 struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req); cryptd_aead_enqueue() local
732 rctx->complete = req->base.complete; cryptd_aead_enqueue()
950 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); cryptd_shash_desc() local
951 return &rctx->desc; cryptd_shash_desc()
H A Drmd320.c284 struct rmd320_ctx *rctx = shash_desc_ctx(desc); rmd320_init() local
286 rctx->byte_count = 0; rmd320_init()
288 rctx->state[0] = RMD_H0; rmd320_init()
289 rctx->state[1] = RMD_H1; rmd320_init()
290 rctx->state[2] = RMD_H2; rmd320_init()
291 rctx->state[3] = RMD_H3; rmd320_init()
292 rctx->state[4] = RMD_H4; rmd320_init()
293 rctx->state[5] = RMD_H5; rmd320_init()
294 rctx->state[6] = RMD_H6; rmd320_init()
295 rctx->state[7] = RMD_H7; rmd320_init()
296 rctx->state[8] = RMD_H8; rmd320_init()
297 rctx->state[9] = RMD_H9; rmd320_init()
299 memset(rctx->buffer, 0, sizeof(rctx->buffer)); rmd320_init()
307 struct rmd320_ctx *rctx = shash_desc_ctx(desc); rmd320_update() local
308 const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f); rmd320_update()
310 rctx->byte_count += len; rmd320_update()
314 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), rmd320_update()
319 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), rmd320_update()
322 rmd320_transform(rctx->state, rctx->buffer); rmd320_update()
326 while (len >= sizeof(rctx->buffer)) { rmd320_update()
327 memcpy(rctx->buffer, data, sizeof(rctx->buffer)); rmd320_update()
328 rmd320_transform(rctx->state, rctx->buffer); rmd320_update()
329 data += sizeof(rctx->buffer); rmd320_update()
330 len -= sizeof(rctx->buffer); rmd320_update()
333 memcpy(rctx->buffer, data, len); rmd320_update()
342 struct rmd320_ctx *rctx = shash_desc_ctx(desc); rmd320_final() local
348 bits = cpu_to_le64(rctx->byte_count << 3); rmd320_final()
351 index = rctx->byte_count & 0x3f; rmd320_final()
360 dst[i] = cpu_to_le32p(&rctx->state[i]); rmd320_final()
363 memset(rctx, 0, sizeof(*rctx)); rmd320_final()
H A Drmd160.c265 struct rmd160_ctx *rctx = shash_desc_ctx(desc); rmd160_init() local
267 rctx->byte_count = 0; rmd160_init()
269 rctx->state[0] = RMD_H0; rmd160_init()
270 rctx->state[1] = RMD_H1; rmd160_init()
271 rctx->state[2] = RMD_H2; rmd160_init()
272 rctx->state[3] = RMD_H3; rmd160_init()
273 rctx->state[4] = RMD_H4; rmd160_init()
275 memset(rctx->buffer, 0, sizeof(rctx->buffer)); rmd160_init()
283 struct rmd160_ctx *rctx = shash_desc_ctx(desc); rmd160_update() local
284 const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f); rmd160_update()
286 rctx->byte_count += len; rmd160_update()
290 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), rmd160_update()
295 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), rmd160_update()
298 rmd160_transform(rctx->state, rctx->buffer); rmd160_update()
302 while (len >= sizeof(rctx->buffer)) { rmd160_update()
303 memcpy(rctx->buffer, data, sizeof(rctx->buffer)); rmd160_update()
304 rmd160_transform(rctx->state, rctx->buffer); rmd160_update()
305 data += sizeof(rctx->buffer); rmd160_update()
306 len -= sizeof(rctx->buffer); rmd160_update()
309 memcpy(rctx->buffer, data, len); rmd160_update()
318 struct rmd160_ctx *rctx = shash_desc_ctx(desc); rmd160_final() local
324 bits = cpu_to_le64(rctx->byte_count << 3); rmd160_final()
327 index = rctx->byte_count & 0x3f; rmd160_final()
336 dst[i] = cpu_to_le32p(&rctx->state[i]); rmd160_final()
339 memset(rctx, 0, sizeof(*rctx)); rmd160_final()
H A Dccm.c707 struct crypto_rfc4309_req_ctx *rctx = aead_request_ctx(req); crypto_rfc4309_crypt() local
708 struct aead_request *subreq = &rctx->subreq; crypto_rfc4309_crypt()
724 sg_init_table(rctx->src, 3); crypto_rfc4309_crypt()
725 sg_set_buf(rctx->src, iv + 16, req->assoclen - 8); crypto_rfc4309_crypt()
726 sg = scatterwalk_ffwd(rctx->src + 1, req->src, req->assoclen); crypto_rfc4309_crypt()
727 if (sg != rctx->src + 1) crypto_rfc4309_crypt()
728 sg_chain(rctx->src, 2, sg); crypto_rfc4309_crypt()
731 sg_init_table(rctx->dst, 3); crypto_rfc4309_crypt()
732 sg_set_buf(rctx->dst, iv + 16, req->assoclen - 8); crypto_rfc4309_crypt()
733 sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen); crypto_rfc4309_crypt()
734 if (sg != rctx->dst + 1) crypto_rfc4309_crypt()
735 sg_chain(rctx->dst, 2, sg); crypto_rfc4309_crypt()
741 aead_request_set_crypt(subreq, rctx->src, crypto_rfc4309_crypt()
742 req->src == req->dst ? rctx->src : rctx->dst, crypto_rfc4309_crypt()
H A Dgcm.c825 struct crypto_rfc4106_req_ctx *rctx = aead_request_ctx(req); crypto_rfc4106_crypt() local
828 struct aead_request *subreq = &rctx->subreq; crypto_rfc4106_crypt()
839 sg_init_table(rctx->src, 3); crypto_rfc4106_crypt()
840 sg_set_buf(rctx->src, iv + 12, req->assoclen - 8); crypto_rfc4106_crypt()
841 sg = scatterwalk_ffwd(rctx->src + 1, req->src, req->assoclen); crypto_rfc4106_crypt()
842 if (sg != rctx->src + 1) crypto_rfc4106_crypt()
843 sg_chain(rctx->src, 2, sg); crypto_rfc4106_crypt()
846 sg_init_table(rctx->dst, 3); crypto_rfc4106_crypt()
847 sg_set_buf(rctx->dst, iv + 12, req->assoclen - 8); crypto_rfc4106_crypt()
848 sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen); crypto_rfc4106_crypt()
849 if (sg != rctx->dst + 1) crypto_rfc4106_crypt()
850 sg_chain(rctx->dst, 2, sg); crypto_rfc4106_crypt()
856 aead_request_set_crypt(subreq, rctx->src, crypto_rfc4106_crypt()
857 req->src == req->dst ? rctx->src : rctx->dst, crypto_rfc4106_crypt()
1053 struct crypto_rfc4543_req_ctx *rctx = aead_request_ctx(req); crypto_rfc4543_crypt() local
1054 struct aead_request *subreq = &rctx->subreq; crypto_rfc4543_crypt()
1056 u8 *iv = PTR_ALIGN((u8 *)(rctx + 1) + crypto_aead_reqsize(ctx->child), crypto_rfc4543_crypt()
H A Dctr.c284 struct crypto_rfc3686_req_ctx *rctx = crypto_rfc3686_crypt() local
286 struct ablkcipher_request *subreq = &rctx->subreq; crypto_rfc3686_crypt()
287 u8 *iv = rctx->iv; crypto_rfc3686_crypt()
/linux-4.4.14/drivers/crypto/
H A Dsahara.c555 struct sahara_aes_reqctx *rctx; sahara_aes_process() local
569 rctx = ablkcipher_request_ctx(req); sahara_aes_process()
571 rctx->mode &= FLAGS_MODE_MASK; sahara_aes_process()
572 dev->flags = (dev->flags & ~FLAGS_MODE_MASK) | rctx->mode; sahara_aes_process()
640 struct sahara_aes_reqctx *rctx = ablkcipher_request_ctx(req); sahara_aes_crypt() local
653 rctx->mode = mode; sahara_aes_crypt()
763 struct sahara_sha_reqctx *rctx) sahara_sha_init_hdr()
767 hdr = rctx->mode; sahara_sha_init_hdr()
769 if (rctx->first) { sahara_sha_init_hdr()
776 if (rctx->last) sahara_sha_init_hdr()
786 struct sahara_sha_reqctx *rctx, sahara_sha_hw_links_create()
793 dev->in_sg = rctx->in_sg; sahara_sha_hw_links_create()
795 dev->nb_in_sg = sg_nents_for_len(dev->in_sg, rctx->total); sahara_sha_hw_links_create()
822 struct sahara_sha_reqctx *rctx, sahara_sha_hw_data_descriptor_create()
829 if (rctx->first) sahara_sha_hw_data_descriptor_create()
831 dev->hw_desc[index]->hdr = sahara_sha_init_hdr(dev, rctx); sahara_sha_hw_data_descriptor_create()
836 dev->hw_desc[index]->len1 = rctx->total; sahara_sha_hw_data_descriptor_create()
840 rctx->sg_in_idx = 0; sahara_sha_hw_data_descriptor_create()
844 i = sahara_sha_hw_links_create(dev, rctx, index); sahara_sha_hw_data_descriptor_create()
846 rctx->sg_in_idx = index; sahara_sha_hw_data_descriptor_create()
854 result_len = rctx->context_size; sahara_sha_hw_data_descriptor_create()
875 struct sahara_sha_reqctx *rctx, sahara_sha_hw_context_descriptor_create()
879 dev->hw_desc[index]->hdr = sahara_sha_init_hdr(dev, rctx); sahara_sha_hw_context_descriptor_create()
881 dev->hw_desc[index]->len1 = rctx->context_size; sahara_sha_hw_context_descriptor_create()
886 dev->hw_link[index]->len = rctx->context_size; sahara_sha_hw_context_descriptor_create()
914 struct sahara_sha_reqctx *rctx = ahash_request_ctx(req); sahara_sha_prepare_request() local
922 len = rctx->buf_cnt + req->nbytes; sahara_sha_prepare_request()
925 if (!rctx->last && (len < block_size)) { sahara_sha_prepare_request()
927 scatterwalk_map_and_copy(rctx->buf + rctx->buf_cnt, req->src, sahara_sha_prepare_request()
929 rctx->buf_cnt += req->nbytes; sahara_sha_prepare_request()
935 if (rctx->buf_cnt) sahara_sha_prepare_request()
936 memcpy(rctx->rembuf, rctx->buf, rctx->buf_cnt); sahara_sha_prepare_request()
939 hash_later = rctx->last ? 0 : len & (block_size - 1); sahara_sha_prepare_request()
943 scatterwalk_map_and_copy(rctx->buf, req->src, offset, sahara_sha_prepare_request()
953 if (rctx->buf_cnt && req->nbytes) { sahara_sha_prepare_request()
954 sg_init_table(rctx->in_sg_chain, 2); sahara_sha_prepare_request()
955 sg_set_buf(rctx->in_sg_chain, rctx->rembuf, rctx->buf_cnt); sahara_sha_prepare_request()
957 sg_chain(rctx->in_sg_chain, 2, req->src); sahara_sha_prepare_request()
959 rctx->total = req->nbytes + rctx->buf_cnt; sahara_sha_prepare_request()
960 rctx->in_sg = rctx->in_sg_chain; sahara_sha_prepare_request()
962 req->src = rctx->in_sg_chain; sahara_sha_prepare_request()
964 } else if (rctx->buf_cnt) { sahara_sha_prepare_request()
966 rctx->in_sg = req->src; sahara_sha_prepare_request()
968 rctx->in_sg = rctx->in_sg_chain; sahara_sha_prepare_request()
970 sg_init_one(rctx->in_sg, rctx->rembuf, rctx->buf_cnt); sahara_sha_prepare_request()
971 rctx->total = rctx->buf_cnt; sahara_sha_prepare_request()
974 rctx->in_sg = req->src; sahara_sha_prepare_request()
975 rctx->total = req->nbytes; sahara_sha_prepare_request()
976 req->src = rctx->in_sg; sahara_sha_prepare_request()
980 rctx->buf_cnt = hash_later; sahara_sha_prepare_request()
988 struct sahara_sha_reqctx *rctx = ahash_request_ctx(req); sahara_sha_process() local
996 if (rctx->first) { sahara_sha_process()
997 sahara_sha_hw_data_descriptor_create(dev, rctx, req, 0); sahara_sha_process()
999 rctx->first = 0; sahara_sha_process()
1001 memcpy(dev->context_base, rctx->context, rctx->context_size); sahara_sha_process()
1003 sahara_sha_hw_context_descriptor_create(dev, rctx, req, 0); sahara_sha_process()
1005 sahara_sha_hw_data_descriptor_create(dev, rctx, req, 1); sahara_sha_process()
1023 if (rctx->sg_in_idx) sahara_sha_process()
1027 memcpy(rctx->context, dev->context_base, rctx->context_size); sahara_sha_process()
1030 memcpy(req->result, rctx->context, rctx->digest_size); sahara_sha_process()
1080 struct sahara_sha_reqctx *rctx = ahash_request_ctx(req); sahara_sha_enqueue() local
1087 mutex_lock(&rctx->mutex); sahara_sha_enqueue()
1088 rctx->last = last; sahara_sha_enqueue()
1090 if (!rctx->active) { sahara_sha_enqueue()
1091 rctx->active = 1; sahara_sha_enqueue()
1092 rctx->first = 1; sahara_sha_enqueue()
1100 mutex_unlock(&rctx->mutex); sahara_sha_enqueue()
1108 struct sahara_sha_reqctx *rctx = ahash_request_ctx(req); sahara_sha_init() local
1110 memset(rctx, 0, sizeof(*rctx)); sahara_sha_init()
1114 rctx->mode |= SAHARA_HDR_MDHA_ALG_SHA1; sahara_sha_init()
1115 rctx->digest_size = SHA1_DIGEST_SIZE; sahara_sha_init()
1118 rctx->mode |= SAHARA_HDR_MDHA_ALG_SHA256; sahara_sha_init()
1119 rctx->digest_size = SHA256_DIGEST_SIZE; sahara_sha_init()
1125 rctx->context_size = rctx->digest_size + 4; sahara_sha_init()
1126 rctx->active = 0; sahara_sha_init()
1128 mutex_init(&rctx->mutex); sahara_sha_init()
1160 struct sahara_sha_reqctx *rctx = ahash_request_ctx(req); sahara_sha_export() local
1163 memcpy(out + sizeof(struct sahara_sha_reqctx), rctx, sahara_sha_export()
1173 struct sahara_sha_reqctx *rctx = ahash_request_ctx(req); sahara_sha_import() local
1176 memcpy(rctx, in + sizeof(struct sahara_sha_reqctx), sahara_sha_import()
762 sahara_sha_init_hdr(struct sahara_dev *dev, struct sahara_sha_reqctx *rctx) sahara_sha_init_hdr() argument
785 sahara_sha_hw_links_create(struct sahara_dev *dev, struct sahara_sha_reqctx *rctx, int start) sahara_sha_hw_links_create() argument
821 sahara_sha_hw_data_descriptor_create(struct sahara_dev *dev, struct sahara_sha_reqctx *rctx, struct ahash_request *req, int index) sahara_sha_hw_data_descriptor_create() argument
874 sahara_sha_hw_context_descriptor_create(struct sahara_dev *dev, struct sahara_sha_reqctx *rctx, struct ahash_request *req, int index) sahara_sha_hw_context_descriptor_create() argument
H A Dn2_core.c305 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); n2_hash_async_init() local
309 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); n2_hash_async_init()
310 rctx->fallback_req.base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; n2_hash_async_init()
312 return crypto_ahash_init(&rctx->fallback_req); n2_hash_async_init()
317 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); n2_hash_async_update() local
321 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); n2_hash_async_update()
322 rctx->fallback_req.base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; n2_hash_async_update()
323 rctx->fallback_req.nbytes = req->nbytes; n2_hash_async_update()
324 rctx->fallback_req.src = req->src; n2_hash_async_update()
326 return crypto_ahash_update(&rctx->fallback_req); n2_hash_async_update()
331 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); n2_hash_async_final() local
335 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); n2_hash_async_final()
336 rctx->fallback_req.base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; n2_hash_async_final()
337 rctx->fallback_req.result = req->result; n2_hash_async_final()
339 return crypto_ahash_final(&rctx->fallback_req); n2_hash_async_final()
344 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); n2_hash_async_finup() local
348 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); n2_hash_async_finup()
349 rctx->fallback_req.base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; n2_hash_async_finup()
350 rctx->fallback_req.nbytes = req->nbytes; n2_hash_async_finup()
351 rctx->fallback_req.src = req->src; n2_hash_async_finup()
352 rctx->fallback_req.result = req->result; n2_hash_async_finup()
354 return crypto_ahash_finup(&rctx->fallback_req); n2_hash_async_finup()
523 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); n2_do_async_digest() local
526 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); n2_do_async_digest()
527 rctx->fallback_req.base.flags = n2_do_async_digest()
529 rctx->fallback_req.nbytes = req->nbytes; n2_do_async_digest()
530 rctx->fallback_req.src = req->src; n2_do_async_digest()
531 rctx->fallback_req.result = req->result; n2_do_async_digest()
533 return crypto_ahash_digest(&rctx->fallback_req); n2_do_async_digest()
598 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); n2_hash_async_digest() local
606 memcpy(&rctx->u, n2alg->hash_init, n2alg->hw_op_hashsz); n2_hash_async_digest()
610 &rctx->u, 0UL, 0); n2_hash_async_digest()
616 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); n2_hmac_async_digest() local
624 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); n2_hmac_async_digest() local
627 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); n2_hmac_async_digest()
628 rctx->fallback_req.base.flags = n2_hmac_async_digest()
630 rctx->fallback_req.nbytes = req->nbytes; n2_hmac_async_digest()
631 rctx->fallback_req.src = req->src; n2_hmac_async_digest()
632 rctx->fallback_req.result = req->result; n2_hmac_async_digest()
634 return crypto_ahash_digest(&rctx->fallback_req); n2_hmac_async_digest()
636 memcpy(&rctx->u, n2alg->derived.hash_init, n2_hmac_async_digest()
641 &rctx->u, n2_hmac_async_digest()
875 struct n2_request_context *rctx = ablkcipher_request_ctx(req); n2_compute_chunks() local
876 struct ablkcipher_walk *walk = &rctx->walk; n2_compute_chunks()
888 INIT_LIST_HEAD(&rctx->chunk_list); n2_compute_chunks()
890 chunk = &rctx->chunk; n2_compute_chunks()
921 &rctx->chunk_list); n2_compute_chunks()
948 list_add_tail(&chunk->entry, &rctx->chunk_list); n2_compute_chunks()
956 struct n2_request_context *rctx = ablkcipher_request_ctx(req); n2_chunk_complete() local
960 memcpy(rctx->walk.iv, final_iv, rctx->walk.blocksize); n2_chunk_complete()
962 ablkcipher_walk_complete(&rctx->walk); n2_chunk_complete()
963 list_for_each_entry_safe(c, tmp, &rctx->chunk_list, entry) { n2_chunk_complete()
965 if (unlikely(c != &rctx->chunk)) n2_chunk_complete()
973 struct n2_request_context *rctx = ablkcipher_request_ctx(req); n2_do_ecb() local
990 list_for_each_entry_safe(c, tmp, &rctx->chunk_list, entry) { n2_do_ecb()
995 if (unlikely(c != &rctx->chunk)) n2_do_ecb()
1025 struct n2_request_context *rctx = ablkcipher_request_ctx(req); n2_do_chaining() local
1046 iv_paddr = __pa(rctx->walk.iv); n2_do_chaining()
1047 list_for_each_entry_safe(c, tmp, &rctx->chunk_list, n2_do_chaining()
1053 iv_paddr = c->dest_final - rctx->walk.blocksize; n2_do_chaining()
1055 if (unlikely(c != &rctx->chunk)) n2_do_chaining()
1060 list_for_each_entry_safe_reverse(c, tmp, &rctx->chunk_list, n2_do_chaining()
1062 if (c == &rctx->chunk) { n2_do_chaining()
1063 iv_paddr = __pa(rctx->walk.iv); n2_do_chaining()
1067 rctx->walk.blocksize); n2_do_chaining()
1074 rctx->walk.blocksize); n2_do_chaining()
1075 final_iv_addr = rctx->temp_iv; n2_do_chaining()
1076 memcpy(rctx->temp_iv, __va(pa), n2_do_chaining()
1077 rctx->walk.blocksize); n2_do_chaining()
1084 if (unlikely(c != &rctx->chunk)) n2_do_chaining()
H A Dmxs-dcp.c201 struct dcp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); mxs_dcp_run_aes() local
220 if (rctx->enc) mxs_dcp_run_aes()
227 if (rctx->ecb) mxs_dcp_run_aes()
255 struct dcp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); mxs_dcp_aes_block_crypt() local
280 if (!rctx->ecb) { mxs_dcp_aes_block_crypt()
400 struct dcp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); mxs_dcp_aes_enqueue() local
406 rctx->enc = enc; mxs_dcp_aes_enqueue()
407 rctx->ecb = ecb; mxs_dcp_aes_enqueue()
516 struct dcp_sha_req_ctx *rctx = ahash_request_ctx(req); mxs_dcp_run_sha() local
529 if (rctx->init) mxs_dcp_run_sha()
541 if (rctx->fini) { mxs_dcp_run_sha()
550 if (rctx->fini) mxs_dcp_run_sha()
566 struct dcp_sha_req_ctx *rctx = ahash_request_ctx(req); dcp_sha_req_to_buf() local
579 int fin = rctx->fini; dcp_sha_req_to_buf()
581 rctx->fini = 0; dcp_sha_req_to_buf()
607 rctx->init = 0; dcp_sha_req_to_buf()
613 rctx->fini = 1; dcp_sha_req_to_buf()
643 struct dcp_sha_req_ctx *rctx; dcp_chan_thread_sha() local
661 rctx = ahash_request_ctx(req); dcp_chan_thread_sha()
664 fini = rctx->fini; dcp_chan_thread_sha()
707 struct dcp_sha_req_ctx *rctx = ahash_request_ctx(req); dcp_sha_update_fx() local
722 rctx->fini = fini; dcp_sha_update_fx()
726 rctx->init = 1; dcp_sha_update_fx()
H A Dimg-hash.c484 struct img_hash_request_ctx *rctx = ahash_request_ctx(req); img_hash_init() local
487 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback); img_hash_init()
488 rctx->fallback_req.base.flags = req->base.flags img_hash_init()
491 return crypto_ahash_init(&rctx->fallback_req); img_hash_init()
547 struct img_hash_request_ctx *rctx = ahash_request_ctx(req); img_hash_update() local
551 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback); img_hash_update()
552 rctx->fallback_req.base.flags = req->base.flags img_hash_update()
554 rctx->fallback_req.nbytes = req->nbytes; img_hash_update()
555 rctx->fallback_req.src = req->src; img_hash_update()
557 return crypto_ahash_update(&rctx->fallback_req); img_hash_update()
562 struct img_hash_request_ctx *rctx = ahash_request_ctx(req); img_hash_final() local
566 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback); img_hash_final()
567 rctx->fallback_req.base.flags = req->base.flags img_hash_final()
569 rctx->fallback_req.result = req->result; img_hash_final()
571 return crypto_ahash_final(&rctx->fallback_req); img_hash_final()
576 struct img_hash_request_ctx *rctx = ahash_request_ctx(req); img_hash_finup() local
580 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback); img_hash_finup()
581 rctx->fallback_req.base.flags = req->base.flags img_hash_finup()
583 rctx->fallback_req.nbytes = req->nbytes; img_hash_finup()
584 rctx->fallback_req.src = req->src; img_hash_finup()
585 rctx->fallback_req.result = req->result; img_hash_finup()
587 return crypto_ahash_finup(&rctx->fallback_req); img_hash_finup()
H A Dhifn_795x.c1164 struct hifn_context *ctx, struct hifn_request_context *rctx, hifn_setup_cmd_desc()
1176 switch (rctx->op) { hifn_setup_cmd_desc()
1193 if (rctx->op == ACRYPTO_OP_ENCRYPT || rctx->op == ACRYPTO_OP_DECRYPT) { hifn_setup_cmd_desc()
1198 if (rctx->iv && rctx->mode != ACRYPTO_MODE_ECB) hifn_setup_cmd_desc()
1201 switch (rctx->mode) { hifn_setup_cmd_desc()
1218 switch (rctx->type) { hifn_setup_cmd_desc()
1253 rctx->iv, rctx->ivsize, md); hifn_setup_cmd_desc()
1369 struct hifn_context *ctx, struct hifn_request_context *rctx, hifn_setup_dma()
1390 t = &rctx->walk.cache[0]; hifn_setup_dma()
1393 if (t->length && rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) { hifn_setup_dma()
1413 hifn_setup_cmd_desc(dev, ctx, rctx, priv, nbytes); hifn_setup_dma()
1576 struct hifn_request_context *rctx = ablkcipher_request_ctx(req); hifn_setup_session() local
1583 if (rctx->iv && !rctx->ivsize && rctx->mode != ACRYPTO_MODE_ECB) hifn_setup_session()
1586 rctx->walk.flags = 0; hifn_setup_session()
1594 rctx->walk.flags |= ASYNC_FLAGS_MISALIGNED; hifn_setup_session()
1600 if (rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) { hifn_setup_session()
1601 err = hifn_cipher_walk_init(&rctx->walk, idx, GFP_ATOMIC); hifn_setup_session()
1606 sg_num = hifn_cipher_walk(req, &rctx->walk); hifn_setup_session()
1618 err = hifn_setup_dma(dev, ctx, rctx, req->src, req->dst, req->nbytes, req); hifn_setup_session()
1635 dev->name, rctx->iv, rctx->ivsize, hifn_setup_session()
1637 rctx->mode, rctx->op, rctx->type, err); hifn_setup_session()
1648 struct hifn_request_context rctx; hifn_test() local
1661 rctx.ivsize = 0; hifn_test()
1662 rctx.iv = NULL; hifn_test()
1663 rctx.op = (encdec)?ACRYPTO_OP_ENCRYPT:ACRYPTO_OP_DECRYPT; hifn_test()
1664 rctx.mode = ACRYPTO_MODE_ECB; hifn_test()
1665 rctx.type = ACRYPTO_TYPE_AES_128; hifn_test()
1666 rctx.walk.cache[0].length = 0; hifn_test()
1670 err = hifn_setup_dma(dev, &ctx, &rctx, &sg, &sg, sizeof(src), NULL); hifn_test()
1770 struct hifn_request_context *rctx = ablkcipher_request_ctx(req); hifn_process_ready() local
1772 if (rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) { hifn_process_ready()
1779 t = &rctx->walk.cache[idx]; hifn_process_ready()
1807 hifn_cipher_walk_exit(&rctx->walk); hifn_process_ready()
2099 struct hifn_request_context *rctx = ablkcipher_request_ctx(req); hifn_setup_crypto_req() local
2120 rctx->op = op; hifn_setup_crypto_req()
2121 rctx->mode = mode; hifn_setup_crypto_req()
2122 rctx->type = type; hifn_setup_crypto_req()
2123 rctx->iv = req->info; hifn_setup_crypto_req()
2124 rctx->ivsize = ivsize; hifn_setup_crypto_req()
1163 hifn_setup_cmd_desc(struct hifn_device *dev, struct hifn_context *ctx, struct hifn_request_context *rctx, void *priv, unsigned int nbytes) hifn_setup_cmd_desc() argument
1368 hifn_setup_dma(struct hifn_device *dev, struct hifn_context *ctx, struct hifn_request_context *rctx, struct scatterlist *src, struct scatterlist *dst, unsigned int nbytes, void *priv) hifn_setup_dma() argument
H A Datmel-aes.c567 struct atmel_aes_reqctx *rctx; atmel_aes_handle_queue() local
600 rctx = ablkcipher_request_ctx(req); atmel_aes_handle_queue()
602 rctx->mode &= AES_FLAGS_MODE_MASK; atmel_aes_handle_queue()
603 dd->flags = (dd->flags & ~AES_FLAGS_MODE_MASK) | rctx->mode; atmel_aes_handle_queue()
710 struct atmel_aes_reqctx *rctx = ablkcipher_request_ctx(req); atmel_aes_crypt() local
749 rctx->mode = mode; atmel_aes_crypt()
H A Datmel-tdes.c593 struct atmel_tdes_reqctx *rctx; atmel_tdes_handle_queue() local
626 rctx = ablkcipher_request_ctx(req); atmel_tdes_handle_queue()
628 rctx->mode &= TDES_FLAGS_MODE_MASK; atmel_tdes_handle_queue()
629 dd->flags = (dd->flags & ~TDES_FLAGS_MODE_MASK) | rctx->mode; atmel_tdes_handle_queue()
675 struct atmel_tdes_reqctx *rctx = ablkcipher_request_ctx(req); atmel_tdes_crypt() local
703 rctx->mode = mode; atmel_tdes_crypt()
H A Domap-aes.c613 struct omap_aes_reqctx *rctx; omap_aes_handle_queue() local
659 rctx = ablkcipher_request_ctx(req); omap_aes_handle_queue()
661 rctx->mode &= FLAGS_MODE_MASK; omap_aes_handle_queue()
662 dd->flags = (dd->flags & ~FLAGS_MODE_MASK) | rctx->mode; omap_aes_handle_queue()
725 struct omap_aes_reqctx *rctx = ablkcipher_request_ctx(req); omap_aes_crypt() local
736 rctx->mode = mode; omap_aes_crypt()
H A Domap-des.c594 struct omap_des_reqctx *rctx; omap_des_handle_queue() local
639 rctx = ablkcipher_request_ctx(req); omap_des_handle_queue()
641 rctx->mode &= FLAGS_MODE_MASK; omap_des_handle_queue()
642 dd->flags = (dd->flags & ~FLAGS_MODE_MASK) | rctx->mode; omap_des_handle_queue()
704 struct omap_des_reqctx *rctx = ablkcipher_request_ctx(req); omap_des_crypt() local
720 rctx->mode = mode; omap_des_crypt()
/linux-4.4.14/kernel/events/
H A Dcallchain.c135 static struct perf_callchain_entry *get_callchain_entry(int *rctx) get_callchain_entry() argument
140 *rctx = get_recursion_context(this_cpu_ptr(callchain_recursion)); get_callchain_entry()
141 if (*rctx == -1) get_callchain_entry()
150 return &entries->cpu_entries[cpu][*rctx]; get_callchain_entry()
154 put_callchain_entry(int rctx) put_callchain_entry() argument
156 put_recursion_context(this_cpu_ptr(callchain_recursion), rctx); put_callchain_entry() local
162 int rctx; perf_callchain() local
171 entry = get_callchain_entry(&rctx); perf_callchain()
172 if (rctx == -1) perf_callchain()
206 put_callchain_entry(rctx); perf_callchain()
H A Dinternal.h190 int rctx; get_recursion_context() local
193 rctx = 3; get_recursion_context()
195 rctx = 2; get_recursion_context()
197 rctx = 1; get_recursion_context()
199 rctx = 0; get_recursion_context()
201 if (recursion[rctx]) get_recursion_context()
204 recursion[rctx]++; get_recursion_context()
207 return rctx; get_recursion_context()
210 static inline void put_recursion_context(int *recursion, int rctx) put_recursion_context() argument
213 recursion[rctx]--; put_recursion_context()
H A Dcore.c4984 int rctx; perf_pending_event() local
4986 rctx = perf_swevent_get_recursion_context(); perf_pending_event()
5002 if (rctx >= 0) perf_pending_event()
5003 perf_swevent_put_recursion_context(rctx); perf_pending_event()
6687 inline void perf_swevent_put_recursion_context(int rctx) perf_swevent_put_recursion_context() argument
6691 put_recursion_context(swhash->recursion, rctx); perf_swevent_put_recursion_context()
6707 int rctx; __perf_sw_event() local
6710 rctx = perf_swevent_get_recursion_context(); __perf_sw_event()
6711 if (unlikely(rctx < 0)) __perf_sw_event()
6716 perf_swevent_put_recursion_context(rctx); __perf_sw_event()
6951 struct pt_regs *regs, struct hlist_head *head, int rctx, perf_tp_event()
6995 perf_swevent_put_recursion_context(rctx);
6950 perf_tp_event(u64 addr, u64 count, void *record, int entry_size, struct pt_regs *regs, struct hlist_head *head, int rctx, struct task_struct *task) perf_tp_event() argument
/linux-4.4.14/drivers/crypto/sunxi-ss/
H A Dsun4i-ss-cipher.c323 struct sun4i_cipher_req_ctx *rctx = ablkcipher_request_ctx(areq); sun4i_ss_cbc_aes_encrypt() local
325 rctx->mode = SS_OP_AES | SS_CBC | SS_ENABLED | SS_ENCRYPTION | sun4i_ss_cbc_aes_encrypt()
334 struct sun4i_cipher_req_ctx *rctx = ablkcipher_request_ctx(areq); sun4i_ss_cbc_aes_decrypt() local
336 rctx->mode = SS_OP_AES | SS_CBC | SS_ENABLED | SS_DECRYPTION | sun4i_ss_cbc_aes_decrypt()
346 struct sun4i_cipher_req_ctx *rctx = ablkcipher_request_ctx(areq); sun4i_ss_ecb_aes_encrypt() local
348 rctx->mode = SS_OP_AES | SS_ECB | SS_ENABLED | SS_ENCRYPTION | sun4i_ss_ecb_aes_encrypt()
357 struct sun4i_cipher_req_ctx *rctx = ablkcipher_request_ctx(areq); sun4i_ss_ecb_aes_decrypt() local
359 rctx->mode = SS_OP_AES | SS_ECB | SS_ENABLED | SS_DECRYPTION | sun4i_ss_ecb_aes_decrypt()
369 struct sun4i_cipher_req_ctx *rctx = ablkcipher_request_ctx(areq); sun4i_ss_cbc_des_encrypt() local
371 rctx->mode = SS_OP_DES | SS_CBC | SS_ENABLED | SS_ENCRYPTION | sun4i_ss_cbc_des_encrypt()
380 struct sun4i_cipher_req_ctx *rctx = ablkcipher_request_ctx(areq); sun4i_ss_cbc_des_decrypt() local
382 rctx->mode = SS_OP_DES | SS_CBC | SS_ENABLED | SS_DECRYPTION | sun4i_ss_cbc_des_decrypt()
392 struct sun4i_cipher_req_ctx *rctx = ablkcipher_request_ctx(areq); sun4i_ss_ecb_des_encrypt() local
394 rctx->mode = SS_OP_DES | SS_ECB | SS_ENABLED | SS_ENCRYPTION | sun4i_ss_ecb_des_encrypt()
403 struct sun4i_cipher_req_ctx *rctx = ablkcipher_request_ctx(areq); sun4i_ss_ecb_des_decrypt() local
405 rctx->mode = SS_OP_DES | SS_ECB | SS_ENABLED | SS_DECRYPTION | sun4i_ss_ecb_des_decrypt()
415 struct sun4i_cipher_req_ctx *rctx = ablkcipher_request_ctx(areq); sun4i_ss_cbc_des3_encrypt() local
417 rctx->mode = SS_OP_3DES | SS_CBC | SS_ENABLED | SS_ENCRYPTION | sun4i_ss_cbc_des3_encrypt()
426 struct sun4i_cipher_req_ctx *rctx = ablkcipher_request_ctx(areq); sun4i_ss_cbc_des3_decrypt() local
428 rctx->mode = SS_OP_3DES | SS_CBC | SS_ENABLED | SS_DECRYPTION | sun4i_ss_cbc_des3_decrypt()
438 struct sun4i_cipher_req_ctx *rctx = ablkcipher_request_ctx(areq); sun4i_ss_ecb_des3_encrypt() local
440 rctx->mode = SS_OP_3DES | SS_ECB | SS_ENABLED | SS_ENCRYPTION | sun4i_ss_ecb_des3_encrypt()
449 struct sun4i_cipher_req_ctx *rctx = ablkcipher_request_ctx(areq); sun4i_ss_ecb_des3_decrypt() local
451 rctx->mode = SS_OP_3DES | SS_ECB | SS_ENABLED | SS_DECRYPTION | sun4i_ss_ecb_des3_decrypt()
/linux-4.4.14/include/trace/
H A Dperf.h46 int rctx; \
60 event_call->event.type, &__regs, &rctx); \
70 perf_trace_buf_submit(entry, __entry_size, rctx, __addr, \
/linux-4.4.14/drivers/crypto/nx/
H A Dnx-aes-gcm.c325 struct nx_gcm_rctx *rctx = aead_request_ctx(req); gcm_aes_nx_crypt() local
335 desc.info = rctx->iv; gcm_aes_nx_crypt()
433 struct nx_gcm_rctx *rctx = aead_request_ctx(req); gcm_aes_nx_encrypt() local
434 char *iv = rctx->iv; gcm_aes_nx_encrypt()
443 struct nx_gcm_rctx *rctx = aead_request_ctx(req); gcm_aes_nx_decrypt() local
444 char *iv = rctx->iv; gcm_aes_nx_decrypt()
455 struct nx_gcm_rctx *rctx = aead_request_ctx(req); gcm4106_aes_nx_encrypt() local
456 char *iv = rctx->iv; gcm4106_aes_nx_encrypt()
472 struct nx_gcm_rctx *rctx = aead_request_ctx(req); gcm4106_aes_nx_decrypt() local
473 char *iv = rctx->iv; gcm4106_aes_nx_decrypt()
H A Dnx-aes-ccm.c495 struct nx_gcm_rctx *rctx = aead_request_ctx(req); ccm4309_aes_nx_encrypt() local
497 u8 *iv = rctx->iv; ccm4309_aes_nx_encrypt()
525 struct nx_gcm_rctx *rctx = aead_request_ctx(req); ccm4309_aes_nx_decrypt() local
527 u8 *iv = rctx->iv; ccm4309_aes_nx_decrypt()
/linux-4.4.14/kernel/trace/
H A Dtrace_syscalls.c555 int rctx; perf_syscall_enter() local
578 sys_data->enter_event->event.type, NULL, &rctx); perf_syscall_enter()
585 perf_trace_buf_submit(rec, size, rctx, 0, 1, regs, head, NULL); perf_syscall_enter()
629 int rctx; perf_syscall_exit() local
651 sys_data->exit_event->event.type, NULL, &rctx); perf_syscall_exit()
657 perf_trace_buf_submit(rec, size, rctx, 0, 1, regs, head, NULL); perf_syscall_exit()
H A Dtrace_event_perf.c308 int rctx; perf_ftrace_function_call() local
321 entry = perf_trace_buf_prepare(ENTRY_SIZE, TRACE_FN, NULL, &rctx); perf_ftrace_function_call()
327 perf_trace_buf_submit(entry, ENTRY_SIZE, rctx, 0, perf_ftrace_function_call()
H A Dtrace_kprobe.c1130 int rctx; kprobe_perf_func() local
1144 entry = perf_trace_buf_prepare(size, call->event.type, NULL, &rctx); kprobe_perf_func()
1151 perf_trace_buf_submit(entry, size, rctx, 0, 1, regs, head, NULL); kprobe_perf_func()
1165 int rctx; kretprobe_perf_func() local
1179 entry = perf_trace_buf_prepare(size, call->event.type, NULL, &rctx); kretprobe_perf_func()
1186 perf_trace_buf_submit(entry, size, rctx, 0, 1, regs, head, NULL); kretprobe_perf_func()
H A Dtrace_uprobe.c1117 int rctx; __uprobe_perf_func() local
1134 entry = perf_trace_buf_prepare(size, call->event.type, NULL, &rctx); __uprobe_perf_func()
1155 perf_trace_buf_submit(entry, size, rctx, 0, 1, regs, head, NULL); __uprobe_perf_func()
/linux-4.4.14/drivers/crypto/amcc/
H A Dcrypto4xx_core.h172 struct crypto4xx_ctx *rctx);
173 extern void crypto4xx_free_sa_rctx(struct crypto4xx_ctx *rctx);
/linux-4.4.14/include/linux/
H A Dtrace_events.h621 perf_trace_buf_submit(void *raw_data, int size, int rctx, u64 addr, perf_trace_buf_submit() argument
625 perf_tp_event(addr, count, raw_data, size, regs, head, rctx, task); perf_trace_buf_submit()
H A Dperf_event.h1010 struct hlist_head *head, int rctx,
1043 extern void perf_swevent_put_recursion_context(int rctx);
1105 static inline void perf_swevent_put_recursion_context(int rctx) { } perf_swevent_set_period() argument

Completed in 1192 milliseconds