/linux-4.1.27/drivers/crypto/ccp/ |
H A D | ccp-crypto-sha.c | 30 struct ccp_sha_req_ctx *rctx = ahash_request_ctx(req); ccp_sha_complete() local 36 if (rctx->hash_rem) { ccp_sha_complete() 38 unsigned int offset = rctx->nbytes - rctx->hash_rem; ccp_sha_complete() 40 scatterwalk_map_and_copy(rctx->buf, rctx->src, ccp_sha_complete() 41 offset, rctx->hash_rem, 0); ccp_sha_complete() 42 rctx->buf_count = rctx->hash_rem; ccp_sha_complete() 44 rctx->buf_count = 0; ccp_sha_complete() 49 memcpy(req->result, rctx->ctx, digest_size); ccp_sha_complete() 52 sg_free_table(&rctx->data_sg); ccp_sha_complete() 62 struct ccp_sha_req_ctx *rctx = ahash_request_ctx(req); ccp_do_sha_update() local 71 len = (u64)rctx->buf_count + (u64)nbytes; ccp_do_sha_update() 74 scatterwalk_map_and_copy(rctx->buf + rctx->buf_count, req->src, ccp_do_sha_update() 76 rctx->buf_count += nbytes; ccp_do_sha_update() 81 rctx->src = req->src; ccp_do_sha_update() 82 rctx->nbytes = nbytes; ccp_do_sha_update() 84 rctx->final = final; ccp_do_sha_update() 85 rctx->hash_rem = final ? 0 : len & (block_size - 1); ccp_do_sha_update() 86 rctx->hash_cnt = len - rctx->hash_rem; ccp_do_sha_update() 87 if (!final && !rctx->hash_rem) { ccp_do_sha_update() 89 rctx->hash_cnt -= block_size; ccp_do_sha_update() 90 rctx->hash_rem = block_size; ccp_do_sha_update() 94 sg_init_one(&rctx->ctx_sg, rctx->ctx, sizeof(rctx->ctx)); ccp_do_sha_update() 97 if (rctx->buf_count && nbytes) { ccp_do_sha_update() 104 ret = sg_alloc_table(&rctx->data_sg, sg_count, gfp); ccp_do_sha_update() 108 sg_init_one(&rctx->buf_sg, rctx->buf, rctx->buf_count); ccp_do_sha_update() 109 sg = ccp_crypto_sg_table_add(&rctx->data_sg, &rctx->buf_sg); ccp_do_sha_update() 110 sg = ccp_crypto_sg_table_add(&rctx->data_sg, req->src); ccp_do_sha_update() 113 sg = rctx->data_sg.sgl; ccp_do_sha_update() 114 } else if (rctx->buf_count) { ccp_do_sha_update() 115 sg_init_one(&rctx->buf_sg, rctx->buf, rctx->buf_count); ccp_do_sha_update() 117 sg = &rctx->buf_sg; ccp_do_sha_update() 122 rctx->msg_bits += (rctx->hash_cnt << 3); /* Total in bits */ ccp_do_sha_update() 124 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); ccp_do_sha_update() 125 INIT_LIST_HEAD(&rctx->cmd.entry); ccp_do_sha_update() 126 rctx->cmd.engine = CCP_ENGINE_SHA; ccp_do_sha_update() 127 rctx->cmd.u.sha.type = rctx->type; ccp_do_sha_update() 128 rctx->cmd.u.sha.ctx = &rctx->ctx_sg; ccp_do_sha_update() 129 rctx->cmd.u.sha.ctx_len = sizeof(rctx->ctx); ccp_do_sha_update() 130 rctx->cmd.u.sha.src = sg; ccp_do_sha_update() 131 rctx->cmd.u.sha.src_len = rctx->hash_cnt; ccp_do_sha_update() 132 rctx->cmd.u.sha.opad = ctx->u.sha.key_len ? ccp_do_sha_update() 134 rctx->cmd.u.sha.opad_len = ctx->u.sha.key_len ? ccp_do_sha_update() 136 rctx->cmd.u.sha.first = rctx->first; ccp_do_sha_update() 137 rctx->cmd.u.sha.final = rctx->final; ccp_do_sha_update() 138 rctx->cmd.u.sha.msg_bits = rctx->msg_bits; ccp_do_sha_update() 140 rctx->first = 0; ccp_do_sha_update() 142 ret = ccp_crypto_enqueue_request(&req->base, &rctx->cmd); ccp_do_sha_update() 151 struct ccp_sha_req_ctx *rctx = ahash_request_ctx(req); ccp_sha_init() local 157 memset(rctx, 0, sizeof(*rctx)); ccp_sha_init() 159 rctx->type = alg->type; ccp_sha_init() 160 rctx->first = 1; ccp_sha_init() 164 memcpy(rctx->buf, ctx->u.sha.ipad, block_size); ccp_sha_init() 165 rctx->buf_count = block_size; ccp_sha_init() 199 struct ccp_sha_req_ctx *rctx = ahash_request_ctx(req); ccp_sha_export() local 205 state.type = rctx->type; ccp_sha_export() 206 state.msg_bits = rctx->msg_bits; ccp_sha_export() 207 state.first = rctx->first; ccp_sha_export() 208 memcpy(state.ctx, rctx->ctx, sizeof(state.ctx)); ccp_sha_export() 209 state.buf_count = rctx->buf_count; ccp_sha_export() 210 memcpy(state.buf, rctx->buf, sizeof(state.buf)); ccp_sha_export() 220 struct ccp_sha_req_ctx *rctx = ahash_request_ctx(req); ccp_sha_import() local 226 memset(rctx, 0, sizeof(*rctx)); ccp_sha_import() 227 rctx->type = state.type; ccp_sha_import() 228 rctx->msg_bits = state.msg_bits; ccp_sha_import() 229 rctx->first = state.first; ccp_sha_import() 230 memcpy(rctx->ctx, state.ctx, sizeof(rctx->ctx)); ccp_sha_import() 231 rctx->buf_count = state.buf_count; ccp_sha_import() 232 memcpy(rctx->buf, state.buf, sizeof(rctx->buf)); ccp_sha_import()
|
H A D | ccp-crypto-aes-cmac.c | 31 struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx(req); ccp_aes_cmac_complete() local 37 if (rctx->hash_rem) { ccp_aes_cmac_complete() 39 unsigned int offset = rctx->nbytes - rctx->hash_rem; ccp_aes_cmac_complete() 41 scatterwalk_map_and_copy(rctx->buf, rctx->src, ccp_aes_cmac_complete() 42 offset, rctx->hash_rem, 0); ccp_aes_cmac_complete() 43 rctx->buf_count = rctx->hash_rem; ccp_aes_cmac_complete() 45 rctx->buf_count = 0; ccp_aes_cmac_complete() 50 memcpy(req->result, rctx->iv, digest_size); ccp_aes_cmac_complete() 53 sg_free_table(&rctx->data_sg); ccp_aes_cmac_complete() 63 struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx(req); ccp_do_cmac_update() local 76 rctx->null_msg = 0; ccp_do_cmac_update() 78 len = (u64)rctx->buf_count + (u64)nbytes; ccp_do_cmac_update() 81 scatterwalk_map_and_copy(rctx->buf + rctx->buf_count, req->src, ccp_do_cmac_update() 83 rctx->buf_count += nbytes; ccp_do_cmac_update() 88 rctx->src = req->src; ccp_do_cmac_update() 89 rctx->nbytes = nbytes; ccp_do_cmac_update() 91 rctx->final = final; ccp_do_cmac_update() 92 rctx->hash_rem = final ? 0 : len & (block_size - 1); ccp_do_cmac_update() 93 rctx->hash_cnt = len - rctx->hash_rem; ccp_do_cmac_update() 94 if (!final && !rctx->hash_rem) { ccp_do_cmac_update() 96 rctx->hash_cnt -= block_size; ccp_do_cmac_update() 97 rctx->hash_rem = block_size; ccp_do_cmac_update() 100 if (final && (rctx->null_msg || (len & (block_size - 1)))) ccp_do_cmac_update() 105 sg_init_one(&rctx->iv_sg, rctx->iv, sizeof(rctx->iv)); ccp_do_cmac_update() 113 ret = sg_alloc_table(&rctx->data_sg, sg_count, gfp); ccp_do_cmac_update() 118 if (rctx->buf_count) { ccp_do_cmac_update() 119 sg_init_one(&rctx->buf_sg, rctx->buf, rctx->buf_count); ccp_do_cmac_update() 120 sg = ccp_crypto_sg_table_add(&rctx->data_sg, &rctx->buf_sg); ccp_do_cmac_update() 124 sg = ccp_crypto_sg_table_add(&rctx->data_sg, req->src); ccp_do_cmac_update() 129 rctx->hash_cnt += pad_length; ccp_do_cmac_update() 131 memset(rctx->pad, 0, sizeof(rctx->pad)); ccp_do_cmac_update() 132 rctx->pad[0] = 0x80; ccp_do_cmac_update() 133 sg_init_one(&rctx->pad_sg, rctx->pad, pad_length); ccp_do_cmac_update() 134 sg = ccp_crypto_sg_table_add(&rctx->data_sg, &rctx->pad_sg); ccp_do_cmac_update() 138 sg = rctx->data_sg.sgl; ccp_do_cmac_update() 146 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); ccp_do_cmac_update() 147 INIT_LIST_HEAD(&rctx->cmd.entry); ccp_do_cmac_update() 148 rctx->cmd.engine = CCP_ENGINE_AES; ccp_do_cmac_update() 149 rctx->cmd.u.aes.type = ctx->u.aes.type; ccp_do_cmac_update() 150 rctx->cmd.u.aes.mode = ctx->u.aes.mode; ccp_do_cmac_update() 151 rctx->cmd.u.aes.action = CCP_AES_ACTION_ENCRYPT; ccp_do_cmac_update() 152 rctx->cmd.u.aes.key = &ctx->u.aes.key_sg; ccp_do_cmac_update() 153 rctx->cmd.u.aes.key_len = ctx->u.aes.key_len; ccp_do_cmac_update() 154 rctx->cmd.u.aes.iv = &rctx->iv_sg; ccp_do_cmac_update() 155 rctx->cmd.u.aes.iv_len = AES_BLOCK_SIZE; ccp_do_cmac_update() 156 rctx->cmd.u.aes.src = sg; ccp_do_cmac_update() 157 rctx->cmd.u.aes.src_len = rctx->hash_cnt; ccp_do_cmac_update() 158 rctx->cmd.u.aes.dst = NULL; ccp_do_cmac_update() 159 rctx->cmd.u.aes.cmac_key = cmac_key_sg; ccp_do_cmac_update() 160 rctx->cmd.u.aes.cmac_key_len = ctx->u.aes.kn_len; ccp_do_cmac_update() 161 rctx->cmd.u.aes.cmac_final = final; ccp_do_cmac_update() 163 ret = ccp_crypto_enqueue_request(&req->base, &rctx->cmd); ccp_do_cmac_update() 170 struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx(req); ccp_aes_cmac_init() local 172 memset(rctx, 0, sizeof(*rctx)); ccp_aes_cmac_init() 174 rctx->null_msg = 1; ccp_aes_cmac_init() 207 struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx(req); ccp_aes_cmac_export() local 213 state.null_msg = rctx->null_msg; ccp_aes_cmac_export() 214 memcpy(state.iv, rctx->iv, sizeof(state.iv)); ccp_aes_cmac_export() 215 state.buf_count = rctx->buf_count; ccp_aes_cmac_export() 216 memcpy(state.buf, rctx->buf, sizeof(state.buf)); ccp_aes_cmac_export() 226 struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx(req); ccp_aes_cmac_import() local 232 memset(rctx, 0, sizeof(*rctx)); ccp_aes_cmac_import() 233 rctx->null_msg = state.null_msg; ccp_aes_cmac_import() 234 memcpy(rctx->iv, state.iv, sizeof(rctx->iv)); ccp_aes_cmac_import() 235 rctx->buf_count = state.buf_count; ccp_aes_cmac_import() 236 memcpy(rctx->buf, state.buf, sizeof(rctx->buf)); ccp_aes_cmac_import()
|
H A D | ccp-crypto-aes.c | 29 struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); ccp_aes_complete() local 35 memcpy(req->info, rctx->iv, AES_BLOCK_SIZE); ccp_aes_complete() 73 struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); ccp_aes_crypt() local 91 memcpy(rctx->iv, req->info, AES_BLOCK_SIZE); ccp_aes_crypt() 92 iv_sg = &rctx->iv_sg; ccp_aes_crypt() 94 sg_init_one(iv_sg, rctx->iv, iv_len); ccp_aes_crypt() 97 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); ccp_aes_crypt() 98 INIT_LIST_HEAD(&rctx->cmd.entry); ccp_aes_crypt() 99 rctx->cmd.engine = CCP_ENGINE_AES; ccp_aes_crypt() 100 rctx->cmd.u.aes.type = ctx->u.aes.type; ccp_aes_crypt() 101 rctx->cmd.u.aes.mode = ctx->u.aes.mode; ccp_aes_crypt() 102 rctx->cmd.u.aes.action = ccp_aes_crypt() 104 rctx->cmd.u.aes.key = &ctx->u.aes.key_sg; ccp_aes_crypt() 105 rctx->cmd.u.aes.key_len = ctx->u.aes.key_len; ccp_aes_crypt() 106 rctx->cmd.u.aes.iv = iv_sg; ccp_aes_crypt() 107 rctx->cmd.u.aes.iv_len = iv_len; ccp_aes_crypt() 108 rctx->cmd.u.aes.src = req->src; ccp_aes_crypt() 109 rctx->cmd.u.aes.src_len = req->nbytes; ccp_aes_crypt() 110 rctx->cmd.u.aes.dst = req->dst; ccp_aes_crypt() 112 ret = ccp_crypto_enqueue_request(&req->base, &rctx->cmd); ccp_aes_crypt() 147 struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); ccp_aes_rfc3686_complete() local 150 req->info = rctx->rfc3686_info; ccp_aes_rfc3686_complete() 172 struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); ccp_aes_rfc3686_crypt() local 176 iv = rctx->rfc3686_iv; ccp_aes_rfc3686_crypt() 186 rctx->rfc3686_info = req->info; ccp_aes_rfc3686_crypt() 187 req->info = rctx->rfc3686_iv; ccp_aes_rfc3686_crypt()
|
H A D | ccp-crypto-aes-xts.c | 87 struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); ccp_aes_xts_complete() local 92 memcpy(req->info, rctx->iv, AES_BLOCK_SIZE); ccp_aes_xts_complete() 123 struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); ccp_aes_xts_crypt() local 160 memcpy(rctx->iv, req->info, AES_BLOCK_SIZE); ccp_aes_xts_crypt() 161 sg_init_one(&rctx->iv_sg, rctx->iv, AES_BLOCK_SIZE); ccp_aes_xts_crypt() 163 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); ccp_aes_xts_crypt() 164 INIT_LIST_HEAD(&rctx->cmd.entry); ccp_aes_xts_crypt() 165 rctx->cmd.engine = CCP_ENGINE_XTS_AES_128; ccp_aes_xts_crypt() 166 rctx->cmd.u.xts.action = (encrypt) ? CCP_AES_ACTION_ENCRYPT ccp_aes_xts_crypt() 168 rctx->cmd.u.xts.unit_size = unit_size; ccp_aes_xts_crypt() 169 rctx->cmd.u.xts.key = &ctx->u.aes.key_sg; ccp_aes_xts_crypt() 170 rctx->cmd.u.xts.key_len = ctx->u.aes.key_len; ccp_aes_xts_crypt() 171 rctx->cmd.u.xts.iv = &rctx->iv_sg; ccp_aes_xts_crypt() 172 rctx->cmd.u.xts.iv_len = AES_BLOCK_SIZE; ccp_aes_xts_crypt() 173 rctx->cmd.u.xts.src = req->src; ccp_aes_xts_crypt() 174 rctx->cmd.u.xts.src_len = req->nbytes; ccp_aes_xts_crypt() 175 rctx->cmd.u.xts.dst = req->dst; ccp_aes_xts_crypt() 177 ret = ccp_crypto_enqueue_request(&req->base, &rctx->cmd); ccp_aes_xts_crypt()
|
/linux-4.1.27/drivers/crypto/qce/ |
H A D | sha.c | 42 struct qce_sha_reqctx *rctx = ahash_request_ctx(req); qce_ahash_done() local 54 qce_unmapsg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE, qce_ahash_done() 55 rctx->src_chained); qce_ahash_done() 56 qce_unmapsg(qce->dev, &rctx->result_sg, 1, DMA_FROM_DEVICE, 0); qce_ahash_done() 58 memcpy(rctx->digest, result->auth_iv, digestsize); qce_ahash_done() 62 rctx->byte_count[0] = cpu_to_be32(result->auth_byte_count[0]); qce_ahash_done() 63 rctx->byte_count[1] = cpu_to_be32(result->auth_byte_count[1]); qce_ahash_done() 69 req->src = rctx->src_orig; qce_ahash_done() 70 req->nbytes = rctx->nbytes_orig; qce_ahash_done() 71 rctx->last_blk = false; qce_ahash_done() 72 rctx->first_blk = false; qce_ahash_done() 80 struct qce_sha_reqctx *rctx = ahash_request_ctx(req); qce_ahash_async_req_handle() local 84 unsigned long flags = rctx->flags; qce_ahash_async_req_handle() 88 rctx->authkey = ctx->authkey; qce_ahash_async_req_handle() 89 rctx->authklen = QCE_SHA_HMAC_KEY_SIZE; qce_ahash_async_req_handle() 91 rctx->authkey = ctx->authkey; qce_ahash_async_req_handle() 92 rctx->authklen = AES_KEYSIZE_128; qce_ahash_async_req_handle() 95 rctx->src_nents = qce_countsg(req->src, req->nbytes, qce_ahash_async_req_handle() 96 &rctx->src_chained); qce_ahash_async_req_handle() 97 ret = qce_mapsg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE, qce_ahash_async_req_handle() 98 rctx->src_chained); qce_ahash_async_req_handle() 102 sg_init_one(&rctx->result_sg, qce->dma.result_buf, QCE_RESULT_BUF_SZ); qce_ahash_async_req_handle() 104 ret = qce_mapsg(qce->dev, &rctx->result_sg, 1, DMA_FROM_DEVICE, 0); qce_ahash_async_req_handle() 108 ret = qce_dma_prep_sgs(&qce->dma, req->src, rctx->src_nents, qce_ahash_async_req_handle() 109 &rctx->result_sg, 1, qce_ahash_done, async_req); qce_ahash_async_req_handle() 124 qce_unmapsg(qce->dev, &rctx->result_sg, 1, DMA_FROM_DEVICE, 0); qce_ahash_async_req_handle() 126 qce_unmapsg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE, qce_ahash_async_req_handle() 127 rctx->src_chained); qce_ahash_async_req_handle() 133 struct qce_sha_reqctx *rctx = ahash_request_ctx(req); qce_ahash_init() local 137 memset(rctx, 0, sizeof(*rctx)); qce_ahash_init() 138 rctx->first_blk = true; qce_ahash_init() 139 rctx->last_blk = false; qce_ahash_init() 140 rctx->flags = tmpl->alg_flags; qce_ahash_init() 141 memcpy(rctx->digest, std_iv, sizeof(rctx->digest)); qce_ahash_init() 149 struct qce_sha_reqctx *rctx = ahash_request_ctx(req); qce_ahash_export() local 150 unsigned long flags = rctx->flags; qce_ahash_export() 158 out_state->count = rctx->count; qce_ahash_export() 160 rctx->digest, digestsize); qce_ahash_export() 161 memcpy(out_state->buffer, rctx->buf, blocksize); qce_ahash_export() 165 out_state->count = rctx->count; qce_ahash_export() 167 rctx->digest, digestsize); qce_ahash_export() 168 memcpy(out_state->buf, rctx->buf, blocksize); qce_ahash_export() 180 struct qce_sha_reqctx *rctx = ahash_request_ctx(req); qce_import_common() local 186 rctx->count = in_count; qce_import_common() 187 memcpy(rctx->buf, buffer, blocksize); qce_import_common() 190 rctx->first_blk = 1; qce_import_common() 192 rctx->first_blk = 0; qce_import_common() 202 rctx->byte_count[0] = (__force __be32)(count & ~SHA_PADDING_MASK); qce_import_common() 203 rctx->byte_count[1] = (__force __be32)(count >> 32); qce_import_common() 204 qce_cpu_to_be32p_array((__be32 *)rctx->digest, (const u8 *)state, qce_import_common() 206 rctx->buflen = (unsigned int)(in_count & (blocksize - 1)); qce_import_common() 213 struct qce_sha_reqctx *rctx = ahash_request_ctx(req); qce_ahash_import() local 214 unsigned long flags = rctx->flags; qce_ahash_import() 236 struct qce_sha_reqctx *rctx = ahash_request_ctx(req); qce_ahash_update() local 246 rctx->count += req->nbytes; qce_ahash_update() 249 total = req->nbytes + rctx->buflen; qce_ahash_update() 252 scatterwalk_map_and_copy(rctx->buf + rctx->buflen, req->src, qce_ahash_update() 254 rctx->buflen += req->nbytes; qce_ahash_update() 259 rctx->src_orig = req->src; qce_ahash_update() 260 rctx->nbytes_orig = req->nbytes; qce_ahash_update() 266 if (rctx->buflen) qce_ahash_update() 267 memcpy(rctx->tmpbuf, rctx->buf, rctx->buflen); qce_ahash_update() 273 scatterwalk_map_and_copy(rctx->buf, req->src, src_offset, qce_ahash_update() 280 len = rctx->buflen; qce_ahash_update() 296 if (rctx->buflen) { qce_ahash_update() 297 sg_init_table(rctx->sg, 2); qce_ahash_update() 298 sg_set_buf(rctx->sg, rctx->tmpbuf, rctx->buflen); qce_ahash_update() 299 scatterwalk_sg_chain(rctx->sg, 2, req->src); qce_ahash_update() 300 req->src = rctx->sg; qce_ahash_update() 304 rctx->buflen = hash_later; qce_ahash_update() 311 struct qce_sha_reqctx *rctx = ahash_request_ctx(req); qce_ahash_final() local 315 if (!rctx->buflen) qce_ahash_final() 318 rctx->last_blk = true; qce_ahash_final() 320 rctx->src_orig = req->src; qce_ahash_final() 321 rctx->nbytes_orig = req->nbytes; qce_ahash_final() 323 memcpy(rctx->tmpbuf, rctx->buf, rctx->buflen); qce_ahash_final() 324 sg_init_one(rctx->sg, rctx->tmpbuf, rctx->buflen); qce_ahash_final() 326 req->src = rctx->sg; qce_ahash_final() 327 req->nbytes = rctx->buflen; qce_ahash_final() 334 struct qce_sha_reqctx *rctx = ahash_request_ctx(req); qce_ahash_digest() local 343 rctx->src_orig = req->src; qce_ahash_digest() 344 rctx->nbytes_orig = req->nbytes; qce_ahash_digest() 345 rctx->first_blk = true; qce_ahash_digest() 346 rctx->last_blk = true; qce_ahash_digest()
|
H A D | ablkcipher.c | 29 struct qce_cipher_reqctx *rctx = ablkcipher_request_ctx(req); qce_ablkcipher_done() local 47 qce_unmapsg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src, qce_ablkcipher_done() 48 rctx->dst_chained); qce_ablkcipher_done() 49 qce_unmapsg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst, qce_ablkcipher_done() 50 rctx->dst_chained); qce_ablkcipher_done() 52 sg_free_table(&rctx->dst_tbl); qce_ablkcipher_done() 65 struct qce_cipher_reqctx *rctx = ablkcipher_request_ctx(req); qce_ablkcipher_async_req_handle() local 75 rctx->iv = req->info; qce_ablkcipher_async_req_handle() 76 rctx->ivsize = crypto_ablkcipher_ivsize(ablkcipher); qce_ablkcipher_async_req_handle() 77 rctx->cryptlen = req->nbytes; qce_ablkcipher_async_req_handle() 83 rctx->src_nents = qce_countsg(req->src, req->nbytes, qce_ablkcipher_async_req_handle() 84 &rctx->src_chained); qce_ablkcipher_async_req_handle() 86 rctx->dst_nents = qce_countsg(req->dst, req->nbytes, qce_ablkcipher_async_req_handle() 87 &rctx->dst_chained); qce_ablkcipher_async_req_handle() 89 rctx->dst_nents = rctx->src_nents; qce_ablkcipher_async_req_handle() 90 rctx->dst_chained = rctx->src_chained; qce_ablkcipher_async_req_handle() 93 rctx->dst_nents += 1; qce_ablkcipher_async_req_handle() 98 ret = sg_alloc_table(&rctx->dst_tbl, rctx->dst_nents, gfp); qce_ablkcipher_async_req_handle() 102 sg_init_one(&rctx->result_sg, qce->dma.result_buf, QCE_RESULT_BUF_SZ); qce_ablkcipher_async_req_handle() 104 sg = qce_sgtable_add(&rctx->dst_tbl, req->dst); qce_ablkcipher_async_req_handle() 110 sg = qce_sgtable_add(&rctx->dst_tbl, &rctx->result_sg); qce_ablkcipher_async_req_handle() 117 rctx->dst_sg = rctx->dst_tbl.sgl; qce_ablkcipher_async_req_handle() 119 ret = qce_mapsg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst, qce_ablkcipher_async_req_handle() 120 rctx->dst_chained); qce_ablkcipher_async_req_handle() 125 ret = qce_mapsg(qce->dev, req->src, rctx->src_nents, dir_src, qce_ablkcipher_async_req_handle() 126 rctx->src_chained); qce_ablkcipher_async_req_handle() 129 rctx->src_sg = req->src; qce_ablkcipher_async_req_handle() 131 rctx->src_sg = rctx->dst_sg; qce_ablkcipher_async_req_handle() 134 ret = qce_dma_prep_sgs(&qce->dma, rctx->src_sg, rctx->src_nents, qce_ablkcipher_async_req_handle() 135 rctx->dst_sg, rctx->dst_nents, qce_ablkcipher_async_req_handle() 152 qce_unmapsg(qce->dev, req->src, rctx->src_nents, dir_src, qce_ablkcipher_async_req_handle() 153 rctx->src_chained); qce_ablkcipher_async_req_handle() 155 qce_unmapsg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst, qce_ablkcipher_async_req_handle() 156 rctx->dst_chained); qce_ablkcipher_async_req_handle() 158 sg_free_table(&rctx->dst_tbl); qce_ablkcipher_async_req_handle() 208 struct qce_cipher_reqctx *rctx = ablkcipher_request_ctx(req); qce_ablkcipher_crypt() local 212 rctx->flags = tmpl->alg_flags; qce_ablkcipher_crypt() 213 rctx->flags |= encrypt ? QCE_ENCRYPT : QCE_DECRYPT; qce_ablkcipher_crypt() 215 if (IS_AES(rctx->flags) && ctx->enc_keylen != AES_KEYSIZE_128 && qce_ablkcipher_crypt()
|
H A D | common.c | 235 struct qce_sha_reqctx *rctx = ahash_request_ctx(req); qce_setup_regs_ahash() local 246 if (!rctx->last_blk && req->nbytes % blocksize) qce_setup_regs_ahash() 251 if (IS_CMAC(rctx->flags)) { qce_setup_regs_ahash() 259 auth_cfg = qce_auth_cfg(rctx->flags, rctx->authklen); qce_setup_regs_ahash() 262 if (IS_SHA_HMAC(rctx->flags) || IS_CMAC(rctx->flags)) { qce_setup_regs_ahash() 263 u32 authkey_words = rctx->authklen / sizeof(u32); qce_setup_regs_ahash() 265 qce_cpu_to_be32p_array(mackey, rctx->authkey, rctx->authklen); qce_setup_regs_ahash() 270 if (IS_CMAC(rctx->flags)) qce_setup_regs_ahash() 273 if (rctx->first_blk) qce_setup_regs_ahash() 274 memcpy(auth, rctx->digest, digestsize); qce_setup_regs_ahash() 276 qce_cpu_to_be32p_array(auth, rctx->digest, digestsize); qce_setup_regs_ahash() 278 iv_words = (IS_SHA1(rctx->flags) || IS_SHA1_HMAC(rctx->flags)) ? 5 : 8; qce_setup_regs_ahash() 281 if (rctx->first_blk) qce_setup_regs_ahash() 285 (u32 *)rctx->byte_count, 2); qce_setup_regs_ahash() 287 auth_cfg = qce_auth_cfg(rctx->flags, 0); qce_setup_regs_ahash() 289 if (rctx->last_blk) qce_setup_regs_ahash() 294 if (rctx->first_blk) qce_setup_regs_ahash() 319 struct qce_cipher_reqctx *rctx = ablkcipher_request_ctx(req); qce_setup_regs_ablkcipher() local 328 unsigned int ivsize = rctx->ivsize; qce_setup_regs_ablkcipher() 329 unsigned long flags = rctx->flags; qce_setup_regs_ablkcipher() 354 rctx->cryptlen); qce_setup_regs_ablkcipher() 364 qce_xts_swapiv(enciv, rctx->iv, ivsize); qce_setup_regs_ablkcipher() 366 qce_cpu_to_be32p_array(enciv, rctx->iv, ivsize); qce_setup_regs_ablkcipher() 375 qce_write(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen); qce_setup_regs_ablkcipher()
|
/linux-4.1.27/arch/x86/crypto/sha-mb/ |
H A D | sha1_mb.c | 96 static void req_ctx_init(struct mcryptd_hash_request_ctx *rctx, req_ctx_init() argument 99 rctx->flag = HASH_UPDATE; req_ctx_init() 366 static int sha1_mb_set_results(struct mcryptd_hash_request_ctx *rctx) sha1_mb_set_results() argument 369 struct sha1_hash_ctx *sctx = shash_desc_ctx(&rctx->desc); sha1_mb_set_results() 370 __be32 *dst = (__be32 *) rctx->out; sha1_mb_set_results() 383 struct mcryptd_hash_request_ctx *rctx = *ret_rctx; sha_finish_walk() local 387 while (!(rctx->flag & HASH_DONE)) { sha_finish_walk() 388 nbytes = crypto_ahash_walk_done(&rctx->walk, 0); sha_finish_walk() 394 if (crypto_ahash_walk_last(&rctx->walk)) { sha_finish_walk() 395 rctx->flag |= HASH_DONE; sha_finish_walk() 396 if (rctx->flag & HASH_FINAL) sha_finish_walk() 400 sha_ctx = (struct sha1_hash_ctx *) shash_desc_ctx(&rctx->desc); sha_finish_walk() 402 sha_ctx = sha1_ctx_mgr_submit(cstate->mgr, sha_ctx, rctx->walk.data, nbytes, flag); sha_finish_walk() 409 rctx = cast_hash_to_mcryptd_ctx(sha_ctx); sha_finish_walk() 411 rctx = NULL; sha_finish_walk() 417 if (rctx->flag & HASH_FINAL) sha_finish_walk() 418 sha1_mb_set_results(rctx); sha_finish_walk() 421 *ret_rctx = rctx; sha_finish_walk() 425 static int sha_complete_job(struct mcryptd_hash_request_ctx *rctx, sha_complete_job() argument 429 struct ahash_request *req = cast_mcryptd_ctx_to_req(rctx); sha_complete_job() 436 list_del(&rctx->waiter); sha_complete_job() 440 rctx->complete(&req->base, err); sha_complete_job() 443 rctx->complete(&req->base, err); sha_complete_job() 472 static void sha1_mb_add_list(struct mcryptd_hash_request_ctx *rctx, sha1_mb_add_list() argument 479 rctx->tag.arrival = jiffies; /* tag the arrival time */ sha1_mb_add_list() 480 rctx->tag.seq_num = cstate->next_seq_num++; sha1_mb_add_list() 481 next_flush = rctx->tag.arrival + delay; sha1_mb_add_list() 482 rctx->tag.expire = next_flush; sha1_mb_add_list() 485 list_add_tail(&rctx->waiter, &cstate->work_list); sha1_mb_add_list() 494 struct mcryptd_hash_request_ctx *rctx = sha1_mb_update() local 499 struct ahash_request *req = cast_mcryptd_ctx_to_req(rctx); sha1_mb_update() 505 if (rctx->tag.cpu != smp_processor_id()) { sha1_mb_update() 511 req_ctx_init(rctx, desc); sha1_mb_update() 513 nbytes = crypto_ahash_walk_first(req, &rctx->walk); sha1_mb_update() 520 if (crypto_ahash_walk_last(&rctx->walk)) sha1_mb_update() 521 rctx->flag |= HASH_DONE; sha1_mb_update() 525 sha1_mb_add_list(rctx, cstate); sha1_mb_update() 527 sha_ctx = sha1_ctx_mgr_submit(cstate->mgr, sha_ctx, rctx->walk.data, nbytes, HASH_UPDATE); sha1_mb_update() 536 rctx = cast_hash_to_mcryptd_ctx(sha_ctx); sha1_mb_update() 540 rctx = cast_hash_to_mcryptd_ctx(sha_ctx); sha1_mb_update() 541 ret = sha_finish_walk(&rctx, cstate, false); sha1_mb_update() 543 if (!rctx) sha1_mb_update() 546 sha_complete_job(rctx, cstate, ret); sha1_mb_update() 553 struct mcryptd_hash_request_ctx *rctx = sha1_mb_finup() local 558 struct ahash_request *req = cast_mcryptd_ctx_to_req(rctx); sha1_mb_finup() 563 if (rctx->tag.cpu != smp_processor_id()) { sha1_mb_finup() 569 req_ctx_init(rctx, desc); sha1_mb_finup() 571 nbytes = crypto_ahash_walk_first(req, &rctx->walk); sha1_mb_finup() 578 if (crypto_ahash_walk_last(&rctx->walk)) { sha1_mb_finup() 579 rctx->flag |= HASH_DONE; sha1_mb_finup() 582 rctx->out = out; sha1_mb_finup() 585 rctx->flag |= HASH_FINAL; sha1_mb_finup() 587 sha1_mb_add_list(rctx, cstate); sha1_mb_finup() 590 sha_ctx = sha1_ctx_mgr_submit(cstate->mgr, sha_ctx, rctx->walk.data, nbytes, flag); sha1_mb_finup() 602 rctx = cast_hash_to_mcryptd_ctx(sha_ctx); sha1_mb_finup() 603 ret = sha_finish_walk(&rctx, cstate, false); sha1_mb_finup() 604 if (!rctx) sha1_mb_finup() 607 sha_complete_job(rctx, cstate, ret); sha1_mb_finup() 613 struct mcryptd_hash_request_ctx *rctx = sha1_mb_final() local 623 if (rctx->tag.cpu != smp_processor_id()) { sha1_mb_final() 629 req_ctx_init(rctx, desc); sha1_mb_final() 631 rctx->out = out; sha1_mb_final() 632 rctx->flag |= HASH_DONE | HASH_FINAL; sha1_mb_final() 636 sha1_mb_add_list(rctx, cstate); sha1_mb_final() 647 rctx = cast_hash_to_mcryptd_ctx(sha_ctx); sha1_mb_final() 651 rctx = cast_hash_to_mcryptd_ctx(sha_ctx); sha1_mb_final() 652 ret = sha_finish_walk(&rctx, cstate, false); sha1_mb_final() 653 if (!rctx) sha1_mb_final() 656 sha_complete_job(rctx, cstate, ret); sha1_mb_final() 823 struct mcryptd_hash_request_ctx *rctx; sha1_mb_flusher() local 832 rctx = list_entry(cstate->work_list.next, sha1_mb_flusher() 834 if (time_before(cur_time, rctx->tag.expire)) sha1_mb_flusher() 843 rctx = cast_hash_to_mcryptd_ctx(sha_ctx); sha1_mb_flusher() 844 sha_finish_walk(&rctx, cstate, true); sha1_mb_flusher() 845 sha_complete_job(rctx, cstate, 0); sha1_mb_flusher() 849 rctx = list_entry(cstate->work_list.next, sha1_mb_flusher() 852 next_flush = rctx->tag.expire; sha1_mb_flusher()
|
/linux-4.1.27/kernel/events/ |
H A D | callchain.c | 135 static struct perf_callchain_entry *get_callchain_entry(int *rctx) get_callchain_entry() argument 140 *rctx = get_recursion_context(this_cpu_ptr(callchain_recursion)); get_callchain_entry() 141 if (*rctx == -1) get_callchain_entry() 150 return &entries->cpu_entries[cpu][*rctx]; get_callchain_entry() 154 put_callchain_entry(int rctx) put_callchain_entry() argument 156 put_recursion_context(this_cpu_ptr(callchain_recursion), rctx); put_callchain_entry() local 162 int rctx; perf_callchain() local 171 entry = get_callchain_entry(&rctx); perf_callchain() 172 if (rctx == -1) perf_callchain() 206 put_callchain_entry(rctx); perf_callchain()
|
H A D | internal.h | 199 int rctx; get_recursion_context() local 202 rctx = 3; get_recursion_context() 204 rctx = 2; get_recursion_context() 206 rctx = 1; get_recursion_context() 208 rctx = 0; get_recursion_context() 210 if (recursion[rctx]) get_recursion_context() 213 recursion[rctx]++; get_recursion_context() 216 return rctx; get_recursion_context() 219 static inline void put_recursion_context(int *recursion, int rctx) put_recursion_context() argument 222 recursion[rctx]--; put_recursion_context()
|
H A D | core.c | 4818 int rctx; perf_pending_event() local 4820 rctx = perf_swevent_get_recursion_context(); perf_pending_event() 4836 if (rctx >= 0) perf_pending_event() 4837 perf_swevent_put_recursion_context(rctx); perf_pending_event() 6389 inline void perf_swevent_put_recursion_context(int rctx) perf_swevent_put_recursion_context() argument 6393 put_recursion_context(swhash->recursion, rctx); perf_swevent_put_recursion_context() 6409 int rctx; __perf_sw_event() local 6412 rctx = perf_swevent_get_recursion_context(); __perf_sw_event() 6413 if (unlikely(rctx < 0)) __perf_sw_event() 6418 perf_swevent_put_recursion_context(rctx); __perf_sw_event() 6656 struct pt_regs *regs, struct hlist_head *head, int rctx, perf_tp_event() 6700 perf_swevent_put_recursion_context(rctx); 6655 perf_tp_event(u64 addr, u64 count, void *record, int entry_size, struct pt_regs *regs, struct hlist_head *head, int rctx, struct task_struct *task) perf_tp_event() argument
|
/linux-4.1.27/crypto/ |
H A D | rmd256.c | 237 struct rmd256_ctx *rctx = shash_desc_ctx(desc); rmd256_init() local 239 rctx->byte_count = 0; rmd256_init() 241 rctx->state[0] = RMD_H0; rmd256_init() 242 rctx->state[1] = RMD_H1; rmd256_init() 243 rctx->state[2] = RMD_H2; rmd256_init() 244 rctx->state[3] = RMD_H3; rmd256_init() 245 rctx->state[4] = RMD_H5; rmd256_init() 246 rctx->state[5] = RMD_H6; rmd256_init() 247 rctx->state[6] = RMD_H7; rmd256_init() 248 rctx->state[7] = RMD_H8; rmd256_init() 250 memset(rctx->buffer, 0, sizeof(rctx->buffer)); rmd256_init() 258 struct rmd256_ctx *rctx = shash_desc_ctx(desc); rmd256_update() local 259 const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f); rmd256_update() 261 rctx->byte_count += len; rmd256_update() 265 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), rmd256_update() 270 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), rmd256_update() 273 rmd256_transform(rctx->state, rctx->buffer); rmd256_update() 277 while (len >= sizeof(rctx->buffer)) { rmd256_update() 278 memcpy(rctx->buffer, data, sizeof(rctx->buffer)); rmd256_update() 279 rmd256_transform(rctx->state, rctx->buffer); rmd256_update() 280 data += sizeof(rctx->buffer); rmd256_update() 281 len -= sizeof(rctx->buffer); rmd256_update() 284 memcpy(rctx->buffer, data, len); rmd256_update() 293 struct rmd256_ctx *rctx = shash_desc_ctx(desc); rmd256_final() local 299 bits = cpu_to_le64(rctx->byte_count << 3); rmd256_final() 302 index = rctx->byte_count & 0x3f; rmd256_final() 311 dst[i] = cpu_to_le32p(&rctx->state[i]); rmd256_final() 314 memset(rctx, 0, sizeof(*rctx)); rmd256_final()
|
H A D | rmd128.c | 222 struct rmd128_ctx *rctx = shash_desc_ctx(desc); rmd128_init() local 224 rctx->byte_count = 0; rmd128_init() 226 rctx->state[0] = RMD_H0; rmd128_init() 227 rctx->state[1] = RMD_H1; rmd128_init() 228 rctx->state[2] = RMD_H2; rmd128_init() 229 rctx->state[3] = RMD_H3; rmd128_init() 231 memset(rctx->buffer, 0, sizeof(rctx->buffer)); rmd128_init() 239 struct rmd128_ctx *rctx = shash_desc_ctx(desc); rmd128_update() local 240 const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f); rmd128_update() 242 rctx->byte_count += len; rmd128_update() 246 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), rmd128_update() 251 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), rmd128_update() 254 rmd128_transform(rctx->state, rctx->buffer); rmd128_update() 258 while (len >= sizeof(rctx->buffer)) { rmd128_update() 259 memcpy(rctx->buffer, data, sizeof(rctx->buffer)); rmd128_update() 260 rmd128_transform(rctx->state, rctx->buffer); rmd128_update() 261 data += sizeof(rctx->buffer); rmd128_update() 262 len -= sizeof(rctx->buffer); rmd128_update() 265 memcpy(rctx->buffer, data, len); rmd128_update() 274 struct rmd128_ctx *rctx = shash_desc_ctx(desc); rmd128_final() local 280 bits = cpu_to_le64(rctx->byte_count << 3); rmd128_final() 283 index = rctx->byte_count & 0x3f; rmd128_final() 292 dst[i] = cpu_to_le32p(&rctx->state[i]); rmd128_final() 295 memset(rctx, 0, sizeof(*rctx)); rmd128_final()
|
H A D | mcryptd.c | 101 struct mcryptd_hash_request_ctx *rctx) mcryptd_enqueue_request() 108 rctx->tag.cpu = cpu; mcryptd_enqueue_request() 322 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req); mcryptd_hash_enqueue() local 327 rctx->complete = req->base.complete; mcryptd_hash_enqueue() 330 ret = mcryptd_enqueue_request(queue, &req->base, rctx); mcryptd_hash_enqueue() 340 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req); mcryptd_hash_init() local 341 struct shash_desc *desc = &rctx->desc; mcryptd_hash_init() 351 req->base.complete = rctx->complete; mcryptd_hash_init() 355 rctx->complete(&req->base, err); mcryptd_hash_init() 367 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req); mcryptd_hash_update() local 372 err = shash_ahash_mcryptd_update(req, &rctx->desc); mcryptd_hash_update() 374 req->base.complete = rctx->complete; mcryptd_hash_update() 381 rctx->complete(&req->base, err); mcryptd_hash_update() 393 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req); mcryptd_hash_final() local 398 err = shash_ahash_mcryptd_final(req, &rctx->desc); mcryptd_hash_final() 400 req->base.complete = rctx->complete; mcryptd_hash_final() 407 rctx->complete(&req->base, err); mcryptd_hash_final() 419 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req); mcryptd_hash_finup() local 424 err = shash_ahash_mcryptd_finup(req, &rctx->desc); mcryptd_hash_finup() 427 req->base.complete = rctx->complete; mcryptd_hash_finup() 434 rctx->complete(&req->base, err); mcryptd_hash_finup() 448 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req); mcryptd_hash_digest() local 449 struct shash_desc *desc = &rctx->desc; mcryptd_hash_digest() 460 req->base.complete = rctx->complete; mcryptd_hash_digest() 467 rctx->complete(&req->base, err); mcryptd_hash_digest() 478 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req); mcryptd_hash_export() local 480 return crypto_shash_export(&rctx->desc, out); mcryptd_hash_export() 485 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req); mcryptd_hash_import() local 487 return crypto_shash_import(&rctx->desc, in); mcryptd_hash_import() 676 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req); mcryptd_shash_desc() local 677 return &rctx->desc; mcryptd_shash_desc() 99 mcryptd_enqueue_request(struct mcryptd_queue *queue, struct crypto_async_request *request, struct mcryptd_hash_request_ctx *rctx) mcryptd_enqueue_request() argument
|
H A D | cryptd.c | 209 struct cryptd_blkcipher_request_ctx *rctx; cryptd_blkcipher_crypt() local 212 rctx = ablkcipher_request_ctx(req); cryptd_blkcipher_crypt() 223 req->base.complete = rctx->complete; cryptd_blkcipher_crypt() 227 rctx->complete(&req->base, err); cryptd_blkcipher_crypt() 252 struct cryptd_blkcipher_request_ctx *rctx = ablkcipher_request_ctx(req); cryptd_blkcipher_enqueue() local 257 rctx->complete = req->base.complete; cryptd_blkcipher_enqueue() 439 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); cryptd_hash_enqueue() local 444 rctx->complete = req->base.complete; cryptd_hash_enqueue() 455 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); cryptd_hash_init() local 456 struct shash_desc *desc = &rctx->desc; cryptd_hash_init() 466 req->base.complete = rctx->complete; cryptd_hash_init() 470 rctx->complete(&req->base, err); cryptd_hash_init() 482 struct cryptd_hash_request_ctx *rctx; cryptd_hash_update() local 484 rctx = ahash_request_ctx(req); cryptd_hash_update() 489 err = shash_ahash_update(req, &rctx->desc); cryptd_hash_update() 491 req->base.complete = rctx->complete; cryptd_hash_update() 495 rctx->complete(&req->base, err); cryptd_hash_update() 507 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); cryptd_hash_final() local 512 err = crypto_shash_final(&rctx->desc, req->result); cryptd_hash_final() 514 req->base.complete = rctx->complete; cryptd_hash_final() 518 rctx->complete(&req->base, err); cryptd_hash_final() 530 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); cryptd_hash_finup() local 535 err = shash_ahash_finup(req, &rctx->desc); cryptd_hash_finup() 537 req->base.complete = rctx->complete; cryptd_hash_finup() 541 rctx->complete(&req->base, err); cryptd_hash_finup() 555 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); cryptd_hash_digest() local 556 struct shash_desc *desc = &rctx->desc; cryptd_hash_digest() 566 req->base.complete = rctx->complete; cryptd_hash_digest() 570 rctx->complete(&req->base, err); cryptd_hash_digest() 581 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); cryptd_hash_export() local 583 return crypto_shash_export(&rctx->desc, out); cryptd_hash_export() 588 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); cryptd_hash_import() local 590 return crypto_shash_import(&rctx->desc, in); cryptd_hash_import() 662 struct cryptd_aead_request_ctx *rctx; cryptd_aead_crypt() local 663 rctx = aead_request_ctx(req); cryptd_aead_crypt() 669 req->base.complete = rctx->complete; cryptd_aead_crypt() 672 rctx->complete(&req->base, err); cryptd_aead_crypt() 699 struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req); cryptd_aead_enqueue() local 703 rctx->complete = req->base.complete; cryptd_aead_enqueue() 920 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); cryptd_shash_desc() local 921 return &rctx->desc; cryptd_shash_desc()
|
H A D | rmd320.c | 284 struct rmd320_ctx *rctx = shash_desc_ctx(desc); rmd320_init() local 286 rctx->byte_count = 0; rmd320_init() 288 rctx->state[0] = RMD_H0; rmd320_init() 289 rctx->state[1] = RMD_H1; rmd320_init() 290 rctx->state[2] = RMD_H2; rmd320_init() 291 rctx->state[3] = RMD_H3; rmd320_init() 292 rctx->state[4] = RMD_H4; rmd320_init() 293 rctx->state[5] = RMD_H5; rmd320_init() 294 rctx->state[6] = RMD_H6; rmd320_init() 295 rctx->state[7] = RMD_H7; rmd320_init() 296 rctx->state[8] = RMD_H8; rmd320_init() 297 rctx->state[9] = RMD_H9; rmd320_init() 299 memset(rctx->buffer, 0, sizeof(rctx->buffer)); rmd320_init() 307 struct rmd320_ctx *rctx = shash_desc_ctx(desc); rmd320_update() local 308 const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f); rmd320_update() 310 rctx->byte_count += len; rmd320_update() 314 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), rmd320_update() 319 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), rmd320_update() 322 rmd320_transform(rctx->state, rctx->buffer); rmd320_update() 326 while (len >= sizeof(rctx->buffer)) { rmd320_update() 327 memcpy(rctx->buffer, data, sizeof(rctx->buffer)); rmd320_update() 328 rmd320_transform(rctx->state, rctx->buffer); rmd320_update() 329 data += sizeof(rctx->buffer); rmd320_update() 330 len -= sizeof(rctx->buffer); rmd320_update() 333 memcpy(rctx->buffer, data, len); rmd320_update() 342 struct rmd320_ctx *rctx = shash_desc_ctx(desc); rmd320_final() local 348 bits = cpu_to_le64(rctx->byte_count << 3); rmd320_final() 351 index = rctx->byte_count & 0x3f; rmd320_final() 360 dst[i] = cpu_to_le32p(&rctx->state[i]); rmd320_final() 363 memset(rctx, 0, sizeof(*rctx)); rmd320_final()
|
H A D | rmd160.c | 265 struct rmd160_ctx *rctx = shash_desc_ctx(desc); rmd160_init() local 267 rctx->byte_count = 0; rmd160_init() 269 rctx->state[0] = RMD_H0; rmd160_init() 270 rctx->state[1] = RMD_H1; rmd160_init() 271 rctx->state[2] = RMD_H2; rmd160_init() 272 rctx->state[3] = RMD_H3; rmd160_init() 273 rctx->state[4] = RMD_H4; rmd160_init() 275 memset(rctx->buffer, 0, sizeof(rctx->buffer)); rmd160_init() 283 struct rmd160_ctx *rctx = shash_desc_ctx(desc); rmd160_update() local 284 const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f); rmd160_update() 286 rctx->byte_count += len; rmd160_update() 290 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), rmd160_update() 295 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), rmd160_update() 298 rmd160_transform(rctx->state, rctx->buffer); rmd160_update() 302 while (len >= sizeof(rctx->buffer)) { rmd160_update() 303 memcpy(rctx->buffer, data, sizeof(rctx->buffer)); rmd160_update() 304 rmd160_transform(rctx->state, rctx->buffer); rmd160_update() 305 data += sizeof(rctx->buffer); rmd160_update() 306 len -= sizeof(rctx->buffer); rmd160_update() 309 memcpy(rctx->buffer, data, len); rmd160_update() 318 struct rmd160_ctx *rctx = shash_desc_ctx(desc); rmd160_final() local 324 bits = cpu_to_le64(rctx->byte_count << 3); rmd160_final() 327 index = rctx->byte_count & 0x3f; rmd160_final() 336 dst[i] = cpu_to_le32p(&rctx->state[i]); rmd160_final() 339 memset(rctx, 0, sizeof(*rctx)); rmd160_final()
|
H A D | gcm.c | 1107 struct crypto_rfc4543_req_ctx *rctx = crypto_rfc4543_reqctx(req); crypto_rfc4543_done() local 1110 scatterwalk_map_and_copy(rctx->auth_tag, req->dst, crypto_rfc4543_done() 1123 struct crypto_rfc4543_req_ctx *rctx = crypto_rfc4543_reqctx(req); crypto_rfc4543_crypt() local 1124 struct aead_request *subreq = &rctx->subreq; crypto_rfc4543_crypt() 1126 struct scatterlist *cipher = rctx->cipher; crypto_rfc4543_crypt() 1127 struct scatterlist *payload = rctx->payload; crypto_rfc4543_crypt() 1128 struct scatterlist *assoc = rctx->assoc; crypto_rfc4543_crypt() 1133 u8 *iv = PTR_ALIGN((u8 *)(rctx + 1) + crypto_aead_reqsize(ctx->child), crypto_rfc4543_crypt() 1141 memset(rctx->auth_tag, 0, authsize); crypto_rfc4543_crypt() 1143 scatterwalk_map_and_copy(rctx->auth_tag, src, crypto_rfc4543_crypt() 1147 sg_init_one(cipher, rctx->auth_tag, authsize); crypto_rfc4543_crypt() 1163 BUG_ON(req->assoclen > sizeof(rctx->assocbuf)); crypto_rfc4543_crypt() 1165 scatterwalk_map_and_copy(rctx->assocbuf, req->assoc, 0, crypto_rfc4543_crypt() 1169 sg_set_buf(assoc, rctx->assocbuf, req->assoclen); crypto_rfc4543_crypt() 1198 struct crypto_rfc4543_req_ctx *rctx = crypto_rfc4543_reqctx(req); crypto_rfc4543_encrypt() local 1213 scatterwalk_map_and_copy(rctx->auth_tag, req->dst, req->cryptlen, crypto_rfc4543_encrypt()
|
H A D | ctr.c | 284 struct crypto_rfc3686_req_ctx *rctx = crypto_rfc3686_crypt() local 286 struct ablkcipher_request *subreq = &rctx->subreq; crypto_rfc3686_crypt() 287 u8 *iv = rctx->iv; crypto_rfc3686_crypt()
|
/linux-4.1.27/drivers/crypto/ |
H A D | sahara.c | 581 struct sahara_aes_reqctx *rctx; sahara_aes_process() local 595 rctx = ablkcipher_request_ctx(req); sahara_aes_process() 597 rctx->mode &= FLAGS_MODE_MASK; sahara_aes_process() 598 dev->flags = (dev->flags & ~FLAGS_MODE_MASK) | rctx->mode; sahara_aes_process() 666 struct sahara_aes_reqctx *rctx = ablkcipher_request_ctx(req); sahara_aes_crypt() local 679 rctx->mode = mode; sahara_aes_crypt() 789 struct sahara_sha_reqctx *rctx) sahara_sha_init_hdr() 793 hdr = rctx->mode; sahara_sha_init_hdr() 795 if (rctx->first) { sahara_sha_init_hdr() 802 if (rctx->last) sahara_sha_init_hdr() 812 struct sahara_sha_reqctx *rctx, sahara_sha_hw_links_create() 819 dev->in_sg = rctx->in_sg; sahara_sha_hw_links_create() 821 dev->nb_in_sg = sahara_sg_length(dev->in_sg, rctx->total); sahara_sha_hw_links_create() 828 if (rctx->in_sg_chained) { sahara_sha_hw_links_create() 867 struct sahara_sha_reqctx *rctx, sahara_sha_hw_data_descriptor_create() 874 if (rctx->first) sahara_sha_hw_data_descriptor_create() 876 dev->hw_desc[index]->hdr = sahara_sha_init_hdr(dev, rctx); sahara_sha_hw_data_descriptor_create() 881 dev->hw_desc[index]->len1 = rctx->total; sahara_sha_hw_data_descriptor_create() 885 rctx->sg_in_idx = 0; sahara_sha_hw_data_descriptor_create() 889 i = sahara_sha_hw_links_create(dev, rctx, index); sahara_sha_hw_data_descriptor_create() 891 rctx->sg_in_idx = index; sahara_sha_hw_data_descriptor_create() 899 result_len = rctx->context_size; sahara_sha_hw_data_descriptor_create() 920 struct sahara_sha_reqctx *rctx, sahara_sha_hw_context_descriptor_create() 924 dev->hw_desc[index]->hdr = sahara_sha_init_hdr(dev, rctx); sahara_sha_hw_context_descriptor_create() 926 dev->hw_desc[index]->len1 = rctx->context_size; sahara_sha_hw_context_descriptor_create() 931 dev->hw_link[index]->len = rctx->context_size; sahara_sha_hw_context_descriptor_create() 959 struct sahara_sha_reqctx *rctx = ahash_request_ctx(req); sahara_sha_prepare_request() local 967 len = rctx->buf_cnt + req->nbytes; sahara_sha_prepare_request() 970 if (!rctx->last && (len < block_size)) { sahara_sha_prepare_request() 972 scatterwalk_map_and_copy(rctx->buf + rctx->buf_cnt, req->src, sahara_sha_prepare_request() 974 rctx->buf_cnt += req->nbytes; sahara_sha_prepare_request() 980 if (rctx->buf_cnt) sahara_sha_prepare_request() 981 memcpy(rctx->rembuf, rctx->buf, rctx->buf_cnt); sahara_sha_prepare_request() 984 hash_later = rctx->last ? 0 : len & (block_size - 1); sahara_sha_prepare_request() 988 scatterwalk_map_and_copy(rctx->buf, req->src, offset, sahara_sha_prepare_request() 998 if (rctx->buf_cnt && req->nbytes) { sahara_sha_prepare_request() 999 sg_init_table(rctx->in_sg_chain, 2); sahara_sha_prepare_request() 1000 sg_set_buf(rctx->in_sg_chain, rctx->rembuf, rctx->buf_cnt); sahara_sha_prepare_request() 1002 scatterwalk_sg_chain(rctx->in_sg_chain, 2, req->src); sahara_sha_prepare_request() 1004 rctx->total = req->nbytes + rctx->buf_cnt; sahara_sha_prepare_request() 1005 rctx->in_sg = rctx->in_sg_chain; sahara_sha_prepare_request() 1007 rctx->in_sg_chained = true; sahara_sha_prepare_request() 1008 req->src = rctx->in_sg_chain; sahara_sha_prepare_request() 1010 } else if (rctx->buf_cnt) { sahara_sha_prepare_request() 1012 rctx->in_sg = req->src; sahara_sha_prepare_request() 1014 rctx->in_sg = rctx->in_sg_chain; sahara_sha_prepare_request() 1016 sg_init_one(rctx->in_sg, rctx->rembuf, rctx->buf_cnt); sahara_sha_prepare_request() 1017 rctx->total = rctx->buf_cnt; sahara_sha_prepare_request() 1018 rctx->in_sg_chained = false; sahara_sha_prepare_request() 1021 rctx->in_sg = req->src; sahara_sha_prepare_request() 1022 rctx->total = req->nbytes; sahara_sha_prepare_request() 1023 req->src = rctx->in_sg; sahara_sha_prepare_request() 1024 rctx->in_sg_chained = false; sahara_sha_prepare_request() 1028 rctx->buf_cnt = hash_later; sahara_sha_prepare_request() 1034 struct sahara_sha_reqctx *rctx) sahara_sha_unmap_sg() 1038 if (rctx->in_sg_chained) { sahara_sha_unmap_sg() 1053 struct sahara_sha_reqctx *rctx = ahash_request_ctx(req); sahara_sha_process() local 1061 if (rctx->first) { sahara_sha_process() 1062 sahara_sha_hw_data_descriptor_create(dev, rctx, req, 0); sahara_sha_process() 1064 rctx->first = 0; sahara_sha_process() 1066 memcpy(dev->context_base, rctx->context, rctx->context_size); sahara_sha_process() 1068 sahara_sha_hw_context_descriptor_create(dev, rctx, req, 0); sahara_sha_process() 1070 sahara_sha_hw_data_descriptor_create(dev, rctx, req, 1); sahara_sha_process() 1088 if (rctx->sg_in_idx) sahara_sha_process() 1089 sahara_sha_unmap_sg(dev, rctx); sahara_sha_process() 1091 memcpy(rctx->context, dev->context_base, rctx->context_size); sahara_sha_process() 1094 memcpy(req->result, rctx->context, rctx->digest_size); sahara_sha_process() 1144 struct sahara_sha_reqctx *rctx = ahash_request_ctx(req); sahara_sha_enqueue() local 1151 mutex_lock(&rctx->mutex); sahara_sha_enqueue() 1152 rctx->last = last; sahara_sha_enqueue() 1154 if (!rctx->active) { sahara_sha_enqueue() 1155 rctx->active = 1; sahara_sha_enqueue() 1156 rctx->first = 1; sahara_sha_enqueue() 1164 mutex_unlock(&rctx->mutex); sahara_sha_enqueue() 1172 struct sahara_sha_reqctx *rctx = ahash_request_ctx(req); sahara_sha_init() local 1174 memset(rctx, 0, sizeof(*rctx)); sahara_sha_init() 1178 rctx->mode |= SAHARA_HDR_MDHA_ALG_SHA1; sahara_sha_init() 1179 rctx->digest_size = SHA1_DIGEST_SIZE; sahara_sha_init() 1182 rctx->mode |= SAHARA_HDR_MDHA_ALG_SHA256; sahara_sha_init() 1183 rctx->digest_size = SHA256_DIGEST_SIZE; sahara_sha_init() 1189 rctx->context_size = rctx->digest_size + 4; sahara_sha_init() 1190 rctx->active = 0; sahara_sha_init() 1192 mutex_init(&rctx->mutex); sahara_sha_init() 1224 struct sahara_sha_reqctx *rctx = ahash_request_ctx(req); sahara_sha_export() local 1227 memcpy(out + sizeof(struct sahara_sha_reqctx), rctx, sahara_sha_export() 1237 struct sahara_sha_reqctx *rctx = ahash_request_ctx(req); sahara_sha_import() local 1240 memcpy(rctx, in + sizeof(struct sahara_sha_reqctx), sahara_sha_import() 788 sahara_sha_init_hdr(struct sahara_dev *dev, struct sahara_sha_reqctx *rctx) sahara_sha_init_hdr() argument 811 sahara_sha_hw_links_create(struct sahara_dev *dev, struct sahara_sha_reqctx *rctx, int start) sahara_sha_hw_links_create() argument 866 sahara_sha_hw_data_descriptor_create(struct sahara_dev *dev, struct sahara_sha_reqctx *rctx, struct ahash_request *req, int index) sahara_sha_hw_data_descriptor_create() argument 919 sahara_sha_hw_context_descriptor_create(struct sahara_dev *dev, struct sahara_sha_reqctx *rctx, struct ahash_request *req, int index) sahara_sha_hw_context_descriptor_create() argument 1033 sahara_sha_unmap_sg(struct sahara_dev *dev, struct sahara_sha_reqctx *rctx) sahara_sha_unmap_sg() argument
|
H A D | n2_core.c | 305 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); n2_hash_async_init() local 309 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); n2_hash_async_init() 310 rctx->fallback_req.base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; n2_hash_async_init() 312 return crypto_ahash_init(&rctx->fallback_req); n2_hash_async_init() 317 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); n2_hash_async_update() local 321 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); n2_hash_async_update() 322 rctx->fallback_req.base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; n2_hash_async_update() 323 rctx->fallback_req.nbytes = req->nbytes; n2_hash_async_update() 324 rctx->fallback_req.src = req->src; n2_hash_async_update() 326 return crypto_ahash_update(&rctx->fallback_req); n2_hash_async_update() 331 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); n2_hash_async_final() local 335 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); n2_hash_async_final() 336 rctx->fallback_req.base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; n2_hash_async_final() 337 rctx->fallback_req.result = req->result; n2_hash_async_final() 339 return crypto_ahash_final(&rctx->fallback_req); n2_hash_async_final() 344 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); n2_hash_async_finup() local 348 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); n2_hash_async_finup() 349 rctx->fallback_req.base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; n2_hash_async_finup() 350 rctx->fallback_req.nbytes = req->nbytes; n2_hash_async_finup() 351 rctx->fallback_req.src = req->src; n2_hash_async_finup() 352 rctx->fallback_req.result = req->result; n2_hash_async_finup() 354 return crypto_ahash_finup(&rctx->fallback_req); n2_hash_async_finup() 523 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); n2_do_async_digest() local 526 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); n2_do_async_digest() 527 rctx->fallback_req.base.flags = n2_do_async_digest() 529 rctx->fallback_req.nbytes = req->nbytes; n2_do_async_digest() 530 rctx->fallback_req.src = req->src; n2_do_async_digest() 531 rctx->fallback_req.result = req->result; n2_do_async_digest() 533 return crypto_ahash_digest(&rctx->fallback_req); n2_do_async_digest() 598 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); n2_hash_async_digest() local 606 memcpy(&rctx->u, n2alg->hash_init, n2alg->hw_op_hashsz); n2_hash_async_digest() 610 &rctx->u, 0UL, 0); n2_hash_async_digest() 616 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); n2_hmac_async_digest() local 624 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); n2_hmac_async_digest() local 627 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); n2_hmac_async_digest() 628 rctx->fallback_req.base.flags = n2_hmac_async_digest() 630 rctx->fallback_req.nbytes = req->nbytes; n2_hmac_async_digest() 631 rctx->fallback_req.src = req->src; n2_hmac_async_digest() 632 rctx->fallback_req.result = req->result; n2_hmac_async_digest() 634 return crypto_ahash_digest(&rctx->fallback_req); n2_hmac_async_digest() 636 memcpy(&rctx->u, n2alg->derived.hash_init, n2_hmac_async_digest() 641 &rctx->u, n2_hmac_async_digest() 875 struct n2_request_context *rctx = ablkcipher_request_ctx(req); n2_compute_chunks() local 876 struct ablkcipher_walk *walk = &rctx->walk; n2_compute_chunks() 888 INIT_LIST_HEAD(&rctx->chunk_list); n2_compute_chunks() 890 chunk = &rctx->chunk; n2_compute_chunks() 921 &rctx->chunk_list); n2_compute_chunks() 948 list_add_tail(&chunk->entry, &rctx->chunk_list); n2_compute_chunks() 956 struct n2_request_context *rctx = ablkcipher_request_ctx(req); n2_chunk_complete() local 960 memcpy(rctx->walk.iv, final_iv, rctx->walk.blocksize); n2_chunk_complete() 962 ablkcipher_walk_complete(&rctx->walk); n2_chunk_complete() 963 list_for_each_entry_safe(c, tmp, &rctx->chunk_list, entry) { n2_chunk_complete() 965 if (unlikely(c != &rctx->chunk)) n2_chunk_complete() 973 struct n2_request_context *rctx = ablkcipher_request_ctx(req); n2_do_ecb() local 990 list_for_each_entry_safe(c, tmp, &rctx->chunk_list, entry) { n2_do_ecb() 995 if (unlikely(c != &rctx->chunk)) n2_do_ecb() 1025 struct n2_request_context *rctx = ablkcipher_request_ctx(req); n2_do_chaining() local 1046 iv_paddr = __pa(rctx->walk.iv); n2_do_chaining() 1047 list_for_each_entry_safe(c, tmp, &rctx->chunk_list, n2_do_chaining() 1053 iv_paddr = c->dest_final - rctx->walk.blocksize; n2_do_chaining() 1055 if (unlikely(c != &rctx->chunk)) n2_do_chaining() 1060 list_for_each_entry_safe_reverse(c, tmp, &rctx->chunk_list, n2_do_chaining() 1062 if (c == &rctx->chunk) { n2_do_chaining() 1063 iv_paddr = __pa(rctx->walk.iv); n2_do_chaining() 1067 rctx->walk.blocksize); n2_do_chaining() 1074 rctx->walk.blocksize); n2_do_chaining() 1075 final_iv_addr = rctx->temp_iv; n2_do_chaining() 1076 memcpy(rctx->temp_iv, __va(pa), n2_do_chaining() 1077 rctx->walk.blocksize); n2_do_chaining() 1084 if (unlikely(c != &rctx->chunk)) n2_do_chaining()
|
H A D | mxs-dcp.c | 201 struct dcp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); mxs_dcp_run_aes() local 220 if (rctx->enc) mxs_dcp_run_aes() 227 if (rctx->ecb) mxs_dcp_run_aes() 255 struct dcp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); mxs_dcp_aes_block_crypt() local 280 if (!rctx->ecb) { mxs_dcp_aes_block_crypt() 400 struct dcp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); mxs_dcp_aes_enqueue() local 406 rctx->enc = enc; mxs_dcp_aes_enqueue() 407 rctx->ecb = ecb; mxs_dcp_aes_enqueue() 516 struct dcp_sha_req_ctx *rctx = ahash_request_ctx(req); mxs_dcp_run_sha() local 529 if (rctx->init) mxs_dcp_run_sha() 541 if (rctx->fini) { mxs_dcp_run_sha() 550 if (rctx->fini) mxs_dcp_run_sha() 566 struct dcp_sha_req_ctx *rctx = ahash_request_ctx(req); dcp_sha_req_to_buf() local 579 int fin = rctx->fini; dcp_sha_req_to_buf() 581 rctx->fini = 0; dcp_sha_req_to_buf() 607 rctx->init = 0; dcp_sha_req_to_buf() 613 rctx->fini = 1; dcp_sha_req_to_buf() 643 struct dcp_sha_req_ctx *rctx; dcp_chan_thread_sha() local 661 rctx = ahash_request_ctx(req); dcp_chan_thread_sha() 664 fini = rctx->fini; dcp_chan_thread_sha() 707 struct dcp_sha_req_ctx *rctx = ahash_request_ctx(req); dcp_sha_update_fx() local 722 rctx->fini = fini; dcp_sha_update_fx() 726 rctx->init = 1; dcp_sha_update_fx()
|
H A D | img-hash.c | 484 struct img_hash_request_ctx *rctx = ahash_request_ctx(req); img_hash_init() local 487 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback); img_hash_init() 488 rctx->fallback_req.base.flags = req->base.flags img_hash_init() 491 return crypto_ahash_init(&rctx->fallback_req); img_hash_init() 547 struct img_hash_request_ctx *rctx = ahash_request_ctx(req); img_hash_update() local 551 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback); img_hash_update() 552 rctx->fallback_req.base.flags = req->base.flags img_hash_update() 554 rctx->fallback_req.nbytes = req->nbytes; img_hash_update() 555 rctx->fallback_req.src = req->src; img_hash_update() 557 return crypto_ahash_update(&rctx->fallback_req); img_hash_update() 562 struct img_hash_request_ctx *rctx = ahash_request_ctx(req); img_hash_final() local 566 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback); img_hash_final() 567 rctx->fallback_req.base.flags = req->base.flags img_hash_final() 569 rctx->fallback_req.result = req->result; img_hash_final() 571 return crypto_ahash_final(&rctx->fallback_req); img_hash_final() 576 struct img_hash_request_ctx *rctx = ahash_request_ctx(req); img_hash_finup() local 580 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback); img_hash_finup() 581 rctx->fallback_req.base.flags = req->base.flags img_hash_finup() 583 rctx->fallback_req.nbytes = req->nbytes; img_hash_finup() 584 rctx->fallback_req.src = req->src; img_hash_finup() 585 rctx->fallback_req.result = req->result; img_hash_finup() 587 return crypto_ahash_finup(&rctx->fallback_req); img_hash_finup()
|
H A D | hifn_795x.c | 1166 struct hifn_context *ctx, struct hifn_request_context *rctx, hifn_setup_cmd_desc() 1178 switch (rctx->op) { hifn_setup_cmd_desc() 1195 if (rctx->op == ACRYPTO_OP_ENCRYPT || rctx->op == ACRYPTO_OP_DECRYPT) { hifn_setup_cmd_desc() 1200 if (rctx->iv && rctx->mode != ACRYPTO_MODE_ECB) hifn_setup_cmd_desc() 1203 switch (rctx->mode) { hifn_setup_cmd_desc() 1220 switch (rctx->type) { hifn_setup_cmd_desc() 1255 rctx->iv, rctx->ivsize, md); hifn_setup_cmd_desc() 1371 struct hifn_context *ctx, struct hifn_request_context *rctx, hifn_setup_dma() 1392 t = &rctx->walk.cache[0]; hifn_setup_dma() 1395 if (t->length && rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) { hifn_setup_dma() 1415 hifn_setup_cmd_desc(dev, ctx, rctx, priv, nbytes); hifn_setup_dma() 1578 struct hifn_request_context *rctx = ablkcipher_request_ctx(req); hifn_setup_session() local 1585 if (rctx->iv && !rctx->ivsize && rctx->mode != ACRYPTO_MODE_ECB) hifn_setup_session() 1588 rctx->walk.flags = 0; hifn_setup_session() 1596 rctx->walk.flags |= ASYNC_FLAGS_MISALIGNED; hifn_setup_session() 1602 if (rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) { hifn_setup_session() 1603 err = hifn_cipher_walk_init(&rctx->walk, idx, GFP_ATOMIC); hifn_setup_session() 1608 sg_num = hifn_cipher_walk(req, &rctx->walk); hifn_setup_session() 1620 err = hifn_setup_dma(dev, ctx, rctx, req->src, req->dst, req->nbytes, req); hifn_setup_session() 1637 dev->name, rctx->iv, rctx->ivsize, hifn_setup_session() 1639 rctx->mode, rctx->op, rctx->type, err); hifn_setup_session() 1650 struct hifn_request_context rctx; hifn_test() local 1663 rctx.ivsize = 0; hifn_test() 1664 rctx.iv = NULL; hifn_test() 1665 rctx.op = (encdec)?ACRYPTO_OP_ENCRYPT:ACRYPTO_OP_DECRYPT; hifn_test() 1666 rctx.mode = ACRYPTO_MODE_ECB; hifn_test() 1667 rctx.type = ACRYPTO_TYPE_AES_128; hifn_test() 1668 rctx.walk.cache[0].length = 0; hifn_test() 1672 err = hifn_setup_dma(dev, &ctx, &rctx, &sg, &sg, sizeof(src), NULL); hifn_test() 1772 struct hifn_request_context *rctx = ablkcipher_request_ctx(req); hifn_process_ready() local 1774 if (rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) { hifn_process_ready() 1781 t = &rctx->walk.cache[idx]; hifn_process_ready() 1809 hifn_cipher_walk_exit(&rctx->walk); hifn_process_ready() 2101 struct hifn_request_context *rctx = ablkcipher_request_ctx(req); hifn_setup_crypto_req() local 2122 rctx->op = op; hifn_setup_crypto_req() 2123 rctx->mode = mode; hifn_setup_crypto_req() 2124 rctx->type = type; hifn_setup_crypto_req() 2125 rctx->iv = req->info; hifn_setup_crypto_req() 2126 rctx->ivsize = ivsize; hifn_setup_crypto_req() 1165 hifn_setup_cmd_desc(struct hifn_device *dev, struct hifn_context *ctx, struct hifn_request_context *rctx, void *priv, unsigned int nbytes) hifn_setup_cmd_desc() argument 1370 hifn_setup_dma(struct hifn_device *dev, struct hifn_context *ctx, struct hifn_request_context *rctx, struct scatterlist *src, struct scatterlist *dst, unsigned int nbytes, void *priv) hifn_setup_dma() argument
|
H A D | atmel-aes.c | 567 struct atmel_aes_reqctx *rctx; atmel_aes_handle_queue() local 600 rctx = ablkcipher_request_ctx(req); atmel_aes_handle_queue() 602 rctx->mode &= AES_FLAGS_MODE_MASK; atmel_aes_handle_queue() 603 dd->flags = (dd->flags & ~AES_FLAGS_MODE_MASK) | rctx->mode; atmel_aes_handle_queue() 710 struct atmel_aes_reqctx *rctx = ablkcipher_request_ctx(req); atmel_aes_crypt() local 749 rctx->mode = mode; atmel_aes_crypt()
|
H A D | atmel-tdes.c | 593 struct atmel_tdes_reqctx *rctx; atmel_tdes_handle_queue() local 626 rctx = ablkcipher_request_ctx(req); atmel_tdes_handle_queue() 628 rctx->mode &= TDES_FLAGS_MODE_MASK; atmel_tdes_handle_queue() 629 dd->flags = (dd->flags & ~TDES_FLAGS_MODE_MASK) | rctx->mode; atmel_tdes_handle_queue() 675 struct atmel_tdes_reqctx *rctx = ablkcipher_request_ctx(req); atmel_tdes_crypt() local 703 rctx->mode = mode; atmel_tdes_crypt()
|
H A D | omap-aes.c | 612 struct omap_aes_reqctx *rctx; omap_aes_handle_queue() local 657 rctx = ablkcipher_request_ctx(req); omap_aes_handle_queue() 659 rctx->mode &= FLAGS_MODE_MASK; omap_aes_handle_queue() 660 dd->flags = (dd->flags & ~FLAGS_MODE_MASK) | rctx->mode; omap_aes_handle_queue() 722 struct omap_aes_reqctx *rctx = ablkcipher_request_ctx(req); omap_aes_crypt() local 738 rctx->mode = mode; omap_aes_crypt()
|
H A D | omap-des.c | 594 struct omap_des_reqctx *rctx; omap_des_handle_queue() local 639 rctx = ablkcipher_request_ctx(req); omap_des_handle_queue() 641 rctx->mode &= FLAGS_MODE_MASK; omap_des_handle_queue() 642 dd->flags = (dd->flags & ~FLAGS_MODE_MASK) | rctx->mode; omap_des_handle_queue() 704 struct omap_des_reqctx *rctx = ablkcipher_request_ctx(req); omap_des_crypt() local 720 rctx->mode = mode; omap_des_crypt()
|
/linux-4.1.27/drivers/crypto/nx/ |
H A D | nx-aes-gcm.c | 333 struct nx_gcm_rctx *rctx = aead_request_ctx(req); gcm_aes_nx_crypt() local 343 desc.info = rctx->iv; gcm_aes_nx_crypt() 438 struct nx_gcm_rctx *rctx = aead_request_ctx(req); gcm_aes_nx_encrypt() local 439 char *iv = rctx->iv; gcm_aes_nx_encrypt() 448 struct nx_gcm_rctx *rctx = aead_request_ctx(req); gcm_aes_nx_decrypt() local 449 char *iv = rctx->iv; gcm_aes_nx_decrypt() 459 struct nx_gcm_rctx *rctx = aead_request_ctx(req); gcm4106_aes_nx_encrypt() local 460 char *iv = rctx->iv; gcm4106_aes_nx_encrypt() 472 struct nx_gcm_rctx *rctx = aead_request_ctx(req); gcm4106_aes_nx_decrypt() local 473 char *iv = rctx->iv; gcm4106_aes_nx_decrypt()
|
H A D | nx-aes-ccm.c | 497 struct nx_gcm_rctx *rctx = aead_request_ctx(req); ccm4309_aes_nx_encrypt() local 499 u8 *iv = rctx->iv; ccm4309_aes_nx_encrypt() 529 struct nx_gcm_rctx *rctx = aead_request_ctx(req); ccm4309_aes_nx_decrypt() local 531 u8 *iv = rctx->iv; ccm4309_aes_nx_decrypt()
|
/linux-4.1.27/kernel/trace/ |
H A D | trace_syscalls.c | 554 int rctx; perf_syscall_enter() local 577 sys_data->enter_event->event.type, NULL, &rctx); perf_syscall_enter() 584 perf_trace_buf_submit(rec, size, rctx, 0, 1, regs, head, NULL); perf_syscall_enter() 628 int rctx; perf_syscall_exit() local 650 sys_data->exit_event->event.type, NULL, &rctx); perf_syscall_exit() 656 perf_trace_buf_submit(rec, size, rctx, 0, 1, regs, head, NULL); perf_syscall_exit()
|
H A D | trace_event_perf.c | 308 int rctx; perf_ftrace_function_call() local 321 entry = perf_trace_buf_prepare(ENTRY_SIZE, TRACE_FN, NULL, &rctx); perf_ftrace_function_call() 327 perf_trace_buf_submit(entry, ENTRY_SIZE, rctx, 0, perf_ftrace_function_call()
|
H A D | trace_kprobe.c | 1142 int rctx; kprobe_perf_func() local 1156 entry = perf_trace_buf_prepare(size, call->event.type, NULL, &rctx); kprobe_perf_func() 1163 perf_trace_buf_submit(entry, size, rctx, 0, 1, regs, head, NULL); kprobe_perf_func() 1177 int rctx; kretprobe_perf_func() local 1191 entry = perf_trace_buf_prepare(size, call->event.type, NULL, &rctx); kretprobe_perf_func() 1198 perf_trace_buf_submit(entry, size, rctx, 0, 1, regs, head, NULL); kretprobe_perf_func()
|
H A D | trace_uprobe.c | 1101 int rctx; __uprobe_perf_func() local 1115 entry = perf_trace_buf_prepare(size, call->event.type, NULL, &rctx); __uprobe_perf_func() 1136 perf_trace_buf_submit(entry, size, rctx, 0, 1, regs, head, NULL); __uprobe_perf_func()
|
/linux-4.1.27/drivers/crypto/amcc/ |
H A D | crypto4xx_core.h | 172 struct crypto4xx_ctx *rctx); 173 extern void crypto4xx_free_sa_rctx(struct crypto4xx_ctx *rctx);
|
/linux-4.1.27/include/trace/ |
H A D | ftrace.h | 812 int rctx; \ 826 event_call->event.type, &__regs, &rctx); \ 836 perf_trace_buf_submit(entry, __entry_size, rctx, __addr, \
|
/linux-4.1.27/include/linux/ |
H A D | ftrace_event.h | 619 perf_trace_buf_submit(void *raw_data, int size, int rctx, u64 addr, perf_trace_buf_submit() argument 623 perf_tp_event(addr, count, raw_data, size, regs, head, rctx, task); perf_trace_buf_submit()
|
H A D | perf_event.h | 872 struct hlist_head *head, int rctx, 905 extern void perf_swevent_put_recursion_context(int rctx); 959 static inline void perf_swevent_put_recursion_context(int rctx) { } perf_swevent_set_period() argument
|