/linux-4.4.14/drivers/crypto/ccp/ |
D | ccp-crypto-sha.c | 30 struct ccp_sha_req_ctx *rctx = ahash_request_ctx(req); in ccp_sha_complete() local 36 if (rctx->hash_rem) { in ccp_sha_complete() 38 unsigned int offset = rctx->nbytes - rctx->hash_rem; in ccp_sha_complete() 40 scatterwalk_map_and_copy(rctx->buf, rctx->src, in ccp_sha_complete() 41 offset, rctx->hash_rem, 0); in ccp_sha_complete() 42 rctx->buf_count = rctx->hash_rem; in ccp_sha_complete() 44 rctx->buf_count = 0; in ccp_sha_complete() 49 memcpy(req->result, rctx->ctx, digest_size); in ccp_sha_complete() 52 sg_free_table(&rctx->data_sg); in ccp_sha_complete() 62 struct ccp_sha_req_ctx *rctx = ahash_request_ctx(req); in ccp_do_sha_update() local [all …]
|
D | ccp-crypto-aes-cmac.c | 31 struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx(req); in ccp_aes_cmac_complete() local 37 if (rctx->hash_rem) { in ccp_aes_cmac_complete() 39 unsigned int offset = rctx->nbytes - rctx->hash_rem; in ccp_aes_cmac_complete() 41 scatterwalk_map_and_copy(rctx->buf, rctx->src, in ccp_aes_cmac_complete() 42 offset, rctx->hash_rem, 0); in ccp_aes_cmac_complete() 43 rctx->buf_count = rctx->hash_rem; in ccp_aes_cmac_complete() 45 rctx->buf_count = 0; in ccp_aes_cmac_complete() 50 memcpy(req->result, rctx->iv, digest_size); in ccp_aes_cmac_complete() 53 sg_free_table(&rctx->data_sg); in ccp_aes_cmac_complete() 63 struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx(req); in ccp_do_cmac_update() local [all …]
|
D | ccp-crypto-aes.c | 29 struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); in ccp_aes_complete() local 35 memcpy(req->info, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_complete() 73 struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); in ccp_aes_crypt() local 91 memcpy(rctx->iv, req->info, AES_BLOCK_SIZE); in ccp_aes_crypt() 92 iv_sg = &rctx->iv_sg; in ccp_aes_crypt() 94 sg_init_one(iv_sg, rctx->iv, iv_len); in ccp_aes_crypt() 97 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); in ccp_aes_crypt() 98 INIT_LIST_HEAD(&rctx->cmd.entry); in ccp_aes_crypt() 99 rctx->cmd.engine = CCP_ENGINE_AES; in ccp_aes_crypt() 100 rctx->cmd.u.aes.type = ctx->u.aes.type; in ccp_aes_crypt() [all …]
|
D | ccp-crypto-aes-xts.c | 87 struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); in ccp_aes_xts_complete() local 92 memcpy(req->info, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_xts_complete() 123 struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); in ccp_aes_xts_crypt() local 160 memcpy(rctx->iv, req->info, AES_BLOCK_SIZE); in ccp_aes_xts_crypt() 161 sg_init_one(&rctx->iv_sg, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_xts_crypt() 163 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); in ccp_aes_xts_crypt() 164 INIT_LIST_HEAD(&rctx->cmd.entry); in ccp_aes_xts_crypt() 165 rctx->cmd.engine = CCP_ENGINE_XTS_AES_128; in ccp_aes_xts_crypt() 166 rctx->cmd.u.xts.action = (encrypt) ? CCP_AES_ACTION_ENCRYPT in ccp_aes_xts_crypt() 168 rctx->cmd.u.xts.unit_size = unit_size; in ccp_aes_xts_crypt() [all …]
|
/linux-4.4.14/drivers/crypto/qce/ |
D | sha.c | 42 struct qce_sha_reqctx *rctx = ahash_request_ctx(req); in qce_ahash_done() local 54 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_done() 55 dma_unmap_sg(qce->dev, &rctx->result_sg, 1, DMA_FROM_DEVICE); in qce_ahash_done() 57 memcpy(rctx->digest, result->auth_iv, digestsize); in qce_ahash_done() 61 rctx->byte_count[0] = cpu_to_be32(result->auth_byte_count[0]); in qce_ahash_done() 62 rctx->byte_count[1] = cpu_to_be32(result->auth_byte_count[1]); in qce_ahash_done() 68 req->src = rctx->src_orig; in qce_ahash_done() 69 req->nbytes = rctx->nbytes_orig; in qce_ahash_done() 70 rctx->last_blk = false; in qce_ahash_done() 71 rctx->first_blk = false; in qce_ahash_done() [all …]
|
D | ablkcipher.c | 29 struct qce_cipher_reqctx *rctx = ablkcipher_request_ctx(req); in qce_ablkcipher_done() local 47 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); in qce_ablkcipher_done() 48 dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); in qce_ablkcipher_done() 50 sg_free_table(&rctx->dst_tbl); in qce_ablkcipher_done() 63 struct qce_cipher_reqctx *rctx = ablkcipher_request_ctx(req); in qce_ablkcipher_async_req_handle() local 73 rctx->iv = req->info; in qce_ablkcipher_async_req_handle() 74 rctx->ivsize = crypto_ablkcipher_ivsize(ablkcipher); in qce_ablkcipher_async_req_handle() 75 rctx->cryptlen = req->nbytes; in qce_ablkcipher_async_req_handle() 81 rctx->src_nents = sg_nents_for_len(req->src, req->nbytes); in qce_ablkcipher_async_req_handle() 83 rctx->dst_nents = sg_nents_for_len(req->dst, req->nbytes); in qce_ablkcipher_async_req_handle() [all …]
|
D | common.c | 235 struct qce_sha_reqctx *rctx = ahash_request_ctx(req); in qce_setup_regs_ahash() local 246 if (!rctx->last_blk && req->nbytes % blocksize) in qce_setup_regs_ahash() 251 if (IS_CMAC(rctx->flags)) { in qce_setup_regs_ahash() 259 auth_cfg = qce_auth_cfg(rctx->flags, rctx->authklen); in qce_setup_regs_ahash() 262 if (IS_SHA_HMAC(rctx->flags) || IS_CMAC(rctx->flags)) { in qce_setup_regs_ahash() 263 u32 authkey_words = rctx->authklen / sizeof(u32); in qce_setup_regs_ahash() 265 qce_cpu_to_be32p_array(mackey, rctx->authkey, rctx->authklen); in qce_setup_regs_ahash() 270 if (IS_CMAC(rctx->flags)) in qce_setup_regs_ahash() 273 if (rctx->first_blk) in qce_setup_regs_ahash() 274 memcpy(auth, rctx->digest, digestsize); in qce_setup_regs_ahash() [all …]
|
/linux-4.4.14/arch/x86/crypto/sha-mb/ |
D | sha1_mb.c | 93 static void req_ctx_init(struct mcryptd_hash_request_ctx *rctx, in req_ctx_init() argument 96 rctx->flag = HASH_UPDATE; in req_ctx_init() 363 static int sha1_mb_set_results(struct mcryptd_hash_request_ctx *rctx) in sha1_mb_set_results() argument 366 struct sha1_hash_ctx *sctx = shash_desc_ctx(&rctx->desc); in sha1_mb_set_results() 367 __be32 *dst = (__be32 *) rctx->out; in sha1_mb_set_results() 380 struct mcryptd_hash_request_ctx *rctx = *ret_rctx; in sha_finish_walk() local 384 while (!(rctx->flag & HASH_DONE)) { in sha_finish_walk() 385 nbytes = crypto_ahash_walk_done(&rctx->walk, 0); in sha_finish_walk() 391 if (crypto_ahash_walk_last(&rctx->walk)) { in sha_finish_walk() 392 rctx->flag |= HASH_DONE; in sha_finish_walk() [all …]
|
/linux-4.4.14/crypto/ |
D | chacha20poly1305.c | 99 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); in poly_verify_tag() local 100 u8 tag[sizeof(rctx->tag)]; in poly_verify_tag() 103 req->assoclen + rctx->cryptlen, in poly_verify_tag() 105 if (crypto_memneq(tag, rctx->tag, sizeof(tag))) in poly_verify_tag() 112 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); in poly_copy_tag() local 114 scatterwalk_map_and_copy(rctx->tag, req->dst, in poly_copy_tag() 115 req->assoclen + rctx->cryptlen, in poly_copy_tag() 116 sizeof(rctx->tag), 1); in poly_copy_tag() 128 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); in chacha_decrypt() local 129 struct chacha_req *creq = &rctx->u.chacha; in chacha_decrypt() [all …]
|
D | rmd256.c | 237 struct rmd256_ctx *rctx = shash_desc_ctx(desc); in rmd256_init() local 239 rctx->byte_count = 0; in rmd256_init() 241 rctx->state[0] = RMD_H0; in rmd256_init() 242 rctx->state[1] = RMD_H1; in rmd256_init() 243 rctx->state[2] = RMD_H2; in rmd256_init() 244 rctx->state[3] = RMD_H3; in rmd256_init() 245 rctx->state[4] = RMD_H5; in rmd256_init() 246 rctx->state[5] = RMD_H6; in rmd256_init() 247 rctx->state[6] = RMD_H7; in rmd256_init() 248 rctx->state[7] = RMD_H8; in rmd256_init() [all …]
|
D | mcryptd.c | 101 struct mcryptd_hash_request_ctx *rctx) in mcryptd_enqueue_request() argument 108 rctx->tag.cpu = cpu; in mcryptd_enqueue_request() 322 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req); in mcryptd_hash_enqueue() local 327 rctx->complete = req->base.complete; in mcryptd_hash_enqueue() 330 ret = mcryptd_enqueue_request(queue, &req->base, rctx); in mcryptd_hash_enqueue() 340 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req); in mcryptd_hash_init() local 341 struct shash_desc *desc = &rctx->desc; in mcryptd_hash_init() 351 req->base.complete = rctx->complete; in mcryptd_hash_init() 355 rctx->complete(&req->base, err); in mcryptd_hash_init() 367 struct mcryptd_hash_request_ctx *rctx = ahash_request_ctx(req); in mcryptd_hash_update() local [all …]
|
D | rmd128.c | 222 struct rmd128_ctx *rctx = shash_desc_ctx(desc); in rmd128_init() local 224 rctx->byte_count = 0; in rmd128_init() 226 rctx->state[0] = RMD_H0; in rmd128_init() 227 rctx->state[1] = RMD_H1; in rmd128_init() 228 rctx->state[2] = RMD_H2; in rmd128_init() 229 rctx->state[3] = RMD_H3; in rmd128_init() 231 memset(rctx->buffer, 0, sizeof(rctx->buffer)); in rmd128_init() 239 struct rmd128_ctx *rctx = shash_desc_ctx(desc); in rmd128_update() local 240 const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f); in rmd128_update() 242 rctx->byte_count += len; in rmd128_update() [all …]
|
D | cryptd.c | 208 struct cryptd_blkcipher_request_ctx *rctx; in cryptd_blkcipher_crypt() local 211 rctx = ablkcipher_request_ctx(req); in cryptd_blkcipher_crypt() 222 req->base.complete = rctx->complete; in cryptd_blkcipher_crypt() 226 rctx->complete(&req->base, err); in cryptd_blkcipher_crypt() 251 struct cryptd_blkcipher_request_ctx *rctx = ablkcipher_request_ctx(req); in cryptd_blkcipher_enqueue() local 256 rctx->complete = req->base.complete; in cryptd_blkcipher_enqueue() 448 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); in cryptd_hash_enqueue() local 453 rctx->complete = req->base.complete; in cryptd_hash_enqueue() 464 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); in cryptd_hash_init() local 465 struct shash_desc *desc = &rctx->desc; in cryptd_hash_init() [all …]
|
D | rmd320.c | 284 struct rmd320_ctx *rctx = shash_desc_ctx(desc); in rmd320_init() local 286 rctx->byte_count = 0; in rmd320_init() 288 rctx->state[0] = RMD_H0; in rmd320_init() 289 rctx->state[1] = RMD_H1; in rmd320_init() 290 rctx->state[2] = RMD_H2; in rmd320_init() 291 rctx->state[3] = RMD_H3; in rmd320_init() 292 rctx->state[4] = RMD_H4; in rmd320_init() 293 rctx->state[5] = RMD_H5; in rmd320_init() 294 rctx->state[6] = RMD_H6; in rmd320_init() 295 rctx->state[7] = RMD_H7; in rmd320_init() [all …]
|
D | rmd160.c | 265 struct rmd160_ctx *rctx = shash_desc_ctx(desc); in rmd160_init() local 267 rctx->byte_count = 0; in rmd160_init() 269 rctx->state[0] = RMD_H0; in rmd160_init() 270 rctx->state[1] = RMD_H1; in rmd160_init() 271 rctx->state[2] = RMD_H2; in rmd160_init() 272 rctx->state[3] = RMD_H3; in rmd160_init() 273 rctx->state[4] = RMD_H4; in rmd160_init() 275 memset(rctx->buffer, 0, sizeof(rctx->buffer)); in rmd160_init() 283 struct rmd160_ctx *rctx = shash_desc_ctx(desc); in rmd160_update() local 284 const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f); in rmd160_update() [all …]
|
D | ccm.c | 707 struct crypto_rfc4309_req_ctx *rctx = aead_request_ctx(req); in crypto_rfc4309_crypt() local 708 struct aead_request *subreq = &rctx->subreq; in crypto_rfc4309_crypt() 724 sg_init_table(rctx->src, 3); in crypto_rfc4309_crypt() 725 sg_set_buf(rctx->src, iv + 16, req->assoclen - 8); in crypto_rfc4309_crypt() 726 sg = scatterwalk_ffwd(rctx->src + 1, req->src, req->assoclen); in crypto_rfc4309_crypt() 727 if (sg != rctx->src + 1) in crypto_rfc4309_crypt() 728 sg_chain(rctx->src, 2, sg); in crypto_rfc4309_crypt() 731 sg_init_table(rctx->dst, 3); in crypto_rfc4309_crypt() 732 sg_set_buf(rctx->dst, iv + 16, req->assoclen - 8); in crypto_rfc4309_crypt() 733 sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen); in crypto_rfc4309_crypt() [all …]
|
D | gcm.c | 825 struct crypto_rfc4106_req_ctx *rctx = aead_request_ctx(req); in crypto_rfc4106_crypt() local 828 struct aead_request *subreq = &rctx->subreq; in crypto_rfc4106_crypt() 839 sg_init_table(rctx->src, 3); in crypto_rfc4106_crypt() 840 sg_set_buf(rctx->src, iv + 12, req->assoclen - 8); in crypto_rfc4106_crypt() 841 sg = scatterwalk_ffwd(rctx->src + 1, req->src, req->assoclen); in crypto_rfc4106_crypt() 842 if (sg != rctx->src + 1) in crypto_rfc4106_crypt() 843 sg_chain(rctx->src, 2, sg); in crypto_rfc4106_crypt() 846 sg_init_table(rctx->dst, 3); in crypto_rfc4106_crypt() 847 sg_set_buf(rctx->dst, iv + 12, req->assoclen - 8); in crypto_rfc4106_crypt() 848 sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen); in crypto_rfc4106_crypt() [all …]
|
D | ctr.c | 284 struct crypto_rfc3686_req_ctx *rctx = in crypto_rfc3686_crypt() local 286 struct ablkcipher_request *subreq = &rctx->subreq; in crypto_rfc3686_crypt() 287 u8 *iv = rctx->iv; in crypto_rfc3686_crypt()
|
/linux-4.4.14/drivers/crypto/ |
D | sahara.c | 555 struct sahara_aes_reqctx *rctx; in sahara_aes_process() local 569 rctx = ablkcipher_request_ctx(req); in sahara_aes_process() 571 rctx->mode &= FLAGS_MODE_MASK; in sahara_aes_process() 572 dev->flags = (dev->flags & ~FLAGS_MODE_MASK) | rctx->mode; in sahara_aes_process() 640 struct sahara_aes_reqctx *rctx = ablkcipher_request_ctx(req); in sahara_aes_crypt() local 653 rctx->mode = mode; in sahara_aes_crypt() 763 struct sahara_sha_reqctx *rctx) in sahara_sha_init_hdr() argument 767 hdr = rctx->mode; in sahara_sha_init_hdr() 769 if (rctx->first) { in sahara_sha_init_hdr() 776 if (rctx->last) in sahara_sha_init_hdr() [all …]
|
D | n2_core.c | 305 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); in n2_hash_async_init() local 309 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in n2_hash_async_init() 310 rctx->fallback_req.base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in n2_hash_async_init() 312 return crypto_ahash_init(&rctx->fallback_req); in n2_hash_async_init() 317 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); in n2_hash_async_update() local 321 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in n2_hash_async_update() 322 rctx->fallback_req.base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in n2_hash_async_update() 323 rctx->fallback_req.nbytes = req->nbytes; in n2_hash_async_update() 324 rctx->fallback_req.src = req->src; in n2_hash_async_update() 326 return crypto_ahash_update(&rctx->fallback_req); in n2_hash_async_update() [all …]
|
D | mxs-dcp.c | 201 struct dcp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); in mxs_dcp_run_aes() local 220 if (rctx->enc) in mxs_dcp_run_aes() 227 if (rctx->ecb) in mxs_dcp_run_aes() 255 struct dcp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); in mxs_dcp_aes_block_crypt() local 280 if (!rctx->ecb) { in mxs_dcp_aes_block_crypt() 400 struct dcp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); in mxs_dcp_aes_enqueue() local 406 rctx->enc = enc; in mxs_dcp_aes_enqueue() 407 rctx->ecb = ecb; in mxs_dcp_aes_enqueue() 516 struct dcp_sha_req_ctx *rctx = ahash_request_ctx(req); in mxs_dcp_run_sha() local 529 if (rctx->init) in mxs_dcp_run_sha() [all …]
|
D | img-hash.c | 484 struct img_hash_request_ctx *rctx = ahash_request_ctx(req); in img_hash_init() local 487 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback); in img_hash_init() 488 rctx->fallback_req.base.flags = req->base.flags in img_hash_init() 491 return crypto_ahash_init(&rctx->fallback_req); in img_hash_init() 547 struct img_hash_request_ctx *rctx = ahash_request_ctx(req); in img_hash_update() local 551 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback); in img_hash_update() 552 rctx->fallback_req.base.flags = req->base.flags in img_hash_update() 554 rctx->fallback_req.nbytes = req->nbytes; in img_hash_update() 555 rctx->fallback_req.src = req->src; in img_hash_update() 557 return crypto_ahash_update(&rctx->fallback_req); in img_hash_update() [all …]
|
D | hifn_795x.c | 1164 struct hifn_context *ctx, struct hifn_request_context *rctx, in hifn_setup_cmd_desc() argument 1176 switch (rctx->op) { in hifn_setup_cmd_desc() 1193 if (rctx->op == ACRYPTO_OP_ENCRYPT || rctx->op == ACRYPTO_OP_DECRYPT) { in hifn_setup_cmd_desc() 1198 if (rctx->iv && rctx->mode != ACRYPTO_MODE_ECB) in hifn_setup_cmd_desc() 1201 switch (rctx->mode) { in hifn_setup_cmd_desc() 1218 switch (rctx->type) { in hifn_setup_cmd_desc() 1253 rctx->iv, rctx->ivsize, md); in hifn_setup_cmd_desc() 1369 struct hifn_context *ctx, struct hifn_request_context *rctx, in hifn_setup_dma() argument 1390 t = &rctx->walk.cache[0]; in hifn_setup_dma() 1393 if (t->length && rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) { in hifn_setup_dma() [all …]
|
D | omap-des.c | 594 struct omap_des_reqctx *rctx; in omap_des_handle_queue() local 639 rctx = ablkcipher_request_ctx(req); in omap_des_handle_queue() 641 rctx->mode &= FLAGS_MODE_MASK; in omap_des_handle_queue() 642 dd->flags = (dd->flags & ~FLAGS_MODE_MASK) | rctx->mode; in omap_des_handle_queue() 704 struct omap_des_reqctx *rctx = ablkcipher_request_ctx(req); in omap_des_crypt() local 720 rctx->mode = mode; in omap_des_crypt()
|
D | omap-aes.c | 613 struct omap_aes_reqctx *rctx; in omap_aes_handle_queue() local 659 rctx = ablkcipher_request_ctx(req); in omap_aes_handle_queue() 661 rctx->mode &= FLAGS_MODE_MASK; in omap_aes_handle_queue() 662 dd->flags = (dd->flags & ~FLAGS_MODE_MASK) | rctx->mode; in omap_aes_handle_queue() 725 struct omap_aes_reqctx *rctx = ablkcipher_request_ctx(req); in omap_aes_crypt() local 736 rctx->mode = mode; in omap_aes_crypt()
|
D | atmel-aes.c | 567 struct atmel_aes_reqctx *rctx; in atmel_aes_handle_queue() local 600 rctx = ablkcipher_request_ctx(req); in atmel_aes_handle_queue() 602 rctx->mode &= AES_FLAGS_MODE_MASK; in atmel_aes_handle_queue() 603 dd->flags = (dd->flags & ~AES_FLAGS_MODE_MASK) | rctx->mode; in atmel_aes_handle_queue() 710 struct atmel_aes_reqctx *rctx = ablkcipher_request_ctx(req); in atmel_aes_crypt() local 749 rctx->mode = mode; in atmel_aes_crypt()
|
D | atmel-tdes.c | 593 struct atmel_tdes_reqctx *rctx; in atmel_tdes_handle_queue() local 626 rctx = ablkcipher_request_ctx(req); in atmel_tdes_handle_queue() 628 rctx->mode &= TDES_FLAGS_MODE_MASK; in atmel_tdes_handle_queue() 629 dd->flags = (dd->flags & ~TDES_FLAGS_MODE_MASK) | rctx->mode; in atmel_tdes_handle_queue() 675 struct atmel_tdes_reqctx *rctx = ablkcipher_request_ctx(req); in atmel_tdes_crypt() local 703 rctx->mode = mode; in atmel_tdes_crypt()
|
/linux-4.4.14/kernel/events/ |
D | callchain.c | 135 static struct perf_callchain_entry *get_callchain_entry(int *rctx) in get_callchain_entry() argument 140 *rctx = get_recursion_context(this_cpu_ptr(callchain_recursion)); in get_callchain_entry() 141 if (*rctx == -1) in get_callchain_entry() 150 return &entries->cpu_entries[cpu][*rctx]; in get_callchain_entry() 154 put_callchain_entry(int rctx) in put_callchain_entry() argument 156 put_recursion_context(this_cpu_ptr(callchain_recursion), rctx); in put_callchain_entry() 162 int rctx; in perf_callchain() local 171 entry = get_callchain_entry(&rctx); in perf_callchain() 172 if (rctx == -1) in perf_callchain() 206 put_callchain_entry(rctx); in perf_callchain()
|
D | internal.h | 190 int rctx; in get_recursion_context() local 193 rctx = 3; in get_recursion_context() 195 rctx = 2; in get_recursion_context() 197 rctx = 1; in get_recursion_context() 199 rctx = 0; in get_recursion_context() 201 if (recursion[rctx]) in get_recursion_context() 204 recursion[rctx]++; in get_recursion_context() 207 return rctx; in get_recursion_context() 210 static inline void put_recursion_context(int *recursion, int rctx) in put_recursion_context() argument 213 recursion[rctx]--; in put_recursion_context()
|
D | core.c | 4984 int rctx; in perf_pending_event() local 4986 rctx = perf_swevent_get_recursion_context(); in perf_pending_event() 5002 if (rctx >= 0) in perf_pending_event() 5003 perf_swevent_put_recursion_context(rctx); in perf_pending_event() 6687 inline void perf_swevent_put_recursion_context(int rctx) in perf_swevent_put_recursion_context() argument 6691 put_recursion_context(swhash->recursion, rctx); in perf_swevent_put_recursion_context() 6707 int rctx; in __perf_sw_event() local 6710 rctx = perf_swevent_get_recursion_context(); in __perf_sw_event() 6711 if (unlikely(rctx < 0)) in __perf_sw_event() 6716 perf_swevent_put_recursion_context(rctx); in __perf_sw_event() [all …]
|
/linux-4.4.14/drivers/crypto/sunxi-ss/ |
D | sun4i-ss-cipher.c | 323 struct sun4i_cipher_req_ctx *rctx = ablkcipher_request_ctx(areq); in sun4i_ss_cbc_aes_encrypt() local 325 rctx->mode = SS_OP_AES | SS_CBC | SS_ENABLED | SS_ENCRYPTION | in sun4i_ss_cbc_aes_encrypt() 334 struct sun4i_cipher_req_ctx *rctx = ablkcipher_request_ctx(areq); in sun4i_ss_cbc_aes_decrypt() local 336 rctx->mode = SS_OP_AES | SS_CBC | SS_ENABLED | SS_DECRYPTION | in sun4i_ss_cbc_aes_decrypt() 346 struct sun4i_cipher_req_ctx *rctx = ablkcipher_request_ctx(areq); in sun4i_ss_ecb_aes_encrypt() local 348 rctx->mode = SS_OP_AES | SS_ECB | SS_ENABLED | SS_ENCRYPTION | in sun4i_ss_ecb_aes_encrypt() 357 struct sun4i_cipher_req_ctx *rctx = ablkcipher_request_ctx(areq); in sun4i_ss_ecb_aes_decrypt() local 359 rctx->mode = SS_OP_AES | SS_ECB | SS_ENABLED | SS_DECRYPTION | in sun4i_ss_ecb_aes_decrypt() 369 struct sun4i_cipher_req_ctx *rctx = ablkcipher_request_ctx(areq); in sun4i_ss_cbc_des_encrypt() local 371 rctx->mode = SS_OP_DES | SS_CBC | SS_ENABLED | SS_ENCRYPTION | in sun4i_ss_cbc_des_encrypt() [all …]
|
/linux-4.4.14/drivers/crypto/nx/ |
D | nx-aes-gcm.c | 325 struct nx_gcm_rctx *rctx = aead_request_ctx(req); in gcm_aes_nx_crypt() local 335 desc.info = rctx->iv; in gcm_aes_nx_crypt() 433 struct nx_gcm_rctx *rctx = aead_request_ctx(req); in gcm_aes_nx_encrypt() local 434 char *iv = rctx->iv; in gcm_aes_nx_encrypt() 443 struct nx_gcm_rctx *rctx = aead_request_ctx(req); in gcm_aes_nx_decrypt() local 444 char *iv = rctx->iv; in gcm_aes_nx_decrypt() 455 struct nx_gcm_rctx *rctx = aead_request_ctx(req); in gcm4106_aes_nx_encrypt() local 456 char *iv = rctx->iv; in gcm4106_aes_nx_encrypt() 472 struct nx_gcm_rctx *rctx = aead_request_ctx(req); in gcm4106_aes_nx_decrypt() local 473 char *iv = rctx->iv; in gcm4106_aes_nx_decrypt()
|
D | nx-aes-ccm.c | 495 struct nx_gcm_rctx *rctx = aead_request_ctx(req); in ccm4309_aes_nx_encrypt() local 497 u8 *iv = rctx->iv; in ccm4309_aes_nx_encrypt() 525 struct nx_gcm_rctx *rctx = aead_request_ctx(req); in ccm4309_aes_nx_decrypt() local 527 u8 *iv = rctx->iv; in ccm4309_aes_nx_decrypt()
|
/linux-4.4.14/include/trace/ |
D | perf.h | 46 int rctx; \ 60 event_call->event.type, &__regs, &rctx); \ 70 perf_trace_buf_submit(entry, __entry_size, rctx, __addr, \
|
/linux-4.4.14/kernel/trace/ |
D | trace_syscalls.c | 555 int rctx; in perf_syscall_enter() local 578 sys_data->enter_event->event.type, NULL, &rctx); in perf_syscall_enter() 585 perf_trace_buf_submit(rec, size, rctx, 0, 1, regs, head, NULL); in perf_syscall_enter() 629 int rctx; in perf_syscall_exit() local 651 sys_data->exit_event->event.type, NULL, &rctx); in perf_syscall_exit() 657 perf_trace_buf_submit(rec, size, rctx, 0, 1, regs, head, NULL); in perf_syscall_exit()
|
D | trace_event_perf.c | 308 int rctx; in perf_ftrace_function_call() local 321 entry = perf_trace_buf_prepare(ENTRY_SIZE, TRACE_FN, NULL, &rctx); in perf_ftrace_function_call() 327 perf_trace_buf_submit(entry, ENTRY_SIZE, rctx, 0, in perf_ftrace_function_call()
|
D | trace_kprobe.c | 1130 int rctx; in kprobe_perf_func() local 1144 entry = perf_trace_buf_prepare(size, call->event.type, NULL, &rctx); in kprobe_perf_func() 1151 perf_trace_buf_submit(entry, size, rctx, 0, 1, regs, head, NULL); in kprobe_perf_func() 1165 int rctx; in kretprobe_perf_func() local 1179 entry = perf_trace_buf_prepare(size, call->event.type, NULL, &rctx); in kretprobe_perf_func() 1186 perf_trace_buf_submit(entry, size, rctx, 0, 1, regs, head, NULL); in kretprobe_perf_func()
|
D | trace_uprobe.c | 1117 int rctx; in __uprobe_perf_func() local 1134 entry = perf_trace_buf_prepare(size, call->event.type, NULL, &rctx); in __uprobe_perf_func() 1155 perf_trace_buf_submit(entry, size, rctx, 0, 1, regs, head, NULL); in __uprobe_perf_func()
|
/linux-4.4.14/drivers/crypto/amcc/ |
D | crypto4xx_core.h | 172 struct crypto4xx_ctx *rctx); 173 extern void crypto4xx_free_sa_rctx(struct crypto4xx_ctx *rctx);
|
/linux-4.4.14/include/linux/ |
D | trace_events.h | 621 perf_trace_buf_submit(void *raw_data, int size, int rctx, u64 addr, in perf_trace_buf_submit() argument 625 perf_tp_event(addr, count, raw_data, size, regs, head, rctx, task); in perf_trace_buf_submit()
|
D | perf_event.h | 1010 struct hlist_head *head, int rctx, 1043 extern void perf_swevent_put_recursion_context(int rctx); 1105 static inline void perf_swevent_put_recursion_context(int rctx) { } in perf_swevent_put_recursion_context() argument
|