actx 2328 drivers/crypto/chelsio/chcr_algo.c struct chcr_authenc_ctx *actx = AUTHENC_CTX(aeadctx); actx 2414 drivers/crypto/chelsio/chcr_algo.c actx->auth_mode, aeadctx->hmac_ctrl, actx 2426 drivers/crypto/chelsio/chcr_algo.c memcpy(chcr_req->key_ctx.key, actx->dec_rrkey, actx 2430 drivers/crypto/chelsio/chcr_algo.c actx->h_iopad, kctx_len - roundup(aeadctx->enckey_len, 16)); actx 3407 drivers/crypto/chelsio/chcr_algo.c struct chcr_authenc_ctx *actx = AUTHENC_CTX(aeadctx); actx 3468 drivers/crypto/chelsio/chcr_algo.c get_aes_decrypt_key(actx->dec_rrkey, aeadctx->key, actx 3484 drivers/crypto/chelsio/chcr_algo.c o_ptr = actx->h_iopad + param.result_size + align; actx 3504 drivers/crypto/chelsio/chcr_algo.c if (chcr_compute_partial_hash(shash, pad, actx->h_iopad, actx 3517 drivers/crypto/chelsio/chcr_algo.c chcr_change_order(actx->h_iopad, param.result_size); actx 3524 drivers/crypto/chelsio/chcr_algo.c actx->auth_mode = param.auth_mode; actx 3542 drivers/crypto/chelsio/chcr_algo.c struct chcr_authenc_ctx *actx = AUTHENC_CTX(aeadctx); actx 3587 drivers/crypto/chelsio/chcr_algo.c get_aes_decrypt_key(actx->dec_rrkey, aeadctx->key, actx 3594 drivers/crypto/chelsio/chcr_algo.c actx->auth_mode = CHCR_SCMD_AUTH_MODE_NOP; actx 169 drivers/crypto/mxs-dcp.c static int mxs_dcp_start_dma(struct dcp_async_ctx *actx) actx 172 drivers/crypto/mxs-dcp.c const int chan = actx->chan; actx 175 drivers/crypto/mxs-dcp.c struct dcp_dma_desc *desc = &sdcp->coh->desc[actx->chan]; actx 214 drivers/crypto/mxs-dcp.c static int mxs_dcp_run_aes(struct dcp_async_ctx *actx, actx 218 drivers/crypto/mxs-dcp.c struct dcp_dma_desc *desc = &sdcp->coh->desc[actx->chan]; actx 230 drivers/crypto/mxs-dcp.c if (actx->fill % AES_BLOCK_SIZE) { actx 259 drivers/crypto/mxs-dcp.c desc->size = actx->fill; actx 263 drivers/crypto/mxs-dcp.c ret = mxs_dcp_start_dma(actx); actx 279 drivers/crypto/mxs-dcp.c struct dcp_async_ctx *actx = crypto_tfm_ctx(arq->tfm); actx 302 drivers/crypto/mxs-dcp.c actx->fill = 0; actx 305 drivers/crypto/mxs-dcp.c memcpy(key, actx->key, actx->key_len); actx 326 drivers/crypto/mxs-dcp.c if (actx->fill + len > out_off) actx 327 drivers/crypto/mxs-dcp.c clen = out_off - actx->fill; actx 331 drivers/crypto/mxs-dcp.c memcpy(in_buf + actx->fill, src_buf, clen); actx 334 drivers/crypto/mxs-dcp.c actx->fill += clen; actx 340 drivers/crypto/mxs-dcp.c if (actx->fill == out_off || sg_is_last(src) || actx 342 drivers/crypto/mxs-dcp.c ret = mxs_dcp_run_aes(actx, req, init); actx 348 drivers/crypto/mxs-dcp.c last_out_len = actx->fill; actx 349 drivers/crypto/mxs-dcp.c while (dst && actx->fill) { actx 355 drivers/crypto/mxs-dcp.c actx->fill); actx 360 drivers/crypto/mxs-dcp.c actx->fill -= rem; actx 452 drivers/crypto/mxs-dcp.c struct dcp_async_ctx *actx = crypto_tfm_ctx(arq->tfm); actx 456 drivers/crypto/mxs-dcp.c if (unlikely(actx->key_len != AES_KEYSIZE_128)) actx 461 drivers/crypto/mxs-dcp.c actx->chan = DCP_CHAN_CRYPTO; actx 463 drivers/crypto/mxs-dcp.c spin_lock(&sdcp->lock[actx->chan]); actx 464 drivers/crypto/mxs-dcp.c ret = crypto_enqueue_request(&sdcp->queue[actx->chan], &req->base); actx 465 drivers/crypto/mxs-dcp.c spin_unlock(&sdcp->lock[actx->chan]); actx 467 drivers/crypto/mxs-dcp.c wake_up_process(sdcp->thread[actx->chan]); actx 495 drivers/crypto/mxs-dcp.c struct dcp_async_ctx *actx = crypto_ablkcipher_ctx(tfm); actx 503 drivers/crypto/mxs-dcp.c actx->key_len = len; actx 505 drivers/crypto/mxs-dcp.c memcpy(actx->key, key, len); actx 514 drivers/crypto/mxs-dcp.c crypto_sync_skcipher_clear_flags(actx->fallback, CRYPTO_TFM_REQ_MASK); actx 515 drivers/crypto/mxs-dcp.c crypto_sync_skcipher_set_flags(actx->fallback, actx 518 drivers/crypto/mxs-dcp.c ret = crypto_sync_skcipher_setkey(actx->fallback, key, len); actx 523 drivers/crypto/mxs-dcp.c tfm->base.crt_flags |= crypto_sync_skcipher_get_flags(actx->fallback) & actx 532 drivers/crypto/mxs-dcp.c struct dcp_async_ctx *actx = crypto_tfm_ctx(tfm); actx 539 drivers/crypto/mxs-dcp.c actx->fallback = blk; actx 546 drivers/crypto/mxs-dcp.c struct dcp_async_ctx *actx = crypto_tfm_ctx(tfm); actx 548 drivers/crypto/mxs-dcp.c crypto_free_sync_skcipher(actx->fallback); actx 560 drivers/crypto/mxs-dcp.c struct dcp_async_ctx *actx = crypto_ahash_ctx(tfm); actx 562 drivers/crypto/mxs-dcp.c struct dcp_dma_desc *desc = &sdcp->coh->desc[actx->chan]; actx 575 drivers/crypto/mxs-dcp.c desc->control1 = actx->alg; actx 579 drivers/crypto/mxs-dcp.c desc->size = actx->fill; actx 589 drivers/crypto/mxs-dcp.c (actx->alg == MXS_DCP_CONTROL1_HASH_SELECT_SHA1) ? actx 604 drivers/crypto/mxs-dcp.c ret = mxs_dcp_start_dma(actx); actx 622 drivers/crypto/mxs-dcp.c struct dcp_async_ctx *actx = crypto_ahash_ctx(tfm); actx 642 drivers/crypto/mxs-dcp.c if (actx->fill + len > DCP_BUF_SZ) actx 643 drivers/crypto/mxs-dcp.c clen = DCP_BUF_SZ - actx->fill; actx 647 drivers/crypto/mxs-dcp.c scatterwalk_map_and_copy(in_buf + actx->fill, src, oft, clen, actx 652 drivers/crypto/mxs-dcp.c actx->fill += clen; actx 658 drivers/crypto/mxs-dcp.c if (len && actx->fill == DCP_BUF_SZ) { actx 662 drivers/crypto/mxs-dcp.c actx->fill = 0; actx 678 drivers/crypto/mxs-dcp.c actx->fill = 0; actx 727 drivers/crypto/mxs-dcp.c struct dcp_async_ctx *actx = crypto_ahash_ctx(tfm); actx 735 drivers/crypto/mxs-dcp.c memset(actx, 0, sizeof(*actx)); actx 738 drivers/crypto/mxs-dcp.c actx->alg = MXS_DCP_CONTROL1_HASH_SELECT_SHA1; actx 740 drivers/crypto/mxs-dcp.c actx->alg = MXS_DCP_CONTROL1_HASH_SELECT_SHA256; actx 742 drivers/crypto/mxs-dcp.c actx->fill = 0; actx 743 drivers/crypto/mxs-dcp.c actx->hot = 0; actx 744 drivers/crypto/mxs-dcp.c actx->chan = DCP_CHAN_HASH_SHA; actx 746 drivers/crypto/mxs-dcp.c mutex_init(&actx->mutex); actx 757 drivers/crypto/mxs-dcp.c struct dcp_async_ctx *actx = crypto_ahash_ctx(tfm); actx 768 drivers/crypto/mxs-dcp.c mutex_lock(&actx->mutex); actx 772 drivers/crypto/mxs-dcp.c if (!actx->hot) { actx 773 drivers/crypto/mxs-dcp.c actx->hot = 1; actx 777 drivers/crypto/mxs-dcp.c spin_lock(&sdcp->lock[actx->chan]); actx 778 drivers/crypto/mxs-dcp.c ret = crypto_enqueue_request(&sdcp->queue[actx->chan], &req->base); actx 779 drivers/crypto/mxs-dcp.c spin_unlock(&sdcp->lock[actx->chan]); actx 781 drivers/crypto/mxs-dcp.c wake_up_process(sdcp->thread[actx->chan]); actx 782 drivers/crypto/mxs-dcp.c mutex_unlock(&actx->mutex); actx 819 drivers/crypto/mxs-dcp.c struct dcp_async_ctx *actx = crypto_ahash_ctx(tfm); actx 823 drivers/crypto/mxs-dcp.c memset(actx, 0, sizeof(struct dcp_async_ctx)); actx 825 drivers/crypto/mxs-dcp.c memcpy(actx, &export->async_ctx, sizeof(struct dcp_async_ctx)); actx 47 fs/ntfs/index.c if (ictx->actx) actx 48 fs/ntfs/index.c ntfs_attr_put_search_ctx(ictx->actx); actx 118 fs/ntfs/index.c ntfs_attr_search_ctx *actx; actx 144 fs/ntfs/index.c actx = ntfs_attr_get_search_ctx(base_ni, m); actx 145 fs/ntfs/index.c if (unlikely(!actx)) { actx 151 fs/ntfs/index.c CASE_SENSITIVE, 0, NULL, 0, actx); actx 161 fs/ntfs/index.c ir = (INDEX_ROOT*)((u8*)actx->attr + actx 162 fs/ntfs/index.c le16_to_cpu(actx->attr->data.resident.value_offset)); actx 173 fs/ntfs/index.c if ((u8*)ie < (u8*)actx->mrec || (u8*)ie + actx 197 fs/ntfs/index.c ictx->actx = actx; actx 254 fs/ntfs/index.c ntfs_attr_put_search_ctx(actx); actx 257 fs/ntfs/index.c actx = NULL; actx 366 fs/ntfs/index.c ictx->actx = NULL; actx 432 fs/ntfs/index.c if (actx) actx 433 fs/ntfs/index.c ntfs_attr_put_search_ctx(actx); actx 68 fs/ntfs/index.h ntfs_attr_search_ctx *actx; actx 100 fs/ntfs/index.h flush_dcache_mft_record_page(ictx->actx->ntfs_ino); actx 126 fs/ntfs/index.h mark_mft_record_dirty(ictx->actx->ntfs_ino);