cryptd_tfm         45 arch/arm/crypto/ghash-ce-glue.c 	struct cryptd_ahash *cryptd_tfm;
cryptd_tfm        209 arch/arm/crypto/ghash-ce-glue.c 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
cryptd_tfm        211 arch/arm/crypto/ghash-ce-glue.c 	struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
cryptd_tfm        222 arch/arm/crypto/ghash-ce-glue.c 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
cryptd_tfm        225 arch/arm/crypto/ghash-ce-glue.c 	    (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
cryptd_tfm        227 arch/arm/crypto/ghash-ce-glue.c 		ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
cryptd_tfm        240 arch/arm/crypto/ghash-ce-glue.c 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
cryptd_tfm        243 arch/arm/crypto/ghash-ce-glue.c 	    (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
cryptd_tfm        245 arch/arm/crypto/ghash-ce-glue.c 		ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
cryptd_tfm        258 arch/arm/crypto/ghash-ce-glue.c 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
cryptd_tfm        261 arch/arm/crypto/ghash-ce-glue.c 	    (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
cryptd_tfm        263 arch/arm/crypto/ghash-ce-glue.c 		ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
cryptd_tfm        267 arch/arm/crypto/ghash-ce-glue.c 		struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
cryptd_tfm        281 arch/arm/crypto/ghash-ce-glue.c 	desc->tfm = cryptd_ahash_child(ctx->cryptd_tfm);
cryptd_tfm        298 arch/arm/crypto/ghash-ce-glue.c 	struct crypto_ahash *child = &ctx->cryptd_tfm->base;
cryptd_tfm        313 arch/arm/crypto/ghash-ce-glue.c 	struct cryptd_ahash *cryptd_tfm;
cryptd_tfm        316 arch/arm/crypto/ghash-ce-glue.c 	cryptd_tfm = cryptd_alloc_ahash("ghash-ce-sync", 0, 0);
cryptd_tfm        317 arch/arm/crypto/ghash-ce-glue.c 	if (IS_ERR(cryptd_tfm))
cryptd_tfm        318 arch/arm/crypto/ghash-ce-glue.c 		return PTR_ERR(cryptd_tfm);
cryptd_tfm        319 arch/arm/crypto/ghash-ce-glue.c 	ctx->cryptd_tfm = cryptd_tfm;
cryptd_tfm        322 arch/arm/crypto/ghash-ce-glue.c 				 crypto_ahash_reqsize(&cryptd_tfm->base));
cryptd_tfm        331 arch/arm/crypto/ghash-ce-glue.c 	cryptd_free_ahash(ctx->cryptd_tfm);
cryptd_tfm         32 arch/x86/crypto/ghash-clmulni-intel_glue.c 	struct cryptd_ahash *cryptd_tfm;
cryptd_tfm        167 arch/x86/crypto/ghash-clmulni-intel_glue.c 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
cryptd_tfm        169 arch/x86/crypto/ghash-clmulni-intel_glue.c 	struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
cryptd_tfm        180 arch/x86/crypto/ghash-clmulni-intel_glue.c 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
cryptd_tfm        183 arch/x86/crypto/ghash-clmulni-intel_glue.c 	    (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
cryptd_tfm        185 arch/x86/crypto/ghash-clmulni-intel_glue.c 		ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
cryptd_tfm        198 arch/x86/crypto/ghash-clmulni-intel_glue.c 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
cryptd_tfm        201 arch/x86/crypto/ghash-clmulni-intel_glue.c 	    (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
cryptd_tfm        203 arch/x86/crypto/ghash-clmulni-intel_glue.c 		ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
cryptd_tfm        239 arch/x86/crypto/ghash-clmulni-intel_glue.c 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
cryptd_tfm        242 arch/x86/crypto/ghash-clmulni-intel_glue.c 	    (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
cryptd_tfm        244 arch/x86/crypto/ghash-clmulni-intel_glue.c 		ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
cryptd_tfm        248 arch/x86/crypto/ghash-clmulni-intel_glue.c 		struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
cryptd_tfm        259 arch/x86/crypto/ghash-clmulni-intel_glue.c 	struct crypto_ahash *child = &ctx->cryptd_tfm->base;
cryptd_tfm        274 arch/x86/crypto/ghash-clmulni-intel_glue.c 	struct cryptd_ahash *cryptd_tfm;
cryptd_tfm        277 arch/x86/crypto/ghash-clmulni-intel_glue.c 	cryptd_tfm = cryptd_alloc_ahash("__ghash-pclmulqdqni",
cryptd_tfm        280 arch/x86/crypto/ghash-clmulni-intel_glue.c 	if (IS_ERR(cryptd_tfm))
cryptd_tfm        281 arch/x86/crypto/ghash-clmulni-intel_glue.c 		return PTR_ERR(cryptd_tfm);
cryptd_tfm        282 arch/x86/crypto/ghash-clmulni-intel_glue.c 	ctx->cryptd_tfm = cryptd_tfm;
cryptd_tfm        285 arch/x86/crypto/ghash-clmulni-intel_glue.c 				 crypto_ahash_reqsize(&cryptd_tfm->base));
cryptd_tfm        294 arch/x86/crypto/ghash-clmulni-intel_glue.c 	cryptd_free_ahash(ctx->cryptd_tfm);
cryptd_tfm         47 crypto/simd.c  	struct cryptd_skcipher *cryptd_tfm;
cryptd_tfm         54 crypto/simd.c  	struct crypto_skcipher *child = &ctx->cryptd_tfm->base;
cryptd_tfm         77 crypto/simd.c  	    (in_atomic() && cryptd_skcipher_queued(ctx->cryptd_tfm)))
cryptd_tfm         78 crypto/simd.c  		child = &ctx->cryptd_tfm->base;
cryptd_tfm         80 crypto/simd.c  		child = cryptd_skcipher_child(ctx->cryptd_tfm);
cryptd_tfm         98 crypto/simd.c  	    (in_atomic() && cryptd_skcipher_queued(ctx->cryptd_tfm)))
cryptd_tfm         99 crypto/simd.c  		child = &ctx->cryptd_tfm->base;
cryptd_tfm        101 crypto/simd.c  		child = cryptd_skcipher_child(ctx->cryptd_tfm);
cryptd_tfm        112 crypto/simd.c  	cryptd_free_skcipher(ctx->cryptd_tfm);
cryptd_tfm        118 crypto/simd.c  	struct cryptd_skcipher *cryptd_tfm;
cryptd_tfm        126 crypto/simd.c  	cryptd_tfm = cryptd_alloc_skcipher(salg->ialg_name,
cryptd_tfm        129 crypto/simd.c  	if (IS_ERR(cryptd_tfm))
cryptd_tfm        130 crypto/simd.c  		return PTR_ERR(cryptd_tfm);
cryptd_tfm        132 crypto/simd.c  	ctx->cryptd_tfm = cryptd_tfm;
cryptd_tfm        134 crypto/simd.c  	reqsize = crypto_skcipher_reqsize(cryptd_skcipher_child(cryptd_tfm));
cryptd_tfm        135 crypto/simd.c  	reqsize = max(reqsize, crypto_skcipher_reqsize(&cryptd_tfm->base));
cryptd_tfm        290 crypto/simd.c  	struct cryptd_aead *cryptd_tfm;
cryptd_tfm        297 crypto/simd.c  	struct crypto_aead *child = &ctx->cryptd_tfm->base;
cryptd_tfm        312 crypto/simd.c  	struct crypto_aead *child = &ctx->cryptd_tfm->base;
cryptd_tfm        328 crypto/simd.c  	    (in_atomic() && cryptd_aead_queued(ctx->cryptd_tfm)))
cryptd_tfm        329 crypto/simd.c  		child = &ctx->cryptd_tfm->base;
cryptd_tfm        331 crypto/simd.c  		child = cryptd_aead_child(ctx->cryptd_tfm);
cryptd_tfm        349 crypto/simd.c  	    (in_atomic() && cryptd_aead_queued(ctx->cryptd_tfm)))
cryptd_tfm        350 crypto/simd.c  		child = &ctx->cryptd_tfm->base;
cryptd_tfm        352 crypto/simd.c  		child = cryptd_aead_child(ctx->cryptd_tfm);
cryptd_tfm        363 crypto/simd.c  	cryptd_free_aead(ctx->cryptd_tfm);
cryptd_tfm        369 crypto/simd.c  	struct cryptd_aead *cryptd_tfm;
cryptd_tfm        377 crypto/simd.c  	cryptd_tfm = cryptd_alloc_aead(salg->ialg_name, CRYPTO_ALG_INTERNAL,
cryptd_tfm        379 crypto/simd.c  	if (IS_ERR(cryptd_tfm))
cryptd_tfm        380 crypto/simd.c  		return PTR_ERR(cryptd_tfm);
cryptd_tfm        382 crypto/simd.c  	ctx->cryptd_tfm = cryptd_tfm;
cryptd_tfm        384 crypto/simd.c  	reqsize = crypto_aead_reqsize(cryptd_aead_child(cryptd_tfm));
cryptd_tfm        385 crypto/simd.c  	reqsize = max(reqsize, crypto_aead_reqsize(&cryptd_tfm->base));