aes_algs 598 arch/arm/crypto/aes-ce-glue.c static struct skcipher_alg aes_algs[] = { { aes_algs 692 arch/arm/crypto/aes-ce-glue.c static struct simd_skcipher_alg *aes_simd_algs[ARRAY_SIZE(aes_algs)]; aes_algs 701 arch/arm/crypto/aes-ce-glue.c crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs)); aes_algs 713 arch/arm/crypto/aes-ce-glue.c err = crypto_register_skciphers(aes_algs, ARRAY_SIZE(aes_algs)); aes_algs 717 arch/arm/crypto/aes-ce-glue.c for (i = 0; i < ARRAY_SIZE(aes_algs); i++) { aes_algs 718 arch/arm/crypto/aes-ce-glue.c if (!(aes_algs[i].base.cra_flags & CRYPTO_ALG_INTERNAL)) aes_algs 721 arch/arm/crypto/aes-ce-glue.c algname = aes_algs[i].base.cra_name + 2; aes_algs 722 arch/arm/crypto/aes-ce-glue.c drvname = aes_algs[i].base.cra_driver_name + 2; aes_algs 723 arch/arm/crypto/aes-ce-glue.c basename = aes_algs[i].base.cra_driver_name; aes_algs 411 arch/arm/crypto/aes-neonbs-glue.c static struct skcipher_alg aes_algs[] = { { aes_algs 497 arch/arm/crypto/aes-neonbs-glue.c static struct simd_skcipher_alg *aes_simd_algs[ARRAY_SIZE(aes_algs)]; aes_algs 507 arch/arm/crypto/aes-neonbs-glue.c crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs)); aes_algs 522 arch/arm/crypto/aes-neonbs-glue.c err = crypto_register_skciphers(aes_algs, ARRAY_SIZE(aes_algs)); aes_algs 526 arch/arm/crypto/aes-neonbs-glue.c for (i = 0; i < ARRAY_SIZE(aes_algs); i++) { aes_algs 527 arch/arm/crypto/aes-neonbs-glue.c if (!(aes_algs[i].base.cra_flags & CRYPTO_ALG_INTERNAL)) aes_algs 530 arch/arm/crypto/aes-neonbs-glue.c algname = aes_algs[i].base.cra_name + 2; aes_algs 531 arch/arm/crypto/aes-neonbs-glue.c drvname = aes_algs[i].base.cra_driver_name + 2; aes_algs 532 arch/arm/crypto/aes-neonbs-glue.c basename = aes_algs[i].base.cra_driver_name; aes_algs 670 arch/arm64/crypto/aes-glue.c static struct skcipher_alg aes_algs[] = { { aes_algs 1012 arch/arm64/crypto/aes-glue.c static struct simd_skcipher_alg *aes_simd_algs[ARRAY_SIZE(aes_algs)]; aes_algs 1023 arch/arm64/crypto/aes-glue.c crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs)); aes_algs 1035 arch/arm64/crypto/aes-glue.c err = crypto_register_skciphers(aes_algs, ARRAY_SIZE(aes_algs)); aes_algs 1043 arch/arm64/crypto/aes-glue.c for (i = 0; i < ARRAY_SIZE(aes_algs); i++) { aes_algs 1044 arch/arm64/crypto/aes-glue.c if (!(aes_algs[i].base.cra_flags & CRYPTO_ALG_INTERNAL)) aes_algs 1047 arch/arm64/crypto/aes-glue.c algname = aes_algs[i].base.cra_name + 2; aes_algs 1048 arch/arm64/crypto/aes-glue.c drvname = aes_algs[i].base.cra_driver_name + 2; aes_algs 1049 arch/arm64/crypto/aes-glue.c basename = aes_algs[i].base.cra_driver_name; aes_algs 1064 arch/arm64/crypto/aes-glue.c crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs)); aes_algs 432 arch/arm64/crypto/aes-neonbs-glue.c static struct skcipher_alg aes_algs[] = { { aes_algs 514 arch/arm64/crypto/aes-neonbs-glue.c static struct simd_skcipher_alg *aes_simd_algs[ARRAY_SIZE(aes_algs)]; aes_algs 524 arch/arm64/crypto/aes-neonbs-glue.c crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs)); aes_algs 539 arch/arm64/crypto/aes-neonbs-glue.c err = crypto_register_skciphers(aes_algs, ARRAY_SIZE(aes_algs)); aes_algs 543 arch/arm64/crypto/aes-neonbs-glue.c for (i = 0; i < ARRAY_SIZE(aes_algs); i++) { aes_algs 544 arch/arm64/crypto/aes-neonbs-glue.c if (!(aes_algs[i].base.cra_flags & CRYPTO_ALG_INTERNAL)) aes_algs 547 arch/arm64/crypto/aes-neonbs-glue.c algname = aes_algs[i].base.cra_name + 2; aes_algs 548 arch/arm64/crypto/aes-neonbs-glue.c drvname = aes_algs[i].base.cra_driver_name + 2; aes_algs 549 arch/arm64/crypto/aes-neonbs-glue.c basename = aes_algs[i].base.cra_driver_name; aes_algs 393 arch/powerpc/crypto/aes-spe-glue.c static struct crypto_alg aes_algs[] = { { aes_algs 495 arch/powerpc/crypto/aes-spe-glue.c return crypto_register_algs(aes_algs, ARRAY_SIZE(aes_algs)); aes_algs 500 arch/powerpc/crypto/aes-spe-glue.c crypto_unregister_algs(aes_algs, ARRAY_SIZE(aes_algs)); aes_algs 1262 drivers/crypto/atmel-aes.c static struct crypto_alg aes_algs[] = { aes_algs 2475 drivers/crypto/atmel-aes.c for (i = 0; i < ARRAY_SIZE(aes_algs); i++) aes_algs 2476 drivers/crypto/atmel-aes.c crypto_unregister_alg(&aes_algs[i]); aes_algs 2483 drivers/crypto/atmel-aes.c for (i = 0; i < ARRAY_SIZE(aes_algs); i++) { aes_algs 2484 drivers/crypto/atmel-aes.c err = crypto_register_alg(&aes_algs[i]); aes_algs 2531 drivers/crypto/atmel-aes.c i = ARRAY_SIZE(aes_algs); aes_algs 2534 drivers/crypto/atmel-aes.c crypto_unregister_alg(&aes_algs[j]); aes_algs 266 drivers/crypto/ccp/ccp-crypto-aes.c static struct ccp_aes_def aes_algs[] = { aes_algs 365 drivers/crypto/ccp/ccp-crypto-aes.c for (i = 0; i < ARRAY_SIZE(aes_algs); i++) { aes_algs 366 drivers/crypto/ccp/ccp-crypto-aes.c if (aes_algs[i].version > ccpversion) aes_algs 368 drivers/crypto/ccp/ccp-crypto-aes.c ret = ccp_register_aes_alg(head, &aes_algs[i]); aes_algs 750 drivers/crypto/mediatek/mtk-aes.c static struct crypto_alg aes_algs[] = { aes_algs 1261 drivers/crypto/mediatek/mtk-aes.c for (i = 0; i < ARRAY_SIZE(aes_algs); i++) aes_algs 1262 drivers/crypto/mediatek/mtk-aes.c crypto_unregister_alg(&aes_algs[i]); aes_algs 1269 drivers/crypto/mediatek/mtk-aes.c for (i = 0; i < ARRAY_SIZE(aes_algs); i++) { aes_algs 1270 drivers/crypto/mediatek/mtk-aes.c err = crypto_register_alg(&aes_algs[i]); aes_algs 1283 drivers/crypto/mediatek/mtk-aes.c crypto_unregister_alg(&aes_algs[i]); aes_algs 1192 drivers/crypto/sahara.c static struct crypto_alg aes_algs[] = { aes_algs 1320 drivers/crypto/sahara.c for (i = 0; i < ARRAY_SIZE(aes_algs); i++) { aes_algs 1321 drivers/crypto/sahara.c err = crypto_register_alg(&aes_algs[i]); aes_algs 1351 drivers/crypto/sahara.c crypto_unregister_alg(&aes_algs[j]); aes_algs 1360 drivers/crypto/sahara.c for (i = 0; i < ARRAY_SIZE(aes_algs); i++) aes_algs 1361 drivers/crypto/sahara.c crypto_unregister_alg(&aes_algs[i]);