CAST5_PARALLEL_BLOCKS   38 arch/x86/crypto/cast5_avx_glue.c 	return glue_fpu_begin(CAST5_BLOCK_SIZE, CAST5_PARALLEL_BLOCKS,
CAST5_PARALLEL_BLOCKS   67 arch/x86/crypto/cast5_avx_glue.c 		if (nbytes >= bsize * CAST5_PARALLEL_BLOCKS) {
CAST5_PARALLEL_BLOCKS   72 arch/x86/crypto/cast5_avx_glue.c 				wsrc += bsize * CAST5_PARALLEL_BLOCKS;
CAST5_PARALLEL_BLOCKS   73 arch/x86/crypto/cast5_avx_glue.c 				wdst += bsize * CAST5_PARALLEL_BLOCKS;
CAST5_PARALLEL_BLOCKS   74 arch/x86/crypto/cast5_avx_glue.c 				nbytes -= bsize * CAST5_PARALLEL_BLOCKS;
CAST5_PARALLEL_BLOCKS   75 arch/x86/crypto/cast5_avx_glue.c 			} while (nbytes >= bsize * CAST5_PARALLEL_BLOCKS);
CAST5_PARALLEL_BLOCKS  158 arch/x86/crypto/cast5_avx_glue.c 	if (nbytes >= bsize * CAST5_PARALLEL_BLOCKS) {
CAST5_PARALLEL_BLOCKS  160 arch/x86/crypto/cast5_avx_glue.c 			nbytes -= bsize * (CAST5_PARALLEL_BLOCKS - 1);
CAST5_PARALLEL_BLOCKS  161 arch/x86/crypto/cast5_avx_glue.c 			src -= CAST5_PARALLEL_BLOCKS - 1;
CAST5_PARALLEL_BLOCKS  162 arch/x86/crypto/cast5_avx_glue.c 			dst -= CAST5_PARALLEL_BLOCKS - 1;
CAST5_PARALLEL_BLOCKS  173 arch/x86/crypto/cast5_avx_glue.c 		} while (nbytes >= bsize * CAST5_PARALLEL_BLOCKS);
CAST5_PARALLEL_BLOCKS  240 arch/x86/crypto/cast5_avx_glue.c 	if (nbytes >= bsize * CAST5_PARALLEL_BLOCKS) {
CAST5_PARALLEL_BLOCKS  245 arch/x86/crypto/cast5_avx_glue.c 			src += CAST5_PARALLEL_BLOCKS;
CAST5_PARALLEL_BLOCKS  246 arch/x86/crypto/cast5_avx_glue.c 			dst += CAST5_PARALLEL_BLOCKS;
CAST5_PARALLEL_BLOCKS  247 arch/x86/crypto/cast5_avx_glue.c 			nbytes -= bsize * CAST5_PARALLEL_BLOCKS;
CAST5_PARALLEL_BLOCKS  248 arch/x86/crypto/cast5_avx_glue.c 		} while (nbytes >= bsize * CAST5_PARALLEL_BLOCKS);