fpu_enabled 408 arch/mips/include/asm/kvm_host.h u8 fpu_enabled; fpu_enabled 750 arch/mips/include/asm/kvm_host.h vcpu->fpu_enabled; fpu_enabled 883 arch/mips/kvm/mips.c vcpu->arch.fpu_enabled = true; fpu_enabled 35 arch/x86/crypto/cast5_avx_glue.c static inline bool cast5_fpu_begin(bool fpu_enabled, struct skcipher_walk *walk, fpu_enabled 39 arch/x86/crypto/cast5_avx_glue.c walk, fpu_enabled, nbytes); fpu_enabled 42 arch/x86/crypto/cast5_avx_glue.c static inline void cast5_fpu_end(bool fpu_enabled) fpu_enabled 44 arch/x86/crypto/cast5_avx_glue.c return glue_fpu_end(fpu_enabled); fpu_enabled 49 arch/x86/crypto/cast5_avx_glue.c bool fpu_enabled = false; fpu_enabled 64 arch/x86/crypto/cast5_avx_glue.c fpu_enabled = cast5_fpu_begin(fpu_enabled, &walk, nbytes); fpu_enabled 96 arch/x86/crypto/cast5_avx_glue.c cast5_fpu_end(fpu_enabled); fpu_enabled 200 arch/x86/crypto/cast5_avx_glue.c bool fpu_enabled = false; fpu_enabled 208 arch/x86/crypto/cast5_avx_glue.c fpu_enabled = cast5_fpu_begin(fpu_enabled, &walk, nbytes); fpu_enabled 213 arch/x86/crypto/cast5_avx_glue.c cast5_fpu_end(fpu_enabled); fpu_enabled 280 arch/x86/crypto/cast5_avx_glue.c bool fpu_enabled = false; fpu_enabled 288 arch/x86/crypto/cast5_avx_glue.c fpu_enabled = cast5_fpu_begin(fpu_enabled, &walk, nbytes); fpu_enabled 293 arch/x86/crypto/cast5_avx_glue.c cast5_fpu_end(fpu_enabled); fpu_enabled 27 arch/x86/crypto/glue_helper.c bool fpu_enabled = false; fpu_enabled 39 arch/x86/crypto/glue_helper.c fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, fpu_enabled 40 arch/x86/crypto/glue_helper.c &walk, fpu_enabled, nbytes); fpu_enabled 61 arch/x86/crypto/glue_helper.c glue_fpu_end(fpu_enabled); fpu_enabled 104 arch/x86/crypto/glue_helper.c bool fpu_enabled = false; fpu_enabled 117 arch/x86/crypto/glue_helper.c fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, fpu_enabled 118 arch/x86/crypto/glue_helper.c &walk, fpu_enabled, nbytes); fpu_enabled 153 arch/x86/crypto/glue_helper.c glue_fpu_end(fpu_enabled); fpu_enabled 164 arch/x86/crypto/glue_helper.c bool fpu_enabled = false; fpu_enabled 177 arch/x86/crypto/glue_helper.c fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, fpu_enabled 178 arch/x86/crypto/glue_helper.c &walk, fpu_enabled, nbytes); fpu_enabled 205 arch/x86/crypto/glue_helper.c glue_fpu_end(fpu_enabled); fpu_enabled 269 arch/x86/crypto/glue_helper.c bool fpu_enabled = false; fpu_enabled 296 arch/x86/crypto/glue_helper.c fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, fpu_enabled 297 arch/x86/crypto/glue_helper.c &walk, fpu_enabled, fpu_enabled 351 arch/x86/crypto/glue_helper.c glue_fpu_end(fpu_enabled); fpu_enabled 49 arch/x86/include/asm/crypto/glue_helper.h bool fpu_enabled, unsigned int nbytes) fpu_enabled 54 arch/x86/include/asm/crypto/glue_helper.h if (fpu_enabled) fpu_enabled 71 arch/x86/include/asm/crypto/glue_helper.h static inline void glue_fpu_end(bool fpu_enabled) fpu_enabled 73 arch/x86/include/asm/crypto/glue_helper.h if (fpu_enabled)