kvm_read_cr0     3388 arch/x86/kvm/svm.c 	nested_vmcb->save.cr0    = kvm_read_cr0(&svm->vcpu);
kvm_read_cr0     3704 arch/x86/kvm/svm.c 	hsave->save.cr0    = kvm_read_cr0(&svm->vcpu);
kvm_read_cr0     4076 arch/x86/kvm/svm.c 			val = kvm_read_cr0(&svm->vcpu);
kvm_read_cr0     4849 arch/x86/kvm/vmx/vmx.c 		trace_kvm_cr_write(0, kvm_read_cr0(vcpu));
kvm_read_cr0     4868 arch/x86/kvm/vmx/vmx.c 		trace_kvm_cr_write(0, (kvm_read_cr0(vcpu) & ~0xful) | val);
kvm_read_cr0     6890 arch/x86/kvm/vmx/vmx.c 	if (kvm_read_cr0(vcpu) & X86_CR0_CD) {
kvm_read_cr0      763 arch/x86/kvm/x86.c 	unsigned long old_cr0 = kvm_read_cr0(vcpu);
kvm_read_cr0     6037 arch/x86/kvm/x86.c 		value = kvm_read_cr0(vcpu);
kvm_read_cr0     6066 arch/x86/kvm/x86.c 		res = kvm_set_cr0(vcpu, mk_cr_64(kvm_read_cr0(vcpu), val));
kvm_read_cr0     7758 arch/x86/kvm/x86.c 	put_smstate(u32, buf, 0x7ffc, kvm_read_cr0(vcpu));
kvm_read_cr0     7820 arch/x86/kvm/x86.c 	put_smstate(u64, buf, 0x7f58, kvm_read_cr0(vcpu));
kvm_read_cr0     8743 arch/x86/kvm/x86.c 	sregs->cr0 = kvm_read_cr0(vcpu);
kvm_read_cr0     8898 arch/x86/kvm/x86.c 	mmu_reset_needed |= kvm_read_cr0(vcpu) != sregs->cr0;