Home
last modified time | relevance | path

Searched refs:gva (Results 1 – 17 of 17) sorted by relevance

/linux-4.1.27/include/trace/events/
Dkvm.h275 TP_PROTO(u64 gva, u64 gfn),
277 TP_ARGS(gva, gfn),
280 __field(__u64, gva)
285 __entry->gva = gva;
289 TP_printk("gva = %#llx, gfn = %#llx", __entry->gva, __entry->gfn)
294 TP_PROTO(u64 gva, u64 gfn),
296 TP_ARGS(gva, gfn)
301 TP_PROTO(u64 gva, u64 gfn),
303 TP_ARGS(gva, gfn)
308 TP_PROTO(u64 token, u64 gva),
[all …]
/linux-4.1.27/arch/x86/kvm/
Dx86.h86 gva_t gva, gfn_t gfn, unsigned access) in vcpu_cache_mmio_info() argument
88 vcpu->arch.mmio_gva = gva & PAGE_MASK; in vcpu_cache_mmio_info()
105 static inline void vcpu_clear_mmio_info(struct kvm_vcpu *vcpu, gva_t gva) in vcpu_clear_mmio_info() argument
107 if (gva != MMIO_GVA_ANY && vcpu->arch.mmio_gva != (gva & PAGE_MASK)) in vcpu_clear_mmio_info()
113 static inline bool vcpu_match_mmio_gva(struct kvm_vcpu *vcpu, unsigned long gva) in vcpu_match_mmio_gva() argument
116 vcpu->arch.mmio_gva == (gva & PAGE_MASK)) in vcpu_match_mmio_gva()
Dmmutrace.h249 TP_PROTO(struct kvm_vcpu *vcpu, gva_t gva, u32 error_code,
251 TP_ARGS(vcpu, gva, error_code, sptep, old_spte, retry),
255 __field(gva_t, gva)
265 __entry->gva = gva;
275 __entry->gva, __print_flags(__entry->error_code, "|",
Dtrace.h747 TP_PROTO(gva_t gva, gpa_t gpa, bool write, bool gpa_match),
748 TP_ARGS(gva, gpa, write, gpa_match),
751 __field(gva_t, gva)
758 __entry->gva = gva;
764 TP_printk("gva %#lx gpa %#llx %s %s", __entry->gva, __entry->gpa,
Dmmu.c1701 static void nonpaging_invlpg(struct kvm_vcpu *vcpu, gva_t gva) in nonpaging_invlpg() argument
2846 static bool handle_abnormal_pfn(struct kvm_vcpu *vcpu, gva_t gva, gfn_t gfn, in handle_abnormal_pfn() argument
2858 vcpu_cache_mmio_info(vcpu, gva, gfn, access); in handle_abnormal_pfn()
2923 static bool fast_page_fault(struct kvm_vcpu *vcpu, gva_t gva, int level, in fast_page_fault() argument
2938 for_each_shadow_entry_lockless(vcpu, gva, iterator, spte) in fast_page_fault()
2993 trace_fast_page_fault(vcpu, gva, error_code, iterator.sptep, in fast_page_fault()
3001 gva_t gva, pfn_t *pfn, bool write, bool *writable);
3382 static int nonpaging_page_fault(struct kvm_vcpu *vcpu, gva_t gva, in nonpaging_page_fault() argument
3388 pgprintk("%s: gva %lx error %x\n", __func__, gva, error_code); in nonpaging_page_fault()
3391 r = handle_mmio_page_fault(vcpu, gva, error_code, true); in nonpaging_page_fault()
[all …]
Dpaging_tmpl.h830 static void FNAME(invlpg)(struct kvm_vcpu *vcpu, gva_t gva) in FNAME()
837 vcpu_clear_mmio_info(vcpu, gva); in FNAME()
851 for_each_shadow_entry(vcpu, gva, iterator) { in FNAME()
Dx86.c4212 gpa_t kvm_mmu_gva_to_gpa_read(struct kvm_vcpu *vcpu, gva_t gva, in kvm_mmu_gva_to_gpa_read() argument
4216 return vcpu->arch.walk_mmu->gva_to_gpa(vcpu, gva, access, exception); in kvm_mmu_gva_to_gpa_read()
4219 gpa_t kvm_mmu_gva_to_gpa_fetch(struct kvm_vcpu *vcpu, gva_t gva, in kvm_mmu_gva_to_gpa_fetch() argument
4224 return vcpu->arch.walk_mmu->gva_to_gpa(vcpu, gva, access, exception); in kvm_mmu_gva_to_gpa_fetch()
4227 gpa_t kvm_mmu_gva_to_gpa_write(struct kvm_vcpu *vcpu, gva_t gva, in kvm_mmu_gva_to_gpa_write() argument
4232 return vcpu->arch.walk_mmu->gva_to_gpa(vcpu, gva, access, exception); in kvm_mmu_gva_to_gpa_write()
4236 gpa_t kvm_mmu_gva_to_gpa_system(struct kvm_vcpu *vcpu, gva_t gva, in kvm_mmu_gva_to_gpa_system() argument
4239 return vcpu->arch.walk_mmu->gva_to_gpa(vcpu, gva, 0, exception); in kvm_mmu_gva_to_gpa_system()
4354 static int vcpu_mmio_gva_to_gpa(struct kvm_vcpu *vcpu, unsigned long gva, in vcpu_mmio_gva_to_gpa() argument
4361 if (vcpu_match_mmio_gva(vcpu, gva) in vcpu_mmio_gva_to_gpa()
[all …]
Dvmx.c1198 static inline void __invvpid(int ext, u16 vpid, gva_t gva) in __invvpid() argument
1203 u64 gva; in __invvpid() member
1204 } operand = { vpid, 0, gva }; in __invvpid()
6499 gva_t gva; in nested_vmx_check_vmptr() local
6507 vmcs_read32(VMX_INSTRUCTION_INFO), &gva)) in nested_vmx_check_vmptr()
6510 if (kvm_read_guest_virt(&vcpu->arch.emulate_ctxt, gva, &vmptr, in nested_vmx_check_vmptr()
7011 gva_t gva = 0; in handle_vmread() local
7035 vmx_instruction_info, &gva)) in handle_vmread()
7038 kvm_write_guest_virt_system(&vcpu->arch.emulate_ctxt, gva, in handle_vmread()
7051 gva_t gva; in handle_vmwrite() local
[all …]
/linux-4.1.27/virt/kvm/
Dasync_pf.c79 gva_t gva = apf->gva; in async_pf_execute() local
95 trace_kvm_async_pf_completed(addr, gva); in async_pf_execute()
158 int kvm_setup_async_pf(struct kvm_vcpu *vcpu, gva_t gva, unsigned long hva, in kvm_setup_async_pf() argument
178 work->gva = gva; in kvm_setup_async_pf()
/linux-4.1.27/arch/s390/kvm/
Dgaccess.c539 static unsigned long guest_translate(struct kvm_vcpu *vcpu, unsigned long gva, in guest_translate() argument
543 union vaddress vaddr = {.addr = gva}; in guest_translate()
544 union raddress raddr = {.addr = gva}; in guest_translate()
827 int guest_translate_address(struct kvm_vcpu *vcpu, unsigned long gva, ar_t ar, in guest_translate_address() argument
836 gva = kvm_s390_logical_to_effective(vcpu, gva); in guest_translate_address()
839 tec->addr = gva >> PAGE_SHIFT; in guest_translate_address()
842 if (is_low_address(gva) && low_address_protection_enabled(vcpu, asce)) { in guest_translate_address()
850 rc = guest_translate(vcpu, gva, gpa, asce, write); in guest_translate_address()
858 *gpa = kvm_s390_real_to_abs(vcpu, gva); in guest_translate_address()
869 int check_gva_range(struct kvm_vcpu *vcpu, unsigned long gva, ar_t ar, in check_gva_range() argument
[all …]
Dgaccess.h158 int guest_translate_address(struct kvm_vcpu *vcpu, unsigned long gva,
160 int check_gva_range(struct kvm_vcpu *vcpu, unsigned long gva, ar_t ar,
/linux-4.1.27/arch/x86/include/asm/
Dkvm_host.h266 int (*page_fault)(struct kvm_vcpu *vcpu, gva_t gva, u32 err,
270 gpa_t (*gva_to_gpa)(struct kvm_vcpu *vcpu, gva_t gva, u32 access,
276 void (*invlpg)(struct kvm_vcpu *vcpu, gva_t gva);
1010 int kvm_mmu_unprotect_page_virt(struct kvm_vcpu *vcpu, gva_t gva);
1017 gpa_t kvm_mmu_gva_to_gpa_read(struct kvm_vcpu *vcpu, gva_t gva,
1019 gpa_t kvm_mmu_gva_to_gpa_fetch(struct kvm_vcpu *vcpu, gva_t gva,
1021 gpa_t kvm_mmu_gva_to_gpa_write(struct kvm_vcpu *vcpu, gva_t gva,
1023 gpa_t kvm_mmu_gva_to_gpa_system(struct kvm_vcpu *vcpu, gva_t gva,
1028 int kvm_mmu_page_fault(struct kvm_vcpu *vcpu, gva_t gva, u32 error_code,
1030 void kvm_mmu_invlpg(struct kvm_vcpu *vcpu, gva_t gva);
/linux-4.1.27/arch/mips/kvm/
Dtlb.c169 unsigned long gva) in kvm_mips_translate_guest_kseg0_to_hpa() argument
172 uint32_t offset = gva & ~PAGE_MASK; in kvm_mips_translate_guest_kseg0_to_hpa()
175 if (KVM_GUEST_KSEGX(gva) != KVM_GUEST_KSEG0) { in kvm_mips_translate_guest_kseg0_to_hpa()
177 __builtin_return_address(0), gva); in kvm_mips_translate_guest_kseg0_to_hpa()
181 gfn = (KVM_GUEST_CPHYSADDR(gva) >> PAGE_SHIFT); in kvm_mips_translate_guest_kseg0_to_hpa()
185 gva); in kvm_mips_translate_guest_kseg0_to_hpa()
Dtrap_emul.c22 static gpa_t kvm_trap_emul_gva_to_gpa_cb(gva_t gva) in kvm_trap_emul_gva_to_gpa_cb() argument
25 uint32_t kseg = KSEGX(gva); in kvm_trap_emul_gva_to_gpa_cb()
28 gpa = CPHYSADDR(gva); in kvm_trap_emul_gva_to_gpa_cb()
30 kvm_err("%s: cannot find GPA for GVA: %#lx\n", __func__, gva); in kvm_trap_emul_gva_to_gpa_cb()
35 kvm_debug("%s: gva %#lx, gpa: %#llx\n", __func__, gva, gpa); in kvm_trap_emul_gva_to_gpa_cb()
/linux-4.1.27/Documentation/virtual/kvm/
Dmmu.txt36 gva guest virtual address
65 guest physical addresses, to host physical addresses (gva->gpa->hpa)
122 paging: gva->gpa->hpa
123 paging, tdp: (gva->)gpa->hpa
294 (gva->gpa or ngpa->gpa)
/linux-4.1.27/arch/mips/include/asm/
Dkvm_host.h615 gpa_t (*gva_to_gpa)(gva_t gva);
693 unsigned long gva);
/linux-4.1.27/include/linux/
Dkvm_host.h186 gva_t gva; member
194 int kvm_setup_async_pf(struct kvm_vcpu *vcpu, gva_t gva, unsigned long hva,