Lines Matching refs:arch

70 	if (vcpu->arch.hflags & BOOK3S_HFLAG_SPLIT_HACK) {  in kvmppc_unfixup_split_real()
74 vcpu->arch.hflags &= ~BOOK3S_HFLAG_SPLIT_HACK; in kvmppc_unfixup_split_real()
129 vcpu->arch.mmu.reset_msr(vcpu); in kvmppc_inject_interrupt()
163 unsigned long old_pending = vcpu->arch.pending_exceptions; in kvmppc_book3s_dequeue_irqprio()
166 &vcpu->arch.pending_exceptions); in kvmppc_book3s_dequeue_irqprio()
168 kvmppc_update_int_pending(vcpu, vcpu->arch.pending_exceptions, in kvmppc_book3s_dequeue_irqprio()
177 &vcpu->arch.pending_exceptions); in kvmppc_book3s_queue_irqprio()
199 return test_bit(BOOK3S_IRQPRIO_DECREMENTER, &vcpu->arch.pending_exceptions); in kvmppc_core_pending_dec()
339 unsigned long *pending = &vcpu->arch.pending_exceptions; in kvmppc_core_prepare_to_enter()
340 unsigned long old_pending = vcpu->arch.pending_exceptions; in kvmppc_core_prepare_to_enter()
344 if (vcpu->arch.pending_exceptions) in kvmppc_core_prepare_to_enter()
345 printk(KERN_EMERG "KVM: Check pending: %lx\n", vcpu->arch.pending_exceptions); in kvmppc_core_prepare_to_enter()
351 clear_bit(priority, &vcpu->arch.pending_exceptions); in kvmppc_core_prepare_to_enter()
370 ulong mp_pa = vcpu->arch.magic_page_pa & KVM_PAM; in kvmppc_gpa_to_pfn()
379 ulong shared_page = ((ulong)vcpu->arch.shared) & PAGE_MASK; in kvmppc_gpa_to_pfn()
402 r = vcpu->arch.mmu.xlate(vcpu, eaddr, pte, data, iswrite); in kvmppc_xlate()
414 if ((vcpu->arch.hflags & BOOK3S_HFLAG_SPLIT_HACK) && in kvmppc_xlate()
457 return vcpu->kvm->arch.kvm_ops->get_sregs(vcpu, sregs); in kvm_arch_vcpu_ioctl_get_sregs()
463 return vcpu->kvm->arch.kvm_ops->set_sregs(vcpu, sregs); in kvm_arch_vcpu_ioctl_set_sregs()
478 regs->pid = vcpu->arch.pid; in kvm_arch_vcpu_ioctl_get_regs()
537 r = vcpu->kvm->arch.kvm_ops->get_one_reg(vcpu, id, val); in kvmppc_get_one_reg()
552 *val = get_reg_val(id, vcpu->arch.fp.fpscr); in kvmppc_get_one_reg()
558 val->vsxval[0] = vcpu->arch.fp.fpr[i][0]; in kvmppc_get_one_reg()
559 val->vsxval[1] = vcpu->arch.fp.fpr[i][1]; in kvmppc_get_one_reg()
570 if (!vcpu->arch.icp) { in kvmppc_get_one_reg()
578 *val = get_reg_val(id, vcpu->arch.fscr); in kvmppc_get_one_reg()
581 *val = get_reg_val(id, vcpu->arch.tar); in kvmppc_get_one_reg()
584 *val = get_reg_val(id, vcpu->arch.ebbhr); in kvmppc_get_one_reg()
587 *val = get_reg_val(id, vcpu->arch.ebbrr); in kvmppc_get_one_reg()
590 *val = get_reg_val(id, vcpu->arch.bescr); in kvmppc_get_one_reg()
593 *val = get_reg_val(id, vcpu->arch.vtb); in kvmppc_get_one_reg()
596 *val = get_reg_val(id, vcpu->arch.ic); in kvmppc_get_one_reg()
613 r = vcpu->kvm->arch.kvm_ops->set_one_reg(vcpu, id, val); in kvmppc_set_one_reg()
628 vcpu->arch.fp.fpscr = set_reg_val(id, *val); in kvmppc_set_one_reg()
634 vcpu->arch.fp.fpr[i][0] = val->vsxval[0]; in kvmppc_set_one_reg()
635 vcpu->arch.fp.fpr[i][1] = val->vsxval[1]; in kvmppc_set_one_reg()
643 if (!vcpu->arch.icp) { in kvmppc_set_one_reg()
652 vcpu->arch.fscr = set_reg_val(id, *val); in kvmppc_set_one_reg()
655 vcpu->arch.tar = set_reg_val(id, *val); in kvmppc_set_one_reg()
658 vcpu->arch.ebbhr = set_reg_val(id, *val); in kvmppc_set_one_reg()
661 vcpu->arch.ebbrr = set_reg_val(id, *val); in kvmppc_set_one_reg()
664 vcpu->arch.bescr = set_reg_val(id, *val); in kvmppc_set_one_reg()
667 vcpu->arch.vtb = set_reg_val(id, *val); in kvmppc_set_one_reg()
670 vcpu->arch.ic = set_reg_val(id, *val); in kvmppc_set_one_reg()
683 vcpu->kvm->arch.kvm_ops->vcpu_load(vcpu, cpu); in kvmppc_core_vcpu_load()
688 vcpu->kvm->arch.kvm_ops->vcpu_put(vcpu); in kvmppc_core_vcpu_put()
693 vcpu->kvm->arch.kvm_ops->set_msr(vcpu, msr); in kvmppc_set_msr()
699 return vcpu->kvm->arch.kvm_ops->vcpu_run(kvm_run, vcpu); in kvmppc_vcpu_run()
723 return kvm->arch.kvm_ops->vcpu_create(kvm, id); in kvmppc_core_vcpu_create()
728 vcpu->kvm->arch.kvm_ops->vcpu_free(vcpu); in kvmppc_core_vcpu_free()
733 return vcpu->kvm->arch.kvm_ops->check_requests(vcpu); in kvmppc_core_check_requests()
738 return kvm->arch.kvm_ops->get_dirty_log(kvm, log); in kvm_vm_ioctl_get_dirty_log()
744 kvm->arch.kvm_ops->free_memslot(free, dont); in kvmppc_core_free_memslot()
750 return kvm->arch.kvm_ops->create_memslot(slot, npages); in kvmppc_core_create_memslot()
755 kvm->arch.kvm_ops->flush_memslot(kvm, memslot); in kvmppc_core_flush_memslot()
762 return kvm->arch.kvm_ops->prepare_memory_region(kvm, memslot, mem); in kvmppc_core_prepare_memory_region()
769 kvm->arch.kvm_ops->commit_memory_region(kvm, mem, old); in kvmppc_core_commit_memory_region()
774 return kvm->arch.kvm_ops->unmap_hva(kvm, hva); in kvm_unmap_hva()
780 return kvm->arch.kvm_ops->unmap_hva_range(kvm, start, end); in kvm_unmap_hva_range()
785 return kvm->arch.kvm_ops->age_hva(kvm, start, end); in kvm_age_hva()
790 return kvm->arch.kvm_ops->test_age_hva(kvm, hva); in kvm_test_age_hva()
795 kvm->arch.kvm_ops->set_spte_hva(kvm, hva, pte); in kvm_set_spte_hva()
800 vcpu->kvm->arch.kvm_ops->mmu_destroy(vcpu); in kvmppc_mmu_destroy()
807 INIT_LIST_HEAD(&kvm->arch.spapr_tce_tables); in kvmppc_core_init_vm()
808 INIT_LIST_HEAD(&kvm->arch.rtas_tokens); in kvmppc_core_init_vm()
811 return kvm->arch.kvm_ops->init_vm(kvm); in kvmppc_core_init_vm()
816 kvm->arch.kvm_ops->destroy_vm(kvm); in kvmppc_core_destroy_vm()
820 WARN_ON(!list_empty(&kvm->arch.spapr_tce_tables)); in kvmppc_core_destroy_vm()
918 return kvm->arch.kvm_ops->hcall_implemented(hcall); in kvmppc_book3s_hcall_implemented()