vgic_cpu 181 arch/arm/include/asm/kvm_host.h struct vgic_cpu vgic_cpu; vgic_cpu 275 arch/arm/kvm/coproc.c *vcpu_reg(vcpu, p->Rt1) = vcpu->arch.vgic_cpu.vgic_v3.vgic_sre; vgic_cpu 293 arch/arm64/include/asm/kvm_host.h struct vgic_cpu vgic_cpu; vgic_cpu 292 arch/arm64/kvm/sys_regs.c p->regval = vcpu->arch.vgic_cpu.vgic_v3.vgic_sre; vgic_cpu 17 arch/arm64/kvm/vgic-sys-reg-v3.c struct vgic_cpu *vgic_v3_cpu = &vcpu->arch.vgic_cpu; vgic_cpu 186 arch/arm64/kvm/vgic-sys-reg-v3.c struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3; vgic_cpu 233 arch/arm64/kvm/vgic-sys-reg-v3.c struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3; vgic_cpu 199 virt/kvm/arm/hyp/vgic-v3-sr.c struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3; vgic_cpu 200 virt/kvm/arm/hyp/vgic-v3-sr.c u64 used_lrs = vcpu->arch.vgic_cpu.used_lrs; vgic_cpu 235 virt/kvm/arm/hyp/vgic-v3-sr.c struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3; vgic_cpu 236 virt/kvm/arm/hyp/vgic-v3-sr.c u64 used_lrs = vcpu->arch.vgic_cpu.used_lrs; vgic_cpu 262 virt/kvm/arm/hyp/vgic-v3-sr.c struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3; vgic_cpu 311 virt/kvm/arm/hyp/vgic-v3-sr.c struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3; vgic_cpu 343 virt/kvm/arm/hyp/vgic-v3-sr.c cpu_if = &vcpu->arch.vgic_cpu.vgic_v3; vgic_cpu 380 virt/kvm/arm/hyp/vgic-v3-sr.c cpu_if = &vcpu->arch.vgic_cpu.vgic_v3; vgic_cpu 456 virt/kvm/arm/hyp/vgic-v3-sr.c unsigned int used_lrs = vcpu->arch.vgic_cpu.used_lrs; vgic_cpu 495 virt/kvm/arm/hyp/vgic-v3-sr.c unsigned int used_lrs = vcpu->arch.vgic_cpu.used_lrs; vgic_cpu 197 virt/kvm/arm/vgic/vgic-init.c struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_cpu 202 virt/kvm/arm/vgic/vgic-init.c vgic_cpu->rd_iodev.base_addr = VGIC_ADDR_UNDEF; vgic_cpu 204 virt/kvm/arm/vgic/vgic-init.c INIT_LIST_HEAD(&vgic_cpu->ap_list_head); vgic_cpu 205 virt/kvm/arm/vgic/vgic-init.c raw_spin_lock_init(&vgic_cpu->ap_list_lock); vgic_cpu 212 virt/kvm/arm/vgic/vgic-init.c struct vgic_irq *irq = &vgic_cpu->private_irqs[i]; vgic_cpu 286 virt/kvm/arm/vgic/vgic-init.c struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_cpu 289 virt/kvm/arm/vgic/vgic-init.c struct vgic_irq *irq = &vgic_cpu->private_irqs[i]; vgic_cpu 358 virt/kvm/arm/vgic/vgic-init.c struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_cpu 360 virt/kvm/arm/vgic/vgic-init.c INIT_LIST_HEAD(&vgic_cpu->ap_list_head); vgic_cpu 363 virt/kvm/arm/vgic/vgic-its.c map.vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe; vgic_cpu 420 virt/kvm/arm/vgic/vgic-its.c gpa_t pendbase = GICR_PENDBASER_ADDRESS(vcpu->arch.vgic_cpu.pendbaser); vgic_cpu 676 virt/kvm/arm/vgic/vgic-its.c if (!vcpu->arch.vgic_cpu.lpis_enabled) vgic_cpu 1324 virt/kvm/arm/vgic/vgic-its.c if (vcpu->arch.vgic_cpu.vgic_v3.its_vpe.its_vm) vgic_cpu 1325 virt/kvm/arm/vgic/vgic-its.c its_invall_vpe(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe); vgic_cpu 1792 virt/kvm/arm/vgic/vgic-its.c if (!(vcpu->arch.vgic_cpu.pendbaser & GICR_PENDBASER_PTZ)) vgic_cpu 362 virt/kvm/arm/vgic/vgic-mmio-v2.c return vcpu->arch.vgic_cpu.vgic_v2.vgic_apr; vgic_cpu 364 virt/kvm/arm/vgic/vgic-mmio-v2.c struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3; vgic_cpu 388 virt/kvm/arm/vgic/vgic-mmio-v2.c vcpu->arch.vgic_cpu.vgic_v2.vgic_apr = val; vgic_cpu 390 virt/kvm/arm/vgic/vgic-mmio-v2.c struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3; vgic_cpu 177 virt/kvm/arm/vgic/vgic-mmio-v3.c struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_cpu 179 virt/kvm/arm/vgic/vgic-mmio-v3.c return vgic_cpu->lpis_enabled ? GICR_CTLR_ENABLE_LPIS : 0; vgic_cpu 187 virt/kvm/arm/vgic/vgic-mmio-v3.c struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_cpu 188 virt/kvm/arm/vgic/vgic-mmio-v3.c bool was_enabled = vgic_cpu->lpis_enabled; vgic_cpu 193 virt/kvm/arm/vgic/vgic-mmio-v3.c vgic_cpu->lpis_enabled = val & GICR_CTLR_ENABLE_LPIS; vgic_cpu 195 virt/kvm/arm/vgic/vgic-mmio-v3.c if (was_enabled && !vgic_cpu->lpis_enabled) { vgic_cpu 200 virt/kvm/arm/vgic/vgic-mmio-v3.c if (!was_enabled && vgic_cpu->lpis_enabled) vgic_cpu 208 virt/kvm/arm/vgic/vgic-mmio-v3.c struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_cpu 209 virt/kvm/arm/vgic/vgic-mmio-v3.c struct vgic_redist_region *rdreg = vgic_cpu->rdreg; vgic_cpu 397 virt/kvm/arm/vgic/vgic-mmio-v3.c struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_cpu 401 virt/kvm/arm/vgic/vgic-mmio-v3.c if (vgic_cpu->lpis_enabled) vgic_cpu 416 virt/kvm/arm/vgic/vgic-mmio-v3.c struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_cpu 418 virt/kvm/arm/vgic/vgic-mmio-v3.c return extract_bytes(vgic_cpu->pendbaser, addr & 7, len); vgic_cpu 425 virt/kvm/arm/vgic/vgic-mmio-v3.c struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_cpu 429 virt/kvm/arm/vgic/vgic-mmio-v3.c if (vgic_cpu->lpis_enabled) vgic_cpu 433 virt/kvm/arm/vgic/vgic-mmio-v3.c old_pendbaser = READ_ONCE(vgic_cpu->pendbaser); vgic_cpu 437 virt/kvm/arm/vgic/vgic-mmio-v3.c } while (cmpxchg64(&vgic_cpu->pendbaser, old_pendbaser, vgic_cpu 609 virt/kvm/arm/vgic/vgic-mmio-v3.c struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_cpu 610 virt/kvm/arm/vgic/vgic-mmio-v3.c struct vgic_io_device *rd_dev = &vcpu->arch.vgic_cpu.rd_iodev; vgic_cpu 615 virt/kvm/arm/vgic/vgic-mmio-v3.c if (!IS_VGIC_ADDR_UNDEF(vgic_cpu->rd_iodev.base_addr)) vgic_cpu 631 virt/kvm/arm/vgic/vgic-mmio-v3.c vgic_cpu->rdreg = rdreg; vgic_cpu 656 virt/kvm/arm/vgic/vgic-mmio-v3.c struct vgic_io_device *rd_dev = &vcpu->arch.vgic_cpu.rd_iodev; vgic_cpu 31 virt/kvm/arm/vgic/vgic-v2.c struct vgic_v2_cpu_if *cpuif = &vcpu->arch.vgic_cpu.vgic_v2; vgic_cpu 51 virt/kvm/arm/vgic/vgic-v2.c struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_cpu 52 virt/kvm/arm/vgic/vgic-v2.c struct vgic_v2_cpu_if *cpuif = &vgic_cpu->vgic_v2; vgic_cpu 59 virt/kvm/arm/vgic/vgic-v2.c for (lr = 0; lr < vgic_cpu->used_lrs; lr++) { vgic_cpu 123 virt/kvm/arm/vgic/vgic-v2.c vgic_cpu->used_lrs = 0; vgic_cpu 212 virt/kvm/arm/vgic/vgic-v2.c vcpu->arch.vgic_cpu.vgic_v2.vgic_lr[lr] = val; vgic_cpu 217 virt/kvm/arm/vgic/vgic-v2.c vcpu->arch.vgic_cpu.vgic_v2.vgic_lr[lr] = 0; vgic_cpu 222 virt/kvm/arm/vgic/vgic-v2.c struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2; vgic_cpu 249 virt/kvm/arm/vgic/vgic-v2.c struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2; vgic_cpu 282 virt/kvm/arm/vgic/vgic-v2.c vcpu->arch.vgic_cpu.vgic_v2.vgic_vmcr = 0; vgic_cpu 285 virt/kvm/arm/vgic/vgic-v2.c vcpu->arch.vgic_cpu.vgic_v2.vgic_hcr = GICH_HCR_EN; vgic_cpu 429 virt/kvm/arm/vgic/vgic-v2.c struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2; vgic_cpu 430 virt/kvm/arm/vgic/vgic-v2.c u64 used_lrs = vcpu->arch.vgic_cpu.used_lrs; vgic_cpu 451 virt/kvm/arm/vgic/vgic-v2.c u64 used_lrs = vcpu->arch.vgic_cpu.used_lrs; vgic_cpu 464 virt/kvm/arm/vgic/vgic-v2.c struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2; vgic_cpu 466 virt/kvm/arm/vgic/vgic-v2.c u64 used_lrs = vcpu->arch.vgic_cpu.used_lrs; vgic_cpu 483 virt/kvm/arm/vgic/vgic-v2.c struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2; vgic_cpu 493 virt/kvm/arm/vgic/vgic-v2.c struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2; vgic_cpu 500 virt/kvm/arm/vgic/vgic-v2.c struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2; vgic_cpu 20 virt/kvm/arm/vgic/vgic-v3.c struct vgic_v3_cpu_if *cpuif = &vcpu->arch.vgic_cpu.vgic_v3; vgic_cpu 33 virt/kvm/arm/vgic/vgic-v3.c struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_cpu 34 virt/kvm/arm/vgic/vgic-v3.c struct vgic_v3_cpu_if *cpuif = &vgic_cpu->vgic_v3; vgic_cpu 42 virt/kvm/arm/vgic/vgic-v3.c for (lr = 0; lr < vgic_cpu->used_lrs; lr++) { vgic_cpu 114 virt/kvm/arm/vgic/vgic-v3.c vgic_cpu->used_lrs = 0; vgic_cpu 197 virt/kvm/arm/vgic/vgic-v3.c vcpu->arch.vgic_cpu.vgic_v3.vgic_lr[lr] = val; vgic_cpu 202 virt/kvm/arm/vgic/vgic-v3.c vcpu->arch.vgic_cpu.vgic_v3.vgic_lr[lr] = 0; vgic_cpu 207 virt/kvm/arm/vgic/vgic-v3.c struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3; vgic_cpu 237 virt/kvm/arm/vgic/vgic-v3.c struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3; vgic_cpu 273 virt/kvm/arm/vgic/vgic-v3.c struct vgic_v3_cpu_if *vgic_v3 = &vcpu->arch.vgic_cpu.vgic_v3; vgic_cpu 292 virt/kvm/arm/vgic/vgic-v3.c vcpu->arch.vgic_cpu.pendbaser = INITIAL_PENDBASER_VALUE; vgic_cpu 297 virt/kvm/arm/vgic/vgic-v3.c vcpu->arch.vgic_cpu.num_id_bits = (kvm_vgic_global_state.ich_vtr_el2 & vgic_cpu 300 virt/kvm/arm/vgic/vgic-v3.c vcpu->arch.vgic_cpu.num_pri_bits = ((kvm_vgic_global_state.ich_vtr_el2 & vgic_cpu 329 virt/kvm/arm/vgic/vgic-v3.c pendbase = GICR_PENDBASER_ADDRESS(vcpu->arch.vgic_cpu.pendbaser); vgic_cpu 381 virt/kvm/arm/vgic/vgic-v3.c pendbase = GICR_PENDBASER_ADDRESS(vcpu->arch.vgic_cpu.pendbaser); vgic_cpu 507 virt/kvm/arm/vgic/vgic-v3.c struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_cpu 509 virt/kvm/arm/vgic/vgic-v3.c if (IS_VGIC_ADDR_UNDEF(vgic_cpu->rd_iodev.base_addr)) { vgic_cpu 653 virt/kvm/arm/vgic/vgic-v3.c struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3; vgic_cpu 671 virt/kvm/arm/vgic/vgic-v3.c struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3; vgic_cpu 88 virt/kvm/arm/vgic/vgic-v4.c vcpu->arch.vgic_cpu.vgic_v3.its_vpe.pending_last = true; vgic_cpu 126 virt/kvm/arm/vgic/vgic-v4.c dist->its_vm.vpes[i] = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe; vgic_cpu 200 virt/kvm/arm/vgic/vgic-v4.c return its_schedule_vpe(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe, false); vgic_cpu 205 virt/kvm/arm/vgic/vgic-v4.c int irq = vcpu->arch.vgic_cpu.vgic_v3.its_vpe.irq; vgic_cpu 221 virt/kvm/arm/vgic/vgic-v4.c err = its_schedule_vpe(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe, true); vgic_cpu 283 virt/kvm/arm/vgic/vgic-v4.c .vpe = &irq->target_vcpu->arch.vgic_cpu.vgic_v3.its_vpe, vgic_cpu 342 virt/kvm/arm/vgic/vgic-v4.c int irq = vcpu->arch.vgic_cpu.vgic_v3.its_vpe.irq; vgic_cpu 351 virt/kvm/arm/vgic/vgic-v4.c int irq = vcpu->arch.vgic_cpu.vgic_v3.its_vpe.irq; vgic_cpu 96 virt/kvm/arm/vgic/vgic.c return &vcpu->arch.vgic_cpu.private_irqs[intid]; vgic_cpu 153 virt/kvm/arm/vgic/vgic.c struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_cpu 157 virt/kvm/arm/vgic/vgic.c raw_spin_lock_irqsave(&vgic_cpu->ap_list_lock, flags); vgic_cpu 159 virt/kvm/arm/vgic/vgic.c list_for_each_entry_safe(irq, tmp, &vgic_cpu->ap_list_head, ap_list) { vgic_cpu 169 virt/kvm/arm/vgic/vgic.c raw_spin_unlock_irqrestore(&vgic_cpu->ap_list_lock, flags); vgic_cpu 299 virt/kvm/arm/vgic/vgic.c struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_cpu 301 virt/kvm/arm/vgic/vgic.c lockdep_assert_held(&vgic_cpu->ap_list_lock); vgic_cpu 303 virt/kvm/arm/vgic/vgic.c list_sort(NULL, &vgic_cpu->ap_list_head, vgic_irq_cmp); vgic_cpu 379 virt/kvm/arm/vgic/vgic.c raw_spin_lock_irqsave(&vcpu->arch.vgic_cpu.ap_list_lock, flags); vgic_cpu 396 virt/kvm/arm/vgic/vgic.c raw_spin_unlock_irqrestore(&vcpu->arch.vgic_cpu.ap_list_lock, vgic_cpu 408 virt/kvm/arm/vgic/vgic.c list_add_tail(&irq->ap_list, &vcpu->arch.vgic_cpu.ap_list_head); vgic_cpu 412 virt/kvm/arm/vgic/vgic.c raw_spin_unlock_irqrestore(&vcpu->arch.vgic_cpu.ap_list_lock, flags); vgic_cpu 620 virt/kvm/arm/vgic/vgic.c struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_cpu 626 virt/kvm/arm/vgic/vgic.c raw_spin_lock(&vgic_cpu->ap_list_lock); vgic_cpu 628 virt/kvm/arm/vgic/vgic.c list_for_each_entry_safe(irq, tmp, &vgic_cpu->ap_list_head, ap_list) { vgic_cpu 667 virt/kvm/arm/vgic/vgic.c raw_spin_unlock(&vgic_cpu->ap_list_lock); vgic_cpu 681 virt/kvm/arm/vgic/vgic.c raw_spin_lock(&vcpuA->arch.vgic_cpu.ap_list_lock); vgic_cpu 682 virt/kvm/arm/vgic/vgic.c raw_spin_lock_nested(&vcpuB->arch.vgic_cpu.ap_list_lock, vgic_cpu 696 virt/kvm/arm/vgic/vgic.c struct vgic_cpu *new_cpu = &target_vcpu->arch.vgic_cpu; vgic_cpu 705 virt/kvm/arm/vgic/vgic.c raw_spin_unlock(&vcpuB->arch.vgic_cpu.ap_list_lock); vgic_cpu 706 virt/kvm/arm/vgic/vgic.c raw_spin_unlock(&vcpuA->arch.vgic_cpu.ap_list_lock); vgic_cpu 716 virt/kvm/arm/vgic/vgic.c raw_spin_unlock(&vgic_cpu->ap_list_lock); vgic_cpu 759 virt/kvm/arm/vgic/vgic.c struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_cpu 765 virt/kvm/arm/vgic/vgic.c lockdep_assert_held(&vgic_cpu->ap_list_lock); vgic_cpu 767 virt/kvm/arm/vgic/vgic.c list_for_each_entry(irq, &vgic_cpu->ap_list_head, ap_list) { vgic_cpu 784 virt/kvm/arm/vgic/vgic.c struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_cpu 790 virt/kvm/arm/vgic/vgic.c lockdep_assert_held(&vgic_cpu->ap_list_lock); vgic_cpu 798 virt/kvm/arm/vgic/vgic.c list_for_each_entry(irq, &vgic_cpu->ap_list_head, ap_list) { vgic_cpu 824 virt/kvm/arm/vgic/vgic.c &vgic_cpu->ap_list_head)) vgic_cpu 830 virt/kvm/arm/vgic/vgic.c vcpu->arch.vgic_cpu.used_lrs = count; vgic_cpu 858 virt/kvm/arm/vgic/vgic.c struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_cpu 863 virt/kvm/arm/vgic/vgic.c if (list_empty(&vcpu->arch.vgic_cpu.ap_list_head)) vgic_cpu 869 virt/kvm/arm/vgic/vgic.c if (vgic_cpu->used_lrs) vgic_cpu 899 virt/kvm/arm/vgic/vgic.c if (list_empty(&vcpu->arch.vgic_cpu.ap_list_head) && vgic_cpu 905 virt/kvm/arm/vgic/vgic.c if (!list_empty(&vcpu->arch.vgic_cpu.ap_list_head)) { vgic_cpu 906 virt/kvm/arm/vgic/vgic.c raw_spin_lock(&vcpu->arch.vgic_cpu.ap_list_lock); vgic_cpu 908 virt/kvm/arm/vgic/vgic.c raw_spin_unlock(&vcpu->arch.vgic_cpu.ap_list_lock); vgic_cpu 950 virt/kvm/arm/vgic/vgic.c struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_cpu 959 virt/kvm/arm/vgic/vgic.c if (vcpu->arch.vgic_cpu.vgic_v3.its_vpe.pending_last) vgic_cpu 964 virt/kvm/arm/vgic/vgic.c raw_spin_lock_irqsave(&vgic_cpu->ap_list_lock, flags); vgic_cpu 966 virt/kvm/arm/vgic/vgic.c list_for_each_entry(irq, &vgic_cpu->ap_list_head, ap_list) { vgic_cpu 977 virt/kvm/arm/vgic/vgic.c raw_spin_unlock_irqrestore(&vgic_cpu->ap_list_lock, flags); vgic_cpu 260 virt/kvm/arm/vgic/vgic.h struct vgic_cpu *cpu_if = &vcpu->arch.vgic_cpu;