Lines Matching refs:dist

276 	struct vgic_dist *dist = &vcpu->kvm->arch.vgic;  in vgic_irq_is_edge()  local
279 irq_val = vgic_bitmap_get_irq_val(&dist->irq_cfg, vcpu->vcpu_id, irq); in vgic_irq_is_edge()
285 struct vgic_dist *dist = &vcpu->kvm->arch.vgic; in vgic_irq_is_enabled() local
287 return vgic_bitmap_get_irq_val(&dist->irq_enabled, vcpu->vcpu_id, irq); in vgic_irq_is_enabled()
292 struct vgic_dist *dist = &vcpu->kvm->arch.vgic; in vgic_irq_is_queued() local
294 return vgic_bitmap_get_irq_val(&dist->irq_queued, vcpu->vcpu_id, irq); in vgic_irq_is_queued()
299 struct vgic_dist *dist = &vcpu->kvm->arch.vgic; in vgic_irq_is_active() local
301 return vgic_bitmap_get_irq_val(&dist->irq_active, vcpu->vcpu_id, irq); in vgic_irq_is_active()
306 struct vgic_dist *dist = &vcpu->kvm->arch.vgic; in vgic_irq_set_queued() local
308 vgic_bitmap_set_irq_val(&dist->irq_queued, vcpu->vcpu_id, irq, 1); in vgic_irq_set_queued()
313 struct vgic_dist *dist = &vcpu->kvm->arch.vgic; in vgic_irq_clear_queued() local
315 vgic_bitmap_set_irq_val(&dist->irq_queued, vcpu->vcpu_id, irq, 0); in vgic_irq_clear_queued()
320 struct vgic_dist *dist = &vcpu->kvm->arch.vgic; in vgic_irq_set_active() local
322 vgic_bitmap_set_irq_val(&dist->irq_active, vcpu->vcpu_id, irq, 1); in vgic_irq_set_active()
327 struct vgic_dist *dist = &vcpu->kvm->arch.vgic; in vgic_irq_clear_active() local
329 vgic_bitmap_set_irq_val(&dist->irq_active, vcpu->vcpu_id, irq, 0); in vgic_irq_clear_active()
334 struct vgic_dist *dist = &vcpu->kvm->arch.vgic; in vgic_dist_irq_get_level() local
336 return vgic_bitmap_get_irq_val(&dist->irq_level, vcpu->vcpu_id, irq); in vgic_dist_irq_get_level()
341 struct vgic_dist *dist = &vcpu->kvm->arch.vgic; in vgic_dist_irq_set_level() local
343 vgic_bitmap_set_irq_val(&dist->irq_level, vcpu->vcpu_id, irq, 1); in vgic_dist_irq_set_level()
348 struct vgic_dist *dist = &vcpu->kvm->arch.vgic; in vgic_dist_irq_clear_level() local
350 vgic_bitmap_set_irq_val(&dist->irq_level, vcpu->vcpu_id, irq, 0); in vgic_dist_irq_clear_level()
355 struct vgic_dist *dist = &vcpu->kvm->arch.vgic; in vgic_dist_irq_soft_pend() local
357 return vgic_bitmap_get_irq_val(&dist->irq_soft_pend, vcpu->vcpu_id, irq); in vgic_dist_irq_soft_pend()
362 struct vgic_dist *dist = &vcpu->kvm->arch.vgic; in vgic_dist_irq_clear_soft_pend() local
364 vgic_bitmap_set_irq_val(&dist->irq_soft_pend, vcpu->vcpu_id, irq, 0); in vgic_dist_irq_clear_soft_pend()
368 clear_bit(vcpu->vcpu_id, dist->irq_pending_on_cpu); in vgic_dist_irq_clear_soft_pend()
374 struct vgic_dist *dist = &vcpu->kvm->arch.vgic; in vgic_dist_irq_is_pending() local
376 return vgic_bitmap_get_irq_val(&dist->irq_pending, vcpu->vcpu_id, irq); in vgic_dist_irq_is_pending()
381 struct vgic_dist *dist = &vcpu->kvm->arch.vgic; in vgic_dist_irq_set_pending() local
383 vgic_bitmap_set_irq_val(&dist->irq_pending, vcpu->vcpu_id, irq, 1); in vgic_dist_irq_set_pending()
388 struct vgic_dist *dist = &vcpu->kvm->arch.vgic; in vgic_dist_irq_clear_pending() local
390 vgic_bitmap_set_irq_val(&dist->irq_pending, vcpu->vcpu_id, irq, 0); in vgic_dist_irq_clear_pending()
514 struct vgic_dist *dist = &kvm->arch.vgic; in vgic_handle_set_pending_reg() local
516 reg = vgic_bitmap_get_reg(&dist->irq_cfg, vcpu_id, offset); in vgic_handle_set_pending_reg()
520 reg = vgic_bitmap_get_reg(&dist->irq_pending, vcpu_id, offset); in vgic_handle_set_pending_reg()
526 reg = vgic_bitmap_get_reg(&dist->irq_soft_pend, in vgic_handle_set_pending_reg()
551 struct vgic_dist *dist = &kvm->arch.vgic; in vgic_handle_clear_pending_reg() local
553 reg = vgic_bitmap_get_reg(&dist->irq_pending, vcpu_id, offset); in vgic_handle_clear_pending_reg()
558 level_active = vgic_bitmap_get_reg(&dist->irq_level, in vgic_handle_clear_pending_reg()
560 reg = vgic_bitmap_get_reg(&dist->irq_pending, vcpu_id, offset); in vgic_handle_clear_pending_reg()
570 reg = vgic_bitmap_get_reg(&dist->irq_soft_pend, in vgic_handle_clear_pending_reg()
585 struct vgic_dist *dist = &kvm->arch.vgic; in vgic_handle_set_active_reg() local
587 reg = vgic_bitmap_get_reg(&dist->irq_active, vcpu_id, offset); in vgic_handle_set_active_reg()
604 struct vgic_dist *dist = &kvm->arch.vgic; in vgic_handle_clear_active_reg() local
606 reg = vgic_bitmap_get_reg(&dist->irq_active, vcpu_id, offset); in vgic_handle_clear_active_reg()
748 static bool vgic_validate_access(const struct vgic_dist *dist, in vgic_validate_access() argument
758 if (irq >= dist->nr_irqs) in vgic_validate_access()
821 struct vgic_dist *dist = &vcpu->kvm->arch.vgic; in vgic_handle_mmio_access() local
843 spin_lock(&dist->lock); in vgic_handle_mmio_access()
845 if (vgic_validate_access(dist, range, offset)) { in vgic_handle_mmio_access()
852 spin_unlock(&dist->lock); in vgic_handle_mmio_access()
933 static int vgic_nr_shared_irqs(struct vgic_dist *dist) in vgic_nr_shared_irqs() argument
935 return dist->nr_irqs - VGIC_NR_PRIVATE_IRQS; in vgic_nr_shared_irqs()
940 struct vgic_dist *dist = &vcpu->kvm->arch.vgic; in compute_active_for_cpu() local
943 int nr_shared = vgic_nr_shared_irqs(dist); in compute_active_for_cpu()
950 active = vgic_bitmap_get_cpu_map(&dist->irq_active, vcpu_id); in compute_active_for_cpu()
951 enabled = vgic_bitmap_get_cpu_map(&dist->irq_enabled, vcpu_id); in compute_active_for_cpu()
954 active = vgic_bitmap_get_shared_map(&dist->irq_active); in compute_active_for_cpu()
955 enabled = vgic_bitmap_get_shared_map(&dist->irq_enabled); in compute_active_for_cpu()
958 vgic_bitmap_get_shared_map(&dist->irq_spi_target[vcpu_id]), in compute_active_for_cpu()
970 struct vgic_dist *dist = &vcpu->kvm->arch.vgic; in compute_pending_for_cpu() local
973 int nr_shared = vgic_nr_shared_irqs(dist); in compute_pending_for_cpu()
980 if (!dist->enabled) { in compute_pending_for_cpu()
986 pending = vgic_bitmap_get_cpu_map(&dist->irq_pending, vcpu_id); in compute_pending_for_cpu()
987 enabled = vgic_bitmap_get_cpu_map(&dist->irq_enabled, vcpu_id); in compute_pending_for_cpu()
990 pending = vgic_bitmap_get_shared_map(&dist->irq_pending); in compute_pending_for_cpu()
991 enabled = vgic_bitmap_get_shared_map(&dist->irq_enabled); in compute_pending_for_cpu()
994 vgic_bitmap_get_shared_map(&dist->irq_spi_target[vcpu_id]), in compute_pending_for_cpu()
1000 pending_shared < vgic_nr_shared_irqs(dist)); in compute_pending_for_cpu()
1009 struct vgic_dist *dist = &kvm->arch.vgic; in vgic_update_state() local
1015 set_bit(c, dist->irq_pending_on_cpu); in vgic_update_state()
1018 set_bit(c, dist->irq_active_on_cpu); in vgic_update_state()
1020 clear_bit(c, dist->irq_active_on_cpu); in vgic_update_state()
1101 struct vgic_dist *dist = &vcpu->kvm->arch.vgic; in dist_active_irq() local
1103 return test_bit(vcpu->vcpu_id, dist->irq_active_on_cpu); in dist_active_irq()
1188 struct vgic_dist *dist = &vcpu->kvm->arch.vgic; in vgic_queue_irq() local
1197 BUG_ON(irq >= dist->nr_irqs); in vgic_queue_irq()
1252 struct vgic_dist *dist = &vcpu->kvm->arch.vgic; in __kvm_vgic_flush_hwstate() local
1256 int nr_shared = vgic_nr_shared_irqs(dist); in __kvm_vgic_flush_hwstate()
1307 clear_bit(vcpu_id, dist->irq_pending_on_cpu); in __kvm_vgic_flush_hwstate()
1362 struct vgic_dist *dist = &vcpu->kvm->arch.vgic; in vgic_process_maintenance() local
1392 spin_lock(&dist->lock); in vgic_process_maintenance()
1394 spin_unlock(&dist->lock); in vgic_process_maintenance()
1419 struct vgic_dist *dist = &vcpu->kvm->arch.vgic; in vgic_sync_hwirq() local
1428 spin_lock(&dist->lock); in vgic_sync_hwirq()
1430 spin_unlock(&dist->lock); in vgic_sync_hwirq()
1437 struct vgic_dist *dist = &vcpu->kvm->arch.vgic; in __kvm_vgic_sync_hwstate() local
1450 BUG_ON(vlr.irq >= dist->nr_irqs); in __kvm_vgic_sync_hwstate()
1458 set_bit(vcpu->vcpu_id, dist->irq_pending_on_cpu); in __kvm_vgic_sync_hwstate()
1463 struct vgic_dist *dist = &vcpu->kvm->arch.vgic; in kvm_vgic_flush_hwstate() local
1468 spin_lock(&dist->lock); in kvm_vgic_flush_hwstate()
1470 spin_unlock(&dist->lock); in kvm_vgic_flush_hwstate()
1483 struct vgic_dist *dist = &vcpu->kvm->arch.vgic; in kvm_vgic_vcpu_pending_irq() local
1488 return test_bit(vcpu->vcpu_id, dist->irq_pending_on_cpu); in kvm_vgic_vcpu_pending_irq()
1528 struct vgic_dist *dist = &kvm->arch.vgic; in vgic_update_irq_pending() local
1539 spin_lock(&dist->lock); in vgic_update_irq_pending()
1551 cpuid = dist->irq_spi_cpu[irq_num - VGIC_NR_PRIVATE_IRQS]; in vgic_update_irq_pending()
1573 clear_bit(cpuid, dist->irq_pending_on_cpu); in vgic_update_irq_pending()
1599 set_bit(cpuid, dist->irq_pending_on_cpu); in vgic_update_irq_pending()
1603 spin_unlock(&dist->lock); in vgic_update_irq_pending()
1729 struct vgic_dist *dist = &vcpu->kvm->arch.vgic; in kvm_vgic_map_phys_irq() local
1754 spin_lock(&dist->irq_phys_map_lock); in kvm_vgic_map_phys_irq()
1776 spin_unlock(&dist->irq_phys_map_lock); in kvm_vgic_map_phys_irq()
1823 struct vgic_dist *dist = &vcpu->kvm->arch.vgic; in kvm_vgic_unmap_phys_irq() local
1832 spin_lock(&dist->irq_phys_map_lock); in kvm_vgic_unmap_phys_irq()
1842 spin_unlock(&dist->irq_phys_map_lock); in kvm_vgic_unmap_phys_irq()
1849 struct vgic_dist *dist = &kvm->arch.vgic; in vgic_destroy_irq_phys_map() local
1852 spin_lock(&dist->irq_phys_map_lock); in vgic_destroy_irq_phys_map()
1859 spin_unlock(&dist->irq_phys_map_lock); in vgic_destroy_irq_phys_map()
1925 struct vgic_dist *dist = &kvm->arch.vgic; in kvm_vgic_destroy() local
1932 vgic_free_bitmap(&dist->irq_enabled); in kvm_vgic_destroy()
1933 vgic_free_bitmap(&dist->irq_level); in kvm_vgic_destroy()
1934 vgic_free_bitmap(&dist->irq_pending); in kvm_vgic_destroy()
1935 vgic_free_bitmap(&dist->irq_soft_pend); in kvm_vgic_destroy()
1936 vgic_free_bitmap(&dist->irq_queued); in kvm_vgic_destroy()
1937 vgic_free_bitmap(&dist->irq_cfg); in kvm_vgic_destroy()
1938 vgic_free_bytemap(&dist->irq_priority); in kvm_vgic_destroy()
1939 if (dist->irq_spi_target) { in kvm_vgic_destroy()
1940 for (i = 0; i < dist->nr_cpus; i++) in kvm_vgic_destroy()
1941 vgic_free_bitmap(&dist->irq_spi_target[i]); in kvm_vgic_destroy()
1943 kfree(dist->irq_sgi_sources); in kvm_vgic_destroy()
1944 kfree(dist->irq_spi_cpu); in kvm_vgic_destroy()
1945 kfree(dist->irq_spi_mpidr); in kvm_vgic_destroy()
1946 kfree(dist->irq_spi_target); in kvm_vgic_destroy()
1947 kfree(dist->irq_pending_on_cpu); in kvm_vgic_destroy()
1948 kfree(dist->irq_active_on_cpu); in kvm_vgic_destroy()
1949 vgic_destroy_irq_phys_map(kvm, &dist->irq_phys_map_list); in kvm_vgic_destroy()
1950 dist->irq_sgi_sources = NULL; in kvm_vgic_destroy()
1951 dist->irq_spi_cpu = NULL; in kvm_vgic_destroy()
1952 dist->irq_spi_target = NULL; in kvm_vgic_destroy()
1953 dist->irq_pending_on_cpu = NULL; in kvm_vgic_destroy()
1954 dist->irq_active_on_cpu = NULL; in kvm_vgic_destroy()
1955 dist->nr_cpus = 0; in kvm_vgic_destroy()
1964 struct vgic_dist *dist = &kvm->arch.vgic; in vgic_init() local
1972 nr_cpus = dist->nr_cpus = atomic_read(&kvm->online_vcpus); in vgic_init()
1980 if (!dist->nr_irqs) in vgic_init()
1981 dist->nr_irqs = VGIC_NR_IRQS_LEGACY; in vgic_init()
1983 nr_irqs = dist->nr_irqs; in vgic_init()
1985 ret = vgic_init_bitmap(&dist->irq_enabled, nr_cpus, nr_irqs); in vgic_init()
1986 ret |= vgic_init_bitmap(&dist->irq_level, nr_cpus, nr_irqs); in vgic_init()
1987 ret |= vgic_init_bitmap(&dist->irq_pending, nr_cpus, nr_irqs); in vgic_init()
1988 ret |= vgic_init_bitmap(&dist->irq_soft_pend, nr_cpus, nr_irqs); in vgic_init()
1989 ret |= vgic_init_bitmap(&dist->irq_queued, nr_cpus, nr_irqs); in vgic_init()
1990 ret |= vgic_init_bitmap(&dist->irq_active, nr_cpus, nr_irqs); in vgic_init()
1991 ret |= vgic_init_bitmap(&dist->irq_cfg, nr_cpus, nr_irqs); in vgic_init()
1992 ret |= vgic_init_bytemap(&dist->irq_priority, nr_cpus, nr_irqs); in vgic_init()
1997 dist->irq_sgi_sources = kzalloc(nr_cpus * VGIC_NR_SGIS, GFP_KERNEL); in vgic_init()
1998 dist->irq_spi_cpu = kzalloc(nr_irqs - VGIC_NR_PRIVATE_IRQS, GFP_KERNEL); in vgic_init()
1999 dist->irq_spi_target = kzalloc(sizeof(*dist->irq_spi_target) * nr_cpus, in vgic_init()
2001 dist->irq_pending_on_cpu = kzalloc(BITS_TO_LONGS(nr_cpus) * sizeof(long), in vgic_init()
2003 dist->irq_active_on_cpu = kzalloc(BITS_TO_LONGS(nr_cpus) * sizeof(long), in vgic_init()
2005 if (!dist->irq_sgi_sources || in vgic_init()
2006 !dist->irq_spi_cpu || in vgic_init()
2007 !dist->irq_spi_target || in vgic_init()
2008 !dist->irq_pending_on_cpu || in vgic_init()
2009 !dist->irq_active_on_cpu) { in vgic_init()
2015 ret |= vgic_init_bitmap(&dist->irq_spi_target[i], in vgic_init()
2039 vgic_bitmap_set_irq_val(&dist->irq_enabled, in vgic_init()
2041 vgic_bitmap_set_irq_val(&dist->irq_cfg, in vgic_init()
2046 vgic_bitmap_set_irq_val(&dist->irq_cfg, in vgic_init()
2160 phys_addr_t dist = kvm->arch.vgic.vgic_dist_base; in vgic_ioaddr_overlap() local
2163 if (IS_VGIC_ADDR_UNDEF(dist) || IS_VGIC_ADDR_UNDEF(cpu)) in vgic_ioaddr_overlap()
2165 if ((dist <= cpu && dist + KVM_VGIC_V2_DIST_SIZE > cpu) || in vgic_ioaddr_overlap()
2166 (cpu <= dist && cpu + KVM_VGIC_V2_CPU_SIZE > dist)) in vgic_ioaddr_overlap()