vm 51 arch/arm/mm/ioremap.c struct vm_struct *vm; vm 54 arch/arm/mm/ioremap.c vm = &svm->vm; vm 55 arch/arm/mm/ioremap.c if (!(vm->flags & VM_ARM_STATIC_MAPPING)) vm 57 arch/arm/mm/ioremap.c if ((vm->flags & VM_ARM_MTYPE_MASK) != VM_ARM_MTYPE(mtype)) vm 60 arch/arm/mm/ioremap.c if (vm->phys_addr > paddr || vm 61 arch/arm/mm/ioremap.c paddr + size - 1 > vm->phys_addr + vm->size - 1) vm 73 arch/arm/mm/ioremap.c struct vm_struct *vm; vm 76 arch/arm/mm/ioremap.c vm = &svm->vm; vm 79 arch/arm/mm/ioremap.c if (vm->addr > vaddr) vm 82 arch/arm/mm/ioremap.c if (vm->addr <= vaddr && vm->addr + vm->size > vaddr) vm 92 arch/arm/mm/ioremap.c struct vm_struct *vm; vm 95 arch/arm/mm/ioremap.c vm = &svm->vm; vm 96 arch/arm/mm/ioremap.c vm_area_add_early(vm); vm 97 arch/arm/mm/ioremap.c vaddr = vm->addr; vm 100 arch/arm/mm/ioremap.c vm = &curr_svm->vm; vm 102 arch/arm/mm/ioremap.c if (vm->addr > vaddr) vm 294 arch/arm/mm/ioremap.c addr = (unsigned long)svm->vm.addr; vm 295 arch/arm/mm/ioremap.c addr += paddr - svm->vm.phys_addr; vm 442 arch/arm/mm/ioremap.c struct vm_struct *vm; vm 444 arch/arm/mm/ioremap.c vm = find_vm_area(addr); vm 451 arch/arm/mm/ioremap.c if (vm && (vm->flags & VM_ARM_SECTION_MAPPING)) vm 452 arch/arm/mm/ioremap.c unmap_area_sections((unsigned long)vm->addr, vm->size); vm 75 arch/arm/mm/mm.h struct vm_struct vm; vm 993 arch/arm/mm/mmu.c struct vm_struct *vm; vm 1007 arch/arm/mm/mmu.c vm = &svm->vm; vm 1008 arch/arm/mm/mmu.c vm->addr = (void *)(md->virtual & PAGE_MASK); vm 1009 arch/arm/mm/mmu.c vm->size = PAGE_ALIGN(md->length + (md->virtual & ~PAGE_MASK)); vm 1010 arch/arm/mm/mmu.c vm->phys_addr = __pfn_to_phys(md->pfn); vm 1011 arch/arm/mm/mmu.c vm->flags = VM_IOREMAP | VM_ARM_STATIC_MAPPING; vm 1012 arch/arm/mm/mmu.c vm->flags |= VM_ARM_MTYPE(md->type); vm 1013 arch/arm/mm/mmu.c vm->caller = iotable_init; vm 1021 arch/arm/mm/mmu.c struct vm_struct *vm; vm 1029 arch/arm/mm/mmu.c vm = &svm->vm; vm 1030 arch/arm/mm/mmu.c vm->addr = (void *)addr; vm 1031 arch/arm/mm/mmu.c vm->size = size; vm 1032 arch/arm/mm/mmu.c vm->flags = VM_IOREMAP | VM_ARM_EMPTY_MAPPING; vm 1033 arch/arm/mm/mmu.c vm->caller = caller; vm 1060 arch/arm/mm/mmu.c struct vm_struct *vm; vm 1065 arch/arm/mm/mmu.c vm = &svm->vm; vm 1066 arch/arm/mm/mmu.c addr = (unsigned long)vm->addr; vm 1086 arch/arm/mm/mmu.c addr += vm->size; vm 53 arch/mips/math-emu/ieee754int.h #define EXPLODESP(v, vc, vs, ve, vm) \ vm 57 arch/mips/math-emu/ieee754int.h vm = SPMANT(v); \ vm 59 arch/mips/math-emu/ieee754int.h if (vm == 0) \ vm 61 arch/mips/math-emu/ieee754int.h else if (ieee754_csr.nan2008 ^ !(vm & SP_MBIT(SP_FBITS - 1))) \ vm 66 arch/mips/math-emu/ieee754int.h if (vm) { \ vm 73 arch/mips/math-emu/ieee754int.h vm |= SP_HIDDEN_BIT; \ vm 91 arch/mips/math-emu/ieee754int.h #define EXPLODEDP(v, vc, vs, ve, vm) \ vm 93 arch/mips/math-emu/ieee754int.h vm = DPMANT(v); \ vm 97 arch/mips/math-emu/ieee754int.h if (vm == 0) \ vm 99 arch/mips/math-emu/ieee754int.h else if (ieee754_csr.nan2008 ^ !(vm & DP_MBIT(DP_FBITS - 1))) \ vm 104 arch/mips/math-emu/ieee754int.h if (vm) { \ vm 111 arch/mips/math-emu/ieee754int.h vm |= DP_HIDDEN_BIT; \ vm 119 arch/mips/math-emu/ieee754int.h #define FLUSHDP(v, vc, vs, ve, vm) \ vm 125 arch/mips/math-emu/ieee754int.h vm = 0; \ vm 130 arch/mips/math-emu/ieee754int.h #define FLUSHSP(v, vc, vs, ve, vm) \ vm 136 arch/mips/math-emu/ieee754int.h vm = 0; \ vm 136 arch/s390/include/asm/sysinfo.h } vm[8]; vm 82 arch/s390/kernel/early.c if (!memcmp(vmms->vm[0].cpi, "\xd2\xe5\xd4", 3)) vm 84 arch/s390/kernel/early.c else if (!memcmp(vmms->vm[0].cpi, "\xa9\x61\xe5\xd4", 4)) vm 109 arch/s390/kernel/early.c struct sysinfo_3_2_2 *vm = (struct sysinfo_3_2_2 *)&sysinfo_page; vm 122 arch/s390/kernel/early.c if (stsi(vm, 3, 2, 2) == 0 && vm->count) { vm 123 arch/s390/kernel/early.c EBCASC(vm->vm[0].cpi, sizeof(vm->vm[0].cpi)); vm 124 arch/s390/kernel/early.c sprintf(hvstr, "%-16.16s", vm->vm[0].cpi); vm 44 arch/s390/kernel/lgr.c } vm[VM_LEVEL_MAX]; vm 106 arch/s390/kernel/lgr.c cpascii(lgr_info->vm[i].name, si->vm[i].name, vm 107 arch/s390/kernel/lgr.c sizeof(si->vm[i].name)); vm 108 arch/s390/kernel/lgr.c cpascii(lgr_info->vm[i].cpi, si->vm[i].cpi, vm 109 arch/s390/kernel/lgr.c sizeof(si->vm[i].cpi)); vm 1016 arch/s390/kernel/setup.c add_device_randomness(&vmms->vm, sizeof(vmms->vm[0]) * vmms->count); vm 242 arch/s390/kernel/sysinfo.c if (!convert_ext_name(info->vm[lvl].evmne, info->ext_names[lvl], len)) vm 250 arch/s390/kernel/sysinfo.c if (uuid_is_null(&info->vm[i].uuid)) vm 252 arch/s390/kernel/sysinfo.c seq_printf(m, "VM%02d UUID: %pUb\n", i, &info->vm[i].uuid); vm 262 arch/s390/kernel/sysinfo.c EBCASC(info->vm[i].name, sizeof(info->vm[i].name)); vm 263 arch/s390/kernel/sysinfo.c EBCASC(info->vm[i].cpi, sizeof(info->vm[i].cpi)); vm 265 arch/s390/kernel/sysinfo.c seq_printf(m, "VM%02d Name: %-8.8s\n", i, info->vm[i].name); vm 266 arch/s390/kernel/sysinfo.c seq_printf(m, "VM%02d Control Program: %-16.16s\n", i, info->vm[i].cpi); vm 267 arch/s390/kernel/sysinfo.c seq_printf(m, "VM%02d Adjustment: %d\n", i, info->vm[i].caf); vm 268 arch/s390/kernel/sysinfo.c seq_printf(m, "VM%02d CPUs Total: %d\n", i, info->vm[i].cpus_total); vm 269 arch/s390/kernel/sysinfo.c seq_printf(m, "VM%02d CPUs Configured: %d\n", i, info->vm[i].cpus_configured); vm 270 arch/s390/kernel/sysinfo.c seq_printf(m, "VM%02d CPUs Standby: %d\n", i, info->vm[i].cpus_standby); vm 271 arch/s390/kernel/sysinfo.c seq_printf(m, "VM%02d CPUs Reserved: %d\n", i, info->vm[i].cpus_reserved); vm 819 arch/s390/kvm/priv.c memcpy(&mem->vm[n], &mem->vm[n - 1], sizeof(mem->vm[0])); vm 821 arch/s390/kvm/priv.c memset(&mem->vm[0], 0, sizeof(mem->vm[0])); vm 822 arch/s390/kvm/priv.c mem->vm[0].cpus_total = cpus; vm 823 arch/s390/kvm/priv.c mem->vm[0].cpus_configured = cpus; vm 824 arch/s390/kvm/priv.c mem->vm[0].cpus_standby = 0; vm 825 arch/s390/kvm/priv.c mem->vm[0].cpus_reserved = 0; vm 826 arch/s390/kvm/priv.c mem->vm[0].caf = 1000; vm 827 arch/s390/kvm/priv.c memcpy(mem->vm[0].name, "KVMguest", 8); vm 828 arch/s390/kvm/priv.c ASCEBC(mem->vm[0].name, 8); vm 829 arch/s390/kvm/priv.c memcpy(mem->vm[0].cpi, "KVM/Linux ", 16); vm 830 arch/s390/kvm/priv.c ASCEBC(mem->vm[0].cpi, 16); vm 234 arch/unicore32/mm/ioremap.c struct vm_struct *vm; vm 243 arch/unicore32/mm/ioremap.c vm = find_vm_area(addr); vm 244 arch/unicore32/mm/ioremap.c if (vm && (vm->flags & VM_IOREMAP) && vm 245 arch/unicore32/mm/ioremap.c (vm->flags & VM_UNICORE_SECTION_MAPPING)) vm 246 arch/unicore32/mm/ioremap.c unmap_area_sections((unsigned long)vm->addr, vm->size); vm 485 arch/x86/events/perf_event.h #define EVENT_EXTRA_REG(e, ms, m, vm, i) { \ vm 489 arch/x86/events/perf_event.h .valid_mask = (vm), \ vm 494 arch/x86/events/perf_event.h #define INTEL_EVENT_EXTRA_REG(event, msr, vm, idx) \ vm 495 arch/x86/events/perf_event.h EVENT_EXTRA_REG(event, msr, ARCH_PERFMON_EVENTSEL_EVENT, vm, idx) vm 497 arch/x86/events/perf_event.h #define INTEL_UEVENT_EXTRA_REG(event, msr, vm, idx) \ vm 499 arch/x86/events/perf_event.h ARCH_PERFMON_EVENTSEL_UMASK, vm, idx) vm 417 arch/x86/xen/p2m.c static struct vm_struct vm; vm 423 arch/x86/xen/p2m.c vm.flags = VM_ALLOC; vm 424 arch/x86/xen/p2m.c vm.size = ALIGN(sizeof(unsigned long) * max(xen_max_p2m_pfn, p2m_limit), vm 426 arch/x86/xen/p2m.c vm_area_register_early(&vm, PMD_SIZE * PMDS_PER_MID_PAGE); vm 427 arch/x86/xen/p2m.c pr_notice("p2m virtual area at %p, size is %lx\n", vm.addr, vm.size); vm 429 arch/x86/xen/p2m.c xen_max_p2m_pfn = vm.size / sizeof(unsigned long); vm 431 arch/x86/xen/p2m.c xen_rebuild_p2m_list(vm.addr); vm 433 arch/x86/xen/p2m.c xen_p2m_addr = vm.addr; vm 172 block/partitions/ldm.c static bool ldm_parse_vmdb (const u8 *data, struct vmdb *vm) vm 174 block/partitions/ldm.c BUG_ON (!data || !vm); vm 181 block/partitions/ldm.c vm->ver_major = get_unaligned_be16(data + 0x12); vm 182 block/partitions/ldm.c vm->ver_minor = get_unaligned_be16(data + 0x14); vm 183 block/partitions/ldm.c if ((vm->ver_major != 4) || (vm->ver_minor != 10)) { vm 185 block/partitions/ldm.c "Aborting.", 4, 10, vm->ver_major, vm->ver_minor); vm 189 block/partitions/ldm.c vm->vblk_size = get_unaligned_be32(data + 0x08); vm 190 block/partitions/ldm.c if (vm->vblk_size == 0) { vm 195 block/partitions/ldm.c vm->vblk_offset = get_unaligned_be32(data + 0x0C); vm 196 block/partitions/ldm.c vm->last_vblk_seq = get_unaligned_be32(data + 0x04); vm 433 block/partitions/ldm.c struct vmdb *vm; vm 438 block/partitions/ldm.c vm = &ldb->vm; vm 447 block/partitions/ldm.c if (!ldm_parse_vmdb (data, vm)) vm 456 block/partitions/ldm.c if (vm->vblk_offset != 512) vm 457 block/partitions/ldm.c ldm_info ("VBLKs start at offset 0x%04x.", vm->vblk_offset); vm 463 block/partitions/ldm.c if ((vm->vblk_size * vm->last_vblk_seq) > (toc->bitmap1_size << 9)) { vm 1334 block/partitions/ldm.c if (!ldm_ldmdb_add (f->data, f->num*ldb->vm.vblk_size, ldb)) vm 1363 block/partitions/ldm.c size = ldb->vm.vblk_size; vm 1365 block/partitions/ldm.c skip = ldb->vm.vblk_offset >> 9; /* Bytes to sectors */ vm 1366 block/partitions/ldm.c finish = (size * ldb->vm.last_vblk_seq) >> 9; vm 188 block/partitions/ldm.h struct vmdb vm; vm 419 drivers/gpu/drm/amd/amdgpu/amdgpu.h struct amdgpu_vm vm; vm 430 drivers/gpu/drm/amd/amdgpu/amdgpu.h int amdgpu_ib_get(struct amdgpu_device *adev, struct amdgpu_vm *vm, vm 698 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c struct amdgpu_vm *vm) vm 199 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h void **vm, void **process_info, vm 203 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h void **vm, void **process_info, vm 206 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h struct amdgpu_vm *vm); vm 207 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h void amdgpu_amdkfd_gpuvm_destroy_process_vm(struct kgd_dev *kgd, void *vm); vm 208 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h void amdgpu_amdkfd_gpuvm_release_process_vm(struct kgd_dev *kgd, void *vm); vm 209 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h uint64_t amdgpu_amdkfd_gpuvm_get_process_page_dir(void *vm); vm 212 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h void *vm, struct kgd_mem **mem, vm 217 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h struct kgd_dev *kgd, struct kgd_mem *mem, void *vm); vm 219 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h struct kgd_dev *kgd, struct kgd_mem *mem, void *vm); vm 232 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h uint64_t va, void *vm, vm 89 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c if (entry->bo_va->base.vm == avm) vm 310 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c static int vm_validate_pt_pd_bos(struct amdgpu_vm *vm) vm 312 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c struct amdgpu_bo *pd = vm->root.base.bo; vm 320 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c ret = amdgpu_vm_validate_pt_bos(adev, vm, amdgpu_amdkfd_validate, vm 333 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c vm->pd_phys_addr = amdgpu_gmc_pd_addr(vm->root.base.bo); vm 335 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c if (vm->use_cpu_for_update) { vm 346 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c static int vm_update_pds(struct amdgpu_vm *vm, struct amdgpu_sync *sync) vm 348 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c struct amdgpu_bo *pd = vm->root.base.bo; vm 352 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c ret = amdgpu_vm_update_directories(adev, vm); vm 356 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c return amdgpu_sync_fence(NULL, sync, vm->last_update, false); vm 372 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c struct amdgpu_vm *vm, bool is_aql, vm 395 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c va + bo_size, vm); vm 398 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c bo_va_entry->bo_va = amdgpu_vm_bo_add(adev, vm, bo); vm 416 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c ret = vm_validate_pt_pd_bos(vm); vm 562 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c struct amdgpu_vm *vm, vm 568 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c WARN_ON(!vm); vm 586 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c amdgpu_vm_get_pd_bo(vm, &ctx->list, &ctx->vm_pd[0]); vm 612 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c struct amdgpu_vm *vm, enum bo_vm_match map_type, vm 629 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c if ((vm && vm != entry->bo_va->base.vm) || vm 651 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c if ((vm && vm != entry->bo_va->base.vm) || vm 656 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c amdgpu_vm_get_pd_bo(entry->bo_va->base.vm, &ctx->list, vm 711 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c struct amdgpu_vm *vm = bo_va->base.vm; vm 715 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c amdgpu_vm_clear_freed(adev, vm, &bo_va->last_pt_update); vm 840 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c static int init_kfd_vm(struct amdgpu_vm *vm, void **process_info, vm 875 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c vm->process_info = *process_info; vm 878 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c ret = amdgpu_bo_reserve(vm->root.base.bo, true); vm 881 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c ret = vm_validate_pt_pd_bos(vm); vm 886 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c ret = amdgpu_bo_sync_wait(vm->root.base.bo, vm 890 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c ret = dma_resv_reserve_shared(vm->root.base.bo->tbo.base.resv, 1); vm 893 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c amdgpu_bo_fence(vm->root.base.bo, vm 894 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c &vm->process_info->eviction_fence->base, true); vm 895 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c amdgpu_bo_unreserve(vm->root.base.bo); vm 898 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c mutex_lock(&vm->process_info->lock); vm 899 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c list_add_tail(&vm->vm_list_node, vm 900 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c &(vm->process_info->vm_list_head)); vm 901 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c vm->process_info->n_vms++; vm 902 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c mutex_unlock(&vm->process_info->lock); vm 909 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c amdgpu_bo_unreserve(vm->root.base.bo); vm 911 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c vm->process_info = NULL; vm 927 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c void **vm, void **process_info, vm 950 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c *vm = (void *) new_vm; vm 963 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c void **vm, void **process_info, vm 969 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c struct amdgpu_vm *avm = &drv_priv->vm; vm 986 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c *vm = (void *)avm; vm 992 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c struct amdgpu_vm *vm) vm 994 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c struct amdkfd_process_info *process_info = vm->process_info; vm 995 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c struct amdgpu_bo *pd = vm->root.base.bo; vm 1008 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c list_del(&vm->vm_list_node); vm 1025 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c void amdgpu_amdkfd_gpuvm_destroy_process_vm(struct kgd_dev *kgd, void *vm) vm 1028 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c struct amdgpu_vm *avm = (struct amdgpu_vm *)vm; vm 1030 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c if (WARN_ON(!kgd || !vm)) vm 1033 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c pr_debug("Destroying process vm %p\n", vm); vm 1037 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c kfree(vm); vm 1040 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c void amdgpu_amdkfd_gpuvm_release_process_vm(struct kgd_dev *kgd, void *vm) vm 1043 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c struct amdgpu_vm *avm = (struct amdgpu_vm *)vm; vm 1045 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c if (WARN_ON(!kgd || !vm)) vm 1048 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c pr_debug("Releasing process vm %p\n", vm); vm 1059 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c uint64_t amdgpu_amdkfd_gpuvm_get_process_page_dir(void *vm) vm 1061 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c struct amdgpu_vm *avm = (struct amdgpu_vm *)vm; vm 1072 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c void *vm, struct kgd_mem **mem, vm 1076 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c struct amdgpu_vm *avm = (struct amdgpu_vm *)vm; vm 1299 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c struct kgd_dev *kgd, struct kgd_mem *mem, void *vm) vm 1302 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c struct amdgpu_vm *avm = (struct amdgpu_vm *)vm; vm 1343 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c vm, domain_string(domain)); vm 1345 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c ret = reserve_bo_and_vm(mem, vm, &ctx); vm 1389 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c if (entry->bo_va->base.vm == vm && !entry->is_mapped) { vm 1401 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c ret = vm_update_pds(vm, ctx.sync); vm 1437 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c struct kgd_dev *kgd, struct kgd_mem *mem, void *vm) vm 1441 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c ((struct amdgpu_vm *)vm)->process_info; vm 1449 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c ret = reserve_bo_and_cond_vms(mem, vm, BO_VM_MAPPED, &ctx); vm 1458 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c ret = vm_validate_pt_pd_bos((struct amdgpu_vm *)vm); vm 1465 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c vm); vm 1468 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c if (entry->bo_va->base.vm == vm && entry->is_mapped) { vm 1593 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c uint64_t va, void *vm, vm 1600 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c struct amdgpu_vm *avm = (struct amdgpu_vm *)vm; vm 108 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c struct amdgpu_vm *vm = &fpriv->vm; vm 229 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c ret = amdgpu_job_alloc(p->adev, num_ibs, &p->job, vm); vm 243 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c amdgpu_vm_set_task_info(vm); vm 574 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c struct amdgpu_vm *vm = &fpriv->vm; vm 610 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c amdgpu_vm_get_pd_bo(&fpriv->vm, &p->validated, &p->vm_pd); vm 664 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c r = amdgpu_vm_validate_pt_bos(p->adev, &fpriv->vm, vm 692 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c e->bo_va = amdgpu_vm_bo_find(vm, bo); vm 789 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c struct amdgpu_vm *vm = &fpriv->vm; vm 856 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c if (!p->job->vm) vm 860 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c r = amdgpu_vm_clear_freed(adev, vm, NULL); vm 910 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c r = amdgpu_vm_handle_moved(adev, vm); vm 914 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c r = amdgpu_vm_update_directories(adev, vm); vm 918 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c r = amdgpu_sync_fence(adev, &p->job->sync, vm->last_update, false); vm 922 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c p->job->vm_pd_addr = amdgpu_gmc_pd_addr(vm->root.base.bo); vm 944 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c struct amdgpu_vm *vm = &fpriv->vm; vm 992 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c r = amdgpu_ib_get(adev, vm, ring->funcs->parse_cs ? vm 1327 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c amdgpu_vm_bo_trace_cs(&fpriv->vm, &p->ticket); vm 1334 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c amdgpu_vm_move_to_lru_tail(p->adev, &fpriv->vm); vm 1717 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c struct amdgpu_vm *vm = &fpriv->vm; vm 1723 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c mapping = amdgpu_vm_bo_lookup_mapping(vm, addr); vm 65 drivers/gpu/drm/amd/amdgpu/amdgpu_csa.c int amdgpu_map_static_csa(struct amdgpu_device *adev, struct amdgpu_vm *vm, vm 81 drivers/gpu/drm/amd/amdgpu/amdgpu_csa.c amdgpu_vm_get_pd_bo(vm, &list, &pd); vm 89 drivers/gpu/drm/amd/amdgpu/amdgpu_csa.c *bo_va = amdgpu_vm_bo_add(adev, vm, bo); vm 34 drivers/gpu/drm/amd/amdgpu/amdgpu_csa.h int amdgpu_map_static_csa(struct amdgpu_device *adev, struct amdgpu_vm *vm, vm 1317 drivers/gpu/drm/amd/amdgpu/amdgpu_drv.c timeout = amdgpu_vm_wait_idle(&fpriv->vm, timeout); vm 127 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c struct amdgpu_vm *vm = &fpriv->vm; vm 137 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c abo->tbo.base.resv != vm->root.base.bo->tbo.base.resv) vm 144 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c bo_va = amdgpu_vm_bo_find(vm, abo); vm 146 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c bo_va = amdgpu_vm_bo_add(adev, vm, abo); vm 160 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c struct amdgpu_vm *vm = &fpriv->vm; vm 176 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c amdgpu_vm_get_pd_bo(vm, &list, &vm_pd); vm 184 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c bo_va = amdgpu_vm_bo_find(vm, bo); vm 188 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c if (amdgpu_vm_ready(vm)) { vm 191 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c r = amdgpu_vm_clear_freed(adev, vm, &fence); vm 214 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c struct amdgpu_vm *vm = &fpriv->vm; vm 251 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c r = amdgpu_bo_reserve(vm->root.base.bo, false); vm 255 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c resv = vm->root.base.bo->tbo.base.resv; vm 265 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c abo->parent = amdgpu_bo_ref(vm->root.base.bo); vm 267 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c amdgpu_bo_unreserve(vm->root.base.bo); vm 510 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c struct amdgpu_vm *vm, vm 516 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c if (!amdgpu_vm_ready(vm)) vm 519 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c r = amdgpu_vm_clear_freed(adev, vm, NULL); vm 530 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c r = amdgpu_vm_update_directories(adev, vm); vm 614 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c amdgpu_vm_get_pd_bo(&fpriv->vm, &list, &vm_pd); vm 621 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c bo_va = amdgpu_vm_bo_find(&fpriv->vm, abo); vm 644 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c r = amdgpu_vm_bo_clear_mappings(adev, &fpriv->vm, vm 658 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c amdgpu_gem_va_update_vm(adev, &fpriv->vm, bo_va, vm 716 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c amdgpu_ttm_adev(base->vm->root.base.bo->tbo.bdev))) { vm 64 drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c int amdgpu_ib_get(struct amdgpu_device *adev, struct amdgpu_vm *vm, vm 79 drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c if (!vm) vm 131 drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c struct amdgpu_vm *vm; vm 145 drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c vm = job->vm; vm 149 drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c vm = NULL; vm 158 drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c if (vm && !job->vmid) { vm 267 drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c if (vm && ring->funcs->emit_switch_buffer) vm 196 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c static int amdgpu_vmid_grab_idle(struct amdgpu_vm *vm, vm 265 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c static int amdgpu_vmid_grab_reserved(struct amdgpu_vm *vm, vm 276 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c bool needs_flush = vm->use_cpu_for_update; vm 279 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c *id = vm->reserved_vmid[vmhub]; vm 285 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c if ((*id)->owner != vm->entity.fence_context || vm 330 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c static int amdgpu_vmid_grab_used(struct amdgpu_vm *vm, vm 344 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c job->vm_needs_flush = vm->use_cpu_for_update; vm 348 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c bool needs_flush = vm->use_cpu_for_update; vm 352 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c if ((*id)->owner != vm->entity.fence_context) vm 406 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c int amdgpu_vmid_grab(struct amdgpu_vm *vm, struct amdgpu_ring *ring, vm 418 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c r = amdgpu_vmid_grab_idle(vm, ring, sync, &idle); vm 422 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c if (vm->reserved_vmid[vmhub]) { vm 423 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c r = amdgpu_vmid_grab_reserved(vm, ring, sync, fence, job, &id); vm 427 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c r = amdgpu_vmid_grab_used(vm, ring, sync, fence, job, &id); vm 452 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c id->owner = vm->entity.fence_context; vm 459 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c job->pasid = vm->pasid; vm 460 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c trace_amdgpu_vm_grab_id(vm, ring, job); vm 468 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c struct amdgpu_vm *vm, vm 477 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c if (vm->reserved_vmid[vmhub]) vm 489 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c vm->reserved_vmid[vmhub] = idle; vm 499 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c struct amdgpu_vm *vm, vm 505 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c if (vm->reserved_vmid[vmhub]) { vm 506 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c list_add(&vm->reserved_vmid[vmhub]->list, vm 508 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c vm->reserved_vmid[vmhub] = NULL; vm 81 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.h struct amdgpu_vm *vm, vm 84 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.h struct amdgpu_vm *vm, vm 86 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.h int amdgpu_vmid_grab(struct amdgpu_vm *vm, struct amdgpu_ring *ring, vm 59 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c struct amdgpu_job **job, struct amdgpu_vm *vm) vm 77 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c (*job)->vm = vm; vm 188 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c struct amdgpu_vm *vm = job->vm; vm 203 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c while (fence == NULL && vm && !job->vmid) { vm 204 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c r = amdgpu_vmid_grab(vm, ring, &job->sync, vm 44 drivers/gpu/drm/amd/amdgpu/amdgpu_job.h struct amdgpu_vm *vm; vm 69 drivers/gpu/drm/amd/amdgpu/amdgpu_job.h struct amdgpu_job **job, struct amdgpu_vm *vm); vm 991 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c r = amdgpu_vm_init(adev, &fpriv->vm, AMDGPU_VM_CONTEXT_GFX, pasid); vm 995 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c fpriv->prt_va = amdgpu_vm_bo_add(adev, &fpriv->vm, NULL); vm 1004 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c r = amdgpu_map_static_csa(adev, &fpriv->vm, adev->virt.csa_obj, vm 1019 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c amdgpu_vm_fini(adev, &fpriv->vm); vm 1072 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c pasid = fpriv->vm.pasid; vm 1073 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c pd = amdgpu_bo_ref(fpriv->vm.root.base.bo); vm 1076 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c amdgpu_vm_fini(adev, &fpriv->vm); vm 217 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h TP_PROTO(struct amdgpu_vm *vm, struct amdgpu_ring *ring, vm 219 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h TP_ARGS(vm, ring, job), vm 231 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h __entry->pasid = vm->pasid; vm 985 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c parser->job->vm = NULL; vm 722 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c p->job->vm = NULL; vm 202 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_vm *vm = vm_bo->vm; vm 207 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c list_move(&vm_bo->vm_status, &vm->evicted); vm 209 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c list_move_tail(&vm_bo->vm_status, &vm->evicted); vm 221 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c list_move(&vm_bo->vm_status, &vm_bo->vm->relocated); vm 234 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c list_move(&vm_bo->vm_status, &vm_bo->vm->moved); vm 247 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c list_move(&vm_bo->vm_status, &vm_bo->vm->idle); vm 261 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c spin_lock(&vm_bo->vm->invalidated_lock); vm 262 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c list_move(&vm_bo->vm_status, &vm_bo->vm->invalidated); vm 263 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c spin_unlock(&vm_bo->vm->invalidated_lock); vm 276 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c spin_lock(&vm_bo->vm->invalidated_lock); vm 278 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c spin_unlock(&vm_bo->vm->invalidated_lock); vm 292 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_vm *vm, vm 295 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c base->vm = vm; vm 305 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (bo->tbo.base.resv != vm->root.base.bo->tbo.base.resv) vm 308 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->bulk_moveable = false; vm 365 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_vm *vm, uint64_t start, vm 370 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c cursor->entry = &vm->root; vm 490 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_vm *vm, vm 497 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_pt_start(adev, vm, 0, cursor); vm 541 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c #define for_each_amdgpu_vm_pt_dfs_safe(adev, vm, start, cursor, entry) \ vm 542 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c for (amdgpu_vm_pt_first_dfs((adev), (vm), (start), &(cursor)), \ vm 557 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c void amdgpu_vm_get_pd_bo(struct amdgpu_vm *vm, vm 562 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c entry->tv.bo = &vm->root.base.bo->tbo; vm 584 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_vm *vm = bo_base->vm; vm 586 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (abo->tbo.base.resv == vm->root.base.bo->tbo.base.resv) vm 587 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->bulk_moveable = false; vm 601 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_vm *vm) vm 606 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (vm->bulk_moveable) { vm 608 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c ttm_bo_bulk_move_lru_tail(&vm->lru_bulk_move); vm 613 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c memset(&vm->lru_bulk_move, 0, sizeof(vm->lru_bulk_move)); vm 616 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c list_for_each_entry(bo_base, &vm->idle, vm_status) { vm 622 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c ttm_bo_move_to_lru_tail(&bo->tbo, &vm->lru_bulk_move); vm 625 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c &vm->lru_bulk_move); vm 629 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->bulk_moveable = true; vm 645 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c int amdgpu_vm_validate_pt_bos(struct amdgpu_device *adev, struct amdgpu_vm *vm, vm 652 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->bulk_moveable &= list_empty(&vm->evicted); vm 654 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c list_for_each_entry_safe(bo_base, tmp, &vm->evicted, vm_status) { vm 664 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->update_funcs->map_table(bo); vm 685 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c bool amdgpu_vm_ready(struct amdgpu_vm *vm) vm 687 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c return list_empty(&vm->evicted); vm 703 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_vm *vm, vm 724 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (!vm->pte_support_ats) { vm 737 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if ((pt - vm->root.entries) >= ats_entries) { vm 756 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c r = vm->update_funcs->map_table(bo); vm 762 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c params.vm = vm; vm 764 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c r = vm->update_funcs->prepare(¶ms, AMDGPU_FENCE_OWNER_KFD, NULL); vm 779 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c r = vm->update_funcs->update(¶ms, bo, addr, 0, ats_entries, vm 802 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c r = vm->update_funcs->update(¶ms, bo, addr, 0, entries, vm 808 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c return vm->update_funcs->commit(¶ms, NULL); vm 818 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c static void amdgpu_vm_bo_param(struct amdgpu_device *adev, struct amdgpu_vm *vm, vm 829 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (vm->use_cpu_for_update) vm 831 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c else if (!vm->root.base.bo || vm->root.base.bo->shadow) vm 834 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (vm->root.base.bo) vm 835 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c bp->resv = vm->root.base.bo->tbo.base.resv; vm 852 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_vm *vm, vm 874 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_bo_param(adev, vm, cursor->level, &bp); vm 884 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_bo_base_init(&entry->base, vm, pt); vm 886 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c r = amdgpu_vm_clear_bo(adev, vm, pt); vm 925 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_vm *vm, vm 931 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->bulk_moveable = false; vm 933 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c for_each_amdgpu_vm_pt_dfs_safe(adev, vm, start, cursor, entry) vm 1140 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_bo_va *amdgpu_vm_bo_find(struct amdgpu_vm *vm, vm 1146 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (base->vm != vm) vm 1191 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_vm *vm, vm 1205 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c return vm->update_funcs->update(params, bo, pde, pt, 1, 0, flags); vm 1217 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_vm *vm) vm 1222 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c for_each_amdgpu_vm_pt_dfs_safe(adev, vm, NULL, cursor, entry) vm 1239 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_vm *vm) vm 1244 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (list_empty(&vm->relocated)) vm 1249 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c params.vm = vm; vm 1251 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c r = vm->update_funcs->prepare(¶ms, AMDGPU_FENCE_OWNER_VM, NULL); vm 1255 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c while (!list_empty(&vm->relocated)) { vm 1258 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c entry = list_first_entry(&vm->relocated, struct amdgpu_vm_pt, vm 1262 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c r = amdgpu_vm_update_pde(¶ms, vm, entry); vm 1267 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c r = vm->update_funcs->commit(¶ms, &vm->last_update); vm 1273 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_invalidate_pds(adev, vm); vm 1301 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c params->vm->update_funcs->update(params, bo, pe, addr, count, incr, vm 1394 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_pt_start(adev, params->vm, start, &cursor); vm 1400 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c r = amdgpu_vm_alloc_pts(params->adev, params->vm, &cursor); vm 1474 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_free_pts(adev, params->vm, &cursor); vm 1508 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_vm *vm, vm 1519 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c params.vm = vm; vm 1526 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c r = vm->update_funcs->prepare(¶ms, owner, exclusive); vm 1534 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c return vm->update_funcs->commit(¶ms, fence); vm 1559 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_vm *vm, vm 1651 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c r = amdgpu_vm_bo_update_mapping(adev, exclusive, dma_addr, vm, vm 1686 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_vm *vm = bo_va->base.vm; vm 1719 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (clear || (bo && bo->tbo.base.resv == vm->root.base.bo->tbo.base.resv)) vm 1720 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c last_update = &vm->last_update; vm 1733 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c r = amdgpu_vm_bo_split_mapping(adev, exclusive, pages_addr, vm, vm 1740 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (vm->use_cpu_for_update) { vm 1750 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (bo && bo->tbo.base.resv == vm->root.base.bo->tbo.base.resv) { vm 1867 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_vm *vm, vm 1884 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c static void amdgpu_vm_prt_fini(struct amdgpu_device *adev, struct amdgpu_vm *vm) vm 1886 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct dma_resv *resv = vm->root.base.bo->tbo.base.resv; vm 1930 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_vm *vm, vm 1938 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c while (!list_empty(&vm->freed)) { vm 1939 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c mapping = list_first_entry(&vm->freed, vm 1943 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (vm->pte_support_ats && vm 1947 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c r = amdgpu_vm_bo_update_mapping(adev, NULL, NULL, vm, vm 1950 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_free_mapping(adev, vm, mapping, f); vm 1982 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_vm *vm) vm 1989 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c list_for_each_entry_safe(bo_va, tmp, &vm->moved, base.vm_status) { vm 1996 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c spin_lock(&vm->invalidated_lock); vm 1997 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c while (!list_empty(&vm->invalidated)) { vm 1998 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c bo_va = list_first_entry(&vm->invalidated, struct amdgpu_bo_va, vm 2001 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c spin_unlock(&vm->invalidated_lock); vm 2016 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c spin_lock(&vm->invalidated_lock); vm 2018 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c spin_unlock(&vm->invalidated_lock); vm 2039 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_vm *vm, vm 2048 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_bo_base_init(&bo_va->base, vm, bo); vm 2081 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_vm *vm = bo_va->base.vm; vm 2086 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_it_insert(mapping, &vm->va); vm 2091 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (bo && bo->tbo.base.resv == vm->root.base.bo->tbo.base.resv && vm 2093 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c list_move(&bo_va->base.vm_status, &vm->moved); vm 2122 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_vm *vm = bo_va->base.vm; vm 2139 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c tmp = amdgpu_vm_it_iter_first(&vm->va, saddr, eaddr); vm 2206 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c r = amdgpu_vm_bo_clear_mappings(adev, bo_va->base.vm, saddr, size); vm 2244 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_vm *vm = bo_va->base.vm; vm 2267 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_it_remove(mapping, &vm->va); vm 2272 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c list_add(&mapping->list, &vm->freed); vm 2274 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_free_mapping(adev, vm, mapping, vm 2294 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_vm *vm, vm 2319 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c tmp = amdgpu_vm_it_iter_first(&vm->va, saddr, eaddr); vm 2350 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_it_remove(tmp, &vm->va); vm 2359 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c list_add(&tmp->list, &vm->freed); vm 2365 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_it_insert(before, &vm->va); vm 2374 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_it_insert(after, &vm->va); vm 2396 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_bo_va_mapping *amdgpu_vm_bo_lookup_mapping(struct amdgpu_vm *vm, vm 2399 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c return amdgpu_vm_it_iter_first(&vm->va, addr, addr); vm 2410 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c void amdgpu_vm_bo_trace_cs(struct amdgpu_vm *vm, struct ww_acquire_ctx *ticket) vm 2417 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c for (mapping = amdgpu_vm_it_iter_first(&vm->va, 0, U64_MAX); mapping; vm 2447 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_vm *vm = bo_va->base.vm; vm 2451 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (bo->tbo.base.resv == vm->root.base.bo->tbo.base.resv) vm 2452 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->bulk_moveable = false; vm 2464 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c spin_lock(&vm->invalidated_lock); vm 2466 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c spin_unlock(&vm->invalidated_lock); vm 2470 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_it_remove(mapping, &vm->va); vm 2473 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c list_add(&mapping->list, &vm->freed); vm 2477 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_it_remove(mapping, &vm->va); vm 2478 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_free_mapping(adev, vm, mapping, vm 2513 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_vm *vm = bo_base->vm; vm 2515 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (evicted && bo->tbo.base.resv == vm->root.base.bo->tbo.base.resv) { vm 2526 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c else if (bo->tbo.base.resv == vm->root.base.bo->tbo.base.resv) vm 2654 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c long amdgpu_vm_wait_idle(struct amdgpu_vm *vm, long timeout) vm 2656 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c return dma_resv_wait_timeout_rcu(vm->root.base.bo->tbo.base.resv, vm 2673 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c int amdgpu_vm_init(struct amdgpu_device *adev, struct amdgpu_vm *vm, vm 2680 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->va = RB_ROOT_CACHED; vm 2682 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->reserved_vmid[i] = NULL; vm 2683 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c INIT_LIST_HEAD(&vm->evicted); vm 2684 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c INIT_LIST_HEAD(&vm->relocated); vm 2685 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c INIT_LIST_HEAD(&vm->moved); vm 2686 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c INIT_LIST_HEAD(&vm->idle); vm 2687 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c INIT_LIST_HEAD(&vm->invalidated); vm 2688 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c spin_lock_init(&vm->invalidated_lock); vm 2689 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c INIT_LIST_HEAD(&vm->freed); vm 2692 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c r = drm_sched_entity_init(&vm->entity, adev->vm_manager.vm_pte_rqs, vm 2697 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->pte_support_ats = false; vm 2700 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->use_cpu_for_update = !!(adev->vm_manager.vm_update_mode & vm 2704 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->pte_support_ats = true; vm 2706 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->use_cpu_for_update = !!(adev->vm_manager.vm_update_mode & vm 2710 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->use_cpu_for_update ? "CPU" : "SDMA"); vm 2711 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c WARN_ONCE((vm->use_cpu_for_update && !amdgpu_gmc_vram_full_visible(&adev->gmc)), vm 2714 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (vm->use_cpu_for_update) vm 2715 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->update_funcs = &amdgpu_vm_cpu_funcs; vm 2717 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->update_funcs = &amdgpu_vm_sdma_funcs; vm 2718 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->last_update = NULL; vm 2720 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_bo_param(adev, vm, adev->vm_manager.root_level, &bp); vm 2735 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_bo_base_init(&vm->root.base, vm, root); vm 2737 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c r = amdgpu_vm_clear_bo(adev, vm, root); vm 2741 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_bo_unreserve(vm->root.base.bo); vm 2747 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c r = idr_alloc(&adev->vm_manager.pasid_idr, vm, pasid, pasid + 1, vm 2753 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->pasid = pasid; vm 2756 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c INIT_KFIFO(vm->faults); vm 2761 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_bo_unreserve(vm->root.base.bo); vm 2764 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_bo_unref(&vm->root.base.bo->shadow); vm 2765 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_bo_unref(&vm->root.base.bo); vm 2766 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->root.base.bo = NULL; vm 2769 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c drm_sched_entity_destroy(&vm->entity); vm 2788 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_vm *vm) vm 2794 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (!(vm->root.entries)) vm 2798 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (vm->root.entries[i].base.bo) vm 2825 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c int amdgpu_vm_make_compute(struct amdgpu_device *adev, struct amdgpu_vm *vm, unsigned int pasid) vm 2830 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c r = amdgpu_bo_reserve(vm->root.base.bo, true); vm 2835 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c r = amdgpu_vm_check_clean_reserved(adev, vm); vm 2843 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c r = idr_alloc(&adev->vm_manager.pasid_idr, vm, pasid, pasid + 1, vm 2855 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (pte_support_ats != vm->pte_support_ats) { vm 2856 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->pte_support_ats = pte_support_ats; vm 2857 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c r = amdgpu_vm_clear_bo(adev, vm, vm->root.base.bo); vm 2863 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->use_cpu_for_update = !!(adev->vm_manager.vm_update_mode & vm 2866 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->use_cpu_for_update ? "CPU" : "SDMA"); vm 2867 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c WARN_ONCE((vm->use_cpu_for_update && !amdgpu_gmc_vram_full_visible(&adev->gmc)), vm 2870 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (vm->use_cpu_for_update) vm 2871 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->update_funcs = &amdgpu_vm_cpu_funcs; vm 2873 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->update_funcs = &amdgpu_vm_sdma_funcs; vm 2874 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c dma_fence_put(vm->last_update); vm 2875 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->last_update = NULL; vm 2877 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (vm->pasid) { vm 2881 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c idr_remove(&adev->vm_manager.pasid_idr, vm->pasid); vm 2887 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_pasid_free(vm->pasid); vm 2888 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->pasid = 0; vm 2892 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_bo_unref(&vm->root.base.bo->shadow); vm 2895 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->pasid = pasid; vm 2908 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_bo_unreserve(vm->root.base.bo); vm 2920 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c void amdgpu_vm_release_compute(struct amdgpu_device *adev, struct amdgpu_vm *vm) vm 2922 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (vm->pasid) { vm 2926 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c idr_remove(&adev->vm_manager.pasid_idr, vm->pasid); vm 2929 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->pasid = 0; vm 2941 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c void amdgpu_vm_fini(struct amdgpu_device *adev, struct amdgpu_vm *vm) vm 2948 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_amdkfd_gpuvm_destroy_cb(adev, vm); vm 2950 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (vm->pasid) { vm 2954 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c idr_remove(&adev->vm_manager.pasid_idr, vm->pasid); vm 2958 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c drm_sched_entity_destroy(&vm->entity); vm 2960 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (!RB_EMPTY_ROOT(&vm->va.rb_root)) { vm 2964 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c &vm->va.rb_root, rb) { vm 2971 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c list_for_each_entry_safe(mapping, tmp, &vm->freed, list) { vm 2973 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_prt_fini(adev, vm); vm 2978 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_free_mapping(adev, vm, mapping, NULL); vm 2981 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c root = amdgpu_bo_ref(vm->root.base.bo); vm 2986 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_free_pts(adev, vm, NULL); vm 2990 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c WARN_ON(vm->root.base.bo); vm 2991 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c dma_fence_put(vm->last_update); vm 2993 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vmid_free_reserved(adev, vm, i); vm 3075 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c r = amdgpu_vmid_alloc_reserved(adev, &fpriv->vm, AMDGPU_GFXHUB_0); vm 3080 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vmid_free_reserved(adev, &fpriv->vm, AMDGPU_GFXHUB_0); vm 3099 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_vm *vm; vm 3104 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm = idr_find(&adev->vm_manager.pasid_idr, pasid); vm 3105 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (vm) vm 3106 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c *task_info = vm->task_info; vm 3116 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c void amdgpu_vm_set_task_info(struct amdgpu_vm *vm) vm 3118 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (!vm->task_info.pid) { vm 3119 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->task_info.pid = current->pid; vm 3120 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c get_task_comm(vm->task_info.task_name, current); vm 3123 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->task_info.tgid = current->group_leader->pid; vm 3124 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c get_task_comm(vm->task_info.process_name, current->group_leader); vm 134 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h struct amdgpu_vm *vm; vm 199 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h struct amdgpu_vm *vm; vm 346 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h long amdgpu_vm_wait_idle(struct amdgpu_vm *vm, long timeout); vm 347 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h int amdgpu_vm_init(struct amdgpu_device *adev, struct amdgpu_vm *vm, vm 349 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h int amdgpu_vm_make_compute(struct amdgpu_device *adev, struct amdgpu_vm *vm, unsigned int pasid); vm 350 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h void amdgpu_vm_release_compute(struct amdgpu_device *adev, struct amdgpu_vm *vm); vm 351 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h void amdgpu_vm_fini(struct amdgpu_device *adev, struct amdgpu_vm *vm); vm 352 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h void amdgpu_vm_get_pd_bo(struct amdgpu_vm *vm, vm 355 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h bool amdgpu_vm_ready(struct amdgpu_vm *vm); vm 356 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h int amdgpu_vm_validate_pt_bos(struct amdgpu_device *adev, struct amdgpu_vm *vm, vm 361 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h struct amdgpu_vm *vm); vm 363 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h struct amdgpu_vm *vm, vm 366 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h struct amdgpu_vm *vm); vm 373 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h struct amdgpu_bo_va *amdgpu_vm_bo_find(struct amdgpu_vm *vm, vm 376 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h struct amdgpu_vm *vm, vm 390 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h struct amdgpu_vm *vm, vm 392 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h struct amdgpu_bo_va_mapping *amdgpu_vm_bo_lookup_mapping(struct amdgpu_vm *vm, vm 394 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h void amdgpu_vm_bo_trace_cs(struct amdgpu_vm *vm, struct ww_acquire_ctx *ticket); vm 408 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h void amdgpu_vm_set_task_info(struct amdgpu_vm *vm); vm 411 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h struct amdgpu_vm *vm); vm 55 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_cpu.c r = amdgpu_bo_sync_wait(p->vm->root.base.bo, owner, true); vm 63 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c struct amdgpu_bo *root = p->vm->root.base.bo; vm 96 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c struct amdgpu_bo *root = p->vm->root.base.bo; vm 102 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c ring = container_of(p->vm->entity.rq->sched, struct amdgpu_ring, sched); vm 107 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c r = amdgpu_job_submit(p->job, &p->vm->entity, vm 1179 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c u32 me, u32 pipe, u32 q, u32 vm) vm 1181 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c nv_grbm_select(adev, me, pipe, q, vm); vm 3046 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c u32 me, u32 pipe, u32 q, u32 vm) vm 4200 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c u32 me, u32 pipe, u32 q, u32 vm) vm 4202 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c cik_srbm_select(adev, me, pipe, q, vm); vm 3472 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c u32 me, u32 pipe, u32 q, u32 vm) vm 3474 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c vi_srbm_select(adev, me, pipe, q, vm); vm 1841 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c u32 me, u32 pipe, u32 q, u32 vm) vm 1843 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c soc15_grbm_select(adev, me, pipe, q, vm); vm 1295 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c pdd->vm, (struct kgd_mem **) &mem, &offset, vm 1440 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c peer->kgd, (struct kgd_mem *)mem, peer_pdd->vm); vm 1547 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c peer->kgd, (struct kgd_mem *)mem, peer_pdd->vm); vm 1654 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c args->va_addr, pdd->vm, vm 660 drivers/gpu/drm/amd/amdkfd/kfd_device_queue_manager.c pd_base = amdgpu_amdkfd_gpuvm_get_process_page_dir(pdd->vm); vm 732 drivers/gpu/drm/amd/amdkfd/kfd_device_queue_manager.c pd_base = amdgpu_amdkfd_gpuvm_get_process_page_dir(pdd->vm); vm 782 drivers/gpu/drm/amd/amdkfd/kfd_device_queue_manager.c pd_base = amdgpu_amdkfd_gpuvm_get_process_page_dir(pdd->vm); vm 640 drivers/gpu/drm/amd/amdkfd/kfd_priv.h void *vm; vm 166 drivers/gpu/drm/amd/amdkfd/kfd_process.c amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu(dev->kgd, mem, pdd->vm); vm 186 drivers/gpu/drm/amd/amdkfd/kfd_process.c pdd->vm, &mem, NULL, flags); vm 190 drivers/gpu/drm/amd/amdkfd/kfd_process.c err = amdgpu_amdkfd_gpuvm_map_memory_to_gpu(kdev->kgd, mem, pdd->vm); vm 394 drivers/gpu/drm/amd/amdkfd/kfd_process.c if (!peer_pdd->vm) vm 397 drivers/gpu/drm/amd/amdkfd/kfd_process.c peer_pdd->dev->kgd, mem, peer_pdd->vm); vm 424 drivers/gpu/drm/amd/amdkfd/kfd_process.c pdd->dev->kgd, pdd->vm); vm 427 drivers/gpu/drm/amd/amdkfd/kfd_process.c else if (pdd->vm) vm 429 drivers/gpu/drm/amd/amdkfd/kfd_process.c pdd->dev->kgd, pdd->vm); vm 780 drivers/gpu/drm/amd/amdkfd/kfd_process.c if (pdd->vm) vm 789 drivers/gpu/drm/amd/amdkfd/kfd_process.c &pdd->vm, &p->kgd_process_info, &p->ef); vm 792 drivers/gpu/drm/amd/amdkfd/kfd_process.c &pdd->vm, &p->kgd_process_info, &p->ef); vm 798 drivers/gpu/drm/amd/amdkfd/kfd_process.c amdgpu_vm_set_task_info(pdd->vm); vm 815 drivers/gpu/drm/amd/amdkfd/kfd_process.c amdgpu_amdkfd_gpuvm_destroy_process_vm(dev->kgd, pdd->vm); vm 816 drivers/gpu/drm/amd/amdkfd/kfd_process.c pdd->vm = NULL; vm 218 drivers/gpu/drm/amd/display/dc/dml/display_mode_structs.h unsigned char vm; vm 649 drivers/gpu/drm/amd/display/dc/dml/display_mode_vba.c mode_lib->vba.GPUVMEnable = mode_lib->vba.GPUVMEnable || !!pipes[k].pipe.src.gpuvm || !!pipes[k].pipe.src.vm; vm 657 drivers/gpu/drm/amd/display/dc/dml/display_mode_vba.c mode_lib->vba.HostVMEnable = mode_lib->vba.HostVMEnable || !!pipes[k].pipe.src.hostvm || !!pipes[k].pipe.src.vm; vm 132 drivers/gpu/drm/arm/hdlcd_crtc.c struct videomode vm; vm 135 drivers/gpu/drm/arm/hdlcd_crtc.c vm.vfront_porch = m->crtc_vsync_start - m->crtc_vdisplay; vm 136 drivers/gpu/drm/arm/hdlcd_crtc.c vm.vback_porch = m->crtc_vtotal - m->crtc_vsync_end; vm 137 drivers/gpu/drm/arm/hdlcd_crtc.c vm.vsync_len = m->crtc_vsync_end - m->crtc_vsync_start; vm 138 drivers/gpu/drm/arm/hdlcd_crtc.c vm.hfront_porch = m->crtc_hsync_start - m->crtc_hdisplay; vm 139 drivers/gpu/drm/arm/hdlcd_crtc.c vm.hback_porch = m->crtc_htotal - m->crtc_hsync_end; vm 140 drivers/gpu/drm/arm/hdlcd_crtc.c vm.hsync_len = m->crtc_hsync_end - m->crtc_hsync_start; vm 154 drivers/gpu/drm/arm/hdlcd_crtc.c hdlcd_write(hdlcd, HDLCD_REG_V_BACK_PORCH, vm.vback_porch - 1); vm 155 drivers/gpu/drm/arm/hdlcd_crtc.c hdlcd_write(hdlcd, HDLCD_REG_V_FRONT_PORCH, vm.vfront_porch - 1); vm 156 drivers/gpu/drm/arm/hdlcd_crtc.c hdlcd_write(hdlcd, HDLCD_REG_V_SYNC, vm.vsync_len - 1); vm 158 drivers/gpu/drm/arm/hdlcd_crtc.c hdlcd_write(hdlcd, HDLCD_REG_H_BACK_PORCH, vm.hback_porch - 1); vm 159 drivers/gpu/drm/arm/hdlcd_crtc.c hdlcd_write(hdlcd, HDLCD_REG_H_FRONT_PORCH, vm.hfront_porch - 1); vm 160 drivers/gpu/drm/arm/hdlcd_crtc.c hdlcd_write(hdlcd, HDLCD_REG_H_SYNC, vm.hsync_len - 1); vm 53 drivers/gpu/drm/arm/malidp_crtc.c struct videomode vm; vm 61 drivers/gpu/drm/arm/malidp_crtc.c drm_display_mode_to_videomode(&crtc->state->adjusted_mode, &vm); vm 67 drivers/gpu/drm/arm/malidp_crtc.c hwdev->hw->modeset(hwdev, &vm); vm 255 drivers/gpu/drm/arm/malidp_crtc.c struct videomode vm; vm 330 drivers/gpu/drm/arm/malidp_crtc.c drm_display_mode_to_videomode(&state->adjusted_mode, &vm); vm 331 drivers/gpu/drm/arm/malidp_crtc.c ret = hwdev->hw->se_calc_mclk(hwdev, s, &vm); vm 472 drivers/gpu/drm/arm/malidp_hw.c struct videomode *vm) vm 475 drivers/gpu/drm/arm/malidp_hw.c unsigned long pxlclk = vm->pixelclock; /* Hz */ vm 476 drivers/gpu/drm/arm/malidp_hw.c unsigned long htotal = vm->hactive + vm->hfront_porch + vm 477 drivers/gpu/drm/arm/malidp_hw.c vm->hback_porch + vm->hsync_len; vm 813 drivers/gpu/drm/arm/malidp_hw.c struct videomode *vm) vm 816 drivers/gpu/drm/arm/malidp_hw.c unsigned long pxlclk = vm->pixelclock; vm 817 drivers/gpu/drm/arm/malidp_hw.c unsigned long htotal = vm->hactive + vm->hfront_porch + vm 818 drivers/gpu/drm/arm/malidp_hw.c vm->hback_porch + vm->hsync_len; vm 192 drivers/gpu/drm/arm/malidp_hw.h struct videomode *vm); vm 72 drivers/gpu/drm/atmel-hlcdc/atmel_hlcdc_crtc.c struct videomode vm; vm 82 drivers/gpu/drm/atmel-hlcdc/atmel_hlcdc_crtc.c vm.vfront_porch = adj->crtc_vsync_start - adj->crtc_vdisplay; vm 83 drivers/gpu/drm/atmel-hlcdc/atmel_hlcdc_crtc.c vm.vback_porch = adj->crtc_vtotal - adj->crtc_vsync_end; vm 84 drivers/gpu/drm/atmel-hlcdc/atmel_hlcdc_crtc.c vm.vsync_len = adj->crtc_vsync_end - adj->crtc_vsync_start; vm 85 drivers/gpu/drm/atmel-hlcdc/atmel_hlcdc_crtc.c vm.hfront_porch = adj->crtc_hsync_start - adj->crtc_hdisplay; vm 86 drivers/gpu/drm/atmel-hlcdc/atmel_hlcdc_crtc.c vm.hback_porch = adj->crtc_htotal - adj->crtc_hsync_end; vm 87 drivers/gpu/drm/atmel-hlcdc/atmel_hlcdc_crtc.c vm.hsync_len = adj->crtc_hsync_end - adj->crtc_hsync_start; vm 90 drivers/gpu/drm/atmel-hlcdc/atmel_hlcdc_crtc.c (vm.hsync_len - 1) | ((vm.vsync_len - 1) << 16)); vm 93 drivers/gpu/drm/atmel-hlcdc/atmel_hlcdc_crtc.c (vm.vfront_porch - 1) | (vm.vback_porch << 16)); vm 96 drivers/gpu/drm/atmel-hlcdc/atmel_hlcdc_crtc.c (vm.hfront_porch - 1) | ((vm.hback_porch - 1) << 16)); vm 587 drivers/gpu/drm/drm_modes.c void drm_display_mode_from_videomode(const struct videomode *vm, vm 590 drivers/gpu/drm/drm_modes.c dmode->hdisplay = vm->hactive; vm 591 drivers/gpu/drm/drm_modes.c dmode->hsync_start = dmode->hdisplay + vm->hfront_porch; vm 592 drivers/gpu/drm/drm_modes.c dmode->hsync_end = dmode->hsync_start + vm->hsync_len; vm 593 drivers/gpu/drm/drm_modes.c dmode->htotal = dmode->hsync_end + vm->hback_porch; vm 595 drivers/gpu/drm/drm_modes.c dmode->vdisplay = vm->vactive; vm 596 drivers/gpu/drm/drm_modes.c dmode->vsync_start = dmode->vdisplay + vm->vfront_porch; vm 597 drivers/gpu/drm/drm_modes.c dmode->vsync_end = dmode->vsync_start + vm->vsync_len; vm 598 drivers/gpu/drm/drm_modes.c dmode->vtotal = dmode->vsync_end + vm->vback_porch; vm 600 drivers/gpu/drm/drm_modes.c dmode->clock = vm->pixelclock / 1000; vm 603 drivers/gpu/drm/drm_modes.c if (vm->flags & DISPLAY_FLAGS_HSYNC_HIGH) vm 605 drivers/gpu/drm/drm_modes.c else if (vm->flags & DISPLAY_FLAGS_HSYNC_LOW) vm 607 drivers/gpu/drm/drm_modes.c if (vm->flags & DISPLAY_FLAGS_VSYNC_HIGH) vm 609 drivers/gpu/drm/drm_modes.c else if (vm->flags & DISPLAY_FLAGS_VSYNC_LOW) vm 611 drivers/gpu/drm/drm_modes.c if (vm->flags & DISPLAY_FLAGS_INTERLACED) vm 613 drivers/gpu/drm/drm_modes.c if (vm->flags & DISPLAY_FLAGS_DOUBLESCAN) vm 615 drivers/gpu/drm/drm_modes.c if (vm->flags & DISPLAY_FLAGS_DOUBLECLK) vm 629 drivers/gpu/drm/drm_modes.c struct videomode *vm) vm 631 drivers/gpu/drm/drm_modes.c vm->hactive = dmode->hdisplay; vm 632 drivers/gpu/drm/drm_modes.c vm->hfront_porch = dmode->hsync_start - dmode->hdisplay; vm 633 drivers/gpu/drm/drm_modes.c vm->hsync_len = dmode->hsync_end - dmode->hsync_start; vm 634 drivers/gpu/drm/drm_modes.c vm->hback_porch = dmode->htotal - dmode->hsync_end; vm 636 drivers/gpu/drm/drm_modes.c vm->vactive = dmode->vdisplay; vm 637 drivers/gpu/drm/drm_modes.c vm->vfront_porch = dmode->vsync_start - dmode->vdisplay; vm 638 drivers/gpu/drm/drm_modes.c vm->vsync_len = dmode->vsync_end - dmode->vsync_start; vm 639 drivers/gpu/drm/drm_modes.c vm->vback_porch = dmode->vtotal - dmode->vsync_end; vm 641 drivers/gpu/drm/drm_modes.c vm->pixelclock = dmode->clock * 1000; vm 643 drivers/gpu/drm/drm_modes.c vm->flags = 0; vm 645 drivers/gpu/drm/drm_modes.c vm->flags |= DISPLAY_FLAGS_HSYNC_HIGH; vm 647 drivers/gpu/drm/drm_modes.c vm->flags |= DISPLAY_FLAGS_HSYNC_LOW; vm 649 drivers/gpu/drm/drm_modes.c vm->flags |= DISPLAY_FLAGS_VSYNC_HIGH; vm 651 drivers/gpu/drm/drm_modes.c vm->flags |= DISPLAY_FLAGS_VSYNC_LOW; vm 653 drivers/gpu/drm/drm_modes.c vm->flags |= DISPLAY_FLAGS_INTERLACED; vm 655 drivers/gpu/drm/drm_modes.c vm->flags |= DISPLAY_FLAGS_DOUBLESCAN; vm 657 drivers/gpu/drm/drm_modes.c vm->flags |= DISPLAY_FLAGS_DOUBLECLK; vm 672 drivers/gpu/drm/drm_modes.c void drm_bus_flags_from_videomode(const struct videomode *vm, u32 *bus_flags) vm 675 drivers/gpu/drm/drm_modes.c if (vm->flags & DISPLAY_FLAGS_PIXDATA_POSEDGE) vm 677 drivers/gpu/drm/drm_modes.c if (vm->flags & DISPLAY_FLAGS_PIXDATA_NEGEDGE) vm 680 drivers/gpu/drm/drm_modes.c if (vm->flags & DISPLAY_FLAGS_SYNC_POSEDGE) vm 682 drivers/gpu/drm/drm_modes.c if (vm->flags & DISPLAY_FLAGS_SYNC_NEGEDGE) vm 685 drivers/gpu/drm/drm_modes.c if (vm->flags & DISPLAY_FLAGS_DE_LOW) vm 687 drivers/gpu/drm/drm_modes.c if (vm->flags & DISPLAY_FLAGS_DE_HIGH) vm 711 drivers/gpu/drm/drm_modes.c struct videomode vm; vm 714 drivers/gpu/drm/drm_modes.c ret = of_get_videomode(np, &vm, index); vm 718 drivers/gpu/drm/drm_modes.c drm_display_mode_from_videomode(&vm, dmode); vm 720 drivers/gpu/drm/drm_modes.c drm_bus_flags_from_videomode(&vm, bus_flags); vm 723 drivers/gpu/drm/drm_modes.c np, vm.hactive, vm.vactive); vm 40 drivers/gpu/drm/exynos/exynos_dp.c struct videomode vm; vm 86 drivers/gpu/drm/exynos/exynos_dp.c drm_display_mode_from_videomode(&dp->vm, mode); vm 145 drivers/gpu/drm/exynos/exynos_dp.c ret = of_get_videomode(dp->dev->of_node, &dp->vm, OF_USE_NATIVE_MODE); vm 31 drivers/gpu/drm/exynos/exynos_drm_dpi.c struct videomode *vm; vm 72 drivers/gpu/drm/exynos/exynos_drm_dpi.c if (ctx->vm) { vm 81 drivers/gpu/drm/exynos/exynos_drm_dpi.c drm_display_mode_from_videomode(ctx->vm, mode); vm 174 drivers/gpu/drm/exynos/exynos_drm_dpi.c struct videomode *vm; vm 179 drivers/gpu/drm/exynos/exynos_drm_dpi.c vm = devm_kzalloc(dev, sizeof(*ctx->vm), GFP_KERNEL); vm 180 drivers/gpu/drm/exynos/exynos_drm_dpi.c if (!vm) vm 183 drivers/gpu/drm/exynos/exynos_drm_dpi.c ret = of_get_videomode(dn, vm, 0); vm 185 drivers/gpu/drm/exynos/exynos_drm_dpi.c devm_kfree(dev, vm); vm 189 drivers/gpu/drm/exynos/exynos_drm_dpi.c ctx->vm = vm; vm 101 drivers/gpu/drm/exynos/exynos_drm_mic.c struct videomode vm; vm 156 drivers/gpu/drm/exynos/exynos_drm_mic.c struct videomode vm = mic->vm; vm 159 drivers/gpu/drm/exynos/exynos_drm_mic.c reg = MIC_V_PULSE_WIDTH(vm.vsync_len) + vm 160 drivers/gpu/drm/exynos/exynos_drm_mic.c MIC_V_PERIOD_LINE(vm.vsync_len + vm.vactive + vm 161 drivers/gpu/drm/exynos/exynos_drm_mic.c vm.vback_porch + vm.vfront_porch); vm 164 drivers/gpu/drm/exynos/exynos_drm_mic.c reg = MIC_VBP_SIZE(vm.vback_porch) + vm 165 drivers/gpu/drm/exynos/exynos_drm_mic.c MIC_VFP_SIZE(vm.vfront_porch); vm 168 drivers/gpu/drm/exynos/exynos_drm_mic.c reg = MIC_V_PULSE_WIDTH(vm.hsync_len) + vm 169 drivers/gpu/drm/exynos/exynos_drm_mic.c MIC_V_PERIOD_LINE(vm.hsync_len + vm.hactive + vm 170 drivers/gpu/drm/exynos/exynos_drm_mic.c vm.hback_porch + vm.hfront_porch); vm 173 drivers/gpu/drm/exynos/exynos_drm_mic.c reg = MIC_VBP_SIZE(vm.hback_porch) + vm 174 drivers/gpu/drm/exynos/exynos_drm_mic.c MIC_VFP_SIZE(vm.hfront_porch); vm 180 drivers/gpu/drm/exynos/exynos_drm_mic.c struct videomode *vm = &mic->vm; vm 183 drivers/gpu/drm/exynos/exynos_drm_mic.c reg = MIC_IMG_H_SIZE(vm->hactive) + vm 184 drivers/gpu/drm/exynos/exynos_drm_mic.c MIC_IMG_V_SIZE(vm->vactive); vm 191 drivers/gpu/drm/exynos/exynos_drm_mic.c struct videomode vm = mic->vm; vm 194 drivers/gpu/drm/exynos/exynos_drm_mic.c DRM_DEV_DEBUG(mic->dev, "w: %u, h: %u\n", vm.hactive, vm.vactive); vm 195 drivers/gpu/drm/exynos/exynos_drm_mic.c bs_size_2d = ((vm.hactive >> 2) << 1) + (vm.vactive % 4); vm 200 drivers/gpu/drm/exynos/exynos_drm_mic.c reg = MIC_H_PULSE_WIDTH_2D(vm.hsync_len) + vm 201 drivers/gpu/drm/exynos/exynos_drm_mic.c MIC_H_PERIOD_PIXEL_2D(vm.hsync_len + bs_size_2d + vm 202 drivers/gpu/drm/exynos/exynos_drm_mic.c vm.hback_porch + vm.hfront_porch); vm 205 drivers/gpu/drm/exynos/exynos_drm_mic.c reg = MIC_HBP_SIZE_2D(vm.hback_porch) + vm 206 drivers/gpu/drm/exynos/exynos_drm_mic.c MIC_H_PERIOD_PIXEL_2D(vm.hfront_porch); vm 256 drivers/gpu/drm/exynos/exynos_drm_mic.c drm_display_mode_to_videomode(mode, &mic->vm); vm 87 drivers/gpu/drm/fsl-dcu/fsl_dcu_drm_crtc.c struct videomode vm; vm 91 drivers/gpu/drm/fsl-dcu/fsl_dcu_drm_crtc.c drm_display_mode_to_videomode(mode, &vm); vm 97 drivers/gpu/drm/fsl-dcu/fsl_dcu_drm_crtc.c if (vm.flags & DISPLAY_FLAGS_HSYNC_LOW) vm 100 drivers/gpu/drm/fsl-dcu/fsl_dcu_drm_crtc.c if (vm.flags & DISPLAY_FLAGS_VSYNC_LOW) vm 104 drivers/gpu/drm/fsl-dcu/fsl_dcu_drm_crtc.c DCU_HSYN_PARA_BP(vm.hback_porch) | vm 105 drivers/gpu/drm/fsl-dcu/fsl_dcu_drm_crtc.c DCU_HSYN_PARA_PW(vm.hsync_len) | vm 106 drivers/gpu/drm/fsl-dcu/fsl_dcu_drm_crtc.c DCU_HSYN_PARA_FP(vm.hfront_porch)); vm 108 drivers/gpu/drm/fsl-dcu/fsl_dcu_drm_crtc.c DCU_VSYN_PARA_BP(vm.vback_porch) | vm 109 drivers/gpu/drm/fsl-dcu/fsl_dcu_drm_crtc.c DCU_VSYN_PARA_PW(vm.vsync_len) | vm 110 drivers/gpu/drm/fsl-dcu/fsl_dcu_drm_crtc.c DCU_VSYN_PARA_FP(vm.vfront_porch)); vm 112 drivers/gpu/drm/fsl-dcu/fsl_dcu_drm_crtc.c DCU_DISP_SIZE_DELTA_Y(vm.vactive) | vm 113 drivers/gpu/drm/fsl-dcu/fsl_dcu_drm_crtc.c DCU_DISP_SIZE_DELTA_X(vm.hactive)); vm 2164 drivers/gpu/drm/i915/display/intel_display.c lockdep_assert_held(&vma->vm->i915->drm.struct_mutex); vm 40 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c return vma->vm->vma_ops.bind_vma(vma, cache_level, flags); vm 45 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c vma->vm->vma_ops.unbind_vma(vma); vm 55 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c static struct i915_sleeve *create_sleeve(struct i915_address_space *vm, vm 68 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c vma = i915_vma_instance(obj, vm, NULL); vm 274 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c sleeve = create_sleeve(ce->vm, obj, pages, page_sizes); vm 316 drivers/gpu/drm/i915/gem/i915_gem_context.c if (ctx->vm) vm 317 drivers/gpu/drm/i915/gem/i915_gem_context.c i915_vm_put(ctx->vm); vm 469 drivers/gpu/drm/i915/gem/i915_gem_context.c static void __apply_ppgtt(struct intel_context *ce, void *vm) vm 471 drivers/gpu/drm/i915/gem/i915_gem_context.c i915_vm_put(ce->vm); vm 472 drivers/gpu/drm/i915/gem/i915_gem_context.c ce->vm = i915_vm_get(vm); vm 476 drivers/gpu/drm/i915/gem/i915_gem_context.c __set_ppgtt(struct i915_gem_context *ctx, struct i915_address_space *vm) vm 478 drivers/gpu/drm/i915/gem/i915_gem_context.c struct i915_address_space *old = ctx->vm; vm 480 drivers/gpu/drm/i915/gem/i915_gem_context.c GEM_BUG_ON(old && i915_vm_is_4lvl(vm) != i915_vm_is_4lvl(old)); vm 482 drivers/gpu/drm/i915/gem/i915_gem_context.c ctx->vm = i915_vm_get(vm); vm 483 drivers/gpu/drm/i915/gem/i915_gem_context.c context_apply_all(ctx, __apply_ppgtt, vm); vm 489 drivers/gpu/drm/i915/gem/i915_gem_context.c struct i915_address_space *vm) vm 491 drivers/gpu/drm/i915/gem/i915_gem_context.c if (vm == ctx->vm) vm 494 drivers/gpu/drm/i915/gem/i915_gem_context.c vm = __set_ppgtt(ctx, vm); vm 495 drivers/gpu/drm/i915/gem/i915_gem_context.c if (vm) vm 496 drivers/gpu/drm/i915/gem/i915_gem_context.c i915_vm_put(vm); vm 551 drivers/gpu/drm/i915/gem/i915_gem_context.c __assign_ppgtt(ctx, &ppgtt->vm); vm 552 drivers/gpu/drm/i915/gem/i915_gem_context.c i915_vm_put(&ppgtt->vm); vm 686 drivers/gpu/drm/i915/gem/i915_gem_context.c if (ctx->vm) vm 687 drivers/gpu/drm/i915/gem/i915_gem_context.c ctx->vm->file = fpriv; vm 783 drivers/gpu/drm/i915/gem/i915_gem_context.c ppgtt->vm.file = file_priv; vm 797 drivers/gpu/drm/i915/gem/i915_gem_context.c err = idr_alloc(&file_priv->vm_idr, &ppgtt->vm, 0, 0, GFP_KERNEL); vm 811 drivers/gpu/drm/i915/gem/i915_gem_context.c i915_vm_put(&ppgtt->vm); vm 820 drivers/gpu/drm/i915/gem/i915_gem_context.c struct i915_address_space *vm; vm 838 drivers/gpu/drm/i915/gem/i915_gem_context.c vm = idr_remove(&file_priv->vm_idr, id); vm 841 drivers/gpu/drm/i915/gem/i915_gem_context.c if (!vm) vm 844 drivers/gpu/drm/i915/gem/i915_gem_context.c i915_vm_put(vm); vm 938 drivers/gpu/drm/i915/gem/i915_gem_context.c struct i915_address_space *vm; vm 941 drivers/gpu/drm/i915/gem/i915_gem_context.c if (!ctx->vm) vm 949 drivers/gpu/drm/i915/gem/i915_gem_context.c vm = i915_vm_get(ctx->vm); vm 956 drivers/gpu/drm/i915/gem/i915_gem_context.c ret = idr_alloc(&file_priv->vm_idr, vm, 0, 0, GFP_KERNEL); vm 961 drivers/gpu/drm/i915/gem/i915_gem_context.c i915_vm_get(vm); vm 970 drivers/gpu/drm/i915/gem/i915_gem_context.c i915_vm_put(vm); vm 986 drivers/gpu/drm/i915/gem/i915_gem_context.c struct i915_address_space *vm = rq->hw_context->vm; vm 992 drivers/gpu/drm/i915/gem/i915_gem_context.c if (i915_vm_is_4lvl(vm)) { vm 993 drivers/gpu/drm/i915/gem/i915_gem_context.c struct i915_ppgtt *ppgtt = i915_vm_to_ppgtt(vm); vm 1010 drivers/gpu/drm/i915/gem/i915_gem_context.c struct i915_ppgtt *ppgtt = i915_vm_to_ppgtt(vm); vm 1029 drivers/gpu/drm/i915/gem/i915_gem_context.c gen6_ppgtt_pin(i915_vm_to_ppgtt(vm)); vm 1047 drivers/gpu/drm/i915/gem/i915_gem_context.c struct i915_address_space *vm, *old; vm 1053 drivers/gpu/drm/i915/gem/i915_gem_context.c if (!ctx->vm) vm 1063 drivers/gpu/drm/i915/gem/i915_gem_context.c vm = idr_find(&file_priv->vm_idr, args->value); vm 1064 drivers/gpu/drm/i915/gem/i915_gem_context.c if (vm) vm 1065 drivers/gpu/drm/i915/gem/i915_gem_context.c i915_vm_get(vm); vm 1067 drivers/gpu/drm/i915/gem/i915_gem_context.c if (!vm) vm 1074 drivers/gpu/drm/i915/gem/i915_gem_context.c if (vm == ctx->vm) vm 1082 drivers/gpu/drm/i915/gem/i915_gem_context.c old = __set_ppgtt(ctx, vm); vm 1103 drivers/gpu/drm/i915/gem/i915_gem_context.c i915_vm_put(vm); vm 1974 drivers/gpu/drm/i915/gem/i915_gem_context.c struct i915_address_space *vm; vm 1978 drivers/gpu/drm/i915/gem/i915_gem_context.c vm = READ_ONCE(src->vm); vm 1979 drivers/gpu/drm/i915/gem/i915_gem_context.c if (!vm) vm 1982 drivers/gpu/drm/i915/gem/i915_gem_context.c if (!kref_get_unless_zero(&vm->ref)) vm 2000 drivers/gpu/drm/i915/gem/i915_gem_context.c if (vm == READ_ONCE(src->vm)) vm 2003 drivers/gpu/drm/i915/gem/i915_gem_context.c i915_vm_put(vm); vm 2007 drivers/gpu/drm/i915/gem/i915_gem_context.c if (vm) { vm 2008 drivers/gpu/drm/i915/gem/i915_gem_context.c __assign_ppgtt(dst, vm); vm 2009 drivers/gpu/drm/i915/gem/i915_gem_context.c i915_vm_put(vm); vm 2233 drivers/gpu/drm/i915/gem/i915_gem_context.c if (ctx->vm) vm 2234 drivers/gpu/drm/i915/gem/i915_gem_context.c args->value = ctx->vm->total; vm 2236 drivers/gpu/drm/i915/gem/i915_gem_context.c args->value = to_i915(dev)->ggtt.alias->vm.total; vm 2238 drivers/gpu/drm/i915/gem/i915_gem_context.c args->value = to_i915(dev)->ggtt.vm.total; vm 91 drivers/gpu/drm/i915/gem/i915_gem_context_types.h struct i915_address_space *vm; vm 249 drivers/gpu/drm/i915/gem/i915_gem_domain.c ret = mutex_lock_interruptible(&i915->ggtt.vm.mutex); vm 272 drivers/gpu/drm/i915/gem/i915_gem_domain.c mutex_unlock(&i915->ggtt.vm.mutex); vm 489 drivers/gpu/drm/i915/gem/i915_gem_domain.c mutex_lock(&i915->ggtt.vm.mutex); vm 494 drivers/gpu/drm/i915/gem/i915_gem_domain.c list_move_tail(&vma->vm_link, &vma->vm->bound_list); vm 496 drivers/gpu/drm/i915/gem/i915_gem_domain.c mutex_unlock(&i915->ggtt.vm.mutex); vm 703 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c err = i915_gem_evict_vm(eb->context->vm); vm 731 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c if (ctx->vm) vm 777 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c vma = i915_vma_instance(obj, eb->context->vm, NULL); vm 968 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c intel_gt_flush_ggtt_writes(ggtt->vm.gt); vm 972 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c ggtt->vm.clear_range(&ggtt->vm, vm 1026 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c intel_gt_flush_ggtt_writes(ggtt->vm.gt); vm 1051 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c (&ggtt->vm.mm, &cache->node, vm 1065 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c ggtt->vm.insert_page(&ggtt->vm, vm 1166 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c batch = i915_vma_instance(pool->obj, vma->vm, NULL); vm 1968 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c struct i915_address_space *vm; vm 1977 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c vm = &dev_priv->ggtt.vm; vm 1978 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c } else if (vma->vm->has_read_only) { vm 1980 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c vm = vma->vm; vm 1987 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c return i915_gem_object_pin(obj, vm, NULL, 0, 0, flags); vm 248 drivers/gpu/drm/i915/gem/i915_gem_mman.c ret = intel_gt_reset_trylock(ggtt->vm.gt, &srcu); vm 310 drivers/gpu/drm/i915/gem/i915_gem_mman.c mutex_lock(&i915->ggtt.vm.mutex); vm 313 drivers/gpu/drm/i915/gem/i915_gem_mman.c mutex_unlock(&i915->ggtt.vm.mutex); vm 332 drivers/gpu/drm/i915/gem/i915_gem_mman.c intel_gt_reset_unlock(ggtt->vm.gt, srcu); vm 345 drivers/gpu/drm/i915/gem/i915_gem_mman.c if (!intel_gt_is_wedged(ggtt->vm.gt)) vm 418 drivers/gpu/drm/i915/gem/i915_gem_mman.c mutex_lock(&i915->ggtt.vm.mutex); vm 435 drivers/gpu/drm/i915/gem/i915_gem_mman.c mutex_unlock(&i915->ggtt.vm.mutex); vm 271 drivers/gpu/drm/i915/gem/i915_gem_object.c intel_gt_flush_ggtt_writes(vma->vm->gt); vm 18 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c struct drm_i915_private *i915 = ce->vm->i915; vm 80 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c intel_gt_chipset_flush(ce->vm->gt); vm 84 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c batch = i915_vma_instance(pool->obj, ce->vm, NULL); vm 135 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c vma = i915_vma_instance(obj, ce->vm, NULL); vm 202 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c struct drm_i915_private *i915 = ce->vm->i915; vm 279 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c intel_gt_chipset_flush(ce->vm->gt); vm 283 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c batch = i915_vma_instance(pool->obj, ce->vm, NULL); vm 318 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c struct i915_address_space *vm = ce->vm; vm 324 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c vma[0] = i915_vma_instance(src, vm, NULL); vm 332 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c vma[1] = i915_vma_instance(dst, vm, NULL); vm 441 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c mutex_lock(&i915->ggtt.vm.mutex); vm 443 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c &i915->ggtt.vm.bound_list, vm_link) { vm 449 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c mutex_unlock(&i915->ggtt.vm.mutex); vm 452 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c mutex_lock(&i915->ggtt.vm.mutex); vm 454 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c mutex_unlock(&i915->ggtt.vm.mutex); vm 666 drivers/gpu/drm/i915/gem/i915_gem_stolen.c vma = i915_vma_instance(obj, &ggtt->vm, NULL); vm 677 drivers/gpu/drm/i915/gem/i915_gem_stolen.c ret = i915_gem_gtt_reserve(&ggtt->vm, &vma->node, vm 691 drivers/gpu/drm/i915/gem/i915_gem_stolen.c mutex_lock(&ggtt->vm.mutex); vm 692 drivers/gpu/drm/i915/gem/i915_gem_stolen.c list_move_tail(&vma->vm_link, &ggtt->vm.bound_list); vm 693 drivers/gpu/drm/i915/gem/i915_gem_stolen.c mutex_unlock(&ggtt->vm.mutex); vm 162 drivers/gpu/drm/i915/gem/i915_gem_tiling.c struct drm_i915_private *i915 = vma->vm->i915; vm 809 drivers/gpu/drm/i915/gem/i915_gem_userptr.c struct i915_address_space *vm; vm 815 drivers/gpu/drm/i915/gem/i915_gem_userptr.c vm = dev_priv->kernel_context->vm; vm 816 drivers/gpu/drm/i915/gem/i915_gem_userptr.c if (!vm || !vm->has_read_only) vm 333 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct drm_i915_private *i915 = vma->vm->i915; vm 374 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct drm_i915_private *i915 = ppgtt->vm.i915; vm 410 drivers/gpu/drm/i915/gem/selftests/huge_pages.c vma = i915_vma_instance(obj, &ppgtt->vm, NULL); vm 453 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct drm_i915_private *i915 = ppgtt->vm.i915; vm 495 drivers/gpu/drm/i915/gem/selftests/huge_pages.c vma = i915_vma_instance(obj, &ppgtt->vm, NULL); vm 587 drivers/gpu/drm/i915/gem/selftests/huge_pages.c vma = i915_vma_instance(obj, &ppgtt->vm, NULL); vm 601 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct drm_i915_private *i915 = ppgtt->vm.i915; vm 602 drivers/gpu/drm/i915/gem/selftests/huge_pages.c unsigned long max_pages = ppgtt->vm.total >> PAGE_SHIFT; vm 638 drivers/gpu/drm/i915/gem/selftests/huge_pages.c vma = i915_vma_instance(obj, &ppgtt->vm, NULL); vm 722 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct drm_i915_private *i915 = ppgtt->vm.i915; vm 816 drivers/gpu/drm/i915/gem/selftests/huge_pages.c vma = i915_vma_instance(obj, &ppgtt->vm, NULL); vm 938 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct i915_address_space *vm = ctx->vm ?: &engine->gt->ggtt->vm; vm 943 drivers/gpu/drm/i915/gem/selftests/huge_pages.c vma = i915_vma_instance(obj, vm, NULL); vm 957 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (err == -ENOSPC && i915_is_ggtt(vm)) vm 991 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct i915_address_space *vm = ctx->vm ?: &i915->ggtt.vm; vm 1012 drivers/gpu/drm/i915/gem/selftests/huge_pages.c max = div_u64((vm->total - size), max_page_size); vm 1317 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct i915_address_space *vm = ctx->vm; vm 1335 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (!vm || !i915_vm_is_4lvl(vm)) { vm 1350 drivers/gpu/drm/i915/gem/selftests/huge_pages.c vma = i915_vma_instance(obj, vm, NULL); vm 1404 drivers/gpu/drm/i915/gem/selftests/huge_pages.c vma = i915_vma_instance(obj, vm, NULL); vm 1451 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct i915_address_space *vm = ctx->vm ?: &i915->ggtt.vm; vm 1481 drivers/gpu/drm/i915/gem/selftests/huge_pages.c vma = i915_vma_instance(obj, vm, NULL); vm 1508 drivers/gpu/drm/i915/gem/selftests/huge_pages.c struct i915_address_space *vm = ctx->vm ?: &i915->ggtt.vm; vm 1531 drivers/gpu/drm/i915/gem/selftests/huge_pages.c vma = i915_vma_instance(obj, vm, NULL); vm 1627 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (!i915_vm_is_4lvl(&ppgtt->vm)) { vm 1634 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (!i915_vm_has_scratch_64K(&ppgtt->vm)) { vm 1643 drivers/gpu/drm/i915/gem/selftests/huge_pages.c i915_vm_put(&ppgtt->vm); vm 1688 drivers/gpu/drm/i915/gem/selftests/huge_pages.c if (ctx->vm) vm 1689 drivers/gpu/drm/i915/gem/selftests/huge_pages.c ctx->vm->scrub_64K = true; vm 29 drivers/gpu/drm/i915/gem/selftests/i915_gem_client_blt.c u32 sz = min_t(u64, ce->vm->total >> 4, prandom_u32_state(&prng)); vm 174 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c struct i915_address_space *vm = ctx->vm ?: &engine->gt->ggtt->vm; vm 178 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c GEM_BUG_ON(obj->base.size > vm->total); vm 181 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c vma = i915_vma_instance(obj, vm, NULL); vm 313 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c struct i915_address_space *vm = ctx->vm ?: &ctx->i915->ggtt.vm; vm 320 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c size = min(vm->total / 2, 1024ull * DW_PER_PAGE * PAGE_SIZE); vm 416 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c yesno(!!ctx->vm), err); vm 489 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c if (!parent->vm) { /* not full-ppgtt; nothing to share */ vm 519 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c __assign_ppgtt(ctx, parent->vm); vm 535 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c yesno(!!ctx->vm), err); vm 585 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c if (INTEL_GEN(vma->vm->i915) < 8) vm 588 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c obj = i915_gem_object_create_internal(vma->vm->i915, PAGE_SIZE); vm 607 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c vma = i915_vma_instance(obj, vma->vm, NULL); vm 636 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c vma = i915_vma_instance(obj, ce->vm, NULL); vm 1044 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c struct i915_address_space *vm; vm 1076 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c vm = ctx->vm ?: &i915->ggtt.alias->vm; vm 1077 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c if (!vm || !vm->has_read_only) { vm 1108 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c yesno(!!ctx->vm), err); vm 1152 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c __drm_mm_interval_first(&ctx->vm->mm, vm 1200 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c vma = i915_vma_instance(obj, ctx->vm, NULL); vm 1299 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c vma = i915_vma_instance(obj, ctx->vm, NULL); vm 1409 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c if (ctx_a->vm == ctx_b->vm) vm 1412 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c vm_total = ctx_a->vm->total; vm 1413 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c GEM_BUG_ON(ctx_b->vm->total != vm_total); vm 197 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c (1 + next_prime_number(i915->ggtt.vm.total >> PAGE_SHIFT)) << PAGE_SHIFT); vm 336 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c vma = i915_vma_instance(obj, &i915->ggtt.vm, NULL); vm 46 drivers/gpu/drm/i915/gem/selftests/i915_gem_object.c i915->ggtt.vm.total + PAGE_SIZE); vm 35 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c u32 sz = min_t(u64, ce->vm->total >> 4, prandom_u32_state(&prng)); vm 120 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c u32 sz = min_t(u64, ce->vm->total >> 4, prandom_u32_state(&prng)); vm 45 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c const int gen = INTEL_GEN(vma->vm->i915); vm 52 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c obj = i915_gem_object_create_internal(vma->vm->i915, size); vm 87 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c vma = i915_vma_instance(obj, vma->vm, NULL); vm 111 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c struct i915_address_space *vm = ctx->vm ?: &engine->gt->ggtt->vm; vm 117 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c GEM_BUG_ON(vma->size > vm->total); vm 132 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c if (INTEL_GEN(vm->i915) <= 5) vm 51 drivers/gpu/drm/i915/gem/selftests/mock_context.c __set_ppgtt(ctx, &ppgtt->vm); vm 52 drivers/gpu/drm/i915/gem/selftests/mock_context.c i915_vm_put(&ppgtt->vm); vm 227 drivers/gpu/drm/i915/gt/intel_context.c ce->vm = i915_vm_get(ctx->vm ?: &engine->gt->ggtt->vm); vm 249 drivers/gpu/drm/i915/gt/intel_context.c i915_vm_put(ce->vm); vm 46 drivers/gpu/drm/i915/gt/intel_context_types.h struct i915_address_space *vm; vm 557 drivers/gpu/drm/i915/gt/intel_engine_cs.c vma = i915_vma_instance(obj, &engine->gt->ggtt->vm, NULL); vm 240 drivers/gpu/drm/i915/gt/intel_gt.c vma = i915_vma_instance(obj, >->ggtt->vm, NULL); vm 439 drivers/gpu/drm/i915/gt/intel_lrc.c if (i915_vm_is_4lvl(ce->vm)) vm 1905 drivers/gpu/drm/i915/gt/intel_lrc.c struct i915_ppgtt * const ppgtt = i915_vm_to_ppgtt(rq->hw_context->vm); vm 1978 drivers/gpu/drm/i915/gt/intel_lrc.c if (i915_vm_is_4lvl(request->hw_context->vm)) vm 2224 drivers/gpu/drm/i915/gt/intel_lrc.c vma = i915_vma_instance(obj, &engine->gt->ggtt->vm, NULL); vm 3193 drivers/gpu/drm/i915/gt/intel_lrc.c struct i915_ppgtt *ppgtt = i915_vm_to_ppgtt(ce->vm); vm 3268 drivers/gpu/drm/i915/gt/intel_lrc.c if (i915_vm_is_4lvl(&ppgtt->vm)) { vm 3371 drivers/gpu/drm/i915/gt/intel_lrc.c vma = i915_vma_instance(ctx_obj, &engine->gt->ggtt->vm, NULL); vm 196 drivers/gpu/drm/i915/gt/intel_renderstate.c so.vma = i915_vma_instance(so.obj, &engine->gt->ggtt->vm, NULL); vm 1215 drivers/gpu/drm/i915/gt/intel_ringbuffer.c i915_coherent_map_type(vma->vm->i915)); vm 1269 drivers/gpu/drm/i915/gt/intel_ringbuffer.c struct i915_address_space *vm = &ggtt->vm; vm 1270 drivers/gpu/drm/i915/gt/intel_ringbuffer.c struct drm_i915_private *i915 = vm->i915; vm 1284 drivers/gpu/drm/i915/gt/intel_ringbuffer.c if (vm->has_read_only) vm 1287 drivers/gpu/drm/i915/gt/intel_ringbuffer.c vma = i915_vma_instance(obj, vm, NULL); vm 1367 drivers/gpu/drm/i915/gt/intel_ringbuffer.c struct i915_address_space *vm; vm 1369 drivers/gpu/drm/i915/gt/intel_ringbuffer.c vm = ce->vm; vm 1370 drivers/gpu/drm/i915/gt/intel_ringbuffer.c if (i915_is_ggtt(vm)) vm 1371 drivers/gpu/drm/i915/gt/intel_ringbuffer.c vm = &i915_vm_to_ggtt(vm)->alias->vm; vm 1373 drivers/gpu/drm/i915/gt/intel_ringbuffer.c return vm; vm 1378 drivers/gpu/drm/i915/gt/intel_ringbuffer.c struct i915_address_space *vm; vm 1381 drivers/gpu/drm/i915/gt/intel_ringbuffer.c vm = vm_alias(ce); vm 1382 drivers/gpu/drm/i915/gt/intel_ringbuffer.c if (vm) vm 1383 drivers/gpu/drm/i915/gt/intel_ringbuffer.c err = gen6_ppgtt_pin(i915_vm_to_ppgtt((vm))); vm 1390 drivers/gpu/drm/i915/gt/intel_ringbuffer.c struct i915_address_space *vm; vm 1392 drivers/gpu/drm/i915/gt/intel_ringbuffer.c vm = vm_alias(ce); vm 1393 drivers/gpu/drm/i915/gt/intel_ringbuffer.c if (vm) vm 1394 drivers/gpu/drm/i915/gt/intel_ringbuffer.c gen6_ppgtt_unpin(i915_vm_to_ppgtt(vm)); vm 1455 drivers/gpu/drm/i915/gt/intel_ringbuffer.c vma = i915_vma_instance(obj, &engine->gt->ggtt->vm, NULL); vm 1747 drivers/gpu/drm/i915/gt/intel_ringbuffer.c struct i915_address_space *vm = vm_alias(rq->hw_context); vm 1754 drivers/gpu/drm/i915/gt/intel_ringbuffer.c if (vm) { vm 1755 drivers/gpu/drm/i915/gt/intel_ringbuffer.c struct i915_ppgtt *ppgtt = i915_vm_to_ppgtt(vm); vm 1802 drivers/gpu/drm/i915/gt/intel_ringbuffer.c if (vm) { vm 1836 drivers/gpu/drm/i915/gt/intel_ringbuffer.c i915_vm_to_ppgtt(vm)->pd_dirty_engines |= unwind_mm; vm 46 drivers/gpu/drm/i915/gt/intel_timeline.c vma = i915_vma_instance(obj, >->ggtt->vm, NULL); vm 1416 drivers/gpu/drm/i915/gt/intel_workarounds.c create_scratch(struct i915_address_space *vm, int count) vm 1424 drivers/gpu/drm/i915/gt/intel_workarounds.c obj = i915_gem_object_create_internal(vm->i915, size); vm 1430 drivers/gpu/drm/i915/gt/intel_workarounds.c vma = i915_vma_instance(obj, vm, NULL); vm 1514 drivers/gpu/drm/i915/gt/intel_workarounds.c vma = create_scratch(&ce->engine->gt->ggtt->vm, wal->count); vm 134 drivers/gpu/drm/i915/gt/selftest_hangcheck.c struct i915_address_space *vm = h->ctx->vm ?: &engine->gt->ggtt->vm; vm 159 drivers/gpu/drm/i915/gt/selftest_hangcheck.c vma = i915_vma_instance(h->obj, vm, NULL); vm 163 drivers/gpu/drm/i915/gt/selftest_hangcheck.c hws = i915_vma_instance(h->hws, vm, NULL); vm 1129 drivers/gpu/drm/i915/gt/selftest_hangcheck.c struct i915_address_space *vm = arg->vma->vm; vm 1130 drivers/gpu/drm/i915/gt/selftest_hangcheck.c struct drm_i915_private *i915 = vm->i915; vm 1137 drivers/gpu/drm/i915/gt/selftest_hangcheck.c err = i915_gem_evict_for_node(vm, &evict, 0); vm 1146 drivers/gpu/drm/i915/gt/selftest_hangcheck.c struct drm_i915_private *i915 = arg->vma->vm->i915; vm 1182 drivers/gpu/drm/i915/gt/selftest_hangcheck.c struct i915_address_space *vm, vm 1218 drivers/gpu/drm/i915/gt/selftest_hangcheck.c arg.vma = i915_vma_instance(obj, vm, NULL); vm 1335 drivers/gpu/drm/i915/gt/selftest_hangcheck.c return __igt_reset_evict_vma(gt, >->ggtt->vm, vm 1359 drivers/gpu/drm/i915/gt/selftest_hangcheck.c if (ctx->vm) /* aliasing == global gtt locking, covered above */ vm 1360 drivers/gpu/drm/i915/gt/selftest_hangcheck.c err = __igt_reset_evict_vma(gt, ctx->vm, vm 1372 drivers/gpu/drm/i915/gt/selftest_hangcheck.c return __igt_reset_evict_vma(gt, >->ggtt->vm, vm 252 drivers/gpu/drm/i915/gt/selftest_lrc.c vma = i915_vma_instance(obj, &i915->ggtt.vm, NULL); vm 346 drivers/gpu/drm/i915/gt/selftest_lrc.c vma = i915_vma_instance(obj, &i915->ggtt.vm, NULL); vm 1443 drivers/gpu/drm/i915/gt/selftest_lrc.c vma = i915_vma_instance(batch, ctx->vm, NULL); vm 99 drivers/gpu/drm/i915/gt/selftest_workarounds.c vma = i915_vma_instance(result, &engine->gt->ggtt->vm, NULL); vm 365 drivers/gpu/drm/i915/gt/selftest_workarounds.c vma = i915_vma_instance(obj, ctx->vm, NULL); vm 471 drivers/gpu/drm/i915/gt/selftest_workarounds.c scratch = create_scratch(ctx->vm, 2 * ARRAY_SIZE(values) + 1); vm 993 drivers/gpu/drm/i915/gt/selftest_workarounds.c if (!i915->kernel_context->vm) vm 1005 drivers/gpu/drm/i915/gt/selftest_workarounds.c client[i].scratch[0] = create_scratch(c->vm, 1024); vm 1012 drivers/gpu/drm/i915/gt/selftest_workarounds.c client[i].scratch[1] = create_scratch(c->vm, 1024); vm 603 drivers/gpu/drm/i915/gt/uc/intel_guc.c vma = i915_vma_instance(obj, >->ggtt->vm, NULL); vm 484 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c struct drm_i915_private *i915 = vma->vm->i915; vm 419 drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c .vm = &ggtt->vm, vm 428 drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c ggtt->vm.insert_entries(&ggtt->vm, &dummy, I915_CACHE_NONE, 0); vm 438 drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c ggtt->vm.clear_range(&ggtt->vm, start, obj->base.size); vm 66 drivers/gpu/drm/i915/gvt/aperture_gm.c ret = i915_gem_gtt_insert(&dev_priv->ggtt.vm, node, vm 175 drivers/gpu/drm/i915/gvt/aperture_gm.c mutex_lock(&dev_priv->ggtt.vm.mutex); vm 182 drivers/gpu/drm/i915/gvt/aperture_gm.c mutex_unlock(&dev_priv->ggtt.vm.mutex); vm 198 drivers/gpu/drm/i915/gvt/aperture_gm.c mutex_lock(&dev_priv->ggtt.vm.mutex); vm 210 drivers/gpu/drm/i915/gvt/aperture_gm.c mutex_unlock(&dev_priv->ggtt.vm.mutex); vm 223 drivers/gpu/drm/i915/gvt/aperture_gm.c mutex_unlock(&dev_priv->ggtt.vm.mutex); vm 383 drivers/gpu/drm/i915/gvt/gvt.h #define gvt_ggtt_gm_sz(gvt) (gvt->dev_priv->ggtt.vm.total) vm 385 drivers/gpu/drm/i915/gvt/gvt.h ((gvt->dev_priv->ggtt.vm.total >> PAGE_SHIFT) << 3) vm 368 drivers/gpu/drm/i915/gvt/scheduler.c struct i915_ppgtt *ppgtt = i915_vm_to_ppgtt(ctx->vm); vm 1151 drivers/gpu/drm/i915/gvt/scheduler.c if (i915_vm_is_4lvl(&ppgtt->vm)) { vm 1178 drivers/gpu/drm/i915/gvt/scheduler.c i915_context_ppgtt_root_restore(s, i915_vm_to_ppgtt(s->shadow[0]->vm)); vm 1212 drivers/gpu/drm/i915/gvt/scheduler.c if (i915_vm_is_4lvl(&ppgtt->vm)) { vm 1253 drivers/gpu/drm/i915/gvt/scheduler.c i915_context_ppgtt_root_save(s, i915_vm_to_ppgtt(ctx->vm)); vm 1303 drivers/gpu/drm/i915/gvt/scheduler.c i915_context_ppgtt_root_restore(s, i915_vm_to_ppgtt(ctx->vm)); vm 233 drivers/gpu/drm/i915/i915_debugfs.c struct i915_address_space *vm; vm 252 drivers/gpu/drm/i915/i915_debugfs.c if (!stats->vm) { vm 272 drivers/gpu/drm/i915/i915_debugfs.c cmp = i915_vma_compare(vma, stats->vm, NULL); vm 332 drivers/gpu/drm/i915/i915_debugfs.c struct file_stats stats = { .vm = ctx->vm, }; vm 2300 drivers/gpu/drm/i915/i915_drv.h struct i915_address_space *vm, vm 2380 drivers/gpu/drm/i915/i915_drv.h int __must_check i915_gem_evict_something(struct i915_address_space *vm, vm 2385 drivers/gpu/drm/i915/i915_drv.h int __must_check i915_gem_evict_for_node(struct i915_address_space *vm, vm 2388 drivers/gpu/drm/i915/i915_drv.h int i915_gem_evict_vm(struct i915_address_space *vm); vm 69 drivers/gpu/drm/i915/i915_gem.c return drm_mm_insert_node_in_range(&ggtt->vm.mm, node, vm 90 drivers/gpu/drm/i915/i915_gem.c mutex_lock(&ggtt->vm.mutex); vm 92 drivers/gpu/drm/i915/i915_gem.c pinned = ggtt->vm.reserved; vm 93 drivers/gpu/drm/i915/i915_gem.c list_for_each_entry(vma, &ggtt->vm.bound_list, vm_link) vm 97 drivers/gpu/drm/i915/i915_gem.c mutex_unlock(&ggtt->vm.mutex); vm 99 drivers/gpu/drm/i915/i915_gem.c args->aper_size = ggtt->vm.total; vm 397 drivers/gpu/drm/i915/i915_gem.c ggtt->vm.insert_page(&ggtt->vm, vm 419 drivers/gpu/drm/i915/i915_gem.c ggtt->vm.clear_range(&ggtt->vm, node.start, node.size); vm 609 drivers/gpu/drm/i915/i915_gem.c intel_gt_flush_ggtt_writes(ggtt->vm.gt); vm 610 drivers/gpu/drm/i915/i915_gem.c ggtt->vm.insert_page(&ggtt->vm, vm 638 drivers/gpu/drm/i915/i915_gem.c intel_gt_flush_ggtt_writes(ggtt->vm.gt); vm 640 drivers/gpu/drm/i915/i915_gem.c ggtt->vm.clear_range(&ggtt->vm, node.start, node.size); vm 966 drivers/gpu/drm/i915/i915_gem.c struct i915_address_space *vm = &dev_priv->ggtt.vm; vm 968 drivers/gpu/drm/i915/i915_gem.c return i915_gem_object_pin(obj, vm, view, size, alignment, vm 974 drivers/gpu/drm/i915/i915_gem.c struct i915_address_space *vm, vm 1021 drivers/gpu/drm/i915/i915_gem.c vma = i915_vma_instance(obj, vm, view); vm 1048 drivers/gpu/drm/i915/i915_gem.c mutex_lock(&vma->vm->mutex); vm 1050 drivers/gpu/drm/i915/i915_gem.c mutex_unlock(&vma->vm->mutex); vm 92 drivers/gpu/drm/i915/i915_gem_evict.c i915_gem_evict_something(struct i915_address_space *vm, vm 98 drivers/gpu/drm/i915/i915_gem_evict.c struct drm_i915_private *dev_priv = vm->i915; vm 107 drivers/gpu/drm/i915/i915_gem_evict.c lockdep_assert_held(&vm->i915->drm.struct_mutex); vm 108 drivers/gpu/drm/i915/i915_gem_evict.c trace_i915_gem_evict(vm, min_size, alignment, flags); vm 126 drivers/gpu/drm/i915/i915_gem_evict.c drm_mm_scan_init_with_range(&scan, &vm->mm, vm 142 drivers/gpu/drm/i915/i915_gem_evict.c list_for_each_entry_safe(vma, next, &vm->bound_list, vm_link) { vm 170 drivers/gpu/drm/i915/i915_gem_evict.c list_move_tail(&vma->vm_link, &vm->bound_list); vm 191 drivers/gpu/drm/i915/i915_gem_evict.c if (!i915_is_ggtt(vm) || flags & PIN_NONBLOCK) vm 260 drivers/gpu/drm/i915/i915_gem_evict.c int i915_gem_evict_for_node(struct i915_address_space *vm, vm 272 drivers/gpu/drm/i915/i915_gem_evict.c lockdep_assert_held(&vm->i915->drm.struct_mutex); vm 276 drivers/gpu/drm/i915/i915_gem_evict.c trace_i915_gem_evict_node(vm, target, flags); vm 284 drivers/gpu/drm/i915/i915_gem_evict.c i915_retire_requests(vm->i915); vm 286 drivers/gpu/drm/i915/i915_gem_evict.c check_color = vm->mm.color_adjust; vm 297 drivers/gpu/drm/i915/i915_gem_evict.c drm_mm_for_each_node_in_range(node, &vm->mm, start, end) { vm 372 drivers/gpu/drm/i915/i915_gem_evict.c int i915_gem_evict_vm(struct i915_address_space *vm) vm 378 drivers/gpu/drm/i915/i915_gem_evict.c lockdep_assert_held(&vm->i915->drm.struct_mutex); vm 379 drivers/gpu/drm/i915/i915_gem_evict.c trace_i915_gem_evict_vm(vm); vm 386 drivers/gpu/drm/i915/i915_gem_evict.c if (i915_is_ggtt(vm)) { vm 387 drivers/gpu/drm/i915/i915_gem_evict.c ret = ggtt_flush(vm->i915); vm 393 drivers/gpu/drm/i915/i915_gem_evict.c mutex_lock(&vm->mutex); vm 394 drivers/gpu/drm/i915/i915_gem_evict.c list_for_each_entry(vma, &vm->bound_list, vm_link) { vm 401 drivers/gpu/drm/i915/i915_gem_evict.c mutex_unlock(&vm->mutex); vm 304 drivers/gpu/drm/i915/i915_gem_fence_reg.c lockdep_assert_held(&vma->vm->mutex); vm 336 drivers/gpu/drm/i915/i915_gem_fence_reg.c struct i915_ggtt *ggtt = i915_vm_to_ggtt(vma->vm); vm 351 drivers/gpu/drm/i915/i915_gem_fence_reg.c fence = fence_find(vma->vm->i915); vm 402 drivers/gpu/drm/i915/i915_gem_fence_reg.c assert_rpm_wakelock_held(&vma->vm->i915->runtime_pm); vm 406 drivers/gpu/drm/i915/i915_gem_fence_reg.c err = mutex_lock_interruptible(&vma->vm->mutex); vm 411 drivers/gpu/drm/i915/i915_gem_fence_reg.c mutex_unlock(&vma->vm->mutex); vm 430 drivers/gpu/drm/i915/i915_gem_fence_reg.c lockdep_assert_held(&ggtt->vm.mutex); vm 465 drivers/gpu/drm/i915/i915_gem_fence_reg.c lockdep_assert_held(&ggtt->vm.mutex); vm 821 drivers/gpu/drm/i915/i915_gem_fence_reg.c struct drm_i915_private *i915 = ggtt->vm.i915; vm 123 drivers/gpu/drm/i915/i915_gem_gtt.c struct intel_uncore *uncore = ggtt->vm.gt->uncore; vm 134 drivers/gpu/drm/i915/i915_gem_gtt.c struct intel_uncore *uncore = ggtt->vm.gt->uncore; vm 153 drivers/gpu/drm/i915/i915_gem_gtt.c err = vma->vm->allocate_va_range(vma->vm, vm 164 drivers/gpu/drm/i915/i915_gem_gtt.c vma->vm->insert_entries(vma->vm, vma, cache_level, pte_flags); vm 171 drivers/gpu/drm/i915/i915_gem_gtt.c vma->vm->clear_range(vma->vm, vma->node.start, vma->size); vm 365 drivers/gpu/drm/i915/i915_gem_gtt.c static struct page *vm_alloc_page(struct i915_address_space *vm, gfp_t gfp) vm 370 drivers/gpu/drm/i915/i915_gem_gtt.c if (I915_SELFTEST_ONLY(should_fail(&vm->fault_attr, 1))) vm 371 drivers/gpu/drm/i915/i915_gem_gtt.c i915_gem_shrink_all(vm->i915); vm 373 drivers/gpu/drm/i915/i915_gem_gtt.c page = stash_pop_page(&vm->free_pages); vm 377 drivers/gpu/drm/i915/i915_gem_gtt.c if (!vm->pt_kmap_wc) vm 381 drivers/gpu/drm/i915/i915_gem_gtt.c page = stash_pop_page(&vm->i915->mm.wc_stash); vm 409 drivers/gpu/drm/i915/i915_gem_gtt.c stash_push_pagevec(&vm->i915->mm.wc_stash, &stack); vm 413 drivers/gpu/drm/i915/i915_gem_gtt.c stash_push_pagevec(&vm->free_pages, &stack); vm 425 drivers/gpu/drm/i915/i915_gem_gtt.c static void vm_free_pages_release(struct i915_address_space *vm, vm 428 drivers/gpu/drm/i915/i915_gem_gtt.c struct pagevec *pvec = &vm->free_pages.pvec; vm 431 drivers/gpu/drm/i915/i915_gem_gtt.c lockdep_assert_held(&vm->free_pages.lock); vm 434 drivers/gpu/drm/i915/i915_gem_gtt.c if (vm->pt_kmap_wc) { vm 439 drivers/gpu/drm/i915/i915_gem_gtt.c stash_push_pagevec(&vm->i915->mm.wc_stash, pvec); vm 457 drivers/gpu/drm/i915/i915_gem_gtt.c spin_unlock(&vm->free_pages.lock); vm 462 drivers/gpu/drm/i915/i915_gem_gtt.c spin_lock(&vm->free_pages.lock); vm 468 drivers/gpu/drm/i915/i915_gem_gtt.c static void vm_free_page(struct i915_address_space *vm, struct page *page) vm 478 drivers/gpu/drm/i915/i915_gem_gtt.c spin_lock(&vm->free_pages.lock); vm 479 drivers/gpu/drm/i915/i915_gem_gtt.c while (!pagevec_space(&vm->free_pages.pvec)) vm 480 drivers/gpu/drm/i915/i915_gem_gtt.c vm_free_pages_release(vm, false); vm 481 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(pagevec_count(&vm->free_pages.pvec) >= PAGEVEC_SIZE); vm 482 drivers/gpu/drm/i915/i915_gem_gtt.c pagevec_add(&vm->free_pages.pvec, page); vm 483 drivers/gpu/drm/i915/i915_gem_gtt.c spin_unlock(&vm->free_pages.lock); vm 486 drivers/gpu/drm/i915/i915_gem_gtt.c static void i915_address_space_fini(struct i915_address_space *vm) vm 488 drivers/gpu/drm/i915/i915_gem_gtt.c spin_lock(&vm->free_pages.lock); vm 489 drivers/gpu/drm/i915/i915_gem_gtt.c if (pagevec_count(&vm->free_pages.pvec)) vm 490 drivers/gpu/drm/i915/i915_gem_gtt.c vm_free_pages_release(vm, true); vm 491 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(pagevec_count(&vm->free_pages.pvec)); vm 492 drivers/gpu/drm/i915/i915_gem_gtt.c spin_unlock(&vm->free_pages.lock); vm 494 drivers/gpu/drm/i915/i915_gem_gtt.c drm_mm_takedown(&vm->mm); vm 496 drivers/gpu/drm/i915/i915_gem_gtt.c mutex_destroy(&vm->mutex); vm 499 drivers/gpu/drm/i915/i915_gem_gtt.c static void ppgtt_destroy_vma(struct i915_address_space *vm) vm 502 drivers/gpu/drm/i915/i915_gem_gtt.c &vm->bound_list, vm 503 drivers/gpu/drm/i915/i915_gem_gtt.c &vm->unbound_list, vm 507 drivers/gpu/drm/i915/i915_gem_gtt.c mutex_lock(&vm->i915->drm.struct_mutex); vm 514 drivers/gpu/drm/i915/i915_gem_gtt.c mutex_unlock(&vm->i915->drm.struct_mutex); vm 519 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_address_space *vm = vm 522 drivers/gpu/drm/i915/i915_gem_gtt.c ppgtt_destroy_vma(vm); vm 524 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(!list_empty(&vm->bound_list)); vm 525 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(!list_empty(&vm->unbound_list)); vm 527 drivers/gpu/drm/i915/i915_gem_gtt.c vm->cleanup(vm); vm 528 drivers/gpu/drm/i915/i915_gem_gtt.c i915_address_space_fini(vm); vm 530 drivers/gpu/drm/i915/i915_gem_gtt.c kfree(vm); vm 535 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_address_space *vm = vm 538 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(i915_is_ggtt(vm)); vm 539 drivers/gpu/drm/i915/i915_gem_gtt.c trace_i915_ppgtt_release(vm); vm 541 drivers/gpu/drm/i915/i915_gem_gtt.c vm->closed = true; vm 542 drivers/gpu/drm/i915/i915_gem_gtt.c queue_rcu_work(vm->i915->wq, &vm->rcu); vm 545 drivers/gpu/drm/i915/i915_gem_gtt.c static void i915_address_space_init(struct i915_address_space *vm, int subclass) vm 547 drivers/gpu/drm/i915/i915_gem_gtt.c kref_init(&vm->ref); vm 548 drivers/gpu/drm/i915/i915_gem_gtt.c INIT_RCU_WORK(&vm->rcu, __i915_vm_release); vm 555 drivers/gpu/drm/i915/i915_gem_gtt.c mutex_init(&vm->mutex); vm 556 drivers/gpu/drm/i915/i915_gem_gtt.c lockdep_set_subclass(&vm->mutex, subclass); vm 557 drivers/gpu/drm/i915/i915_gem_gtt.c i915_gem_shrinker_taints_mutex(vm->i915, &vm->mutex); vm 559 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(!vm->total); vm 560 drivers/gpu/drm/i915/i915_gem_gtt.c drm_mm_init(&vm->mm, 0, vm->total); vm 561 drivers/gpu/drm/i915/i915_gem_gtt.c vm->mm.head_node.color = I915_COLOR_UNEVICTABLE; vm 563 drivers/gpu/drm/i915/i915_gem_gtt.c stash_init(&vm->free_pages); vm 565 drivers/gpu/drm/i915/i915_gem_gtt.c INIT_LIST_HEAD(&vm->unbound_list); vm 566 drivers/gpu/drm/i915/i915_gem_gtt.c INIT_LIST_HEAD(&vm->bound_list); vm 569 drivers/gpu/drm/i915/i915_gem_gtt.c static int __setup_page_dma(struct i915_address_space *vm, vm 573 drivers/gpu/drm/i915/i915_gem_gtt.c p->page = vm_alloc_page(vm, gfp | I915_GFP_ALLOW_FAIL); vm 577 drivers/gpu/drm/i915/i915_gem_gtt.c p->daddr = dma_map_page_attrs(vm->dma, vm 582 drivers/gpu/drm/i915/i915_gem_gtt.c if (unlikely(dma_mapping_error(vm->dma, p->daddr))) { vm 583 drivers/gpu/drm/i915/i915_gem_gtt.c vm_free_page(vm, p->page); vm 590 drivers/gpu/drm/i915/i915_gem_gtt.c static int setup_page_dma(struct i915_address_space *vm, vm 593 drivers/gpu/drm/i915/i915_gem_gtt.c return __setup_page_dma(vm, p, __GFP_HIGHMEM); vm 596 drivers/gpu/drm/i915/i915_gem_gtt.c static void cleanup_page_dma(struct i915_address_space *vm, vm 599 drivers/gpu/drm/i915/i915_gem_gtt.c dma_unmap_page(vm->dma, p->daddr, PAGE_SIZE, PCI_DMA_BIDIRECTIONAL); vm 600 drivers/gpu/drm/i915/i915_gem_gtt.c vm_free_page(vm, p->page); vm 618 drivers/gpu/drm/i915/i915_gem_gtt.c setup_scratch_page(struct i915_address_space *vm, gfp_t gfp) vm 634 drivers/gpu/drm/i915/i915_gem_gtt.c if (i915_vm_is_4lvl(vm) && vm 635 drivers/gpu/drm/i915/i915_gem_gtt.c HAS_PAGE_SIZES(vm->i915, I915_GTT_PAGE_SIZE_64K)) { vm 650 drivers/gpu/drm/i915/i915_gem_gtt.c addr = dma_map_page_attrs(vm->dma, vm 655 drivers/gpu/drm/i915/i915_gem_gtt.c if (unlikely(dma_mapping_error(vm->dma, addr))) vm 661 drivers/gpu/drm/i915/i915_gem_gtt.c vm->scratch[0].base.page = page; vm 662 drivers/gpu/drm/i915/i915_gem_gtt.c vm->scratch[0].base.daddr = addr; vm 663 drivers/gpu/drm/i915/i915_gem_gtt.c vm->scratch_order = order; vm 667 drivers/gpu/drm/i915/i915_gem_gtt.c dma_unmap_page(vm->dma, addr, size, PCI_DMA_BIDIRECTIONAL); vm 679 drivers/gpu/drm/i915/i915_gem_gtt.c static void cleanup_scratch_page(struct i915_address_space *vm) vm 681 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_page_dma *p = px_base(&vm->scratch[0]); vm 682 drivers/gpu/drm/i915/i915_gem_gtt.c unsigned int order = vm->scratch_order; vm 684 drivers/gpu/drm/i915/i915_gem_gtt.c dma_unmap_page(vm->dma, p->daddr, BIT(order) << PAGE_SHIFT, vm 689 drivers/gpu/drm/i915/i915_gem_gtt.c static void free_scratch(struct i915_address_space *vm) vm 693 drivers/gpu/drm/i915/i915_gem_gtt.c if (!px_dma(&vm->scratch[0])) /* set to 0 on clones */ vm 696 drivers/gpu/drm/i915/i915_gem_gtt.c for (i = 1; i <= vm->top; i++) { vm 697 drivers/gpu/drm/i915/i915_gem_gtt.c if (!px_dma(&vm->scratch[i])) vm 699 drivers/gpu/drm/i915/i915_gem_gtt.c cleanup_page_dma(vm, px_base(&vm->scratch[i])); vm 702 drivers/gpu/drm/i915/i915_gem_gtt.c cleanup_scratch_page(vm); vm 705 drivers/gpu/drm/i915/i915_gem_gtt.c static struct i915_page_table *alloc_pt(struct i915_address_space *vm) vm 713 drivers/gpu/drm/i915/i915_gem_gtt.c if (unlikely(setup_page_dma(vm, &pt->base))) { vm 734 drivers/gpu/drm/i915/i915_gem_gtt.c static struct i915_page_directory *alloc_pd(struct i915_address_space *vm) vm 742 drivers/gpu/drm/i915/i915_gem_gtt.c if (unlikely(setup_page_dma(vm, px_base(pd)))) { vm 750 drivers/gpu/drm/i915/i915_gem_gtt.c static void free_pd(struct i915_address_space *vm, struct i915_page_dma *pd) vm 752 drivers/gpu/drm/i915/i915_gem_gtt.c cleanup_page_dma(vm, pd); vm 756 drivers/gpu/drm/i915/i915_gem_gtt.c #define free_px(vm, px) free_pd(vm, px_base(px)) vm 832 drivers/gpu/drm/i915/i915_gem_gtt.c struct drm_i915_private *dev_priv = ppgtt->vm.i915; vm 843 drivers/gpu/drm/i915/i915_gem_gtt.c if (i915_vm_is_4lvl(&ppgtt->vm)) { vm 911 drivers/gpu/drm/i915/i915_gem_gtt.c static inline unsigned int gen8_pd_top_count(const struct i915_address_space *vm) vm 913 drivers/gpu/drm/i915/i915_gem_gtt.c unsigned int shift = __gen8_pte_shift(vm->top); vm 914 drivers/gpu/drm/i915/i915_gem_gtt.c return (vm->total + (1ull << shift) - 1) >> shift; vm 918 drivers/gpu/drm/i915/i915_gem_gtt.c gen8_pdp_for_page_index(struct i915_address_space * const vm, const u64 idx) vm 920 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_ppgtt * const ppgtt = i915_vm_to_ppgtt(vm); vm 922 drivers/gpu/drm/i915/i915_gem_gtt.c if (vm->top == 2) vm 925 drivers/gpu/drm/i915/i915_gem_gtt.c return i915_pd_entry(ppgtt->pd, gen8_pd_index(idx, vm->top)); vm 929 drivers/gpu/drm/i915/i915_gem_gtt.c gen8_pdp_for_page_address(struct i915_address_space * const vm, const u64 addr) vm 931 drivers/gpu/drm/i915/i915_gem_gtt.c return gen8_pdp_for_page_index(vm, addr >> GEN8_PTE_SHIFT); vm 934 drivers/gpu/drm/i915/i915_gem_gtt.c static void __gen8_ppgtt_cleanup(struct i915_address_space *vm, vm 945 drivers/gpu/drm/i915/i915_gem_gtt.c __gen8_ppgtt_cleanup(vm, *pde, GEN8_PDES, lvl - 1); vm 949 drivers/gpu/drm/i915/i915_gem_gtt.c free_px(vm, pd); vm 952 drivers/gpu/drm/i915/i915_gem_gtt.c static void gen8_ppgtt_cleanup(struct i915_address_space *vm) vm 954 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_ppgtt *ppgtt = i915_vm_to_ppgtt(vm); vm 956 drivers/gpu/drm/i915/i915_gem_gtt.c if (intel_vgpu_active(vm->i915)) vm 959 drivers/gpu/drm/i915/i915_gem_gtt.c __gen8_ppgtt_cleanup(vm, ppgtt->pd, gen8_pd_top_count(vm), vm->top); vm 960 drivers/gpu/drm/i915/i915_gem_gtt.c free_scratch(vm); vm 963 drivers/gpu/drm/i915/i915_gem_gtt.c static u64 __gen8_ppgtt_clear(struct i915_address_space * const vm, vm 967 drivers/gpu/drm/i915/i915_gem_gtt.c const struct i915_page_scratch * const scratch = &vm->scratch[lvl]; vm 970 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(end > vm->total >> GEN8_PTE_SHIFT); vm 974 drivers/gpu/drm/i915/i915_gem_gtt.c __func__, vm, lvl + 1, start, end, vm 984 drivers/gpu/drm/i915/i915_gem_gtt.c __func__, vm, lvl + 1, idx, start, end); vm 986 drivers/gpu/drm/i915/i915_gem_gtt.c __gen8_ppgtt_cleanup(vm, as_pd(pt), I915_PDES, lvl); vm 992 drivers/gpu/drm/i915/i915_gem_gtt.c start = __gen8_ppgtt_clear(vm, as_pd(pt), vm 1000 drivers/gpu/drm/i915/i915_gem_gtt.c __func__, vm, lvl, start, end, vm 1007 drivers/gpu/drm/i915/i915_gem_gtt.c vm->scratch[0].encode, vm 1016 drivers/gpu/drm/i915/i915_gem_gtt.c free_px(vm, pt); vm 1022 drivers/gpu/drm/i915/i915_gem_gtt.c static void gen8_ppgtt_clear(struct i915_address_space *vm, vm 1027 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(range_overflows(start, length, vm->total)); vm 1033 drivers/gpu/drm/i915/i915_gem_gtt.c __gen8_ppgtt_clear(vm, i915_vm_to_ppgtt(vm)->pd, vm 1034 drivers/gpu/drm/i915/i915_gem_gtt.c start, start + length, vm->top); vm 1037 drivers/gpu/drm/i915/i915_gem_gtt.c static int __gen8_ppgtt_alloc(struct i915_address_space * const vm, vm 1041 drivers/gpu/drm/i915/i915_gem_gtt.c const struct i915_page_scratch * const scratch = &vm->scratch[lvl]; vm 1046 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(end > vm->total >> GEN8_PTE_SHIFT); vm 1050 drivers/gpu/drm/i915/i915_gem_gtt.c __func__, vm, lvl + 1, *start, end, vm 1063 drivers/gpu/drm/i915/i915_gem_gtt.c __func__, vm, lvl + 1, idx); vm 1068 drivers/gpu/drm/i915/i915_gem_gtt.c pt = &alloc_pd(vm)->pt; vm 1075 drivers/gpu/drm/i915/i915_gem_gtt.c fill_px(pt, vm->scratch[lvl].encode); vm 1078 drivers/gpu/drm/i915/i915_gem_gtt.c pt = alloc_pt(vm); vm 1085 drivers/gpu/drm/i915/i915_gem_gtt.c if (intel_vgpu_active(vm->i915) || vm 1087 drivers/gpu/drm/i915/i915_gem_gtt.c fill_px(pt, vm->scratch[lvl].encode); vm 1101 drivers/gpu/drm/i915/i915_gem_gtt.c ret = __gen8_ppgtt_alloc(vm, as_pd(pt), vm 1105 drivers/gpu/drm/i915/i915_gem_gtt.c free_px(vm, pt); vm 1116 drivers/gpu/drm/i915/i915_gem_gtt.c __func__, vm, lvl, *start, end, vm 1129 drivers/gpu/drm/i915/i915_gem_gtt.c free_px(vm, alloc); vm 1133 drivers/gpu/drm/i915/i915_gem_gtt.c static int gen8_ppgtt_alloc(struct i915_address_space *vm, vm 1141 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(range_overflows(start, length, vm->total)); vm 1148 drivers/gpu/drm/i915/i915_gem_gtt.c err = __gen8_ppgtt_alloc(vm, i915_vm_to_ppgtt(vm)->pd, vm 1149 drivers/gpu/drm/i915/i915_gem_gtt.c &start, start + length, vm->top); vm 1151 drivers/gpu/drm/i915/i915_gem_gtt.c __gen8_ppgtt_clear(vm, i915_vm_to_ppgtt(vm)->pd, vm 1152 drivers/gpu/drm/i915/i915_gem_gtt.c from, start, vm->top); vm 1223 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(!i915_vm_is_4lvl(vma->vm)); vm 1227 drivers/gpu/drm/i915/i915_gem_gtt.c gen8_pdp_for_page_address(vma->vm, start); vm 1299 drivers/gpu/drm/i915/i915_gem_gtt.c (i915_vm_has_scratch_64K(vma->vm) && vm 1317 drivers/gpu/drm/i915/i915_gem_gtt.c if (I915_SELFTEST_ONLY(vma->vm->scrub_64K)) { vm 1320 drivers/gpu/drm/i915/i915_gem_gtt.c encode = vma->vm->scratch[0].encode; vm 1334 drivers/gpu/drm/i915/i915_gem_gtt.c static void gen8_ppgtt_insert(struct i915_address_space *vm, vm 1339 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_ppgtt * const ppgtt = i915_vm_to_ppgtt(vm); vm 1349 drivers/gpu/drm/i915/i915_gem_gtt.c gen8_pdp_for_page_index(vm, idx); vm 1359 drivers/gpu/drm/i915/i915_gem_gtt.c static int gen8_init_scratch(struct i915_address_space *vm) vm 1368 drivers/gpu/drm/i915/i915_gem_gtt.c if (vm->has_read_only && vm 1369 drivers/gpu/drm/i915/i915_gem_gtt.c vm->i915->kernel_context && vm 1370 drivers/gpu/drm/i915/i915_gem_gtt.c vm->i915->kernel_context->vm) { vm 1371 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_address_space *clone = vm->i915->kernel_context->vm; vm 1375 drivers/gpu/drm/i915/i915_gem_gtt.c vm->scratch_order = clone->scratch_order; vm 1376 drivers/gpu/drm/i915/i915_gem_gtt.c memcpy(vm->scratch, clone->scratch, sizeof(vm->scratch)); vm 1377 drivers/gpu/drm/i915/i915_gem_gtt.c px_dma(&vm->scratch[0]) = 0; /* no xfer of ownership */ vm 1381 drivers/gpu/drm/i915/i915_gem_gtt.c ret = setup_scratch_page(vm, __GFP_HIGHMEM); vm 1385 drivers/gpu/drm/i915/i915_gem_gtt.c vm->scratch[0].encode = vm 1386 drivers/gpu/drm/i915/i915_gem_gtt.c gen8_pte_encode(px_dma(&vm->scratch[0]), vm 1387 drivers/gpu/drm/i915/i915_gem_gtt.c I915_CACHE_LLC, vm->has_read_only); vm 1389 drivers/gpu/drm/i915/i915_gem_gtt.c for (i = 1; i <= vm->top; i++) { vm 1390 drivers/gpu/drm/i915/i915_gem_gtt.c if (unlikely(setup_page_dma(vm, px_base(&vm->scratch[i])))) vm 1393 drivers/gpu/drm/i915/i915_gem_gtt.c fill_px(&vm->scratch[i], vm->scratch[i - 1].encode); vm 1394 drivers/gpu/drm/i915/i915_gem_gtt.c vm->scratch[i].encode = vm 1395 drivers/gpu/drm/i915/i915_gem_gtt.c gen8_pde_encode(px_dma(&vm->scratch[i]), vm 1402 drivers/gpu/drm/i915/i915_gem_gtt.c free_scratch(vm); vm 1408 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_address_space *vm = &ppgtt->vm; vm 1412 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(vm->top != 2); vm 1413 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(gen8_pd_top_count(vm) != GEN8_3LVL_PDPES); vm 1418 drivers/gpu/drm/i915/i915_gem_gtt.c pde = alloc_pd(vm); vm 1422 drivers/gpu/drm/i915/i915_gem_gtt.c fill_px(pde, vm->scratch[1].encode); vm 1434 drivers/gpu/drm/i915/i915_gem_gtt.c ppgtt->vm.gt = gt; vm 1435 drivers/gpu/drm/i915/i915_gem_gtt.c ppgtt->vm.i915 = i915; vm 1436 drivers/gpu/drm/i915/i915_gem_gtt.c ppgtt->vm.dma = &i915->drm.pdev->dev; vm 1437 drivers/gpu/drm/i915/i915_gem_gtt.c ppgtt->vm.total = BIT_ULL(INTEL_INFO(i915)->ppgtt_size); vm 1439 drivers/gpu/drm/i915/i915_gem_gtt.c i915_address_space_init(&ppgtt->vm, VM_CLASS_PPGTT); vm 1441 drivers/gpu/drm/i915/i915_gem_gtt.c ppgtt->vm.vma_ops.bind_vma = ppgtt_bind_vma; vm 1442 drivers/gpu/drm/i915/i915_gem_gtt.c ppgtt->vm.vma_ops.unbind_vma = ppgtt_unbind_vma; vm 1443 drivers/gpu/drm/i915/i915_gem_gtt.c ppgtt->vm.vma_ops.set_pages = ppgtt_set_pages; vm 1444 drivers/gpu/drm/i915/i915_gem_gtt.c ppgtt->vm.vma_ops.clear_pages = clear_pages; vm 1448 drivers/gpu/drm/i915/i915_gem_gtt.c gen8_alloc_top_pd(struct i915_address_space *vm) vm 1450 drivers/gpu/drm/i915/i915_gem_gtt.c const unsigned int count = gen8_pd_top_count(vm); vm 1459 drivers/gpu/drm/i915/i915_gem_gtt.c if (unlikely(setup_page_dma(vm, px_base(pd)))) { vm 1464 drivers/gpu/drm/i915/i915_gem_gtt.c fill_page_dma(px_base(pd), vm->scratch[vm->top].encode, count); vm 1486 drivers/gpu/drm/i915/i915_gem_gtt.c ppgtt->vm.top = i915_vm_is_4lvl(&ppgtt->vm) ? 3 : 2; vm 1494 drivers/gpu/drm/i915/i915_gem_gtt.c ppgtt->vm.has_read_only = INTEL_GEN(i915) != 11; vm 1500 drivers/gpu/drm/i915/i915_gem_gtt.c ppgtt->vm.pt_kmap_wc = true; vm 1502 drivers/gpu/drm/i915/i915_gem_gtt.c err = gen8_init_scratch(&ppgtt->vm); vm 1506 drivers/gpu/drm/i915/i915_gem_gtt.c ppgtt->pd = gen8_alloc_top_pd(&ppgtt->vm); vm 1512 drivers/gpu/drm/i915/i915_gem_gtt.c if (!i915_vm_is_4lvl(&ppgtt->vm)) { vm 1520 drivers/gpu/drm/i915/i915_gem_gtt.c ppgtt->vm.insert_entries = gen8_ppgtt_insert; vm 1521 drivers/gpu/drm/i915/i915_gem_gtt.c ppgtt->vm.allocate_va_range = gen8_ppgtt_alloc; vm 1522 drivers/gpu/drm/i915/i915_gem_gtt.c ppgtt->vm.clear_range = gen8_ppgtt_clear; vm 1527 drivers/gpu/drm/i915/i915_gem_gtt.c ppgtt->vm.cleanup = gen8_ppgtt_cleanup; vm 1532 drivers/gpu/drm/i915/i915_gem_gtt.c __gen8_ppgtt_cleanup(&ppgtt->vm, ppgtt->pd, vm 1533 drivers/gpu/drm/i915/i915_gem_gtt.c gen8_pd_top_count(&ppgtt->vm), ppgtt->vm.top); vm 1535 drivers/gpu/drm/i915/i915_gem_gtt.c free_scratch(&ppgtt->vm); vm 1604 drivers/gpu/drm/i915/i915_gem_gtt.c static void gen6_ppgtt_clear_range(struct i915_address_space *vm, vm 1607 drivers/gpu/drm/i915/i915_gem_gtt.c struct gen6_ppgtt * const ppgtt = to_gen6_ppgtt(i915_vm_to_ppgtt(vm)); vm 1609 drivers/gpu/drm/i915/i915_gem_gtt.c const gen6_pte_t scratch_pte = vm->scratch[0].encode; vm 1620 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(px_base(pt) == px_base(&vm->scratch[1])); vm 1643 drivers/gpu/drm/i915/i915_gem_gtt.c static void gen6_ppgtt_insert_entries(struct i915_address_space *vm, vm 1648 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_ppgtt *ppgtt = i915_vm_to_ppgtt(vm); vm 1653 drivers/gpu/drm/i915/i915_gem_gtt.c const u32 pte_encode = vm->pte_encode(0, cache_level, flags); vm 1657 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(pd->entry[act_pt] == &vm->scratch[1]); vm 1685 drivers/gpu/drm/i915/i915_gem_gtt.c static int gen6_alloc_va_range(struct i915_address_space *vm, vm 1688 drivers/gpu/drm/i915/i915_gem_gtt.c struct gen6_ppgtt *ppgtt = to_gen6_ppgtt(i915_vm_to_ppgtt(vm)); vm 1697 drivers/gpu/drm/i915/i915_gem_gtt.c wakeref = intel_runtime_pm_get(&vm->i915->runtime_pm); vm 1703 drivers/gpu/drm/i915/i915_gem_gtt.c if (px_base(pt) == px_base(&vm->scratch[1])) { vm 1708 drivers/gpu/drm/i915/i915_gem_gtt.c pt = alloc_pt(vm); vm 1714 drivers/gpu/drm/i915/i915_gem_gtt.c fill32_px(pt, vm->scratch[0].encode); vm 1717 drivers/gpu/drm/i915/i915_gem_gtt.c if (pd->entry[pde] == &vm->scratch[1]) { vm 1736 drivers/gpu/drm/i915/i915_gem_gtt.c gen6_ggtt_invalidate(vm->gt->ggtt); vm 1742 drivers/gpu/drm/i915/i915_gem_gtt.c gen6_ppgtt_clear_range(vm, from, start - from); vm 1745 drivers/gpu/drm/i915/i915_gem_gtt.c free_px(vm, alloc); vm 1746 drivers/gpu/drm/i915/i915_gem_gtt.c intel_runtime_pm_put(&vm->i915->runtime_pm, wakeref); vm 1752 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_address_space * const vm = &ppgtt->base.vm; vm 1756 drivers/gpu/drm/i915/i915_gem_gtt.c ret = setup_scratch_page(vm, __GFP_HIGHMEM); vm 1760 drivers/gpu/drm/i915/i915_gem_gtt.c vm->scratch[0].encode = vm 1761 drivers/gpu/drm/i915/i915_gem_gtt.c vm->pte_encode(px_dma(&vm->scratch[0]), vm 1764 drivers/gpu/drm/i915/i915_gem_gtt.c if (unlikely(setup_page_dma(vm, px_base(&vm->scratch[1])))) { vm 1765 drivers/gpu/drm/i915/i915_gem_gtt.c cleanup_scratch_page(vm); vm 1769 drivers/gpu/drm/i915/i915_gem_gtt.c fill32_px(&vm->scratch[1], vm->scratch[0].encode); vm 1770 drivers/gpu/drm/i915/i915_gem_gtt.c memset_p(pd->entry, &vm->scratch[1], I915_PDES); vm 1779 drivers/gpu/drm/i915/i915_gem_gtt.c px_base(&ppgtt->base.vm.scratch[1]); vm 1785 drivers/gpu/drm/i915/i915_gem_gtt.c free_px(&ppgtt->base.vm, pt); vm 1788 drivers/gpu/drm/i915/i915_gem_gtt.c static void gen6_ppgtt_cleanup(struct i915_address_space *vm) vm 1790 drivers/gpu/drm/i915/i915_gem_gtt.c struct gen6_ppgtt *ppgtt = to_gen6_ppgtt(i915_vm_to_ppgtt(vm)); vm 1791 drivers/gpu/drm/i915/i915_gem_gtt.c struct drm_i915_private *i915 = vm->i915; vm 1799 drivers/gpu/drm/i915/i915_gem_gtt.c free_scratch(vm); vm 1820 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_ggtt *ggtt = i915_vm_to_ggtt(vma->vm); vm 1843 drivers/gpu/drm/i915/i915_gem_gtt.c px_base(&ppgtt->base.vm.scratch[1]); vm 1855 drivers/gpu/drm/i915/i915_gem_gtt.c free_px(&ppgtt->base.vm, pt); vm 1871 drivers/gpu/drm/i915/i915_gem_gtt.c struct drm_i915_private *i915 = ppgtt->base.vm.i915; vm 1872 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_ggtt *ggtt = ppgtt->base.vm.gt->ggtt; vm 1876 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(size > ggtt->vm.total); vm 1884 drivers/gpu/drm/i915/i915_gem_gtt.c vma->vm = &ggtt->vm; vm 1896 drivers/gpu/drm/i915/i915_gem_gtt.c mutex_lock(&vma->vm->mutex); vm 1897 drivers/gpu/drm/i915/i915_gem_gtt.c list_add(&vma->vm_link, &vma->vm->unbound_list); vm 1898 drivers/gpu/drm/i915/i915_gem_gtt.c mutex_unlock(&vma->vm->mutex); vm 1908 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(ppgtt->base.vm.closed); vm 1970 drivers/gpu/drm/i915/i915_gem_gtt.c ppgtt->base.vm.top = 1; vm 1972 drivers/gpu/drm/i915/i915_gem_gtt.c ppgtt->base.vm.allocate_va_range = gen6_alloc_va_range; vm 1973 drivers/gpu/drm/i915/i915_gem_gtt.c ppgtt->base.vm.clear_range = gen6_ppgtt_clear_range; vm 1974 drivers/gpu/drm/i915/i915_gem_gtt.c ppgtt->base.vm.insert_entries = gen6_ppgtt_insert_entries; vm 1975 drivers/gpu/drm/i915/i915_gem_gtt.c ppgtt->base.vm.cleanup = gen6_ppgtt_cleanup; vm 1977 drivers/gpu/drm/i915/i915_gem_gtt.c ppgtt->base.vm.pte_encode = ggtt->vm.pte_encode; vm 1998 drivers/gpu/drm/i915/i915_gem_gtt.c free_scratch(&ppgtt->base.vm); vm 2105 drivers/gpu/drm/i915/i915_gem_gtt.c trace_i915_ppgtt_create(&ppgtt->vm); vm 2123 drivers/gpu/drm/i915/i915_gem_gtt.c struct drm_i915_private *i915 = ggtt->vm.i915; vm 2131 drivers/gpu/drm/i915/i915_gem_gtt.c intel_gt_check_and_clear_faults(ggtt->vm.gt); vm 2133 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.clear_range(&ggtt->vm, 0, ggtt->vm.total); vm 2174 drivers/gpu/drm/i915/i915_gem_gtt.c static void gen8_ggtt_insert_page(struct i915_address_space *vm, vm 2180 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); vm 2189 drivers/gpu/drm/i915/i915_gem_gtt.c static void gen8_ggtt_insert_entries(struct i915_address_space *vm, vm 2194 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); vm 2217 drivers/gpu/drm/i915/i915_gem_gtt.c static void gen6_ggtt_insert_page(struct i915_address_space *vm, vm 2223 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); vm 2227 drivers/gpu/drm/i915/i915_gem_gtt.c iowrite32(vm->pte_encode(addr, level, flags), pte); vm 2238 drivers/gpu/drm/i915/i915_gem_gtt.c static void gen6_ggtt_insert_entries(struct i915_address_space *vm, vm 2243 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); vm 2249 drivers/gpu/drm/i915/i915_gem_gtt.c iowrite32(vm->pte_encode(addr, level, flags), &entries[i++]); vm 2258 drivers/gpu/drm/i915/i915_gem_gtt.c static void nop_clear_range(struct i915_address_space *vm, vm 2263 drivers/gpu/drm/i915/i915_gem_gtt.c static void gen8_ggtt_clear_range(struct i915_address_space *vm, vm 2266 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); vm 2269 drivers/gpu/drm/i915/i915_gem_gtt.c const gen8_pte_t scratch_pte = vm->scratch[0].encode; vm 2284 drivers/gpu/drm/i915/i915_gem_gtt.c static void bxt_vtd_ggtt_wa(struct i915_address_space *vm) vm 2286 drivers/gpu/drm/i915/i915_gem_gtt.c struct drm_i915_private *dev_priv = vm->i915; vm 2299 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_address_space *vm; vm 2309 drivers/gpu/drm/i915/i915_gem_gtt.c gen8_ggtt_insert_page(arg->vm, arg->addr, arg->offset, arg->level, 0); vm 2310 drivers/gpu/drm/i915/i915_gem_gtt.c bxt_vtd_ggtt_wa(arg->vm); vm 2315 drivers/gpu/drm/i915/i915_gem_gtt.c static void bxt_vtd_ggtt_insert_page__BKL(struct i915_address_space *vm, vm 2321 drivers/gpu/drm/i915/i915_gem_gtt.c struct insert_page arg = { vm, addr, offset, level }; vm 2327 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_address_space *vm; vm 2337 drivers/gpu/drm/i915/i915_gem_gtt.c gen8_ggtt_insert_entries(arg->vm, arg->vma, arg->level, arg->flags); vm 2338 drivers/gpu/drm/i915/i915_gem_gtt.c bxt_vtd_ggtt_wa(arg->vm); vm 2343 drivers/gpu/drm/i915/i915_gem_gtt.c static void bxt_vtd_ggtt_insert_entries__BKL(struct i915_address_space *vm, vm 2348 drivers/gpu/drm/i915/i915_gem_gtt.c struct insert_entries arg = { vm, vma, level, flags }; vm 2354 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_address_space *vm; vm 2363 drivers/gpu/drm/i915/i915_gem_gtt.c gen8_ggtt_clear_range(arg->vm, arg->start, arg->length); vm 2364 drivers/gpu/drm/i915/i915_gem_gtt.c bxt_vtd_ggtt_wa(arg->vm); vm 2369 drivers/gpu/drm/i915/i915_gem_gtt.c static void bxt_vtd_ggtt_clear_range__BKL(struct i915_address_space *vm, vm 2373 drivers/gpu/drm/i915/i915_gem_gtt.c struct clear_range arg = { vm, start, length }; vm 2378 drivers/gpu/drm/i915/i915_gem_gtt.c static void gen6_ggtt_clear_range(struct i915_address_space *vm, vm 2381 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); vm 2394 drivers/gpu/drm/i915/i915_gem_gtt.c scratch_pte = vm->scratch[0].encode; vm 2399 drivers/gpu/drm/i915/i915_gem_gtt.c static void i915_ggtt_insert_page(struct i915_address_space *vm, vm 2411 drivers/gpu/drm/i915/i915_gem_gtt.c static void i915_ggtt_insert_entries(struct i915_address_space *vm, vm 2423 drivers/gpu/drm/i915/i915_gem_gtt.c static void i915_ggtt_clear_range(struct i915_address_space *vm, vm 2433 drivers/gpu/drm/i915/i915_gem_gtt.c struct drm_i915_private *i915 = vma->vm->i915; vm 2444 drivers/gpu/drm/i915/i915_gem_gtt.c vma->vm->insert_entries(vma->vm, vma, cache_level, pte_flags); vm 2460 drivers/gpu/drm/i915/i915_gem_gtt.c struct drm_i915_private *i915 = vma->vm->i915; vm 2464 drivers/gpu/drm/i915/i915_gem_gtt.c vma->vm->clear_range(vma->vm, vma->node.start, vma->size); vm 2471 drivers/gpu/drm/i915/i915_gem_gtt.c struct drm_i915_private *i915 = vma->vm->i915; vm 2481 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_ppgtt *alias = i915_vm_to_ggtt(vma->vm)->alias; vm 2484 drivers/gpu/drm/i915/i915_gem_gtt.c ret = alias->vm.allocate_va_range(&alias->vm, vm 2491 drivers/gpu/drm/i915/i915_gem_gtt.c alias->vm.insert_entries(&alias->vm, vma, vm 2499 drivers/gpu/drm/i915/i915_gem_gtt.c vma->vm->insert_entries(vma->vm, vma, vm 2509 drivers/gpu/drm/i915/i915_gem_gtt.c struct drm_i915_private *i915 = vma->vm->i915; vm 2512 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_address_space *vm = vma->vm; vm 2516 drivers/gpu/drm/i915/i915_gem_gtt.c vm->clear_range(vm, vma->node.start, vma->size); vm 2520 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_address_space *vm = vm 2521 drivers/gpu/drm/i915/i915_gem_gtt.c &i915_vm_to_ggtt(vma->vm)->alias->vm; vm 2523 drivers/gpu/drm/i915/i915_gem_gtt.c vm->clear_range(vm, vma->node.start, vma->size); vm 2583 drivers/gpu/drm/i915/i915_gem_gtt.c ppgtt = i915_ppgtt_create(ggtt->vm.i915); vm 2587 drivers/gpu/drm/i915/i915_gem_gtt.c if (GEM_WARN_ON(ppgtt->vm.total < ggtt->vm.total)) { vm 2598 drivers/gpu/drm/i915/i915_gem_gtt.c err = ppgtt->vm.allocate_va_range(&ppgtt->vm, 0, ggtt->vm.total); vm 2604 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(ggtt->vm.vma_ops.bind_vma != ggtt_bind_vma); vm 2605 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.vma_ops.bind_vma = aliasing_gtt_bind_vma; vm 2607 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(ggtt->vm.vma_ops.unbind_vma != ggtt_unbind_vma); vm 2608 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.vma_ops.unbind_vma = aliasing_gtt_unbind_vma; vm 2613 drivers/gpu/drm/i915/i915_gem_gtt.c i915_vm_put(&ppgtt->vm); vm 2619 drivers/gpu/drm/i915/i915_gem_gtt.c struct drm_i915_private *i915 = ggtt->vm.i915; vm 2628 drivers/gpu/drm/i915/i915_gem_gtt.c i915_vm_put(&ppgtt->vm); vm 2630 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.vma_ops.bind_vma = ggtt_bind_vma; vm 2631 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.vma_ops.unbind_vma = ggtt_unbind_vma; vm 2642 drivers/gpu/drm/i915/i915_gem_gtt.c if (!USES_GUC(ggtt->vm.i915)) vm 2645 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(ggtt->vm.total <= GUC_GGTT_TOP); vm 2646 drivers/gpu/drm/i915/i915_gem_gtt.c size = ggtt->vm.total - GUC_GGTT_TOP; vm 2648 drivers/gpu/drm/i915/i915_gem_gtt.c ret = i915_gem_gtt_reserve(&ggtt->vm, &ggtt->uc_fw, size, vm 2691 drivers/gpu/drm/i915/i915_gem_gtt.c intel_wopcm_guc_size(&ggtt->vm.i915->wopcm)); vm 2698 drivers/gpu/drm/i915/i915_gem_gtt.c ret = drm_mm_insert_node_in_range(&ggtt->vm.mm, &ggtt->error_capture, vm 2715 drivers/gpu/drm/i915/i915_gem_gtt.c drm_mm_for_each_hole(entry, &ggtt->vm.mm, hole_start, hole_end) { vm 2718 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.clear_range(&ggtt->vm, hole_start, vm 2723 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.clear_range(&ggtt->vm, ggtt->vm.total - PAGE_SIZE, PAGE_SIZE); vm 2751 drivers/gpu/drm/i915/i915_gem_gtt.c struct drm_i915_private *i915 = ggtt->vm.i915; vm 2754 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.closed = true; vm 2761 drivers/gpu/drm/i915/i915_gem_gtt.c list_for_each_entry_safe(vma, vn, &ggtt->vm.bound_list, vm_link) vm 2769 drivers/gpu/drm/i915/i915_gem_gtt.c if (drm_mm_initialized(&ggtt->vm.mm)) { vm 2771 drivers/gpu/drm/i915/i915_gem_gtt.c i915_address_space_fini(&ggtt->vm); vm 2774 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.cleanup(&ggtt->vm); vm 2839 drivers/gpu/drm/i915/i915_gem_gtt.c struct drm_i915_private *dev_priv = ggtt->vm.i915; vm 2863 drivers/gpu/drm/i915/i915_gem_gtt.c ret = setup_scratch_page(&ggtt->vm, GFP_DMA32); vm 2871 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.scratch[0].encode = vm 2872 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.pte_encode(px_dma(&ggtt->vm.scratch[0]), vm 2959 drivers/gpu/drm/i915/i915_gem_gtt.c static void gen6_gmch_remove(struct i915_address_space *vm) vm 2961 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); vm 2964 drivers/gpu/drm/i915/i915_gem_gtt.c cleanup_scratch_page(vm); vm 2983 drivers/gpu/drm/i915/i915_gem_gtt.c struct drm_i915_private *dev_priv = ggtt->vm.i915; vm 3007 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.total = (size / sizeof(gen8_pte_t)) * I915_GTT_PAGE_SIZE; vm 3008 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.cleanup = gen6_gmch_remove; vm 3009 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.insert_page = gen8_ggtt_insert_page; vm 3010 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.clear_range = nop_clear_range; vm 3012 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.clear_range = gen8_ggtt_clear_range; vm 3014 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.insert_entries = gen8_ggtt_insert_entries; vm 3019 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.insert_entries = bxt_vtd_ggtt_insert_entries__BKL; vm 3020 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.insert_page = bxt_vtd_ggtt_insert_page__BKL; vm 3021 drivers/gpu/drm/i915/i915_gem_gtt.c if (ggtt->vm.clear_range != nop_clear_range) vm 3022 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.clear_range = bxt_vtd_ggtt_clear_range__BKL; vm 3027 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.vma_ops.bind_vma = ggtt_bind_vma; vm 3028 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.vma_ops.unbind_vma = ggtt_unbind_vma; vm 3029 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.vma_ops.set_pages = ggtt_set_pages; vm 3030 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.vma_ops.clear_pages = clear_pages; vm 3032 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.pte_encode = gen8_pte_encode; vm 3041 drivers/gpu/drm/i915/i915_gem_gtt.c struct drm_i915_private *dev_priv = ggtt->vm.i915; vm 3068 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.total = (size / sizeof(gen6_pte_t)) * I915_GTT_PAGE_SIZE; vm 3070 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.clear_range = nop_clear_range; vm 3072 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.clear_range = gen6_ggtt_clear_range; vm 3073 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.insert_page = gen6_ggtt_insert_page; vm 3074 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.insert_entries = gen6_ggtt_insert_entries; vm 3075 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.cleanup = gen6_gmch_remove; vm 3080 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.pte_encode = iris_pte_encode; vm 3082 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.pte_encode = hsw_pte_encode; vm 3084 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.pte_encode = byt_pte_encode; vm 3086 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.pte_encode = ivb_pte_encode; vm 3088 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.pte_encode = snb_pte_encode; vm 3090 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.vma_ops.bind_vma = ggtt_bind_vma; vm 3091 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.vma_ops.unbind_vma = ggtt_unbind_vma; vm 3092 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.vma_ops.set_pages = ggtt_set_pages; vm 3093 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.vma_ops.clear_pages = clear_pages; vm 3098 drivers/gpu/drm/i915/i915_gem_gtt.c static void i915_gmch_remove(struct i915_address_space *vm) vm 3105 drivers/gpu/drm/i915/i915_gem_gtt.c struct drm_i915_private *dev_priv = ggtt->vm.i915; vm 3115 drivers/gpu/drm/i915/i915_gem_gtt.c intel_gtt_get(&ggtt->vm.total, &gmadr_base, &ggtt->mappable_end); vm 3122 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.insert_page = i915_ggtt_insert_page; vm 3123 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.insert_entries = i915_ggtt_insert_entries; vm 3124 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.clear_range = i915_ggtt_clear_range; vm 3125 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.cleanup = i915_gmch_remove; vm 3129 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.vma_ops.bind_vma = ggtt_bind_vma; vm 3130 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.vma_ops.unbind_vma = ggtt_unbind_vma; vm 3131 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.vma_ops.set_pages = ggtt_set_pages; vm 3132 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.vma_ops.clear_pages = clear_pages; vm 3146 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.gt = gt; vm 3147 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.i915 = i915; vm 3148 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.dma = &i915->drm.pdev->dev; vm 3159 drivers/gpu/drm/i915/i915_gem_gtt.c if ((ggtt->vm.total - 1) >> 32) { vm 3162 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.total >> 20); vm 3163 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.total = 1ULL << 32; vm 3165 drivers/gpu/drm/i915/i915_gem_gtt.c min_t(u64, ggtt->mappable_end, ggtt->vm.total); vm 3168 drivers/gpu/drm/i915/i915_gem_gtt.c if (ggtt->mappable_end > ggtt->vm.total) { vm 3171 drivers/gpu/drm/i915/i915_gem_gtt.c &ggtt->mappable_end, ggtt->vm.total); vm 3172 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->mappable_end = ggtt->vm.total; vm 3176 drivers/gpu/drm/i915/i915_gem_gtt.c DRM_DEBUG_DRIVER("GGTT size = %lluM\n", ggtt->vm.total >> 20); vm 3204 drivers/gpu/drm/i915/i915_gem_gtt.c struct drm_i915_private *i915 = ggtt->vm.i915; vm 3209 drivers/gpu/drm/i915/i915_gem_gtt.c i915_address_space_init(&ggtt->vm, VM_CLASS_GGTT); vm 3211 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.is_ggtt = true; vm 3214 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.has_read_only = IS_VALLEYVIEW(i915); vm 3217 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.mm.color_adjust = i915_gtt_color_adjust; vm 3222 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.cleanup(&ggtt->vm); vm 3267 drivers/gpu/drm/i915/i915_gem_gtt.c dev_priv->ggtt.vm.cleanup(&dev_priv->ggtt.vm); vm 3307 drivers/gpu/drm/i915/i915_gem_gtt.c intel_gt_check_and_clear_faults(ggtt->vm.gt); vm 3309 drivers/gpu/drm/i915/i915_gem_gtt.c mutex_lock(&ggtt->vm.mutex); vm 3312 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.clear_range(&ggtt->vm, 0, ggtt->vm.total); vm 3313 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.closed = true; /* skip rewriting PTE on VMA unbind */ vm 3316 drivers/gpu/drm/i915/i915_gem_gtt.c list_for_each_entry_safe(vma, vn, &ggtt->vm.bound_list, vm_link) { vm 3322 drivers/gpu/drm/i915/i915_gem_gtt.c mutex_unlock(&ggtt->vm.mutex); vm 3336 drivers/gpu/drm/i915/i915_gem_gtt.c mutex_lock(&ggtt->vm.mutex); vm 3339 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.closed = false; vm 3342 drivers/gpu/drm/i915/i915_gem_gtt.c mutex_unlock(&ggtt->vm.mutex); vm 3631 drivers/gpu/drm/i915/i915_gem_gtt.c int i915_gem_gtt_reserve(struct i915_address_space *vm, vm 3641 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(range_overflows(offset, size, vm->total)); vm 3642 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(vm == &vm->i915->ggtt.alias->vm); vm 3649 drivers/gpu/drm/i915/i915_gem_gtt.c err = drm_mm_reserve_node(&vm->mm, node); vm 3656 drivers/gpu/drm/i915/i915_gem_gtt.c err = i915_gem_evict_for_node(vm, node, flags); vm 3658 drivers/gpu/drm/i915/i915_gem_gtt.c err = drm_mm_reserve_node(&vm->mm, node); vm 3722 drivers/gpu/drm/i915/i915_gem_gtt.c int i915_gem_gtt_insert(struct i915_address_space *vm, vm 3731 drivers/gpu/drm/i915/i915_gem_gtt.c lockdep_assert_held(&vm->i915->drm.struct_mutex); vm 3739 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(vm == &vm->i915->ggtt.alias->vm); vm 3764 drivers/gpu/drm/i915/i915_gem_gtt.c err = drm_mm_insert_node_in_range(&vm->mm, node, vm 3771 drivers/gpu/drm/i915/i915_gem_gtt.c err = drm_mm_insert_node_in_range(&vm->mm, node, vm 3807 drivers/gpu/drm/i915/i915_gem_gtt.c err = i915_gem_gtt_reserve(vm, node, size, offset, color, flags); vm 3815 drivers/gpu/drm/i915/i915_gem_gtt.c err = i915_gem_evict_something(vm, size, alignment, color, vm 3820 drivers/gpu/drm/i915/i915_gem_gtt.c return drm_mm_insert_node_in_range(&vm->mm, node, vm 76 drivers/gpu/drm/i915/i915_gem_gtt.h #define ggtt_total_entries(ggtt) ((ggtt)->vm.total >> PAGE_SHIFT) vm 344 drivers/gpu/drm/i915/i915_gem_gtt.h int (*allocate_va_range)(struct i915_address_space *vm, vm 346 drivers/gpu/drm/i915/i915_gem_gtt.h void (*clear_range)(struct i915_address_space *vm, vm 348 drivers/gpu/drm/i915/i915_gem_gtt.h void (*insert_page)(struct i915_address_space *vm, vm 353 drivers/gpu/drm/i915/i915_gem_gtt.h void (*insert_entries)(struct i915_address_space *vm, vm 357 drivers/gpu/drm/i915/i915_gem_gtt.h void (*cleanup)(struct i915_address_space *vm); vm 365 drivers/gpu/drm/i915/i915_gem_gtt.h #define i915_is_ggtt(vm) ((vm)->is_ggtt) vm 368 drivers/gpu/drm/i915/i915_gem_gtt.h i915_vm_is_4lvl(const struct i915_address_space *vm) vm 370 drivers/gpu/drm/i915/i915_gem_gtt.h return (vm->total - 1) >> 32; vm 374 drivers/gpu/drm/i915/i915_gem_gtt.h i915_vm_has_scratch_64K(struct i915_address_space *vm) vm 376 drivers/gpu/drm/i915/i915_gem_gtt.h return vm->scratch_order == get_order(I915_GTT_PAGE_SIZE_64K); vm 387 drivers/gpu/drm/i915/i915_gem_gtt.h struct i915_address_space vm; vm 423 drivers/gpu/drm/i915/i915_gem_gtt.h struct i915_address_space vm; vm 535 drivers/gpu/drm/i915/i915_gem_gtt.h return px_dma(pt ?: px_base(&ppgtt->vm.scratch[ppgtt->vm.top])); vm 539 drivers/gpu/drm/i915/i915_gem_gtt.h i915_vm_to_ggtt(struct i915_address_space *vm) vm 541 drivers/gpu/drm/i915/i915_gem_gtt.h BUILD_BUG_ON(offsetof(struct i915_ggtt, vm)); vm 542 drivers/gpu/drm/i915/i915_gem_gtt.h GEM_BUG_ON(!i915_is_ggtt(vm)); vm 543 drivers/gpu/drm/i915/i915_gem_gtt.h return container_of(vm, struct i915_ggtt, vm); vm 547 drivers/gpu/drm/i915/i915_gem_gtt.h i915_vm_to_ppgtt(struct i915_address_space *vm) vm 549 drivers/gpu/drm/i915/i915_gem_gtt.h BUILD_BUG_ON(offsetof(struct i915_ppgtt, vm)); vm 550 drivers/gpu/drm/i915/i915_gem_gtt.h GEM_BUG_ON(i915_is_ggtt(vm)); vm 551 drivers/gpu/drm/i915/i915_gem_gtt.h return container_of(vm, struct i915_ppgtt, vm); vm 567 drivers/gpu/drm/i915/i915_gem_gtt.h i915_vm_get(struct i915_address_space *vm) vm 569 drivers/gpu/drm/i915/i915_gem_gtt.h kref_get(&vm->ref); vm 570 drivers/gpu/drm/i915/i915_gem_gtt.h return vm; vm 575 drivers/gpu/drm/i915/i915_gem_gtt.h static inline void i915_vm_put(struct i915_address_space *vm) vm 577 drivers/gpu/drm/i915/i915_gem_gtt.h kref_put(&vm->ref, i915_vm_release); vm 592 drivers/gpu/drm/i915/i915_gem_gtt.h int i915_gem_gtt_reserve(struct i915_address_space *vm, vm 597 drivers/gpu/drm/i915/i915_gem_gtt.h int i915_gem_gtt_insert(struct i915_address_space *vm, vm 995 drivers/gpu/drm/i915/i915_gpu_error.c ggtt->vm.insert_page(&ggtt->vm, dma, slot, I915_CACHE_NONE, 0); vm 1669 drivers/gpu/drm/i915/i915_gpu_error.c ggtt->vm.clear_range(&ggtt->vm, slot, PAGE_SIZE); vm 460 drivers/gpu/drm/i915/i915_trace.h __field(struct i915_address_space *, vm) vm 468 drivers/gpu/drm/i915/i915_trace.h __entry->vm = vma->vm; vm 477 drivers/gpu/drm/i915/i915_trace.h __entry->vm) vm 486 drivers/gpu/drm/i915/i915_trace.h __field(struct i915_address_space *, vm) vm 493 drivers/gpu/drm/i915/i915_trace.h __entry->vm = vma->vm; vm 499 drivers/gpu/drm/i915/i915_trace.h __entry->obj, __entry->offset, __entry->size, __entry->vm) vm 593 drivers/gpu/drm/i915/i915_trace.h TP_PROTO(struct i915_address_space *vm, u64 size, u64 align, unsigned int flags), vm 594 drivers/gpu/drm/i915/i915_trace.h TP_ARGS(vm, size, align, flags), vm 598 drivers/gpu/drm/i915/i915_trace.h __field(struct i915_address_space *, vm) vm 605 drivers/gpu/drm/i915/i915_trace.h __entry->dev = vm->i915->drm.primary->index; vm 606 drivers/gpu/drm/i915/i915_trace.h __entry->vm = vm; vm 613 drivers/gpu/drm/i915/i915_trace.h __entry->dev, __entry->vm, __entry->size, __entry->align, vm 618 drivers/gpu/drm/i915/i915_trace.h TP_PROTO(struct i915_address_space *vm, struct drm_mm_node *node, unsigned int flags), vm 619 drivers/gpu/drm/i915/i915_trace.h TP_ARGS(vm, node, flags), vm 623 drivers/gpu/drm/i915/i915_trace.h __field(struct i915_address_space *, vm) vm 631 drivers/gpu/drm/i915/i915_trace.h __entry->dev = vm->i915->drm.primary->index; vm 632 drivers/gpu/drm/i915/i915_trace.h __entry->vm = vm; vm 640 drivers/gpu/drm/i915/i915_trace.h __entry->dev, __entry->vm, vm 646 drivers/gpu/drm/i915/i915_trace.h TP_PROTO(struct i915_address_space *vm), vm 647 drivers/gpu/drm/i915/i915_trace.h TP_ARGS(vm), vm 651 drivers/gpu/drm/i915/i915_trace.h __field(struct i915_address_space *, vm) vm 655 drivers/gpu/drm/i915/i915_trace.h __entry->dev = vm->i915->drm.primary->index; vm 656 drivers/gpu/drm/i915/i915_trace.h __entry->vm = vm; vm 659 drivers/gpu/drm/i915/i915_trace.h TP_printk("dev=%d, vm=%p", __entry->dev, __entry->vm) vm 921 drivers/gpu/drm/i915/i915_trace.h TP_PROTO(struct i915_address_space *vm), vm 922 drivers/gpu/drm/i915/i915_trace.h TP_ARGS(vm), vm 925 drivers/gpu/drm/i915/i915_trace.h __field(struct i915_address_space *, vm) vm 930 drivers/gpu/drm/i915/i915_trace.h __entry->vm = vm; vm 931 drivers/gpu/drm/i915/i915_trace.h __entry->dev = vm->i915->drm.primary->index; vm 934 drivers/gpu/drm/i915/i915_trace.h TP_printk("dev=%u, vm=%p", __entry->dev, __entry->vm) vm 938 drivers/gpu/drm/i915/i915_trace.h TP_PROTO(struct i915_address_space *vm), vm 939 drivers/gpu/drm/i915/i915_trace.h TP_ARGS(vm) vm 943 drivers/gpu/drm/i915/i915_trace.h TP_PROTO(struct i915_address_space *vm), vm 944 drivers/gpu/drm/i915/i915_trace.h TP_ARGS(vm) vm 962 drivers/gpu/drm/i915/i915_trace.h __field(struct i915_address_space *, vm) vm 969 drivers/gpu/drm/i915/i915_trace.h __entry->vm = ctx->vm; vm 973 drivers/gpu/drm/i915/i915_trace.h __entry->dev, __entry->ctx, __entry->vm, __entry->hw_id) vm 131 drivers/gpu/drm/i915/i915_vgpu.c ggtt->vm.reserved -= node->size; vm 146 drivers/gpu/drm/i915/i915_vgpu.c if (!intel_vgpu_active(ggtt->vm.i915)) vm 167 drivers/gpu/drm/i915/i915_vgpu.c ret = i915_gem_gtt_reserve(&ggtt->vm, node, vm 171 drivers/gpu/drm/i915/i915_vgpu.c ggtt->vm.reserved += size; vm 222 drivers/gpu/drm/i915/i915_vgpu.c struct intel_uncore *uncore = &ggtt->vm.i915->uncore; vm 223 drivers/gpu/drm/i915/i915_vgpu.c unsigned long ggtt_end = ggtt->vm.total; vm 229 drivers/gpu/drm/i915/i915_vgpu.c if (!intel_vgpu_active(ggtt->vm.i915)) vm 100 drivers/gpu/drm/i915/i915_vma.c struct i915_address_space *vm, vm 107 drivers/gpu/drm/i915/i915_vma.c GEM_BUG_ON(vm == &vm->i915->ggtt.alias->vm); vm 113 drivers/gpu/drm/i915/i915_vma.c vma->vm = vm; vm 114 drivers/gpu/drm/i915/i915_vma.c vma->ops = &vm->vma_ops; vm 120 drivers/gpu/drm/i915/i915_vma.c i915_active_init(vm->i915, &vma->active, vm 151 drivers/gpu/drm/i915/i915_vma.c if (unlikely(vma->size > vm->total)) vm 156 drivers/gpu/drm/i915/i915_vma.c if (i915_is_ggtt(vm)) { vm 160 drivers/gpu/drm/i915/i915_vma.c vma->fence_size = i915_gem_fence_size(vm->i915, vma->size, vm 164 drivers/gpu/drm/i915/i915_vma.c vma->fence_size > vm->total)) vm 169 drivers/gpu/drm/i915/i915_vma.c vma->fence_alignment = i915_gem_fence_alignment(vm->i915, vma->size, vm 193 drivers/gpu/drm/i915/i915_vma.c cmp = i915_vma_compare(pos, vm, view); vm 221 drivers/gpu/drm/i915/i915_vma.c mutex_lock(&vm->mutex); vm 222 drivers/gpu/drm/i915/i915_vma.c list_add(&vma->vm_link, &vm->unbound_list); vm 223 drivers/gpu/drm/i915/i915_vma.c mutex_unlock(&vm->mutex); vm 234 drivers/gpu/drm/i915/i915_vma.c struct i915_address_space *vm, vm 244 drivers/gpu/drm/i915/i915_vma.c cmp = i915_vma_compare(vma, vm, view); vm 274 drivers/gpu/drm/i915/i915_vma.c struct i915_address_space *vm, vm 279 drivers/gpu/drm/i915/i915_vma.c GEM_BUG_ON(view && !i915_is_ggtt(vm)); vm 280 drivers/gpu/drm/i915/i915_vma.c GEM_BUG_ON(vm->closed); vm 283 drivers/gpu/drm/i915/i915_vma.c vma = vma_lookup(obj, vm, view); vm 288 drivers/gpu/drm/i915/i915_vma.c vma = vma_create(obj, vm, view); vm 290 drivers/gpu/drm/i915/i915_vma.c GEM_BUG_ON(!IS_ERR(vma) && i915_vma_compare(vma, vm, view)); vm 316 drivers/gpu/drm/i915/i915_vma.c vma->vm->total))) vm 353 drivers/gpu/drm/i915/i915_vma.c assert_rpm_wakelock_held(&vma->vm->i915->runtime_pm); vm 355 drivers/gpu/drm/i915/i915_vma.c lockdep_assert_held(&vma->vm->i915->drm.struct_mutex); vm 366 drivers/gpu/drm/i915/i915_vma.c ptr = io_mapping_map_wc(&i915_vm_to_ggtt(vma->vm)->iomap, vm 397 drivers/gpu/drm/i915/i915_vma.c intel_gt_flush_ggtt_writes(vma->vm->gt); vm 404 drivers/gpu/drm/i915/i915_vma.c lockdep_assert_held(&vma->vm->i915->drm.struct_mutex); vm 472 drivers/gpu/drm/i915/i915_vma.c mappable = vma->node.start + vma->fence_size <= i915_vm_to_ggtt(vma->vm)->mappable_end; vm 497 drivers/gpu/drm/i915/i915_vma.c if (vma->vm->mm.color_adjust == NULL) vm 544 drivers/gpu/drm/i915/i915_vma.c struct drm_i915_private *dev_priv = vma->vm->i915; vm 568 drivers/gpu/drm/i915/i915_vma.c end = vma->vm->total; vm 610 drivers/gpu/drm/i915/i915_vma.c ret = i915_gem_gtt_reserve(vma->vm, &vma->node, vm 649 drivers/gpu/drm/i915/i915_vma.c ret = i915_gem_gtt_insert(vma->vm, &vma->node, vm 661 drivers/gpu/drm/i915/i915_vma.c mutex_lock(&vma->vm->mutex); vm 662 drivers/gpu/drm/i915/i915_vma.c list_move_tail(&vma->vm_link, &vma->vm->bound_list); vm 663 drivers/gpu/drm/i915/i915_vma.c mutex_unlock(&vma->vm->mutex); vm 688 drivers/gpu/drm/i915/i915_vma.c mutex_lock(&vma->vm->mutex); vm 690 drivers/gpu/drm/i915/i915_vma.c list_move_tail(&vma->vm_link, &vma->vm->unbound_list); vm 691 drivers/gpu/drm/i915/i915_vma.c mutex_unlock(&vma->vm->mutex); vm 718 drivers/gpu/drm/i915/i915_vma.c lockdep_assert_held(&vma->vm->i915->drm.struct_mutex); vm 759 drivers/gpu/drm/i915/i915_vma.c struct drm_i915_private *i915 = vma->vm->i915; vm 783 drivers/gpu/drm/i915/i915_vma.c struct drm_i915_private *i915 = vma->vm->i915; vm 803 drivers/gpu/drm/i915/i915_vma.c mutex_lock(&vma->vm->mutex); vm 805 drivers/gpu/drm/i915/i915_vma.c mutex_unlock(&vma->vm->mutex); vm 823 drivers/gpu/drm/i915/i915_vma.c lockdep_assert_held(&vma->vm->i915->drm.struct_mutex); vm 867 drivers/gpu/drm/i915/i915_vma.c lockdep_assert_held(&vma->vm->mutex); vm 876 drivers/gpu/drm/i915/i915_vma.c unmap_mapping_range(vma->vm->i915->drm.anon_inode->i_mapping, vm 937 drivers/gpu/drm/i915/i915_vma.c lockdep_assert_held(&vma->vm->i915->drm.struct_mutex); vm 985 drivers/gpu/drm/i915/i915_vma.c mutex_lock(&vma->vm->mutex); vm 987 drivers/gpu/drm/i915/i915_vma.c mutex_unlock(&vma->vm->mutex); vm 992 drivers/gpu/drm/i915/i915_vma.c mutex_lock(&vma->vm->mutex); vm 994 drivers/gpu/drm/i915/i915_vma.c mutex_unlock(&vma->vm->mutex); vm 1002 drivers/gpu/drm/i915/i915_vma.c if (likely(!vma->vm->closed)) { vm 55 drivers/gpu/drm/i915/i915_vma.h struct i915_address_space *vm; vm 150 drivers/gpu/drm/i915/i915_vma.h struct i915_address_space *vm, vm 225 drivers/gpu/drm/i915/i915_vma.h return i915_vm_to_ggtt(vma->vm)->pin_bias; vm 254 drivers/gpu/drm/i915/i915_vma.h struct i915_address_space *vm, vm 259 drivers/gpu/drm/i915/i915_vma.h GEM_BUG_ON(view && !i915_is_ggtt(vm)); vm 261 drivers/gpu/drm/i915/i915_vma.h cmp = ptrdiff(vma->vm, vm); vm 8205 drivers/gpu/drm/i915/intel_pm.c const int vm = vd - 1125; vm 8208 drivers/gpu/drm/i915/intel_pm.c return vm > 0 ? vm : 0; vm 50 drivers/gpu/drm/i915/selftests/i915_gem.c ggtt->vm.insert_page(&ggtt->vm, dma, slot, I915_CACHE_NONE, 0); vm 60 drivers/gpu/drm/i915/selftests/i915_gem.c ggtt->vm.clear_range(&ggtt->vm, slot, PAGE_SIZE); vm 73 drivers/gpu/drm/i915/selftests/i915_gem_evict.c count, i915->ggtt.vm.total / PAGE_SIZE); vm 99 drivers/gpu/drm/i915/selftests/i915_gem_evict.c if (list_empty(&i915->ggtt.vm.bound_list)) { vm 112 drivers/gpu/drm/i915/selftests/i915_gem_evict.c mutex_lock(&ggtt->vm.mutex); vm 113 drivers/gpu/drm/i915/selftests/i915_gem_evict.c list_for_each_entry(vma, &i915->ggtt.vm.bound_list, vm_link) vm 116 drivers/gpu/drm/i915/selftests/i915_gem_evict.c mutex_unlock(&ggtt->vm.mutex); vm 151 drivers/gpu/drm/i915/selftests/i915_gem_evict.c err = i915_gem_evict_something(&ggtt->vm, vm 164 drivers/gpu/drm/i915/selftests/i915_gem_evict.c err = i915_gem_evict_something(&ggtt->vm, vm 233 drivers/gpu/drm/i915/selftests/i915_gem_evict.c err = i915_gem_evict_for_node(&ggtt->vm, &target, 0); vm 243 drivers/gpu/drm/i915/selftests/i915_gem_evict.c err = i915_gem_evict_for_node(&ggtt->vm, &target, 0); vm 282 drivers/gpu/drm/i915/selftests/i915_gem_evict.c ggtt->vm.mm.color_adjust = mock_color_adjust; vm 320 drivers/gpu/drm/i915/selftests/i915_gem_evict.c err = i915_gem_evict_for_node(&ggtt->vm, &target, 0); vm 331 drivers/gpu/drm/i915/selftests/i915_gem_evict.c err = i915_gem_evict_for_node(&ggtt->vm, &target, 0); vm 343 drivers/gpu/drm/i915/selftests/i915_gem_evict.c ggtt->vm.mm.color_adjust = NULL; vm 361 drivers/gpu/drm/i915/selftests/i915_gem_evict.c err = i915_gem_evict_vm(&ggtt->vm); vm 370 drivers/gpu/drm/i915/selftests/i915_gem_evict.c err = i915_gem_evict_vm(&ggtt->vm); vm 416 drivers/gpu/drm/i915/selftests/i915_gem_evict.c err = i915_gem_gtt_insert(&i915->ggtt.vm, &hole, vm 418 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 0, i915->ggtt.vm.total, vm 434 drivers/gpu/drm/i915/selftests/i915_gem_evict.c if (i915_gem_gtt_insert(&i915->ggtt.vm, &r->node, vm 436 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 0, i915->ggtt.vm.total, vm 164 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (!ppgtt->vm.allocate_va_range) vm 175 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c limit = min(ppgtt->vm.total, limit); vm 179 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c err = ppgtt->vm.allocate_va_range(&ppgtt->vm, 0, size); vm 191 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c ppgtt->vm.clear_range(&ppgtt->vm, 0, size); vm 196 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c err = ppgtt->vm.allocate_va_range(&ppgtt->vm, vm 211 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c i915_vm_put(&ppgtt->vm); vm 216 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c struct i915_address_space *vm, vm 252 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c GEM_BUG_ON(count * BIT_ULL(size) > vm->total); vm 279 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c GEM_BUG_ON(addr + BIT_ULL(size) > vm->total); vm 288 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (vm->allocate_va_range && vm 289 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vm->allocate_va_range(vm, addr, BIT_ULL(size))) vm 297 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vm->insert_entries(vm, &mock_vma, I915_CACHE_NONE, 0); vm 306 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c GEM_BUG_ON(addr + BIT_ULL(size) > vm->total); vm 307 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vm->clear_range(vm, addr, BIT_ULL(size)); vm 322 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c struct i915_address_space *vm) vm 330 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, vm, NULL); vm 343 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c struct i915_address_space *vm, vm 360 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (i915_is_ggtt(vm)) vm 391 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, vm, NULL); vm 428 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, vm, NULL); vm 464 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, vm, NULL); vm 501 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, vm, NULL); vm 543 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c close_object_list(&objects, vm); vm 550 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c close_object_list(&objects, vm); vm 555 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c struct i915_address_space *vm, vm 568 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (i915_is_ggtt(vm)) vm 581 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, vm, NULL); vm 639 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c struct i915_address_space *vm, vm 650 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (i915_is_ggtt(vm)) vm 657 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, vm, NULL); vm 715 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c struct i915_address_space *vm, vm 724 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (i915_is_ggtt(vm)) vm 766 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, vm, NULL); vm 825 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c struct i915_address_space *vm, vm 850 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, vm, NULL); vm 886 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c close_object_list(&objects, vm); vm 892 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c struct i915_address_space *vm, vm 899 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vm->fault_attr.probability = 999; vm 900 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c atomic_set(&vm->fault_attr.times, -1); vm 903 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vm->fault_attr.interval = prime; vm 904 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c err = __shrink_hole(i915, vm, hole_start, hole_end, end_time); vm 909 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c memset(&vm->fault_attr, 0, sizeof(vm->fault_attr)); vm 915 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c struct i915_address_space *vm, vm 941 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(purge, vm, NULL); vm 960 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vm->fault_attr.probability = 100; vm 961 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vm->fault_attr.interval = 1; vm 962 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c atomic_set(&vm->fault_attr.times, -1); vm 964 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(explode, vm, NULL); vm 979 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c memset(&vm->fault_attr, 0, sizeof(vm->fault_attr)); vm 989 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c memset(&vm->fault_attr, 0, sizeof(vm->fault_attr)); vm 995 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c struct i915_address_space *vm, vm 1017 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c GEM_BUG_ON(offset_in_page(ppgtt->vm.total)); vm 1018 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c GEM_BUG_ON(ppgtt->vm.closed); vm 1020 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c err = func(dev_priv, &ppgtt->vm, 0, ppgtt->vm.total, end_time); vm 1022 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c i915_vm_put(&ppgtt->vm); vm 1078 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c struct i915_address_space *vm, vm 1090 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c list_sort(NULL, &ggtt->vm.mm.hole_stack, sort_holes); vm 1091 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c drm_mm_for_each_hole(node, &ggtt->vm.mm, hole_start, hole_end) { vm 1095 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c if (ggtt->vm.mm.color_adjust) vm 1096 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c ggtt->vm.mm.color_adjust(node, 0, vm 1101 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c err = func(i915, &ggtt->vm, hole_start, hole_end, end_time); vm 1164 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c err = drm_mm_insert_node_in_range(&ggtt->vm.mm, &tmp, vm 1177 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c ggtt->vm.insert_page(&ggtt->vm, vm 1196 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c intel_gt_flush_ggtt_writes(ggtt->vm.gt); vm 1218 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c ggtt->vm.clear_range(&ggtt->vm, tmp.start, tmp.size); vm 1239 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c mutex_lock(&vma->vm->mutex); vm 1240 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c list_move_tail(&vma->vm_link, &vma->vm->bound_list); vm 1241 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c mutex_unlock(&vma->vm->mutex); vm 1246 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c struct i915_address_space *vm, vm 1259 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c err = func(i915, ctx->vm, 0, min(ctx->vm->total, limit), end_time); vm 1269 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c return exercise_mock(ggtt->vm.i915, fill_hole); vm 1276 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c return exercise_mock(ggtt->vm.i915, walk_hole); vm 1283 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c return exercise_mock(ggtt->vm.i915, pot_hole); vm 1290 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c return exercise_mock(ggtt->vm.i915, drunk_hole); vm 1308 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c total + 2 * I915_GTT_PAGE_SIZE <= ggtt->vm.total; vm 1312 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj = i915_gem_object_create_internal(ggtt->vm.i915, vm 1327 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, &ggtt->vm, NULL); vm 1333 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c err = i915_gem_gtt_reserve(&ggtt->vm, &vma->node, vm 1340 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c total, ggtt->vm.total, err); vm 1358 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c total + 2 * I915_GTT_PAGE_SIZE <= ggtt->vm.total; vm 1362 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj = i915_gem_object_create_internal(ggtt->vm.i915, vm 1377 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, &ggtt->vm, NULL); vm 1383 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c err = i915_gem_gtt_reserve(&ggtt->vm, &vma->node, vm 1390 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c total, ggtt->vm.total, err); vm 1411 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, &ggtt->vm, NULL); vm 1423 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c offset = random_offset(0, ggtt->vm.total, vm 1427 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c err = i915_gem_gtt_reserve(&ggtt->vm, &vma->node, vm 1434 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c total, ggtt->vm.total, err); vm 1469 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c ggtt->vm.total + I915_GTT_PAGE_SIZE, 0, vm 1470 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 0, ggtt->vm.total, vm 1500 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c err = i915_gem_gtt_insert(&ggtt->vm, &tmp, vm 1515 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c total + I915_GTT_PAGE_SIZE <= ggtt->vm.total; vm 1519 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj = i915_gem_object_create_internal(ggtt->vm.i915, vm 1534 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, &ggtt->vm, NULL); vm 1540 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c err = i915_gem_gtt_insert(&ggtt->vm, &vma->node, vm 1542 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 0, ggtt->vm.total, vm 1551 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c total, ggtt->vm.total, err); vm 1563 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, &ggtt->vm, NULL); vm 1583 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, &ggtt->vm, NULL); vm 1598 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c err = i915_gem_gtt_insert(&ggtt->vm, &vma->node, vm 1600 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 0, ggtt->vm.total, vm 1604 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c total, ggtt->vm.total, err); vm 1620 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c total + 2 * I915_GTT_PAGE_SIZE <= ggtt->vm.total; vm 1624 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c obj = i915_gem_object_create_internal(ggtt->vm.i915, vm 1639 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c vma = i915_vma_instance(obj, &ggtt->vm, NULL); vm 1645 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c err = i915_gem_gtt_insert(&ggtt->vm, &vma->node, vm 1647 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 0, ggtt->vm.total, vm 1651 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c total, ggtt->vm.total, err); vm 1725 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c GEM_BUG_ON(offset_in_page(i915->ggtt.vm.total)); vm 640 drivers/gpu/drm/i915/selftests/i915_request.c vma = i915_vma_instance(obj, &i915->ggtt.vm, NULL); vm 764 drivers/gpu/drm/i915/selftests/i915_request.c struct i915_address_space *vm = ctx->vm ?: &i915->ggtt.vm; vm 775 drivers/gpu/drm/i915/selftests/i915_request.c vma = i915_vma_instance(obj, vm, NULL); vm 825 drivers/gpu/drm/i915/selftests/i915_request.c intel_gt_chipset_flush(batch->vm->gt); vm 41 drivers/gpu/drm/i915/selftests/i915_vma.c if (vma->vm != ctx->vm) { vm 63 drivers/gpu/drm/i915/selftests/i915_vma.c struct i915_address_space *vm, vm 69 drivers/gpu/drm/i915/selftests/i915_vma.c vma = i915_vma_instance(obj, vm, view); vm 74 drivers/gpu/drm/i915/selftests/i915_vma.c if (vma->vm != vm) { vm 76 drivers/gpu/drm/i915/selftests/i915_vma.c vma->vm, vm); vm 80 drivers/gpu/drm/i915/selftests/i915_vma.c if (i915_is_ggtt(vm) != i915_vma_is_ggtt(vma)) { vm 82 drivers/gpu/drm/i915/selftests/i915_vma.c i915_vma_is_ggtt(vma), i915_is_ggtt(vm)); vm 86 drivers/gpu/drm/i915/selftests/i915_vma.c if (i915_vma_compare(vma, vm, view)) { vm 91 drivers/gpu/drm/i915/selftests/i915_vma.c if (i915_vma_compare(vma, vma->vm, vm 116 drivers/gpu/drm/i915/selftests/i915_vma.c struct i915_address_space *vm = ctx->vm; vm 120 drivers/gpu/drm/i915/selftests/i915_vma.c vma = checked_vma_instance(obj, vm, NULL); vm 148 drivers/gpu/drm/i915/selftests/i915_vma.c struct drm_i915_private *i915 = ggtt->vm.i915; vm 267 drivers/gpu/drm/i915/selftests/i915_vma.c VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->vm.total - 4096)), vm 271 drivers/gpu/drm/i915/selftests/i915_vma.c VALID(0, PIN_GLOBAL | PIN_OFFSET_FIXED | (ggtt->vm.total - 4096)), vm 272 drivers/gpu/drm/i915/selftests/i915_vma.c INVALID(0, PIN_GLOBAL | PIN_OFFSET_FIXED | ggtt->vm.total), vm 280 drivers/gpu/drm/i915/selftests/i915_vma.c VALID(ggtt->vm.total - 4096, PIN_GLOBAL), vm 281 drivers/gpu/drm/i915/selftests/i915_vma.c VALID(ggtt->vm.total, PIN_GLOBAL), vm 282 drivers/gpu/drm/i915/selftests/i915_vma.c NOSPACE(ggtt->vm.total + 4096, PIN_GLOBAL), vm 285 drivers/gpu/drm/i915/selftests/i915_vma.c INVALID(8192, PIN_GLOBAL | PIN_OFFSET_FIXED | (ggtt->vm.total - 4096)), vm 297 drivers/gpu/drm/i915/selftests/i915_vma.c NOSPACE(0, PIN_GLOBAL | PIN_OFFSET_BIAS | ggtt->vm.total), vm 299 drivers/gpu/drm/i915/selftests/i915_vma.c NOSPACE(8192, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->vm.total - 4096)), vm 315 drivers/gpu/drm/i915/selftests/i915_vma.c GEM_BUG_ON(!drm_mm_clean(&ggtt->vm.mm)); vm 317 drivers/gpu/drm/i915/selftests/i915_vma.c obj = i915_gem_object_create_internal(ggtt->vm.i915, PAGE_SIZE); vm 321 drivers/gpu/drm/i915/selftests/i915_vma.c vma = checked_vma_instance(obj, &ggtt->vm, NULL); vm 476 drivers/gpu/drm/i915/selftests/i915_vma.c struct i915_address_space *vm = &ggtt->vm; vm 508 drivers/gpu/drm/i915/selftests/i915_vma.c obj = i915_gem_object_create_internal(vm->i915, max_pages * PAGE_SIZE); vm 532 drivers/gpu/drm/i915/selftests/i915_vma.c vma = checked_vma_instance(obj, vm, &view); vm 698 drivers/gpu/drm/i915/selftests/i915_vma.c struct i915_address_space *vm = &ggtt->vm; vm 716 drivers/gpu/drm/i915/selftests/i915_vma.c obj = i915_gem_object_create_internal(vm->i915, npages * PAGE_SIZE); vm 735 drivers/gpu/drm/i915/selftests/i915_vma.c vma = checked_vma_instance(obj, vm, &view); vm 777 drivers/gpu/drm/i915/selftests/i915_vma.c vma = checked_vma_instance(obj, vm, NULL); vm 101 drivers/gpu/drm/i915/selftests/igt_spinner.c GEM_BUG_ON(spin->gt != ce->vm->gt); vm 103 drivers/gpu/drm/i915/selftests/igt_spinner.c vma = i915_vma_instance(spin->obj, ce->vm, NULL); vm 107 drivers/gpu/drm/i915/selftests/igt_spinner.c hws = i915_vma_instance(spin->hws, ce->vm, NULL); vm 27 drivers/gpu/drm/i915/selftests/mock_gtt.c static void mock_insert_page(struct i915_address_space *vm, vm 35 drivers/gpu/drm/i915/selftests/mock_gtt.c static void mock_insert_entries(struct i915_address_space *vm, vm 54 drivers/gpu/drm/i915/selftests/mock_gtt.c static void mock_cleanup(struct i915_address_space *vm) vm 66 drivers/gpu/drm/i915/selftests/mock_gtt.c ppgtt->vm.i915 = i915; vm 67 drivers/gpu/drm/i915/selftests/mock_gtt.c ppgtt->vm.total = round_down(U64_MAX, PAGE_SIZE); vm 68 drivers/gpu/drm/i915/selftests/mock_gtt.c ppgtt->vm.file = ERR_PTR(-ENODEV); vm 70 drivers/gpu/drm/i915/selftests/mock_gtt.c i915_address_space_init(&ppgtt->vm, VM_CLASS_PPGTT); vm 72 drivers/gpu/drm/i915/selftests/mock_gtt.c ppgtt->vm.clear_range = nop_clear_range; vm 73 drivers/gpu/drm/i915/selftests/mock_gtt.c ppgtt->vm.insert_page = mock_insert_page; vm 74 drivers/gpu/drm/i915/selftests/mock_gtt.c ppgtt->vm.insert_entries = mock_insert_entries; vm 75 drivers/gpu/drm/i915/selftests/mock_gtt.c ppgtt->vm.cleanup = mock_cleanup; vm 77 drivers/gpu/drm/i915/selftests/mock_gtt.c ppgtt->vm.vma_ops.bind_vma = mock_bind_ppgtt; vm 78 drivers/gpu/drm/i915/selftests/mock_gtt.c ppgtt->vm.vma_ops.unbind_vma = mock_unbind_ppgtt; vm 79 drivers/gpu/drm/i915/selftests/mock_gtt.c ppgtt->vm.vma_ops.set_pages = ppgtt_set_pages; vm 80 drivers/gpu/drm/i915/selftests/mock_gtt.c ppgtt->vm.vma_ops.clear_pages = clear_pages; vm 101 drivers/gpu/drm/i915/selftests/mock_gtt.c ggtt->vm.gt = &i915->gt; vm 102 drivers/gpu/drm/i915/selftests/mock_gtt.c ggtt->vm.i915 = i915; vm 103 drivers/gpu/drm/i915/selftests/mock_gtt.c ggtt->vm.is_ggtt = true; vm 107 drivers/gpu/drm/i915/selftests/mock_gtt.c ggtt->vm.total = 4096 * PAGE_SIZE; vm 109 drivers/gpu/drm/i915/selftests/mock_gtt.c ggtt->vm.clear_range = nop_clear_range; vm 110 drivers/gpu/drm/i915/selftests/mock_gtt.c ggtt->vm.insert_page = mock_insert_page; vm 111 drivers/gpu/drm/i915/selftests/mock_gtt.c ggtt->vm.insert_entries = mock_insert_entries; vm 112 drivers/gpu/drm/i915/selftests/mock_gtt.c ggtt->vm.cleanup = mock_cleanup; vm 114 drivers/gpu/drm/i915/selftests/mock_gtt.c ggtt->vm.vma_ops.bind_vma = mock_bind_ggtt; vm 115 drivers/gpu/drm/i915/selftests/mock_gtt.c ggtt->vm.vma_ops.unbind_vma = mock_unbind_ggtt; vm 116 drivers/gpu/drm/i915/selftests/mock_gtt.c ggtt->vm.vma_ops.set_pages = ggtt_set_pages; vm 117 drivers/gpu/drm/i915/selftests/mock_gtt.c ggtt->vm.vma_ops.clear_pages = clear_pages; vm 119 drivers/gpu/drm/i915/selftests/mock_gtt.c i915_address_space_init(&ggtt->vm, VM_CLASS_GGTT); vm 126 drivers/gpu/drm/i915/selftests/mock_gtt.c i915_address_space_fini(&ggtt->vm); vm 215 drivers/gpu/drm/imx/ipuv3-crtc.c struct videomode vm; vm 218 drivers/gpu/drm/imx/ipuv3-crtc.c drm_display_mode_to_videomode(adjusted_mode, &vm); vm 220 drivers/gpu/drm/imx/ipuv3-crtc.c ret = ipu_di_adjust_videomode(ipu_crtc->di, &vm); vm 224 drivers/gpu/drm/imx/ipuv3-crtc.c if ((vm.vsync_len == 0) || (vm.hsync_len == 0)) vm 227 drivers/gpu/drm/imx/ipuv3-crtc.c drm_display_mode_from_videomode(&vm, adjusted_mode); vm 208 drivers/gpu/drm/lima/lima_drv.c priv->vm = lima_vm_create(ldev); vm 209 drivers/gpu/drm/lima/lima_drv.c if (!priv->vm) { vm 229 drivers/gpu/drm/lima/lima_drv.c lima_vm_put(priv->vm); vm 20 drivers/gpu/drm/lima/lima_drv.h struct lima_vm *vm; vm 53 drivers/gpu/drm/lima/lima_gem.c struct lima_vm *vm = priv->vm; vm 55 drivers/gpu/drm/lima/lima_gem.c return lima_vm_bo_add(vm, bo, true); vm 62 drivers/gpu/drm/lima/lima_gem.c struct lima_vm *vm = priv->vm; vm 64 drivers/gpu/drm/lima/lima_gem.c lima_vm_bo_del(vm, bo); vm 72 drivers/gpu/drm/lima/lima_gem.c struct lima_vm *vm = priv->vm; vm 81 drivers/gpu/drm/lima/lima_gem.c *va = lima_vm_get_va(vm, bo); vm 236 drivers/gpu/drm/lima/lima_gem.c struct lima_vm *vm = priv->vm; vm 262 drivers/gpu/drm/lima/lima_gem.c err = lima_vm_bo_add(vm, bo, false); vm 277 drivers/gpu/drm/lima/lima_gem.c bos, submit->nr_bos, vm); vm 326 drivers/gpu/drm/lima/lima_gem.c lima_vm_bo_del(vm, bos[i]); vm 103 drivers/gpu/drm/lima/lima_mmu.c void lima_mmu_switch_vm(struct lima_ip *ip, struct lima_vm *vm) vm 112 drivers/gpu/drm/lima/lima_mmu.c if (vm) vm 113 drivers/gpu/drm/lima/lima_mmu.c mmu_write(LIMA_MMU_DTE_ADDR, vm->pd.dma); vm 13 drivers/gpu/drm/lima/lima_mmu.h void lima_mmu_switch_vm(struct lima_ip *ip, struct lima_vm *vm); vm 111 drivers/gpu/drm/lima/lima_sched.c struct lima_vm *vm) vm 122 drivers/gpu/drm/lima/lima_sched.c err = drm_sched_job_init(&task->base, &context->base, vm); vm 129 drivers/gpu/drm/lima/lima_sched.c task->vm = lima_vm_get(vm); vm 155 drivers/gpu/drm/lima/lima_sched.c lima_vm_put(task->vm); vm 199 drivers/gpu/drm/lima/lima_sched.c struct lima_vm *vm = NULL, *last_vm = NULL; vm 236 drivers/gpu/drm/lima/lima_sched.c if (task->vm != pipe->current_vm) { vm 237 drivers/gpu/drm/lima/lima_sched.c vm = lima_vm_get(task->vm); vm 239 drivers/gpu/drm/lima/lima_sched.c pipe->current_vm = task->vm; vm 243 drivers/gpu/drm/lima/lima_sched.c lima_mmu_switch_vm(pipe->bcast_mmu, vm); vm 246 drivers/gpu/drm/lima/lima_sched.c lima_mmu_switch_vm(pipe->mmu[i], vm); vm 301 drivers/gpu/drm/lima/lima_sched.c struct lima_vm *vm = task->vm; vm 308 drivers/gpu/drm/lima/lima_sched.c lima_vm_bo_del(vm, bos[i]); vm 14 drivers/gpu/drm/lima/lima_sched.h struct lima_vm *vm; vm 78 drivers/gpu/drm/lima/lima_sched.h struct lima_vm *vm); vm 18 drivers/gpu/drm/lima/lima_vm.c struct lima_vm *vm; vm 35 drivers/gpu/drm/lima/lima_vm.c static void lima_vm_unmap_page_table(struct lima_vm *vm, u32 start, u32 end) vm 43 drivers/gpu/drm/lima/lima_vm.c vm->bts[pbe].cpu[bte] = 0; vm 47 drivers/gpu/drm/lima/lima_vm.c static int lima_vm_map_page_table(struct lima_vm *vm, dma_addr_t *dma, vm 57 drivers/gpu/drm/lima/lima_vm.c if (!vm->bts[pbe].cpu) { vm 62 drivers/gpu/drm/lima/lima_vm.c vm->bts[pbe].cpu = dma_alloc_wc( vm 63 drivers/gpu/drm/lima/lima_vm.c vm->dev->dev, LIMA_PAGE_SIZE << LIMA_VM_NUM_PT_PER_BT_SHIFT, vm 64 drivers/gpu/drm/lima/lima_vm.c &vm->bts[pbe].dma, GFP_KERNEL | __GFP_ZERO); vm 65 drivers/gpu/drm/lima/lima_vm.c if (!vm->bts[pbe].cpu) { vm 67 drivers/gpu/drm/lima/lima_vm.c lima_vm_unmap_page_table(vm, start, addr - 1); vm 71 drivers/gpu/drm/lima/lima_vm.c pts = vm->bts[pbe].dma; vm 72 drivers/gpu/drm/lima/lima_vm.c pd = vm->pd.cpu + (pbe << LIMA_VM_NUM_PT_PER_BT_SHIFT); vm 79 drivers/gpu/drm/lima/lima_vm.c vm->bts[pbe].cpu[bte] = dma[i++] | LIMA_VM_FLAGS_CACHE; vm 86 drivers/gpu/drm/lima/lima_vm.c lima_vm_bo_find(struct lima_vm *vm, struct lima_bo *bo) vm 91 drivers/gpu/drm/lima/lima_vm.c if (bo_va->vm == vm) { vm 100 drivers/gpu/drm/lima/lima_vm.c int lima_vm_bo_add(struct lima_vm *vm, struct lima_bo *bo, bool create) vm 107 drivers/gpu/drm/lima/lima_vm.c bo_va = lima_vm_bo_find(vm, bo); vm 126 drivers/gpu/drm/lima/lima_vm.c bo_va->vm = vm; vm 129 drivers/gpu/drm/lima/lima_vm.c mutex_lock(&vm->lock); vm 131 drivers/gpu/drm/lima/lima_vm.c err = drm_mm_insert_node(&vm->mm, &bo_va->node, bo->gem.size); vm 135 drivers/gpu/drm/lima/lima_vm.c err = lima_vm_map_page_table(vm, bo->pages_dma_addr, bo_va->node.start, vm 140 drivers/gpu/drm/lima/lima_vm.c mutex_unlock(&vm->lock); vm 150 drivers/gpu/drm/lima/lima_vm.c mutex_unlock(&vm->lock); vm 157 drivers/gpu/drm/lima/lima_vm.c void lima_vm_bo_del(struct lima_vm *vm, struct lima_bo *bo) vm 163 drivers/gpu/drm/lima/lima_vm.c bo_va = lima_vm_bo_find(vm, bo); vm 169 drivers/gpu/drm/lima/lima_vm.c mutex_lock(&vm->lock); vm 171 drivers/gpu/drm/lima/lima_vm.c lima_vm_unmap_page_table(vm, bo_va->node.start, vm 176 drivers/gpu/drm/lima/lima_vm.c mutex_unlock(&vm->lock); vm 185 drivers/gpu/drm/lima/lima_vm.c u32 lima_vm_get_va(struct lima_vm *vm, struct lima_bo *bo) vm 192 drivers/gpu/drm/lima/lima_vm.c bo_va = lima_vm_bo_find(vm, bo); vm 202 drivers/gpu/drm/lima/lima_vm.c struct lima_vm *vm; vm 204 drivers/gpu/drm/lima/lima_vm.c vm = kzalloc(sizeof(*vm), GFP_KERNEL); vm 205 drivers/gpu/drm/lima/lima_vm.c if (!vm) vm 208 drivers/gpu/drm/lima/lima_vm.c vm->dev = dev; vm 209 drivers/gpu/drm/lima/lima_vm.c mutex_init(&vm->lock); vm 210 drivers/gpu/drm/lima/lima_vm.c kref_init(&vm->refcount); vm 212 drivers/gpu/drm/lima/lima_vm.c vm->pd.cpu = dma_alloc_wc(dev->dev, LIMA_PAGE_SIZE, &vm->pd.dma, vm 214 drivers/gpu/drm/lima/lima_vm.c if (!vm->pd.cpu) vm 219 drivers/gpu/drm/lima/lima_vm.c vm, &dev->dlbu_dma, LIMA_VA_RESERVE_DLBU, vm 225 drivers/gpu/drm/lima/lima_vm.c drm_mm_init(&vm->mm, dev->va_start, dev->va_end - dev->va_start); vm 227 drivers/gpu/drm/lima/lima_vm.c return vm; vm 230 drivers/gpu/drm/lima/lima_vm.c dma_free_wc(dev->dev, LIMA_PAGE_SIZE, vm->pd.cpu, vm->pd.dma); vm 232 drivers/gpu/drm/lima/lima_vm.c kfree(vm); vm 238 drivers/gpu/drm/lima/lima_vm.c struct lima_vm *vm = container_of(kref, struct lima_vm, refcount); vm 241 drivers/gpu/drm/lima/lima_vm.c drm_mm_takedown(&vm->mm); vm 244 drivers/gpu/drm/lima/lima_vm.c if (vm->bts[i].cpu) vm 245 drivers/gpu/drm/lima/lima_vm.c dma_free_wc(vm->dev->dev, LIMA_PAGE_SIZE << LIMA_VM_NUM_PT_PER_BT_SHIFT, vm 246 drivers/gpu/drm/lima/lima_vm.c vm->bts[i].cpu, vm->bts[i].dma); vm 249 drivers/gpu/drm/lima/lima_vm.c if (vm->pd.cpu) vm 250 drivers/gpu/drm/lima/lima_vm.c dma_free_wc(vm->dev->dev, LIMA_PAGE_SIZE, vm->pd.cpu, vm->pd.dma); vm 252 drivers/gpu/drm/lima/lima_vm.c kfree(vm); vm 255 drivers/gpu/drm/lima/lima_vm.c void lima_vm_print(struct lima_vm *vm) vm 260 drivers/gpu/drm/lima/lima_vm.c if (!vm->pd.cpu) vm 263 drivers/gpu/drm/lima/lima_vm.c pd = vm->pd.cpu; vm 265 drivers/gpu/drm/lima/lima_vm.c if (!vm->bts[i].cpu) vm 268 drivers/gpu/drm/lima/lima_vm.c pt = vm->bts[i].cpu; vm 41 drivers/gpu/drm/lima/lima_vm.h int lima_vm_bo_add(struct lima_vm *vm, struct lima_bo *bo, bool create); vm 42 drivers/gpu/drm/lima/lima_vm.h void lima_vm_bo_del(struct lima_vm *vm, struct lima_bo *bo); vm 44 drivers/gpu/drm/lima/lima_vm.h u32 lima_vm_get_va(struct lima_vm *vm, struct lima_bo *bo); vm 49 drivers/gpu/drm/lima/lima_vm.h static inline struct lima_vm *lima_vm_get(struct lima_vm *vm) vm 51 drivers/gpu/drm/lima/lima_vm.h kref_get(&vm->refcount); vm 52 drivers/gpu/drm/lima/lima_vm.h return vm; vm 55 drivers/gpu/drm/lima/lima_vm.h static inline void lima_vm_put(struct lima_vm *vm) vm 57 drivers/gpu/drm/lima/lima_vm.h kref_put(&vm->refcount, lima_vm_release); vm 60 drivers/gpu/drm/lima/lima_vm.h void lima_vm_print(struct lima_vm *vm); vm 425 drivers/gpu/drm/mediatek/mtk_dpi.c struct videomode vm = { 0 }; vm 431 drivers/gpu/drm/mediatek/mtk_dpi.c drm_display_mode_to_videomode(mode, &vm); vm 432 drivers/gpu/drm/mediatek/mtk_dpi.c pll_rate = vm.pixelclock * factor; vm 435 drivers/gpu/drm/mediatek/mtk_dpi.c pll_rate, vm.pixelclock); vm 440 drivers/gpu/drm/mediatek/mtk_dpi.c vm.pixelclock = pll_rate / factor; vm 441 drivers/gpu/drm/mediatek/mtk_dpi.c clk_set_rate(dpi->pixel_clk, vm.pixelclock); vm 442 drivers/gpu/drm/mediatek/mtk_dpi.c vm.pixelclock = clk_get_rate(dpi->pixel_clk); vm 445 drivers/gpu/drm/mediatek/mtk_dpi.c pll_rate, vm.pixelclock); vm 454 drivers/gpu/drm/mediatek/mtk_dpi.c dpi_pol.hsync_pol = vm.flags & DISPLAY_FLAGS_HSYNC_HIGH ? vm 456 drivers/gpu/drm/mediatek/mtk_dpi.c dpi_pol.vsync_pol = vm.flags & DISPLAY_FLAGS_VSYNC_HIGH ? vm 458 drivers/gpu/drm/mediatek/mtk_dpi.c hsync.sync_width = vm.hsync_len; vm 459 drivers/gpu/drm/mediatek/mtk_dpi.c hsync.back_porch = vm.hback_porch; vm 460 drivers/gpu/drm/mediatek/mtk_dpi.c hsync.front_porch = vm.hfront_porch; vm 462 drivers/gpu/drm/mediatek/mtk_dpi.c vsync_lodd.sync_width = vm.vsync_len; vm 463 drivers/gpu/drm/mediatek/mtk_dpi.c vsync_lodd.back_porch = vm.vback_porch; vm 464 drivers/gpu/drm/mediatek/mtk_dpi.c vsync_lodd.front_porch = vm.vfront_porch; vm 467 drivers/gpu/drm/mediatek/mtk_dpi.c if (vm.flags & DISPLAY_FLAGS_INTERLACED && vm 474 drivers/gpu/drm/mediatek/mtk_dpi.c } else if (vm.flags & DISPLAY_FLAGS_INTERLACED && vm 478 drivers/gpu/drm/mediatek/mtk_dpi.c } else if (!(vm.flags & DISPLAY_FLAGS_INTERLACED) && vm 492 drivers/gpu/drm/mediatek/mtk_dpi.c mtk_dpi_config_interface(dpi, !!(vm.flags & vm 494 drivers/gpu/drm/mediatek/mtk_dpi.c if (vm.flags & DISPLAY_FLAGS_INTERLACED) vm 495 drivers/gpu/drm/mediatek/mtk_dpi.c mtk_dpi_config_fb_size(dpi, vm.hactive, vm.vactive >> 1); vm 497 drivers/gpu/drm/mediatek/mtk_dpi.c mtk_dpi_config_fb_size(dpi, vm.hactive, vm.vactive); vm 174 drivers/gpu/drm/mediatek/mtk_dsi.c struct videomode vm; vm 307 drivers/gpu/drm/mediatek/mtk_dsi.c struct videomode *vm = &dsi->vm; vm 316 drivers/gpu/drm/mediatek/mtk_dsi.c ps_wc = vm->hactive * dsi_buf_bpp; vm 334 drivers/gpu/drm/mediatek/mtk_dsi.c writel(vm->vactive, dsi->regs + DSI_VACT_NL); vm 395 drivers/gpu/drm/mediatek/mtk_dsi.c tmp_reg += dsi->vm.hactive * dsi_tmp_buf_bpp & DSI_PS_WC; vm 406 drivers/gpu/drm/mediatek/mtk_dsi.c struct videomode *vm = &dsi->vm; vm 413 drivers/gpu/drm/mediatek/mtk_dsi.c writel(vm->vsync_len, dsi->regs + DSI_VSA_NL); vm 414 drivers/gpu/drm/mediatek/mtk_dsi.c writel(vm->vback_porch, dsi->regs + DSI_VBP_NL); vm 415 drivers/gpu/drm/mediatek/mtk_dsi.c writel(vm->vfront_porch, dsi->regs + DSI_VFP_NL); vm 416 drivers/gpu/drm/mediatek/mtk_dsi.c writel(vm->vactive, dsi->regs + DSI_VACT_NL); vm 418 drivers/gpu/drm/mediatek/mtk_dsi.c horizontal_sync_active_byte = (vm->hsync_len * dsi_tmp_buf_bpp - 10); vm 422 drivers/gpu/drm/mediatek/mtk_dsi.c (vm->hback_porch * dsi_tmp_buf_bpp - 10); vm 424 drivers/gpu/drm/mediatek/mtk_dsi.c horizontal_backporch_byte = ((vm->hback_porch + vm->hsync_len) * vm 427 drivers/gpu/drm/mediatek/mtk_dsi.c horizontal_frontporch_byte = (vm->hfront_porch * dsi_tmp_buf_bpp - 12); vm 553 drivers/gpu/drm/mediatek/mtk_dsi.c pixel_clock = dsi->vm.pixelclock; vm 554 drivers/gpu/drm/mediatek/mtk_dsi.c htotal = dsi->vm.hactive + dsi->vm.hback_porch + dsi->vm.hfront_porch + vm 555 drivers/gpu/drm/mediatek/mtk_dsi.c dsi->vm.hsync_len; vm 729 drivers/gpu/drm/mediatek/mtk_dsi.c drm_display_mode_to_videomode(adjusted, &dsi->vm); vm 64 drivers/gpu/drm/nouveau/include/nvkm/core/memory.h #define nvkm_memory_map(p,o,vm,va,av,ac) \ vm 65 drivers/gpu/drm/nouveau/include/nvkm/core/memory.h (p)->func->map((p),(o),(vm),(va),(av),(ac)) vm 24 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chan.h u32 size, u32 align, bool zero, u64 vm, u64 push, vm 45 drivers/gpu/drm/omapdrm/displays/panel-dsi-cm.c struct videomode vm; vm 601 drivers/gpu/drm/omapdrm/displays/panel-dsi-cm.c .vm = &ddata->vm, vm 895 drivers/gpu/drm/omapdrm/displays/panel-dsi-cm.c r = dsicm_set_update_window(ddata, 0, 0, ddata->vm.hactive, vm 896 drivers/gpu/drm/omapdrm/displays/panel-dsi-cm.c ddata->vm.vactive); vm 1027 drivers/gpu/drm/omapdrm/displays/panel-dsi-cm.c ddata->vm.hactive * ddata->vm.vactive * 3); vm 1117 drivers/gpu/drm/omapdrm/displays/panel-dsi-cm.c return omapdss_display_get_modes(connector, &ddata->vm); vm 1126 drivers/gpu/drm/omapdrm/displays/panel-dsi-cm.c if (mode->hdisplay != ddata->vm.hactive) vm 1129 drivers/gpu/drm/omapdrm/displays/panel-dsi-cm.c if (mode->vdisplay != ddata->vm.vactive) vm 1136 drivers/gpu/drm/omapdrm/displays/panel-dsi-cm.c ddata->vm.hactive, ddata->vm.vactive); vm 1188 drivers/gpu/drm/omapdrm/displays/panel-dsi-cm.c videomode_from_timing(&timing, &ddata->vm); vm 1189 drivers/gpu/drm/omapdrm/displays/panel-dsi-cm.c if (!ddata->vm.pixelclock) vm 1190 drivers/gpu/drm/omapdrm/displays/panel-dsi-cm.c ddata->vm.pixelclock = vm 1191 drivers/gpu/drm/omapdrm/displays/panel-dsi-cm.c ddata->vm.hactive * ddata->vm.vactive * 60; vm 1253 drivers/gpu/drm/omapdrm/displays/panel-dsi-cm.c ddata->vm.hactive = 864; vm 1254 drivers/gpu/drm/omapdrm/displays/panel-dsi-cm.c ddata->vm.vactive = 480; vm 1255 drivers/gpu/drm/omapdrm/displays/panel-dsi-cm.c ddata->vm.pixelclock = 864 * 480 * 60; vm 102 drivers/gpu/drm/omapdrm/dss/dispc.c const struct videomode *vm, vm 2142 drivers/gpu/drm/omapdrm/dss/dispc.c const struct videomode *vm, u16 pos_x, vm 2152 drivers/gpu/drm/omapdrm/dss/dispc.c nonactive = vm->hactive + vm->hfront_porch + vm->hsync_len + vm 2153 drivers/gpu/drm/omapdrm/dss/dispc.c vm->hback_porch - out_width; vm 2160 drivers/gpu/drm/omapdrm/dss/dispc.c blank = div_u64((u64)(vm->hback_porch + vm->hsync_len + vm->hfront_porch) * vm 2196 drivers/gpu/drm/omapdrm/dss/dispc.c const struct videomode *vm, u16 width, vm 2207 drivers/gpu/drm/omapdrm/dss/dispc.c unsigned int ppl = vm->hactive; vm 2290 drivers/gpu/drm/omapdrm/dss/dispc.c const struct videomode *vm, vm 2339 drivers/gpu/drm/omapdrm/dss/dispc.c const struct videomode *vm, vm 2363 drivers/gpu/drm/omapdrm/dss/dispc.c *core_clk = calc_core_clk_five_taps(pclk, vm, vm 2371 drivers/gpu/drm/omapdrm/dss/dispc.c error = check_horiz_timing_omap3(pclk, lclk, vm, vm 2406 drivers/gpu/drm/omapdrm/dss/dispc.c if (check_horiz_timing_omap3(pclk, lclk, vm, pos_x, in_width, vm 2427 drivers/gpu/drm/omapdrm/dss/dispc.c const struct videomode *vm, vm 2496 drivers/gpu/drm/omapdrm/dss/dispc.c const struct videomode *vm, vm 2527 drivers/gpu/drm/omapdrm/dss/dispc.c if (!mem_to_mem && (pclk == 0 || vm->pixelclock == 0)) { vm 2553 drivers/gpu/drm/omapdrm/dss/dispc.c ret = dispc->feat->calc_scaling(dispc, pclk, lclk, vm, width, height, vm 2598 drivers/gpu/drm/omapdrm/dss/dispc.c bool replication, const struct videomode *vm, vm 2612 drivers/gpu/drm/omapdrm/dss/dispc.c bool ilace = !!(vm->flags & DISPLAY_FLAGS_INTERLACED); vm 2621 drivers/gpu/drm/omapdrm/dss/dispc.c pclk = vm->pixelclock; vm 2652 drivers/gpu/drm/omapdrm/dss/dispc.c r = dispc_ovl_calc_scaling(dispc, plane, pclk, lclk, caps, vm, in_width, vm 2765 drivers/gpu/drm/omapdrm/dss/dispc.c const struct videomode *vm, bool mem_to_mem, vm 2784 drivers/gpu/drm/omapdrm/dss/dispc.c oi->rotation_type, replication, vm, mem_to_mem); vm 2791 drivers/gpu/drm/omapdrm/dss/dispc.c bool mem_to_mem, const struct videomode *vm, vm 2801 drivers/gpu/drm/omapdrm/dss/dispc.c int in_width = vm->hactive; vm 2802 drivers/gpu/drm/omapdrm/dss/dispc.c int in_height = vm->vactive; vm 2806 drivers/gpu/drm/omapdrm/dss/dispc.c if (vm->flags & DISPLAY_FLAGS_INTERLACED) vm 2817 drivers/gpu/drm/omapdrm/dss/dispc.c replication, vm, mem_to_mem); vm 2855 drivers/gpu/drm/omapdrm/dss/dispc.c wbdelay = vm->vsync_len + vm->vback_porch; vm 2857 drivers/gpu/drm/omapdrm/dss/dispc.c wbdelay = vm->vfront_porch + vm->vsync_len + vm 2858 drivers/gpu/drm/omapdrm/dss/dispc.c vm->vback_porch; vm 2860 drivers/gpu/drm/omapdrm/dss/dispc.c if (vm->flags & DISPLAY_FLAGS_INTERLACED) vm 3098 drivers/gpu/drm/omapdrm/dss/dispc.c const struct videomode *vm) vm 3100 drivers/gpu/drm/omapdrm/dss/dispc.c if (!_dispc_mgr_size_ok(dispc, vm->hactive, vm->vactive)) vm 3103 drivers/gpu/drm/omapdrm/dss/dispc.c if (!_dispc_mgr_pclk_ok(dispc, channel, vm->pixelclock)) vm 3108 drivers/gpu/drm/omapdrm/dss/dispc.c if (vm->flags & DISPLAY_FLAGS_INTERLACED) vm 3111 drivers/gpu/drm/omapdrm/dss/dispc.c if (!_dispc_lcd_timings_ok(dispc, vm->hsync_len, vm 3112 drivers/gpu/drm/omapdrm/dss/dispc.c vm->hfront_porch, vm->hback_porch, vm 3113 drivers/gpu/drm/omapdrm/dss/dispc.c vm->vsync_len, vm->vfront_porch, vm 3114 drivers/gpu/drm/omapdrm/dss/dispc.c vm->vback_porch)) vm 3123 drivers/gpu/drm/omapdrm/dss/dispc.c const struct videomode *vm) vm 3128 drivers/gpu/drm/omapdrm/dss/dispc.c timing_h = FLD_VAL(vm->hsync_len - 1, dispc->feat->sw_start, 0) | vm 3129 drivers/gpu/drm/omapdrm/dss/dispc.c FLD_VAL(vm->hfront_porch - 1, dispc->feat->fp_start, 8) | vm 3130 drivers/gpu/drm/omapdrm/dss/dispc.c FLD_VAL(vm->hback_porch - 1, dispc->feat->bp_start, 20); vm 3131 drivers/gpu/drm/omapdrm/dss/dispc.c timing_v = FLD_VAL(vm->vsync_len - 1, dispc->feat->sw_start, 0) | vm 3132 drivers/gpu/drm/omapdrm/dss/dispc.c FLD_VAL(vm->vfront_porch, dispc->feat->fp_start, 8) | vm 3133 drivers/gpu/drm/omapdrm/dss/dispc.c FLD_VAL(vm->vback_porch, dispc->feat->bp_start, 20); vm 3138 drivers/gpu/drm/omapdrm/dss/dispc.c if (vm->flags & DISPLAY_FLAGS_VSYNC_HIGH) vm 3143 drivers/gpu/drm/omapdrm/dss/dispc.c if (vm->flags & DISPLAY_FLAGS_HSYNC_HIGH) vm 3148 drivers/gpu/drm/omapdrm/dss/dispc.c if (vm->flags & DISPLAY_FLAGS_DE_HIGH) vm 3153 drivers/gpu/drm/omapdrm/dss/dispc.c if (vm->flags & DISPLAY_FLAGS_PIXDATA_POSEDGE) vm 3161 drivers/gpu/drm/omapdrm/dss/dispc.c if (vm->flags & DISPLAY_FLAGS_SYNC_POSEDGE) vm 3212 drivers/gpu/drm/omapdrm/dss/dispc.c const struct videomode *vm) vm 3216 drivers/gpu/drm/omapdrm/dss/dispc.c struct videomode t = *vm; vm 3231 drivers/gpu/drm/omapdrm/dss/dispc.c ht = vm->pixelclock / xtot; vm 3232 drivers/gpu/drm/omapdrm/dss/dispc.c vt = vm->pixelclock / xtot / ytot; vm 3234 drivers/gpu/drm/omapdrm/dss/dispc.c DSSDBG("pck %lu\n", vm->pixelclock); vm 4552 drivers/gpu/drm/omapdrm/dss/dispc.c struct videomode vm; vm 4557 drivers/gpu/drm/omapdrm/dss/dispc.c .vm = { vm 4654 drivers/gpu/drm/omapdrm/dss/dispc.c dispc_ovl_setup(dispc, OMAP_DSS_GFX, &ovli, &i734.vm, false, vm 4663 drivers/gpu/drm/omapdrm/dss/dispc.c dispc_mgr_set_timings(dispc, OMAP_DSS_CHANNEL_LCD, &i734.vm); vm 53 drivers/gpu/drm/omapdrm/dss/display.c const struct videomode *vm) vm 61 drivers/gpu/drm/omapdrm/dss/display.c drm_display_mode_from_videomode(vm, mode); vm 287 drivers/gpu/drm/omapdrm/dss/dsi.c struct videomode vm; vm 412 drivers/gpu/drm/omapdrm/dss/dsi.c struct videomode vm; vm 3243 drivers/gpu/drm/omapdrm/dss/dsi.c const struct videomode *vm = &dsi->vm; vm 3248 drivers/gpu/drm/omapdrm/dss/dsi.c if (dsi->line_buffer_size <= vm->hactive * bpp / 8) vm 3372 drivers/gpu/drm/omapdrm/dss/dsi.c const struct videomode *vm = &dsi->vm; vm 3413 drivers/gpu/drm/omapdrm/dss/dsi.c width_bytes = DIV_ROUND_UP(vm->hactive * bpp, 8); vm 3622 drivers/gpu/drm/omapdrm/dss/dsi.c const struct videomode *vm = &dsi->vm; vm 3630 drivers/gpu/drm/omapdrm/dss/dsi.c width_bytes = DIV_ROUND_UP(vm->hactive * bpp, 8); vm 3639 drivers/gpu/drm/omapdrm/dss/dsi.c vsa, vm->vactive); vm 3655 drivers/gpu/drm/omapdrm/dss/dsi.c r = FLD_MOD(r, vm->vactive, 14, 0); /* VACT */ vm 3764 drivers/gpu/drm/omapdrm/dss/dsi.c word_count = DIV_ROUND_UP(dsi->vm.hactive * bpp, 8); vm 3821 drivers/gpu/drm/omapdrm/dss/dsi.c u16 w = dsi->vm.hactive; vm 3822 drivers/gpu/drm/omapdrm/dss/dsi.c u16 h = dsi->vm.vactive; vm 3953 drivers/gpu/drm/omapdrm/dss/dsi.c dw = dsi->vm.hactive; vm 3954 drivers/gpu/drm/omapdrm/dss/dsi.c dh = dsi->vm.vactive; vm 4245 drivers/gpu/drm/omapdrm/dss/dsi.c static void print_dispc_vm(const char *str, const struct videomode *vm) vm 4247 drivers/gpu/drm/omapdrm/dss/dsi.c unsigned long pck = vm->pixelclock; vm 4250 drivers/gpu/drm/omapdrm/dss/dsi.c hact = vm->hactive; vm 4251 drivers/gpu/drm/omapdrm/dss/dsi.c bl = vm->hsync_len + vm->hback_porch + vm->hfront_porch; vm 4260 drivers/gpu/drm/omapdrm/dss/dsi.c vm->hsync_len, vm->hback_porch, hact, vm->hfront_porch, vm 4262 drivers/gpu/drm/omapdrm/dss/dsi.c TO_DISPC_T(vm->hsync_len), vm 4263 drivers/gpu/drm/omapdrm/dss/dsi.c TO_DISPC_T(vm->hback_porch), vm 4265 drivers/gpu/drm/omapdrm/dss/dsi.c TO_DISPC_T(vm->hfront_porch), vm 4276 drivers/gpu/drm/omapdrm/dss/dsi.c struct videomode vm = { 0 }; vm 4287 drivers/gpu/drm/omapdrm/dss/dsi.c vm.pixelclock = pck; vm 4288 drivers/gpu/drm/omapdrm/dss/dsi.c vm.hsync_len = div64_u64((u64)(t->hsa + t->hse) * pck, byteclk); vm 4289 drivers/gpu/drm/omapdrm/dss/dsi.c vm.hback_porch = div64_u64((u64)t->hbp * pck, byteclk); vm 4290 drivers/gpu/drm/omapdrm/dss/dsi.c vm.hfront_porch = div64_u64((u64)t->hfp * pck, byteclk); vm 4291 drivers/gpu/drm/omapdrm/dss/dsi.c vm.hactive = t->hact; vm 4293 drivers/gpu/drm/omapdrm/dss/dsi.c print_dispc_vm(str, &vm); vm 4301 drivers/gpu/drm/omapdrm/dss/dsi.c struct videomode *vm = &ctx->vm; vm 4308 drivers/gpu/drm/omapdrm/dss/dsi.c *vm = *ctx->config->vm; vm 4309 drivers/gpu/drm/omapdrm/dss/dsi.c vm->pixelclock = pck; vm 4310 drivers/gpu/drm/omapdrm/dss/dsi.c vm->hactive = ctx->config->vm->hactive; vm 4311 drivers/gpu/drm/omapdrm/dss/dsi.c vm->vactive = ctx->config->vm->vactive; vm 4312 drivers/gpu/drm/omapdrm/dss/dsi.c vm->hsync_len = vm->hfront_porch = vm->hback_porch = vm->vsync_len = 1; vm 4313 drivers/gpu/drm/omapdrm/dss/dsi.c vm->vfront_porch = vm->vback_porch = 0; vm 4366 drivers/gpu/drm/omapdrm/dss/dsi.c pck = cfg->vm->pixelclock; vm 4408 drivers/gpu/drm/omapdrm/dss/dsi.c req_vm = cfg->vm; vm 4542 drivers/gpu/drm/omapdrm/dss/dsi.c dispc_vm = &ctx->vm; vm 4599 drivers/gpu/drm/omapdrm/dss/dsi.c print_dispc_vm("dispc", &ctx->vm); vm 4601 drivers/gpu/drm/omapdrm/dss/dsi.c print_dispc_vm("req ", ctx->config->vm); vm 4652 drivers/gpu/drm/omapdrm/dss/dsi.c const struct videomode *vm = cfg->vm; vm 4668 drivers/gpu/drm/omapdrm/dss/dsi.c ctx->req_pck_min = vm->pixelclock - 1000; vm 4669 drivers/gpu/drm/omapdrm/dss/dsi.c ctx->req_pck_nom = vm->pixelclock; vm 4670 drivers/gpu/drm/omapdrm/dss/dsi.c ctx->req_pck_max = vm->pixelclock + 1000; vm 4726 drivers/gpu/drm/omapdrm/dss/dsi.c dsi->vm = ctx.vm; vm 4732 drivers/gpu/drm/omapdrm/dss/dsi.c dsi->vm.flags &= ~DISPLAY_FLAGS_INTERLACED; vm 4733 drivers/gpu/drm/omapdrm/dss/dsi.c dsi->vm.flags &= ~DISPLAY_FLAGS_HSYNC_LOW; vm 4734 drivers/gpu/drm/omapdrm/dss/dsi.c dsi->vm.flags |= DISPLAY_FLAGS_HSYNC_HIGH; vm 4735 drivers/gpu/drm/omapdrm/dss/dsi.c dsi->vm.flags &= ~DISPLAY_FLAGS_VSYNC_LOW; vm 4736 drivers/gpu/drm/omapdrm/dss/dsi.c dsi->vm.flags |= DISPLAY_FLAGS_VSYNC_HIGH; vm 4742 drivers/gpu/drm/omapdrm/dss/dsi.c dsi->vm.flags &= ~DISPLAY_FLAGS_PIXDATA_NEGEDGE; vm 4743 drivers/gpu/drm/omapdrm/dss/dsi.c dsi->vm.flags |= DISPLAY_FLAGS_PIXDATA_POSEDGE; vm 4744 drivers/gpu/drm/omapdrm/dss/dsi.c dsi->vm.flags &= ~DISPLAY_FLAGS_DE_LOW; vm 4745 drivers/gpu/drm/omapdrm/dss/dsi.c dsi->vm.flags |= DISPLAY_FLAGS_DE_HIGH; vm 4746 drivers/gpu/drm/omapdrm/dss/dsi.c dsi->vm.flags &= ~DISPLAY_FLAGS_SYNC_POSEDGE; vm 4747 drivers/gpu/drm/omapdrm/dss/dsi.c dsi->vm.flags |= DISPLAY_FLAGS_SYNC_NEGEDGE; vm 4749 drivers/gpu/drm/omapdrm/dss/dsi.c dss_mgr_set_timings(&dsi->output, &dsi->vm); vm 176 drivers/gpu/drm/omapdrm/dss/hdmi.h struct videomode vm; vm 307 drivers/gpu/drm/omapdrm/dss/hdmi.h const struct videomode *vm); vm 309 drivers/gpu/drm/omapdrm/dss/hdmi.h const struct videomode *vm); vm 311 drivers/gpu/drm/omapdrm/dss/hdmi.h struct videomode *vm, const struct hdmi_config *param); vm 146 drivers/gpu/drm/omapdrm/dss/hdmi4.c const struct videomode *vm; vm 159 drivers/gpu/drm/omapdrm/dss/hdmi4.c vm = &hdmi->cfg.vm; vm 161 drivers/gpu/drm/omapdrm/dss/hdmi4.c DSSDBG("hdmi_power_on hactive= %d vactive = %d\n", vm->hactive, vm 162 drivers/gpu/drm/omapdrm/dss/hdmi4.c vm->vactive); vm 164 drivers/gpu/drm/omapdrm/dss/hdmi4.c pc = vm->pixelclock; vm 165 drivers/gpu/drm/omapdrm/dss/hdmi4.c if (vm->flags & DISPLAY_FLAGS_DOUBLECLK) vm 247 drivers/gpu/drm/omapdrm/dss/hdmi4.c drm_display_mode_to_videomode(mode, &hdmi->cfg.vm); vm 323 drivers/gpu/drm/omapdrm/dss/hdmi4.c hdmi->cfg.vm.pixelclock); vm 556 drivers/gpu/drm/omapdrm/dss/hdmi4.c hd->cfg.vm.pixelclock); vm 301 drivers/gpu/drm/omapdrm/dss/hdmi4_core.c struct videomode vm; vm 309 drivers/gpu/drm/omapdrm/dss/hdmi4_core.c hdmi_wp_init_vid_fmt_timings(&video_format, &vm, cfg); vm 311 drivers/gpu/drm/omapdrm/dss/hdmi4_core.c hdmi_wp_video_config_timing(wp, &vm); vm 318 drivers/gpu/drm/omapdrm/dss/hdmi4_core.c hdmi_wp_video_config_interface(wp, &vm); vm 145 drivers/gpu/drm/omapdrm/dss/hdmi5.c const struct videomode *vm; vm 153 drivers/gpu/drm/omapdrm/dss/hdmi5.c vm = &hdmi->cfg.vm; vm 155 drivers/gpu/drm/omapdrm/dss/hdmi5.c DSSDBG("hdmi_power_on hactive= %d vactive = %d\n", vm->hactive, vm 156 drivers/gpu/drm/omapdrm/dss/hdmi5.c vm->vactive); vm 158 drivers/gpu/drm/omapdrm/dss/hdmi5.c pc = vm->pixelclock; vm 159 drivers/gpu/drm/omapdrm/dss/hdmi5.c if (vm->flags & DISPLAY_FLAGS_DOUBLECLK) vm 246 drivers/gpu/drm/omapdrm/dss/hdmi5.c drm_display_mode_to_videomode(mode, &hdmi->cfg.vm); vm 331 drivers/gpu/drm/omapdrm/dss/hdmi5.c hdmi->cfg.vm.pixelclock); vm 550 drivers/gpu/drm/omapdrm/dss/hdmi5.c hd->cfg.vm.pixelclock); vm 283 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c video_cfg->v_fc_config.vm = cfg->vm; vm 287 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c video_cfg->hblank = cfg->vm.hfront_porch + vm 288 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c cfg->vm.hback_porch + cfg->vm.hsync_len; vm 290 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c video_cfg->vblank = cfg->vm.vsync_len + cfg->vm.vfront_porch + vm 291 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c cfg->vm.vback_porch; vm 294 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c if (cfg->vm.flags & DISPLAY_FLAGS_INTERLACED) { vm 299 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c video_cfg->v_fc_config.vm.vactive /= 2; vm 301 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c video_cfg->v_fc_config.vm.vfront_porch /= 2; vm 302 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c video_cfg->v_fc_config.vm.vsync_len /= 2; vm 303 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c video_cfg->v_fc_config.vm.vback_porch /= 2; vm 306 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c if (cfg->vm.flags & DISPLAY_FLAGS_DOUBLECLK) { vm 307 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c video_cfg->v_fc_config.vm.hactive *= 2; vm 309 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c video_cfg->v_fc_config.vm.hfront_porch *= 2; vm 310 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c video_cfg->v_fc_config.vm.hsync_len *= 2; vm 311 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c video_cfg->v_fc_config.vm.hback_porch *= 2; vm 320 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c const struct videomode *vm = &cfg->v_fc_config.vm; vm 324 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c vsync_pol = !!(vm->flags & DISPLAY_FLAGS_VSYNC_HIGH); vm 325 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c hsync_pol = !!(vm->flags & DISPLAY_FLAGS_HSYNC_HIGH); vm 333 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c r = FLD_MOD(r, !!(vm->flags & DISPLAY_FLAGS_INTERLACED), 0, 0); vm 337 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c REG_FLD_MOD(base, HDMI_CORE_FC_INHACTIV1, vm->hactive >> 8, 4, 0); vm 338 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c REG_FLD_MOD(base, HDMI_CORE_FC_INHACTIV0, vm->hactive & 0xFF, 7, 0); vm 341 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c REG_FLD_MOD(base, HDMI_CORE_FC_INVACTIV1, vm->vactive >> 8, 4, 0); vm 342 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c REG_FLD_MOD(base, HDMI_CORE_FC_INVACTIV0, vm->vactive & 0xFF, 7, 0); vm 352 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c REG_FLD_MOD(base, HDMI_CORE_FC_HSYNCINDELAY1, vm->hfront_porch >> 8, vm 354 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c REG_FLD_MOD(base, HDMI_CORE_FC_HSYNCINDELAY0, vm->hfront_porch & 0xFF, vm 358 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c REG_FLD_MOD(base, HDMI_CORE_FC_VSYNCINDELAY, vm->vfront_porch, 7, 0); vm 361 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c REG_FLD_MOD(base, HDMI_CORE_FC_HSYNCINWIDTH1, (vm->hsync_len >> 8), vm 363 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c REG_FLD_MOD(base, HDMI_CORE_FC_HSYNCINWIDTH0, vm->hsync_len & 0xFF, vm 367 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c REG_FLD_MOD(base, HDMI_CORE_FC_VSYNCINWIDTH, vm->vsync_len, 5, 0); vm 373 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c if (vm->flags & DISPLAY_FLAGS_DOUBLECLK) vm 600 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c struct videomode vm; vm 608 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c hdmi_wp_init_vid_fmt_timings(&video_format, &vm, cfg); vm 610 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c hdmi_wp_video_config_timing(wp, &vm); vm 617 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c hdmi_wp_video_config_interface(wp, &vm); vm 144 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c const struct videomode *vm) vm 150 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c vsync_inv = !!(vm->flags & DISPLAY_FLAGS_VSYNC_LOW); vm 151 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c hsync_inv = !!(vm->flags & DISPLAY_FLAGS_HSYNC_LOW); vm 158 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c r = FLD_MOD(r, !!(vm->flags & DISPLAY_FLAGS_INTERLACED), 3, 3); vm 164 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c const struct videomode *vm) vm 181 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c timing_h |= FLD_VAL(vm->hback_porch, 31, 20); vm 182 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c timing_h |= FLD_VAL(vm->hfront_porch, 19, 8); vm 183 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c timing_h |= FLD_VAL(vm->hsync_len - hsync_len_offset, 7, 0); vm 186 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c timing_v |= FLD_VAL(vm->vback_porch, 31, 20); vm 187 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c timing_v |= FLD_VAL(vm->vfront_porch, 19, 8); vm 188 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c timing_v |= FLD_VAL(vm->vsync_len, 7, 0); vm 193 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c struct videomode *vm, const struct hdmi_config *param) vm 198 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c video_fmt->y_res = param->vm.vactive; vm 199 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c video_fmt->x_res = param->vm.hactive; vm 201 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c vm->hback_porch = param->vm.hback_porch; vm 202 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c vm->hfront_porch = param->vm.hfront_porch; vm 203 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c vm->hsync_len = param->vm.hsync_len; vm 204 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c vm->vback_porch = param->vm.vback_porch; vm 205 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c vm->vfront_porch = param->vm.vfront_porch; vm 206 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c vm->vsync_len = param->vm.vsync_len; vm 208 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c vm->flags = param->vm.flags; vm 210 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c if (param->vm.flags & DISPLAY_FLAGS_INTERLACED) { vm 212 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c vm->vback_porch /= 2; vm 213 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c vm->vfront_porch /= 2; vm 214 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c vm->vsync_len /= 2; vm 217 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c if (param->vm.flags & DISPLAY_FLAGS_DOUBLECLK) { vm 219 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c vm->hfront_porch *= 2; vm 220 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c vm->hsync_len *= 2; vm 221 drivers/gpu/drm/omapdrm/dss/hdmi_wp.c vm->hback_porch *= 2; vm 214 drivers/gpu/drm/omapdrm/dss/omapdss.h const struct videomode *vm; vm 466 drivers/gpu/drm/omapdrm/dss/omapdss.h const struct videomode *vm); vm 528 drivers/gpu/drm/omapdrm/dss/omapdss.h const struct videomode *vm); vm 546 drivers/gpu/drm/omapdrm/dss/omapdss.h const struct videomode *vm); vm 594 drivers/gpu/drm/omapdrm/dss/omapdss.h const struct videomode *vm); vm 597 drivers/gpu/drm/omapdrm/dss/omapdss.h const struct videomode *vm); vm 611 drivers/gpu/drm/omapdrm/dss/omapdss.h const struct videomode *vm, bool mem_to_mem, vm 620 drivers/gpu/drm/omapdrm/dss/omapdss.h bool mem_to_mem, const struct videomode *vm, vm 79 drivers/gpu/drm/omapdrm/dss/output.c const struct videomode *vm) vm 82 drivers/gpu/drm/omapdrm/dss/output.c dssdev->dispc_channel, vm); vm 38 drivers/gpu/drm/omapdrm/omap_crtc.c struct videomode vm; vm 59 drivers/gpu/drm/omapdrm/omap_crtc.c return &omap_crtc->vm; vm 190 drivers/gpu/drm/omapdrm/omap_crtc.c &omap_crtc->vm); vm 207 drivers/gpu/drm/omapdrm/omap_crtc.c const struct videomode *vm) vm 213 drivers/gpu/drm/omapdrm/omap_crtc.c omap_crtc->vm = *vm; vm 494 drivers/gpu/drm/omapdrm/omap_crtc.c struct videomode vm = {0}; vm 497 drivers/gpu/drm/omapdrm/omap_crtc.c drm_display_mode_to_videomode(mode, &vm); vm 508 drivers/gpu/drm/omapdrm/omap_crtc.c &vm); vm 551 drivers/gpu/drm/omapdrm/omap_crtc.c drm_display_mode_to_videomode(mode, &omap_crtc->vm); vm 44 drivers/gpu/drm/omapdrm/omap_encoder.c static void omap_encoder_update_videomode_flags(struct videomode *vm, vm 47 drivers/gpu/drm/omapdrm/omap_encoder.c if (!(vm->flags & (DISPLAY_FLAGS_DE_LOW | vm 50 drivers/gpu/drm/omapdrm/omap_encoder.c vm->flags |= DISPLAY_FLAGS_DE_LOW; vm 52 drivers/gpu/drm/omapdrm/omap_encoder.c vm->flags |= DISPLAY_FLAGS_DE_HIGH; vm 55 drivers/gpu/drm/omapdrm/omap_encoder.c if (!(vm->flags & (DISPLAY_FLAGS_PIXDATA_POSEDGE | vm 58 drivers/gpu/drm/omapdrm/omap_encoder.c vm->flags |= DISPLAY_FLAGS_PIXDATA_POSEDGE; vm 60 drivers/gpu/drm/omapdrm/omap_encoder.c vm->flags |= DISPLAY_FLAGS_PIXDATA_NEGEDGE; vm 63 drivers/gpu/drm/omapdrm/omap_encoder.c if (!(vm->flags & (DISPLAY_FLAGS_SYNC_POSEDGE | vm 66 drivers/gpu/drm/omapdrm/omap_encoder.c vm->flags |= DISPLAY_FLAGS_SYNC_POSEDGE; vm 68 drivers/gpu/drm/omapdrm/omap_encoder.c vm->flags |= DISPLAY_FLAGS_SYNC_NEGEDGE; vm 106 drivers/gpu/drm/omapdrm/omap_encoder.c struct videomode vm = { 0 }; vm 114 drivers/gpu/drm/omapdrm/omap_encoder.c drm_display_mode_to_videomode(adjusted_mode, &vm); vm 126 drivers/gpu/drm/omapdrm/omap_encoder.c omap_encoder_update_videomode_flags(&vm, dssdev->bus_flags); vm 133 drivers/gpu/drm/omapdrm/omap_encoder.c omap_encoder_update_videomode_flags(&vm, bus_flags); vm 137 drivers/gpu/drm/omapdrm/omap_encoder.c omap_encoder_update_videomode_flags(&vm, bus_flags); vm 140 drivers/gpu/drm/omapdrm/omap_encoder.c dss_mgr_set_timings(output, &vm); vm 278 drivers/gpu/drm/panel/panel-ilitek-ili9322.c struct videomode vm; vm 32 drivers/gpu/drm/panel/panel-lg-lg4573.c struct videomode vm; vm 99 drivers/gpu/drm/panel/panel-samsung-ld9040.c struct videomode vm; vm 276 drivers/gpu/drm/panel/panel-samsung-ld9040.c drm_display_mode_from_videomode(&ctx->vm, mode); vm 302 drivers/gpu/drm/panel/panel-samsung-ld9040.c ret = of_get_videomode(np, &ctx->vm, 0); vm 106 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c struct videomode vm; vm 935 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c drm_display_mode_from_videomode(&ctx->vm, mode); vm 961 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c ret = of_get_videomode(np, &ctx->vm, 0); vm 71 drivers/gpu/drm/panel/panel-seiko-43wvf1g.c struct videomode vm; vm 73 drivers/gpu/drm/panel/panel-seiko-43wvf1g.c videomode_from_timing(dt, &vm); vm 81 drivers/gpu/drm/panel/panel-seiko-43wvf1g.c drm_display_mode_from_videomode(&vm, mode); vm 130 drivers/gpu/drm/panel/panel-simple.c struct videomode vm; vm 132 drivers/gpu/drm/panel/panel-simple.c videomode_from_timing(dt, &vm); vm 140 drivers/gpu/drm/panel/panel-simple.c drm_display_mode_from_videomode(&vm, mode); vm 375 drivers/gpu/drm/panel/panel-simple.c struct videomode vm; vm 403 drivers/gpu/drm/panel/panel-simple.c videomode_from_timing(ot, &vm); vm 404 drivers/gpu/drm/panel/panel-simple.c drm_display_mode_from_videomode(&vm, &panel->override_mode); vm 3736 drivers/gpu/drm/radeon/cik.c unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0; vm 137 drivers/gpu/drm/radeon/cik_sdma.c u32 extra_bits = (ib->vm ? ib->vm->ids[ib->ring].id : 0) & 0xf; vm 1428 drivers/gpu/drm/radeon/ni.c unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0; vm 126 drivers/gpu/drm/radeon/ni_dma.c unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0; vm 482 drivers/gpu/drm/radeon/radeon.h struct radeon_vm *vm; vm 828 drivers/gpu/drm/radeon/radeon.h struct radeon_vm *vm; vm 963 drivers/gpu/drm/radeon/radeon.h struct radeon_vm vm; vm 1006 drivers/gpu/drm/radeon/radeon.h struct radeon_ib *ib, struct radeon_vm *vm, vm 1885 drivers/gpu/drm/radeon/radeon.h } vm; vm 2709 drivers/gpu/drm/radeon/radeon.h #define radeon_asic_vm_init(rdev) (rdev)->asic->vm.init((rdev)) vm 2710 drivers/gpu/drm/radeon/radeon.h #define radeon_asic_vm_fini(rdev) (rdev)->asic->vm.fini((rdev)) vm 2711 drivers/gpu/drm/radeon/radeon.h #define radeon_asic_vm_copy_pages(rdev, ib, pe, src, count) ((rdev)->asic->vm.copy_pages((rdev), (ib), (pe), (src), (count))) vm 2712 drivers/gpu/drm/radeon/radeon.h #define radeon_asic_vm_write_pages(rdev, ib, pe, addr, count, incr, flags) ((rdev)->asic->vm.write_pages((rdev), (ib), (pe), (addr), (count), (incr), (flags))) vm 2713 drivers/gpu/drm/radeon/radeon.h #define radeon_asic_vm_set_pages(rdev, ib, pe, addr, count, incr, flags) ((rdev)->asic->vm.set_pages((rdev), (ib), (pe), (addr), (count), (incr), (flags))) vm 2714 drivers/gpu/drm/radeon/radeon.h #define radeon_asic_vm_pad_ib(rdev, ib) ((rdev)->asic->vm.pad_ib((ib))) vm 2832 drivers/gpu/drm/radeon/radeon.h int radeon_vm_init(struct radeon_device *rdev, struct radeon_vm *vm); vm 2833 drivers/gpu/drm/radeon/radeon.h void radeon_vm_fini(struct radeon_device *rdev, struct radeon_vm *vm); vm 2835 drivers/gpu/drm/radeon/radeon.h struct radeon_vm *vm, vm 2838 drivers/gpu/drm/radeon/radeon.h struct radeon_vm *vm, int ring); vm 2840 drivers/gpu/drm/radeon/radeon.h struct radeon_vm *vm, vm 2843 drivers/gpu/drm/radeon/radeon.h struct radeon_vm *vm, vm 2847 drivers/gpu/drm/radeon/radeon.h struct radeon_vm *vm); vm 2849 drivers/gpu/drm/radeon/radeon.h struct radeon_vm *vm); vm 2851 drivers/gpu/drm/radeon/radeon.h struct radeon_vm *vm); vm 2857 drivers/gpu/drm/radeon/radeon.h struct radeon_bo_va *radeon_vm_bo_find(struct radeon_vm *vm, vm 2860 drivers/gpu/drm/radeon/radeon.h struct radeon_vm *vm, vm 1678 drivers/gpu/drm/radeon/radeon_asic.c .vm = { vm 1796 drivers/gpu/drm/radeon/radeon_asic.c .vm = { vm 1934 drivers/gpu/drm/radeon/radeon_asic.c .vm = { vm 2104 drivers/gpu/drm/radeon/radeon_asic.c .vm = { vm 2217 drivers/gpu/drm/radeon/radeon_asic.c .vm = { vm 196 drivers/gpu/drm/radeon/radeon_cs.c p->vm_bos = radeon_vm_get_bos(p->rdev, p->ib.vm, vm 498 drivers/gpu/drm/radeon/radeon_cs.c struct radeon_vm *vm) vm 504 drivers/gpu/drm/radeon/radeon_cs.c r = radeon_vm_update_page_directory(rdev, vm); vm 508 drivers/gpu/drm/radeon/radeon_cs.c r = radeon_vm_clear_freed(rdev, vm); vm 512 drivers/gpu/drm/radeon/radeon_cs.c if (vm->ib_bo_va == NULL) { vm 517 drivers/gpu/drm/radeon/radeon_cs.c r = radeon_vm_bo_update(rdev, vm->ib_bo_va, vm 526 drivers/gpu/drm/radeon/radeon_cs.c bo_va = radeon_vm_bo_find(vm, bo); vm 528 drivers/gpu/drm/radeon/radeon_cs.c dev_err(rdev->dev, "bo %p not in vm %p\n", bo, vm); vm 539 drivers/gpu/drm/radeon/radeon_cs.c return radeon_vm_clear_invalids(rdev, vm); vm 546 drivers/gpu/drm/radeon/radeon_cs.c struct radeon_vm *vm = &fpriv->vm; vm 569 drivers/gpu/drm/radeon/radeon_cs.c mutex_lock(&vm->mutex); vm 570 drivers/gpu/drm/radeon/radeon_cs.c r = radeon_bo_vm_update_pte(parser, vm); vm 590 drivers/gpu/drm/radeon/radeon_cs.c mutex_unlock(&vm->mutex); vm 607 drivers/gpu/drm/radeon/radeon_cs.c struct radeon_vm *vm = NULL; vm 615 drivers/gpu/drm/radeon/radeon_cs.c vm = &fpriv->vm; vm 625 drivers/gpu/drm/radeon/radeon_cs.c vm, ib_chunk->length_dw * 4); vm 647 drivers/gpu/drm/radeon/radeon_cs.c vm, ib_chunk->length_dw * 4); vm 153 drivers/gpu/drm/radeon/radeon_gem.c struct radeon_vm *vm = &fpriv->vm; vm 167 drivers/gpu/drm/radeon/radeon_gem.c bo_va = radeon_vm_bo_find(vm, rbo); vm 169 drivers/gpu/drm/radeon/radeon_gem.c bo_va = radeon_vm_bo_add(rdev, vm, rbo); vm 184 drivers/gpu/drm/radeon/radeon_gem.c struct radeon_vm *vm = &fpriv->vm; vm 199 drivers/gpu/drm/radeon/radeon_gem.c bo_va = radeon_vm_bo_find(vm, rbo); vm 565 drivers/gpu/drm/radeon/radeon_gem.c vm_bos = radeon_vm_get_bos(rdev, bo_va->vm, &list); vm 581 drivers/gpu/drm/radeon/radeon_gem.c mutex_lock(&bo_va->vm->mutex); vm 582 drivers/gpu/drm/radeon/radeon_gem.c r = radeon_vm_clear_freed(rdev, bo_va->vm); vm 590 drivers/gpu/drm/radeon/radeon_gem.c mutex_unlock(&bo_va->vm->mutex); vm 673 drivers/gpu/drm/radeon/radeon_gem.c bo_va = radeon_vm_bo_find(&fpriv->vm, rbo); vm 59 drivers/gpu/drm/radeon/radeon_ib.c struct radeon_ib *ib, struct radeon_vm *vm, vm 75 drivers/gpu/drm/radeon/radeon_ib.c ib->vm = vm; vm 76 drivers/gpu/drm/radeon/radeon_ib.c if (vm) { vm 145 drivers/gpu/drm/radeon/radeon_ib.c if (ib->vm) { vm 147 drivers/gpu/drm/radeon/radeon_ib.c vm_id_fence = radeon_vm_grab_id(rdev, ib->vm, ib->ring); vm 159 drivers/gpu/drm/radeon/radeon_ib.c if (ib->vm) vm 160 drivers/gpu/drm/radeon/radeon_ib.c radeon_vm_flush(rdev, ib->vm, ib->ring, vm 178 drivers/gpu/drm/radeon/radeon_ib.c if (ib->vm) vm 179 drivers/gpu/drm/radeon/radeon_ib.c radeon_vm_fence(rdev, ib->vm, ib->fence); vm 647 drivers/gpu/drm/radeon/radeon_kms.c struct radeon_vm *vm; vm 656 drivers/gpu/drm/radeon/radeon_kms.c vm = &fpriv->vm; vm 657 drivers/gpu/drm/radeon/radeon_kms.c r = radeon_vm_init(rdev, vm); vm 665 drivers/gpu/drm/radeon/radeon_kms.c radeon_vm_fini(rdev, vm); vm 672 drivers/gpu/drm/radeon/radeon_kms.c vm->ib_bo_va = radeon_vm_bo_add(rdev, vm, vm 674 drivers/gpu/drm/radeon/radeon_kms.c r = radeon_vm_bo_set_addr(rdev, vm->ib_bo_va, vm 679 drivers/gpu/drm/radeon/radeon_kms.c radeon_vm_fini(rdev, vm); vm 722 drivers/gpu/drm/radeon/radeon_kms.c struct radeon_vm *vm = &fpriv->vm; vm 728 drivers/gpu/drm/radeon/radeon_kms.c if (vm->ib_bo_va) vm 729 drivers/gpu/drm/radeon/radeon_kms.c radeon_vm_bo_rmv(rdev, vm->ib_bo_va); vm 732 drivers/gpu/drm/radeon/radeon_kms.c radeon_vm_fini(rdev, vm); vm 129 drivers/gpu/drm/radeon/radeon_vm.c struct radeon_vm *vm, vm 135 drivers/gpu/drm/radeon/radeon_vm.c list = kvmalloc_array(vm->max_pde_used + 2, vm 141 drivers/gpu/drm/radeon/radeon_vm.c list[0].robj = vm->page_directory; vm 144 drivers/gpu/drm/radeon/radeon_vm.c list[0].tv.bo = &vm->page_directory->tbo; vm 149 drivers/gpu/drm/radeon/radeon_vm.c for (i = 0, idx = 1; i <= vm->max_pde_used; i++) { vm 150 drivers/gpu/drm/radeon/radeon_vm.c if (!vm->page_tables[i].bo) vm 153 drivers/gpu/drm/radeon/radeon_vm.c list[idx].robj = vm->page_tables[i].bo; vm 178 drivers/gpu/drm/radeon/radeon_vm.c struct radeon_vm *vm, int ring) vm 181 drivers/gpu/drm/radeon/radeon_vm.c struct radeon_vm_id *vm_id = &vm->ids[ring]; vm 237 drivers/gpu/drm/radeon/radeon_vm.c struct radeon_vm *vm, vm 240 drivers/gpu/drm/radeon/radeon_vm.c uint64_t pd_addr = radeon_bo_gpu_offset(vm->page_directory); vm 241 drivers/gpu/drm/radeon/radeon_vm.c struct radeon_vm_id *vm_id = &vm->ids[ring]; vm 246 drivers/gpu/drm/radeon/radeon_vm.c trace_radeon_vm_flush(pd_addr, ring, vm->ids[ring].id); vm 269 drivers/gpu/drm/radeon/radeon_vm.c struct radeon_vm *vm, vm 272 drivers/gpu/drm/radeon/radeon_vm.c unsigned vm_id = vm->ids[fence->ring].id; vm 277 drivers/gpu/drm/radeon/radeon_vm.c radeon_fence_unref(&vm->ids[fence->ring].last_id_use); vm 278 drivers/gpu/drm/radeon/radeon_vm.c vm->ids[fence->ring].last_id_use = radeon_fence_ref(fence); vm 293 drivers/gpu/drm/radeon/radeon_vm.c struct radeon_bo_va *radeon_vm_bo_find(struct radeon_vm *vm, vm 299 drivers/gpu/drm/radeon/radeon_vm.c if (bo_va->vm == vm) { vm 320 drivers/gpu/drm/radeon/radeon_vm.c struct radeon_vm *vm, vm 329 drivers/gpu/drm/radeon/radeon_vm.c bo_va->vm = vm; vm 338 drivers/gpu/drm/radeon/radeon_vm.c mutex_lock(&vm->mutex); vm 340 drivers/gpu/drm/radeon/radeon_vm.c mutex_unlock(&vm->mutex); vm 452 drivers/gpu/drm/radeon/radeon_vm.c struct radeon_vm *vm = bo_va->vm; vm 477 drivers/gpu/drm/radeon/radeon_vm.c mutex_lock(&vm->mutex); vm 482 drivers/gpu/drm/radeon/radeon_vm.c it = interval_tree_iter_first(&vm->va, soffset, eoffset); vm 490 drivers/gpu/drm/radeon/radeon_vm.c mutex_unlock(&vm->mutex); vm 501 drivers/gpu/drm/radeon/radeon_vm.c mutex_unlock(&vm->mutex); vm 507 drivers/gpu/drm/radeon/radeon_vm.c tmp->vm = vm; vm 510 drivers/gpu/drm/radeon/radeon_vm.c interval_tree_remove(&bo_va->it, &vm->va); vm 511 drivers/gpu/drm/radeon/radeon_vm.c spin_lock(&vm->status_lock); vm 515 drivers/gpu/drm/radeon/radeon_vm.c list_add(&tmp->vm_status, &vm->freed); vm 516 drivers/gpu/drm/radeon/radeon_vm.c spin_unlock(&vm->status_lock); vm 520 drivers/gpu/drm/radeon/radeon_vm.c spin_lock(&vm->status_lock); vm 523 drivers/gpu/drm/radeon/radeon_vm.c list_add(&bo_va->vm_status, &vm->cleared); vm 524 drivers/gpu/drm/radeon/radeon_vm.c spin_unlock(&vm->status_lock); vm 525 drivers/gpu/drm/radeon/radeon_vm.c interval_tree_insert(&bo_va->it, &vm->va); vm 535 drivers/gpu/drm/radeon/radeon_vm.c if (eoffset > vm->max_pde_used) vm 536 drivers/gpu/drm/radeon/radeon_vm.c vm->max_pde_used = eoffset; vm 544 drivers/gpu/drm/radeon/radeon_vm.c if (vm->page_tables[pt_idx].bo) vm 548 drivers/gpu/drm/radeon/radeon_vm.c mutex_unlock(&vm->mutex); vm 564 drivers/gpu/drm/radeon/radeon_vm.c mutex_lock(&vm->mutex); vm 565 drivers/gpu/drm/radeon/radeon_vm.c if (vm->page_tables[pt_idx].bo) { vm 567 drivers/gpu/drm/radeon/radeon_vm.c mutex_unlock(&vm->mutex); vm 569 drivers/gpu/drm/radeon/radeon_vm.c mutex_lock(&vm->mutex); vm 573 drivers/gpu/drm/radeon/radeon_vm.c vm->page_tables[pt_idx].addr = 0; vm 574 drivers/gpu/drm/radeon/radeon_vm.c vm->page_tables[pt_idx].bo = pt; vm 577 drivers/gpu/drm/radeon/radeon_vm.c mutex_unlock(&vm->mutex); vm 642 drivers/gpu/drm/radeon/radeon_vm.c struct radeon_vm *vm) vm 644 drivers/gpu/drm/radeon/radeon_vm.c struct radeon_bo *pd = vm->page_directory; vm 656 drivers/gpu/drm/radeon/radeon_vm.c ndw += vm->max_pde_used * 6; vm 668 drivers/gpu/drm/radeon/radeon_vm.c for (pt_idx = 0; pt_idx <= vm->max_pde_used; ++pt_idx) { vm 669 drivers/gpu/drm/radeon/radeon_vm.c struct radeon_bo *bo = vm->page_tables[pt_idx].bo; vm 676 drivers/gpu/drm/radeon/radeon_vm.c if (vm->page_tables[pt_idx].addr == pt) vm 678 drivers/gpu/drm/radeon/radeon_vm.c vm->page_tables[pt_idx].addr = pt; vm 815 drivers/gpu/drm/radeon/radeon_vm.c struct radeon_vm *vm, vm 828 drivers/gpu/drm/radeon/radeon_vm.c struct radeon_bo *pt = vm->page_tables[pt_idx].bo; vm 886 drivers/gpu/drm/radeon/radeon_vm.c static void radeon_vm_fence_pts(struct radeon_vm *vm, vm 896 drivers/gpu/drm/radeon/radeon_vm.c radeon_bo_fence(vm->page_tables[i].bo, fence, true); vm 916 drivers/gpu/drm/radeon/radeon_vm.c struct radeon_vm *vm = bo_va->vm; vm 925 drivers/gpu/drm/radeon/radeon_vm.c bo_va->bo, vm); vm 929 drivers/gpu/drm/radeon/radeon_vm.c spin_lock(&vm->status_lock); vm 932 drivers/gpu/drm/radeon/radeon_vm.c spin_unlock(&vm->status_lock); vm 938 drivers/gpu/drm/radeon/radeon_vm.c list_add(&bo_va->vm_status, &vm->cleared); vm 940 drivers/gpu/drm/radeon/radeon_vm.c spin_unlock(&vm->status_lock); vm 1009 drivers/gpu/drm/radeon/radeon_vm.c radeon_sync_fence(&ib.sync, vm->ids[i].last_id_use); vm 1012 drivers/gpu/drm/radeon/radeon_vm.c r = radeon_vm_update_ptes(rdev, vm, &ib, bo_va->it.start, vm 1029 drivers/gpu/drm/radeon/radeon_vm.c radeon_vm_fence_pts(vm, bo_va->it.start, bo_va->it.last + 1, ib.fence); vm 1049 drivers/gpu/drm/radeon/radeon_vm.c struct radeon_vm *vm) vm 1054 drivers/gpu/drm/radeon/radeon_vm.c spin_lock(&vm->status_lock); vm 1055 drivers/gpu/drm/radeon/radeon_vm.c while (!list_empty(&vm->freed)) { vm 1056 drivers/gpu/drm/radeon/radeon_vm.c bo_va = list_first_entry(&vm->freed, vm 1058 drivers/gpu/drm/radeon/radeon_vm.c spin_unlock(&vm->status_lock); vm 1063 drivers/gpu/drm/radeon/radeon_vm.c spin_lock(&vm->status_lock); vm 1070 drivers/gpu/drm/radeon/radeon_vm.c spin_unlock(&vm->status_lock); vm 1087 drivers/gpu/drm/radeon/radeon_vm.c struct radeon_vm *vm) vm 1092 drivers/gpu/drm/radeon/radeon_vm.c spin_lock(&vm->status_lock); vm 1093 drivers/gpu/drm/radeon/radeon_vm.c while (!list_empty(&vm->invalidated)) { vm 1094 drivers/gpu/drm/radeon/radeon_vm.c bo_va = list_first_entry(&vm->invalidated, vm 1096 drivers/gpu/drm/radeon/radeon_vm.c spin_unlock(&vm->status_lock); vm 1102 drivers/gpu/drm/radeon/radeon_vm.c spin_lock(&vm->status_lock); vm 1104 drivers/gpu/drm/radeon/radeon_vm.c spin_unlock(&vm->status_lock); vm 1122 drivers/gpu/drm/radeon/radeon_vm.c struct radeon_vm *vm = bo_va->vm; vm 1126 drivers/gpu/drm/radeon/radeon_vm.c mutex_lock(&vm->mutex); vm 1128 drivers/gpu/drm/radeon/radeon_vm.c interval_tree_remove(&bo_va->it, &vm->va); vm 1130 drivers/gpu/drm/radeon/radeon_vm.c spin_lock(&vm->status_lock); vm 1134 drivers/gpu/drm/radeon/radeon_vm.c list_add(&bo_va->vm_status, &vm->freed); vm 1139 drivers/gpu/drm/radeon/radeon_vm.c spin_unlock(&vm->status_lock); vm 1141 drivers/gpu/drm/radeon/radeon_vm.c mutex_unlock(&vm->mutex); vm 1159 drivers/gpu/drm/radeon/radeon_vm.c spin_lock(&bo_va->vm->status_lock); vm 1162 drivers/gpu/drm/radeon/radeon_vm.c list_add(&bo_va->vm_status, &bo_va->vm->invalidated); vm 1163 drivers/gpu/drm/radeon/radeon_vm.c spin_unlock(&bo_va->vm->status_lock); vm 1175 drivers/gpu/drm/radeon/radeon_vm.c int radeon_vm_init(struct radeon_device *rdev, struct radeon_vm *vm) vm 1182 drivers/gpu/drm/radeon/radeon_vm.c vm->ib_bo_va = NULL; vm 1184 drivers/gpu/drm/radeon/radeon_vm.c vm->ids[i].id = 0; vm 1185 drivers/gpu/drm/radeon/radeon_vm.c vm->ids[i].flushed_updates = NULL; vm 1186 drivers/gpu/drm/radeon/radeon_vm.c vm->ids[i].last_id_use = NULL; vm 1188 drivers/gpu/drm/radeon/radeon_vm.c mutex_init(&vm->mutex); vm 1189 drivers/gpu/drm/radeon/radeon_vm.c vm->va = RB_ROOT_CACHED; vm 1190 drivers/gpu/drm/radeon/radeon_vm.c spin_lock_init(&vm->status_lock); vm 1191 drivers/gpu/drm/radeon/radeon_vm.c INIT_LIST_HEAD(&vm->invalidated); vm 1192 drivers/gpu/drm/radeon/radeon_vm.c INIT_LIST_HEAD(&vm->freed); vm 1193 drivers/gpu/drm/radeon/radeon_vm.c INIT_LIST_HEAD(&vm->cleared); vm 1200 drivers/gpu/drm/radeon/radeon_vm.c vm->page_tables = kzalloc(pts_size, GFP_KERNEL); vm 1201 drivers/gpu/drm/radeon/radeon_vm.c if (vm->page_tables == NULL) { vm 1208 drivers/gpu/drm/radeon/radeon_vm.c NULL, &vm->page_directory); vm 1212 drivers/gpu/drm/radeon/radeon_vm.c r = radeon_vm_clear_bo(rdev, vm->page_directory); vm 1214 drivers/gpu/drm/radeon/radeon_vm.c radeon_bo_unref(&vm->page_directory); vm 1215 drivers/gpu/drm/radeon/radeon_vm.c vm->page_directory = NULL; vm 1231 drivers/gpu/drm/radeon/radeon_vm.c void radeon_vm_fini(struct radeon_device *rdev, struct radeon_vm *vm) vm 1236 drivers/gpu/drm/radeon/radeon_vm.c if (!RB_EMPTY_ROOT(&vm->va.rb_root)) { vm 1240 drivers/gpu/drm/radeon/radeon_vm.c &vm->va.rb_root, it.rb) { vm 1241 drivers/gpu/drm/radeon/radeon_vm.c interval_tree_remove(&bo_va->it, &vm->va); vm 1250 drivers/gpu/drm/radeon/radeon_vm.c list_for_each_entry_safe(bo_va, tmp, &vm->freed, vm_status) { vm 1257 drivers/gpu/drm/radeon/radeon_vm.c radeon_bo_unref(&vm->page_tables[i].bo); vm 1258 drivers/gpu/drm/radeon/radeon_vm.c kfree(vm->page_tables); vm 1260 drivers/gpu/drm/radeon/radeon_vm.c radeon_bo_unref(&vm->page_directory); vm 1263 drivers/gpu/drm/radeon/radeon_vm.c radeon_fence_unref(&vm->ids[i].flushed_updates); vm 1264 drivers/gpu/drm/radeon/radeon_vm.c radeon_fence_unref(&vm->ids[i].last_id_use); vm 1267 drivers/gpu/drm/radeon/radeon_vm.c mutex_destroy(&vm->mutex); vm 3407 drivers/gpu/drm/radeon/si.c unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0; vm 549 drivers/gpu/drm/stm/ltdc.c struct videomode vm; vm 563 drivers/gpu/drm/stm/ltdc.c drm_display_mode_to_videomode(mode, &vm); vm 566 drivers/gpu/drm/stm/ltdc.c DRM_DEBUG_DRIVER("Video mode: %dx%d", vm.hactive, vm.vactive); vm 568 drivers/gpu/drm/stm/ltdc.c vm.hfront_porch, vm.hback_porch, vm.hsync_len, vm 569 drivers/gpu/drm/stm/ltdc.c vm.vfront_porch, vm.vback_porch, vm.vsync_len); vm 572 drivers/gpu/drm/stm/ltdc.c hsync = vm.hsync_len - 1; vm 573 drivers/gpu/drm/stm/ltdc.c vsync = vm.vsync_len - 1; vm 574 drivers/gpu/drm/stm/ltdc.c accum_hbp = hsync + vm.hback_porch; vm 575 drivers/gpu/drm/stm/ltdc.c accum_vbp = vsync + vm.vback_porch; vm 576 drivers/gpu/drm/stm/ltdc.c accum_act_w = accum_hbp + vm.hactive; vm 577 drivers/gpu/drm/stm/ltdc.c accum_act_h = accum_vbp + vm.vactive; vm 578 drivers/gpu/drm/stm/ltdc.c total_width = accum_act_w + vm.hfront_porch; vm 579 drivers/gpu/drm/stm/ltdc.c total_height = accum_act_h + vm.vfront_porch; vm 584 drivers/gpu/drm/stm/ltdc.c if (vm.flags & DISPLAY_FLAGS_HSYNC_HIGH) vm 587 drivers/gpu/drm/stm/ltdc.c if (vm.flags & DISPLAY_FLAGS_VSYNC_HIGH) vm 590 drivers/gpu/drm/stm/ltdc.c if (vm.flags & DISPLAY_FLAGS_DE_LOW) vm 593 drivers/gpu/drm/stm/ltdc.c if (vm.flags & DISPLAY_FLAGS_PIXDATA_NEGEDGE) vm 79 drivers/gpu/drm/sun4i/sun8i_vi_layer.c u32 vn = 0, vm = 0; vm 155 drivers/gpu/drm/sun4i/sun8i_vi_layer.c vm = src_h; vm 193 drivers/gpu/drm/sun4i/sun8i_vi_layer.c SUN8I_MIXER_CHAN_VI_DS_M(vm)); vm 197 drivers/gpu/drm/sun4i/sun8i_vi_layer.c SUN8I_MIXER_CHAN_VI_DS_M(vm)); vm 147 drivers/gpu/drm/tilcdc/tilcdc_panel.c struct videomode vm; vm 149 drivers/gpu/drm/tilcdc/tilcdc_panel.c if (videomode_from_timings(timings, &vm, i)) vm 152 drivers/gpu/drm/tilcdc/tilcdc_panel.c drm_display_mode_from_videomode(&vm, mode); vm 111 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c if (ctx->vm && ctx->vm_size_left < PAGE_SIZE) { vm 112 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c int ret = ctx->vm->reserve_mem(ctx->vm, ctx->vm->gran); vm 117 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c ctx->vm_size_left += ctx->vm->gran; vm 118 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c ctx->total_mem += ctx->vm->gran; vm 125 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c if (ctx->vm) vm 157 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c if (ctx->vm && ctx->total_mem) { vm 158 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c ctx->vm->unreserve_mem(ctx->vm, ctx->total_mem); vm 86 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h struct vmw_validation_mem *vm; vm 141 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h struct vmw_validation_mem *vm) vm 143 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h ctx->vm = vm; vm 361 drivers/gpu/drm/zte/zx_vou.c struct videomode vm; vm 367 drivers/gpu/drm/zte/zx_vou.c drm_display_mode_to_videomode(mode, &vm); vm 370 drivers/gpu/drm/zte/zx_vou.c val = V_ACTIVE((interlaced ? vm.vactive / 2 : vm.vactive) - 1); vm 371 drivers/gpu/drm/zte/zx_vou.c val |= H_ACTIVE(vm.hactive - 1); vm 374 drivers/gpu/drm/zte/zx_vou.c val = SYNC_WIDE(vm.hsync_len - 1); vm 375 drivers/gpu/drm/zte/zx_vou.c val |= BACK_PORCH(vm.hback_porch - 1); vm 376 drivers/gpu/drm/zte/zx_vou.c val |= FRONT_PORCH(vm.hfront_porch - 1); vm 379 drivers/gpu/drm/zte/zx_vou.c val = SYNC_WIDE(vm.vsync_len - 1); vm 380 drivers/gpu/drm/zte/zx_vou.c val |= BACK_PORCH(vm.vback_porch - 1); vm 381 drivers/gpu/drm/zte/zx_vou.c val |= FRONT_PORCH(vm.vfront_porch - 1); vm 390 drivers/gpu/drm/zte/zx_vou.c val |= ((vm.vactive / 2 - 1) << shift) & mask; vm 393 drivers/gpu/drm/zte/zx_vou.c val = SYNC_WIDE(vm.vsync_len - 1); vm 398 drivers/gpu/drm/zte/zx_vou.c val |= BACK_PORCH(vm.vback_porch); vm 399 drivers/gpu/drm/zte/zx_vou.c val |= FRONT_PORCH(vm.vfront_porch - 1); vm 404 drivers/gpu/drm/zte/zx_vou.c if (vm.flags & DISPLAY_FLAGS_VSYNC_LOW) vm 406 drivers/gpu/drm/zte/zx_vou.c if (vm.flags & DISPLAY_FLAGS_HSYNC_LOW) vm 430 drivers/gpu/drm/zte/zx_vou.c vm.hactive << CHN_SCREEN_W_SHIFT); vm 432 drivers/gpu/drm/zte/zx_vou.c vm.vactive << CHN_SCREEN_H_SHIFT); vm 134 drivers/irqchip/irq-gic-v3-its.c struct its_vm *vm; vm 178 drivers/irqchip/irq-gic-v3-its.c static u16 get_its_list(struct its_vm *vm) vm 187 drivers/irqchip/irq-gic-v3-its.c if (vm->vlpi_count[its->list_nr]) vm 1066 drivers/irqchip/irq-gic-v3-its.c va = page_address(its_dev->event_map.vm->vprop_page); vm 1219 drivers/irqchip/irq-gic-v3-its.c static void its_map_vm(struct its_node *its, struct its_vm *vm) vm 1233 drivers/irqchip/irq-gic-v3-its.c vm->vlpi_count[its->list_nr]++; vm 1235 drivers/irqchip/irq-gic-v3-its.c if (vm->vlpi_count[its->list_nr] == 1) { vm 1238 drivers/irqchip/irq-gic-v3-its.c for (i = 0; i < vm->nr_vpes; i++) { vm 1239 drivers/irqchip/irq-gic-v3-its.c struct its_vpe *vpe = vm->vpes[i]; vm 1253 drivers/irqchip/irq-gic-v3-its.c static void its_unmap_vm(struct its_node *its, struct its_vm *vm) vm 1263 drivers/irqchip/irq-gic-v3-its.c if (!--vm->vlpi_count[its->list_nr]) { vm 1266 drivers/irqchip/irq-gic-v3-its.c for (i = 0; i < vm->nr_vpes; i++) vm 1267 drivers/irqchip/irq-gic-v3-its.c its_send_vmapp(its, vm->vpes[i], false); vm 1284 drivers/irqchip/irq-gic-v3-its.c if (!its_dev->event_map.vm) { vm 1294 drivers/irqchip/irq-gic-v3-its.c its_dev->event_map.vm = info->map->vm; vm 1296 drivers/irqchip/irq-gic-v3-its.c } else if (its_dev->event_map.vm != info->map->vm) { vm 1309 drivers/irqchip/irq-gic-v3-its.c its_map_vm(its_dev->its, info->map->vm); vm 1343 drivers/irqchip/irq-gic-v3-its.c if (!its_dev->event_map.vm || vm 1344 drivers/irqchip/irq-gic-v3-its.c !its_dev->event_map.vlpi_maps[event].vm) { vm 1365 drivers/irqchip/irq-gic-v3-its.c if (!its_dev->event_map.vm || !irqd_is_forwarded_to_vcpu(d)) { vm 1381 drivers/irqchip/irq-gic-v3-its.c its_unmap_vm(its_dev->its, its_dev->event_map.vm); vm 1388 drivers/irqchip/irq-gic-v3-its.c its_dev->event_map.vm = NULL; vm 1401 drivers/irqchip/irq-gic-v3-its.c if (!its_dev->event_map.vm || !irqd_is_forwarded_to_vcpu(d)) vm 3055 drivers/irqchip/irq-gic-v3-its.c struct its_vm *vm = domain->host_data; vm 3065 drivers/irqchip/irq-gic-v3-its.c BUG_ON(vm != vpe->its_vm); vm 3067 drivers/irqchip/irq-gic-v3-its.c clear_bit(data->hwirq, vm->db_bitmap); vm 3072 drivers/irqchip/irq-gic-v3-its.c if (bitmap_empty(vm->db_bitmap, vm->nr_db_lpis)) { vm 3073 drivers/irqchip/irq-gic-v3-its.c its_lpi_free(vm->db_bitmap, vm->db_lpi_base, vm->nr_db_lpis); vm 3074 drivers/irqchip/irq-gic-v3-its.c its_free_prop_table(vm->vprop_page); vm 3081 drivers/irqchip/irq-gic-v3-its.c struct its_vm *vm = args; vm 3086 drivers/irqchip/irq-gic-v3-its.c BUG_ON(!vm); vm 3103 drivers/irqchip/irq-gic-v3-its.c vm->db_bitmap = bitmap; vm 3104 drivers/irqchip/irq-gic-v3-its.c vm->db_lpi_base = base; vm 3105 drivers/irqchip/irq-gic-v3-its.c vm->nr_db_lpis = nr_ids; vm 3106 drivers/irqchip/irq-gic-v3-its.c vm->vprop_page = vprop_page; vm 3109 drivers/irqchip/irq-gic-v3-its.c vm->vpes[i]->vpe_db_lpi = base + i; vm 3110 drivers/irqchip/irq-gic-v3-its.c err = its_vpe_init(vm->vpes[i]); vm 3114 drivers/irqchip/irq-gic-v3-its.c vm->vpes[i]->vpe_db_lpi); vm 3118 drivers/irqchip/irq-gic-v3-its.c &its_vpe_irq_chip, vm->vpes[i]); vm 89 drivers/irqchip/irq-gic-v4.c int its_alloc_vcpu_irqs(struct its_vm *vm) vm 93 drivers/irqchip/irq-gic-v4.c vm->fwnode = irq_domain_alloc_named_id_fwnode("GICv4-vpe", vm 95 drivers/irqchip/irq-gic-v4.c if (!vm->fwnode) vm 98 drivers/irqchip/irq-gic-v4.c vm->domain = irq_domain_create_hierarchy(gic_domain, 0, vm->nr_vpes, vm 99 drivers/irqchip/irq-gic-v4.c vm->fwnode, vpe_domain_ops, vm 100 drivers/irqchip/irq-gic-v4.c vm); vm 101 drivers/irqchip/irq-gic-v4.c if (!vm->domain) vm 104 drivers/irqchip/irq-gic-v4.c for (i = 0; i < vm->nr_vpes; i++) { vm 105 drivers/irqchip/irq-gic-v4.c vm->vpes[i]->its_vm = vm; vm 106 drivers/irqchip/irq-gic-v4.c vm->vpes[i]->idai = true; vm 109 drivers/irqchip/irq-gic-v4.c vpe_base_irq = __irq_domain_alloc_irqs(vm->domain, -1, vm->nr_vpes, vm 110 drivers/irqchip/irq-gic-v4.c NUMA_NO_NODE, vm, vm 115 drivers/irqchip/irq-gic-v4.c for (i = 0; i < vm->nr_vpes; i++) vm 116 drivers/irqchip/irq-gic-v4.c vm->vpes[i]->irq = vpe_base_irq + i; vm 121 drivers/irqchip/irq-gic-v4.c if (vm->domain) vm 122 drivers/irqchip/irq-gic-v4.c irq_domain_remove(vm->domain); vm 123 drivers/irqchip/irq-gic-v4.c if (vm->fwnode) vm 124 drivers/irqchip/irq-gic-v4.c irq_domain_free_fwnode(vm->fwnode); vm 129 drivers/irqchip/irq-gic-v4.c void its_free_vcpu_irqs(struct its_vm *vm) vm 131 drivers/irqchip/irq-gic-v4.c irq_domain_free_irqs(vm->vpes[0]->irq, vm->nr_vpes); vm 132 drivers/irqchip/irq-gic-v4.c irq_domain_remove(vm->domain); vm 133 drivers/irqchip/irq-gic-v4.c irq_domain_free_fwnode(vm->fwnode); vm 79 drivers/media/radio/si4713/radio-platform-si4713.c struct v4l2_modulator *vm) vm 82 drivers/media/radio/si4713/radio-platform-si4713.c g_modulator, vm); vm 86 drivers/media/radio/si4713/radio-platform-si4713.c const struct v4l2_modulator *vm) vm 89 drivers/media/radio/si4713/radio-platform-si4713.c s_modulator, vm); vm 77 drivers/media/radio/si4713/radio-usb-si4713.c struct v4l2_modulator *vm) vm 81 drivers/media/radio/si4713/radio-usb-si4713.c return v4l2_subdev_call(radio->v4l2_subdev, tuner, g_modulator, vm); vm 85 drivers/media/radio/si4713/radio-usb-si4713.c const struct v4l2_modulator *vm) vm 89 drivers/media/radio/si4713/radio-usb-si4713.c return v4l2_subdev_call(radio->v4l2_subdev, tuner, s_modulator, vm); vm 1055 drivers/media/radio/si4713/si4713.c struct v4l2_modulator vm; vm 1064 drivers/media/radio/si4713/si4713.c vm.index = 0; vm 1066 drivers/media/radio/si4713/si4713.c vm.txsubchans = V4L2_TUNER_SUB_STEREO; vm 1068 drivers/media/radio/si4713/si4713.c vm.txsubchans = V4L2_TUNER_SUB_MONO; vm 1070 drivers/media/radio/si4713/si4713.c vm.txsubchans |= V4L2_TUNER_SUB_RDS; vm 1071 drivers/media/radio/si4713/si4713.c si4713_s_modulator(&sdev->sd, &vm); vm 1255 drivers/media/radio/si4713/si4713.c static int si4713_g_modulator(struct v4l2_subdev *sd, struct v4l2_modulator *vm) vm 1263 drivers/media/radio/si4713/si4713.c if (vm->index > 0) vm 1266 drivers/media/radio/si4713/si4713.c strscpy(vm->name, "FM Modulator", sizeof(vm->name)); vm 1267 drivers/media/radio/si4713/si4713.c vm->capability = V4L2_TUNER_CAP_STEREO | V4L2_TUNER_CAP_LOW | vm 1271 drivers/media/radio/si4713/si4713.c vm->rangelow = si4713_to_v4l2(FREQ_RANGE_LOW); vm 1272 drivers/media/radio/si4713/si4713.c vm->rangehigh = si4713_to_v4l2(FREQ_RANGE_HIGH); vm 1287 drivers/media/radio/si4713/si4713.c vm->txsubchans = V4L2_TUNER_SUB_STEREO; vm 1289 drivers/media/radio/si4713/si4713.c vm->txsubchans = V4L2_TUNER_SUB_MONO; vm 1293 drivers/media/radio/si4713/si4713.c vm->txsubchans |= V4L2_TUNER_SUB_RDS; vm 1295 drivers/media/radio/si4713/si4713.c vm->txsubchans &= ~V4L2_TUNER_SUB_RDS; vm 1301 drivers/media/radio/si4713/si4713.c static int si4713_s_modulator(struct v4l2_subdev *sd, const struct v4l2_modulator *vm) vm 1311 drivers/media/radio/si4713/si4713.c if (vm->index > 0) vm 1315 drivers/media/radio/si4713/si4713.c if (vm->txsubchans & V4L2_TUNER_SUB_STEREO) vm 1317 drivers/media/radio/si4713/si4713.c else if (vm->txsubchans & V4L2_TUNER_SUB_MONO) vm 1322 drivers/media/radio/si4713/si4713.c rds = !!(vm->txsubchans & V4L2_TUNER_SUB_RDS); vm 559 drivers/media/usb/pvrusb2/pvrusb2-v4l2.c static int pvr2_querymenu(struct file *file, void *priv, struct v4l2_querymenu *vm) vm 566 drivers/media/usb/pvrusb2/pvrusb2-v4l2.c ret = pvr2_ctrl_get_valname(pvr2_hdw_get_ctrl_v4l(hdw, vm->id), vm 567 drivers/media/usb/pvrusb2/pvrusb2-v4l2.c vm->index, vm 568 drivers/media/usb/pvrusb2/pvrusb2-v4l2.c vm->name, sizeof(vm->name) - 1, vm 570 drivers/media/usb/pvrusb2/pvrusb2-v4l2.c vm->name[cnt] = 0; vm 389 drivers/media/v4l2-core/v4l2-dev.c static int v4l2_mmap(struct file *filp, struct vm_area_struct *vm) vm 397 drivers/media/v4l2-core/v4l2-dev.c ret = vdev->fops->mmap(filp, vm); vm 367 drivers/misc/cxl/api.c int cxl_fd_mmap(struct file *file, struct vm_area_struct *vm) vm 369 drivers/misc/cxl/api.c return afu_mmap(file, vm); vm 1043 drivers/misc/cxl/cxl.h int afu_mmap(struct file *file, struct vm_area_struct *vm); vm 339 drivers/misc/cxl/file.c int afu_mmap(struct file *file, struct vm_area_struct *vm) vm 350 drivers/misc/cxl/file.c return cxl_context_iomap(ctx, vm); vm 229 drivers/misc/habanalabs/debugfs.c struct hl_vm *vm; vm 271 drivers/misc/habanalabs/debugfs.c vm = &ctx->hdev->vm; vm 272 drivers/misc/habanalabs/debugfs.c spin_lock(&vm->idr_lock); vm 274 drivers/misc/habanalabs/debugfs.c if (!idr_is_empty(&vm->phys_pg_pack_handles)) vm 277 drivers/misc/habanalabs/debugfs.c idr_for_each_entry(&vm->phys_pg_pack_handles, phys_pg_pack, i) { vm 291 drivers/misc/habanalabs/debugfs.c spin_unlock(&vm->idr_lock); vm 1287 drivers/misc/habanalabs/habanalabs.h struct hl_vm vm; vm 57 drivers/misc/habanalabs/memory.c struct hl_vm *vm = &hdev->vm; vm 73 drivers/misc/habanalabs/memory.c paddr = (u64) gen_pool_alloc(vm->dram_pg_pool, total_size); vm 108 drivers/misc/habanalabs/memory.c vm->dram_pg_pool, vm 121 drivers/misc/habanalabs/memory.c spin_lock(&vm->idr_lock); vm 122 drivers/misc/habanalabs/memory.c handle = idr_alloc(&vm->phys_pg_pack_handles, phys_pg_pack, 1, 0, vm 124 drivers/misc/habanalabs/memory.c spin_unlock(&vm->idr_lock); vm 133 drivers/misc/habanalabs/memory.c kref_get(&vm->dram_pg_pool_refcount); vm 148 drivers/misc/habanalabs/memory.c gen_pool_free(vm->dram_pg_pool, phys_pg_pack->pages[i], vm 156 drivers/misc/habanalabs/memory.c gen_pool_free(vm->dram_pg_pool, paddr, total_size); vm 244 drivers/misc/habanalabs/memory.c struct hl_vm *vm = container_of(ref, struct hl_vm, vm 251 drivers/misc/habanalabs/memory.c idr_destroy(&vm->phys_pg_pack_handles); vm 252 drivers/misc/habanalabs/memory.c gen_pool_destroy(vm->dram_pg_pool); vm 269 drivers/misc/habanalabs/memory.c struct hl_vm *vm = &hdev->vm; vm 274 drivers/misc/habanalabs/memory.c gen_pool_free(vm->dram_pg_pool, phys_pg_pack->pages[0], vm 278 drivers/misc/habanalabs/memory.c kref_put(&vm->dram_pg_pool_refcount, vm 282 drivers/misc/habanalabs/memory.c gen_pool_free(vm->dram_pg_pool, vm 285 drivers/misc/habanalabs/memory.c kref_put(&vm->dram_pg_pool_refcount, vm 307 drivers/misc/habanalabs/memory.c struct hl_vm *vm = &hdev->vm; vm 310 drivers/misc/habanalabs/memory.c spin_lock(&vm->idr_lock); vm 311 drivers/misc/habanalabs/memory.c phys_pg_pack = idr_find(&vm->phys_pg_pack_handles, handle); vm 316 drivers/misc/habanalabs/memory.c spin_unlock(&vm->idr_lock); vm 325 drivers/misc/habanalabs/memory.c idr_remove(&vm->phys_pg_pack_handles, handle); vm 326 drivers/misc/habanalabs/memory.c spin_unlock(&vm->idr_lock); vm 333 drivers/misc/habanalabs/memory.c spin_unlock(&vm->idr_lock); vm 790 drivers/misc/habanalabs/memory.c struct hl_vm *vm = &hdev->vm; vm 795 drivers/misc/habanalabs/memory.c spin_lock(&vm->idr_lock); vm 796 drivers/misc/habanalabs/memory.c phys_pg_pack = idr_find(&vm->phys_pg_pack_handles, handle); vm 798 drivers/misc/habanalabs/memory.c spin_unlock(&vm->idr_lock); vm 805 drivers/misc/habanalabs/memory.c spin_unlock(&vm->idr_lock); vm 828 drivers/misc/habanalabs/memory.c struct hl_vm *vm = &hdev->vm; vm 862 drivers/misc/habanalabs/memory.c spin_lock(&vm->idr_lock); vm 863 drivers/misc/habanalabs/memory.c phys_pg_pack = idr_find(&vm->phys_pg_pack_handles, handle); vm 865 drivers/misc/habanalabs/memory.c spin_unlock(&vm->idr_lock); vm 874 drivers/misc/habanalabs/memory.c spin_unlock(&vm->idr_lock); vm 1618 drivers/misc/habanalabs/memory.c struct hl_vm *vm = &hdev->vm; vm 1636 drivers/misc/habanalabs/memory.c spin_lock(&vm->idr_lock); vm 1637 drivers/misc/habanalabs/memory.c idr_for_each_entry(&vm->phys_pg_pack_handles, phys_pg_list, i) vm 1645 drivers/misc/habanalabs/memory.c idr_remove(&vm->phys_pg_pack_handles, i); vm 1647 drivers/misc/habanalabs/memory.c spin_unlock(&vm->idr_lock); vm 1669 drivers/misc/habanalabs/memory.c struct hl_vm *vm = &hdev->vm; vm 1672 drivers/misc/habanalabs/memory.c vm->dram_pg_pool = gen_pool_create(__ffs(prop->dram_page_size), -1); vm 1673 drivers/misc/habanalabs/memory.c if (!vm->dram_pg_pool) { vm 1678 drivers/misc/habanalabs/memory.c kref_init(&vm->dram_pg_pool_refcount); vm 1680 drivers/misc/habanalabs/memory.c rc = gen_pool_add(vm->dram_pg_pool, prop->dram_user_base_address, vm 1690 drivers/misc/habanalabs/memory.c spin_lock_init(&vm->idr_lock); vm 1691 drivers/misc/habanalabs/memory.c idr_init(&vm->phys_pg_pack_handles); vm 1695 drivers/misc/habanalabs/memory.c vm->init_done = true; vm 1700 drivers/misc/habanalabs/memory.c gen_pool_destroy(vm->dram_pg_pool); vm 1717 drivers/misc/habanalabs/memory.c struct hl_vm *vm = &hdev->vm; vm 1719 drivers/misc/habanalabs/memory.c if (!vm->init_done) vm 1726 drivers/misc/habanalabs/memory.c if (kref_put(&vm->dram_pg_pool_refcount, dram_pg_pool_do_release) != 1) vm 1730 drivers/misc/habanalabs/memory.c vm->init_done = false; vm 435 drivers/net/ethernet/hisilicon/hns/hns_dsaf_mac.h int hns_mac_vm_config_bc_en(struct hns_mac_cb *mac_cb, u32 vm, bool enable); vm 176 drivers/s390/cio/device_id.c int vm = 0; vm 183 drivers/s390/cio/device_id.c vm = 1; vm 189 drivers/s390/cio/device_id.c senseid->dev_model, vm ? " (diag210)" : ""); vm 3051 drivers/s390/net/qeth_core_main.c EBCASC(info322->vm[0].name, sizeof(info322->vm[0].name)); vm 3052 drivers/s390/net/qeth_core_main.c memcpy(tid->vmname, info322->vm[0].name, sizeof(tid->vmname)); vm 44 drivers/scsi/cxlflash/backend.h int (*fd_mmap)(struct file *file, struct vm_area_struct *vm); vm 141 drivers/scsi/cxlflash/cxl_hw.c static int cxlflash_fd_mmap(struct file *file, struct vm_area_struct *vm) vm 143 drivers/scsi/cxlflash/cxl_hw.c return cxl_fd_mmap(file, vm); vm 952 drivers/video/fbdev/atmel_lcdfb.c struct videomode vm; vm 1025 drivers/video/fbdev/atmel_lcdfb.c ret = of_get_videomode(display_np, &vm, OF_USE_NATIVE_MODE); vm 1031 drivers/video/fbdev/atmel_lcdfb.c ret = fb_videomode_from_videomode(&vm, &fb_vm); vm 1311 drivers/video/fbdev/core/fbmon.c int fb_videomode_from_videomode(const struct videomode *vm, vm 1316 drivers/video/fbdev/core/fbmon.c fbmode->xres = vm->hactive; vm 1317 drivers/video/fbdev/core/fbmon.c fbmode->left_margin = vm->hback_porch; vm 1318 drivers/video/fbdev/core/fbmon.c fbmode->right_margin = vm->hfront_porch; vm 1319 drivers/video/fbdev/core/fbmon.c fbmode->hsync_len = vm->hsync_len; vm 1321 drivers/video/fbdev/core/fbmon.c fbmode->yres = vm->vactive; vm 1322 drivers/video/fbdev/core/fbmon.c fbmode->upper_margin = vm->vback_porch; vm 1323 drivers/video/fbdev/core/fbmon.c fbmode->lower_margin = vm->vfront_porch; vm 1324 drivers/video/fbdev/core/fbmon.c fbmode->vsync_len = vm->vsync_len; vm 1327 drivers/video/fbdev/core/fbmon.c fbmode->pixclock = vm->pixelclock ? vm 1328 drivers/video/fbdev/core/fbmon.c KHZ2PICOS(vm->pixelclock / 1000) : 0; vm 1332 drivers/video/fbdev/core/fbmon.c if (vm->flags & DISPLAY_FLAGS_HSYNC_HIGH) vm 1334 drivers/video/fbdev/core/fbmon.c if (vm->flags & DISPLAY_FLAGS_VSYNC_HIGH) vm 1336 drivers/video/fbdev/core/fbmon.c if (vm->flags & DISPLAY_FLAGS_INTERLACED) vm 1338 drivers/video/fbdev/core/fbmon.c if (vm->flags & DISPLAY_FLAGS_DOUBLESCAN) vm 1342 drivers/video/fbdev/core/fbmon.c htotal = vm->hactive + vm->hfront_porch + vm->hback_porch + vm 1343 drivers/video/fbdev/core/fbmon.c vm->hsync_len; vm 1344 drivers/video/fbdev/core/fbmon.c vtotal = vm->vactive + vm->vfront_porch + vm->vback_porch + vm 1345 drivers/video/fbdev/core/fbmon.c vm->vsync_len; vm 1348 drivers/video/fbdev/core/fbmon.c fbmode->refresh = vm->pixelclock / (htotal * vtotal); vm 1382 drivers/video/fbdev/core/fbmon.c struct videomode vm; vm 1385 drivers/video/fbdev/core/fbmon.c ret = of_get_videomode(np, &vm, index); vm 1389 drivers/video/fbdev/core/fbmon.c ret = fb_videomode_from_videomode(&vm, fb); vm 1394 drivers/video/fbdev/core/fbmon.c np, vm.hactive, vm.vactive); vm 1283 drivers/video/fbdev/matrox/matroxfb_base.c vaddr_t vm; vm 1292 drivers/video/fbdev/matrox/matroxfb_base.c vm = minfo->video.vbase; vm 1304 drivers/video/fbdev/matrox/matroxfb_base.c *tmp++ = mga_readb(vm, offs); vm 1306 drivers/video/fbdev/matrox/matroxfb_base.c mga_writeb(vm, offs, 0x02); vm 1309 drivers/video/fbdev/matrox/matroxfb_base.c if (mga_readb(vm, offs) != 0x02) vm 1311 drivers/video/fbdev/matrox/matroxfb_base.c mga_writeb(vm, offs, mga_readb(vm, offs) - 0x02); vm 1312 drivers/video/fbdev/matrox/matroxfb_base.c if (mga_readb(vm, offs)) vm 1317 drivers/video/fbdev/matrox/matroxfb_base.c mga_writeb(vm, offs2, *tmp++); vm 619 drivers/video/fbdev/mmp/hw/mmp_ctrl.h #define CFG_GRA_VM_ENA(vm) ((vm)<<15) vm 621 drivers/video/fbdev/mmp/hw/mmp_ctrl.h #define CFG_DMA_VM_ENA(vm) ((vm)<<13) vm 623 drivers/video/fbdev/mmp/hw/mmp_ctrl.h #define CFG_CMD_VM_ENA(vm) ((vm)<<12) vm 159 drivers/video/fbdev/omap2/omapfb/displays/panel-dpi.c struct videomode vm; vm 175 drivers/video/fbdev/omap2/omapfb/displays/panel-dpi.c videomode_from_timing(pdata->display_timing, &vm); vm 176 drivers/video/fbdev/omap2/omapfb/displays/panel-dpi.c videomode_to_omap_video_timings(&vm, &ddata->videomode); vm 204 drivers/video/fbdev/omap2/omapfb/displays/panel-dpi.c struct videomode vm; vm 221 drivers/video/fbdev/omap2/omapfb/displays/panel-dpi.c videomode_from_timing(&timing, &vm); vm 222 drivers/video/fbdev/omap2/omapfb/displays/panel-dpi.c videomode_to_omap_video_timings(&vm, &ddata->videomode); vm 259 drivers/video/fbdev/omap2/omapfb/dss/display.c void videomode_to_omap_video_timings(const struct videomode *vm, vm 264 drivers/video/fbdev/omap2/omapfb/dss/display.c ovt->pixelclock = vm->pixelclock; vm 265 drivers/video/fbdev/omap2/omapfb/dss/display.c ovt->x_res = vm->hactive; vm 266 drivers/video/fbdev/omap2/omapfb/dss/display.c ovt->hbp = vm->hback_porch; vm 267 drivers/video/fbdev/omap2/omapfb/dss/display.c ovt->hfp = vm->hfront_porch; vm 268 drivers/video/fbdev/omap2/omapfb/dss/display.c ovt->hsw = vm->hsync_len; vm 269 drivers/video/fbdev/omap2/omapfb/dss/display.c ovt->y_res = vm->vactive; vm 270 drivers/video/fbdev/omap2/omapfb/dss/display.c ovt->vbp = vm->vback_porch; vm 271 drivers/video/fbdev/omap2/omapfb/dss/display.c ovt->vfp = vm->vfront_porch; vm 272 drivers/video/fbdev/omap2/omapfb/dss/display.c ovt->vsw = vm->vsync_len; vm 274 drivers/video/fbdev/omap2/omapfb/dss/display.c ovt->vsync_level = vm->flags & DISPLAY_FLAGS_VSYNC_HIGH ? vm 277 drivers/video/fbdev/omap2/omapfb/dss/display.c ovt->hsync_level = vm->flags & DISPLAY_FLAGS_HSYNC_HIGH ? vm 280 drivers/video/fbdev/omap2/omapfb/dss/display.c ovt->de_level = vm->flags & DISPLAY_FLAGS_DE_HIGH ? vm 283 drivers/video/fbdev/omap2/omapfb/dss/display.c ovt->data_pclk_edge = vm->flags & DISPLAY_FLAGS_PIXDATA_POSEDGE ? vm 292 drivers/video/fbdev/omap2/omapfb/dss/display.c struct videomode *vm) vm 294 drivers/video/fbdev/omap2/omapfb/dss/display.c memset(vm, 0, sizeof(*vm)); vm 296 drivers/video/fbdev/omap2/omapfb/dss/display.c vm->pixelclock = ovt->pixelclock; vm 298 drivers/video/fbdev/omap2/omapfb/dss/display.c vm->hactive = ovt->x_res; vm 299 drivers/video/fbdev/omap2/omapfb/dss/display.c vm->hback_porch = ovt->hbp; vm 300 drivers/video/fbdev/omap2/omapfb/dss/display.c vm->hfront_porch = ovt->hfp; vm 301 drivers/video/fbdev/omap2/omapfb/dss/display.c vm->hsync_len = ovt->hsw; vm 302 drivers/video/fbdev/omap2/omapfb/dss/display.c vm->vactive = ovt->y_res; vm 303 drivers/video/fbdev/omap2/omapfb/dss/display.c vm->vback_porch = ovt->vbp; vm 304 drivers/video/fbdev/omap2/omapfb/dss/display.c vm->vfront_porch = ovt->vfp; vm 305 drivers/video/fbdev/omap2/omapfb/dss/display.c vm->vsync_len = ovt->vsw; vm 308 drivers/video/fbdev/omap2/omapfb/dss/display.c vm->flags |= DISPLAY_FLAGS_HSYNC_HIGH; vm 310 drivers/video/fbdev/omap2/omapfb/dss/display.c vm->flags |= DISPLAY_FLAGS_HSYNC_LOW; vm 313 drivers/video/fbdev/omap2/omapfb/dss/display.c vm->flags |= DISPLAY_FLAGS_VSYNC_HIGH; vm 315 drivers/video/fbdev/omap2/omapfb/dss/display.c vm->flags |= DISPLAY_FLAGS_VSYNC_LOW; vm 318 drivers/video/fbdev/omap2/omapfb/dss/display.c vm->flags |= DISPLAY_FLAGS_DE_HIGH; vm 320 drivers/video/fbdev/omap2/omapfb/dss/display.c vm->flags |= DISPLAY_FLAGS_DE_LOW; vm 323 drivers/video/fbdev/omap2/omapfb/dss/display.c vm->flags |= DISPLAY_FLAGS_PIXDATA_POSEDGE; vm 325 drivers/video/fbdev/omap2/omapfb/dss/display.c vm->flags |= DISPLAY_FLAGS_PIXDATA_NEGEDGE; vm 4380 drivers/video/fbdev/omap2/omapfb/dss/dsi.c struct omap_video_timings vm = { 0 }; vm 4391 drivers/video/fbdev/omap2/omapfb/dss/dsi.c vm.pixelclock = pck; vm 4392 drivers/video/fbdev/omap2/omapfb/dss/dsi.c vm.hsw = div64_u64((u64)(t->hsa + t->hse) * pck, byteclk); vm 4393 drivers/video/fbdev/omap2/omapfb/dss/dsi.c vm.hbp = div64_u64((u64)t->hbp * pck, byteclk); vm 4394 drivers/video/fbdev/omap2/omapfb/dss/dsi.c vm.hfp = div64_u64((u64)t->hfp * pck, byteclk); vm 4395 drivers/video/fbdev/omap2/omapfb/dss/dsi.c vm.x_res = t->hact; vm 4397 drivers/video/fbdev/omap2/omapfb/dss/dsi.c print_dispc_vm(str, &vm); vm 389 drivers/video/fbdev/pxa168fb.h #define CFG_GRA_VM_ENA(vm) ((vm) << 15) /* gfx */ vm 391 drivers/video/fbdev/pxa168fb.h #define CFG_DMA_VM_ENA(vm) ((vm) << 13) /* video */ vm 393 drivers/video/fbdev/pxa168fb.h #define CFG_CMD_VM_ENA(vm) ((vm) << 13) vm 2106 drivers/video/fbdev/pxafb.c struct videomode vm; vm 2135 drivers/video/fbdev/pxafb.c ret = videomode_from_timings(timings, &vm, i); vm 2141 drivers/video/fbdev/pxafb.c if (vm.flags & DISPLAY_FLAGS_PIXDATA_POSEDGE) vm 2143 drivers/video/fbdev/pxafb.c if (vm.flags & DISPLAY_FLAGS_PIXDATA_NEGEDGE) vm 2145 drivers/video/fbdev/pxafb.c if (vm.flags & DISPLAY_FLAGS_DE_HIGH) vm 2147 drivers/video/fbdev/pxafb.c if (vm.flags & DISPLAY_FLAGS_DE_LOW) vm 2149 drivers/video/fbdev/pxafb.c if (vm.flags & DISPLAY_FLAGS_HSYNC_HIGH) vm 2151 drivers/video/fbdev/pxafb.c if (vm.flags & DISPLAY_FLAGS_VSYNC_HIGH) vm 2154 drivers/video/fbdev/pxafb.c info->modes[i].pixclock = 1000000000UL / (vm.pixelclock / 1000); vm 2155 drivers/video/fbdev/pxafb.c info->modes[i].xres = vm.hactive; vm 2156 drivers/video/fbdev/pxafb.c info->modes[i].yres = vm.vactive; vm 2157 drivers/video/fbdev/pxafb.c info->modes[i].hsync_len = vm.hsync_len; vm 2158 drivers/video/fbdev/pxafb.c info->modes[i].left_margin = vm.hback_porch; vm 2159 drivers/video/fbdev/pxafb.c info->modes[i].right_margin = vm.hfront_porch; vm 2160 drivers/video/fbdev/pxafb.c info->modes[i].vsync_len = vm.vsync_len; vm 2161 drivers/video/fbdev/pxafb.c info->modes[i].upper_margin = vm.vback_porch; vm 2162 drivers/video/fbdev/pxafb.c info->modes[i].lower_margin = vm.vfront_porch; vm 199 drivers/video/fbdev/sh7760fb.c struct fb_videomode *vm = par->pd->def_mode; vm 209 drivers/video/fbdev/sh7760fb.c if (par->rot && (vm->xres > 320)) { vm 215 drivers/video/fbdev/sh7760fb.c hsynp = vm->right_margin + vm->xres; vm 216 drivers/video/fbdev/sh7760fb.c hsynw = vm->hsync_len; vm 217 drivers/video/fbdev/sh7760fb.c htcn = vm->left_margin + hsynp + hsynw; vm 218 drivers/video/fbdev/sh7760fb.c hdcn = vm->xres; vm 219 drivers/video/fbdev/sh7760fb.c vsynp = vm->lower_margin + vm->yres; vm 220 drivers/video/fbdev/sh7760fb.c vsynw = vm->vsync_len; vm 221 drivers/video/fbdev/sh7760fb.c vtln = vm->upper_margin + vsynp + vsynw; vm 222 drivers/video/fbdev/sh7760fb.c vdln = vm->yres; vm 241 drivers/video/fbdev/sh7760fb.c if (!(vm->sync & FB_SYNC_HOR_HIGH_ACT)) vm 243 drivers/video/fbdev/sh7760fb.c if (!(vm->sync & FB_SYNC_VERT_HIGH_ACT)) vm 422 drivers/video/fbdev/tgafb.c min_diff = delta, vm = m, va = a, vr = r; \ vm 430 drivers/video/fbdev/tgafb.c int r,a,m,vm = 34, va = 1, vr = 30; vm 496 drivers/video/fbdev/tgafb.c TGA_WRITE_REG(par, (vm >> r) & 1, TGA_CLOCK_REG); vm 30 drivers/video/of_videomode.c int of_get_videomode(struct device_node *np, struct videomode *vm, vm 45 drivers/video/of_videomode.c ret = videomode_from_timings(disp, vm, index); vm 14 drivers/video/videomode.c struct videomode *vm) vm 16 drivers/video/videomode.c vm->pixelclock = dt->pixelclock.typ; vm 17 drivers/video/videomode.c vm->hactive = dt->hactive.typ; vm 18 drivers/video/videomode.c vm->hfront_porch = dt->hfront_porch.typ; vm 19 drivers/video/videomode.c vm->hback_porch = dt->hback_porch.typ; vm 20 drivers/video/videomode.c vm->hsync_len = dt->hsync_len.typ; vm 22 drivers/video/videomode.c vm->vactive = dt->vactive.typ; vm 23 drivers/video/videomode.c vm->vfront_porch = dt->vfront_porch.typ; vm 24 drivers/video/videomode.c vm->vback_porch = dt->vback_porch.typ; vm 25 drivers/video/videomode.c vm->vsync_len = dt->vsync_len.typ; vm 27 drivers/video/videomode.c vm->flags = dt->flags; vm 32 drivers/video/videomode.c struct videomode *vm, unsigned int index) vm 40 drivers/video/videomode.c videomode_from_timing(dt, vm); vm 139 drivers/xen/sys-hypervisor.c char *vm, *val; vm 146 drivers/xen/sys-hypervisor.c vm = xenbus_read(XBT_NIL, "vm", "", NULL); vm 147 drivers/xen/sys-hypervisor.c if (IS_ERR(vm)) vm 148 drivers/xen/sys-hypervisor.c return PTR_ERR(vm); vm 149 drivers/xen/sys-hypervisor.c val = xenbus_read(XBT_NIL, vm, "uuid", NULL); vm 150 drivers/xen/sys-hypervisor.c kfree(vm); vm 493 include/drm/drm_modes.h void drm_display_mode_from_videomode(const struct videomode *vm, vm 496 include/drm/drm_modes.h struct videomode *vm); vm 497 include/drm/drm_modes.h void drm_bus_flags_from_videomode(const struct videomode *vm, u32 *bus_flags); vm 727 include/linux/fb.h extern int fb_videomode_from_videomode(const struct videomode *vm, vm 67 include/linux/irqchip/arm-gic-v4.h struct its_vm *vm; vm 92 include/linux/irqchip/arm-gic-v4.h int its_alloc_vcpu_irqs(struct its_vm *vm); vm 93 include/linux/irqchip/arm-gic-v4.h void its_free_vcpu_irqs(struct its_vm *vm); vm 74 include/linux/kasan.h void kasan_free_shadow(const struct vm_struct *vm); vm 146 include/linux/kasan.h static inline void kasan_free_shadow(const struct vm_struct *vm) {} vm 68 include/linux/vmalloc.h struct vm_struct *vm; /* in "busy" tree */ vm 167 include/linux/vmalloc.h struct vm_struct *vm = find_vm_area(addr); vm 169 include/linux/vmalloc.h if (vm) vm 170 include/linux/vmalloc.h vm->flags |= VM_FLUSH_RESET_PERMS; vm 204 include/linux/vmalloc.h extern __init void vm_area_add_early(struct vm_struct *vm); vm 205 include/linux/vmalloc.h extern __init void vm_area_register_early(struct vm_struct *vm, size_t align); vm 272 include/media/v4l2-subdev.h int (*g_modulator)(struct v4l2_subdev *sd, struct v4l2_modulator *vm); vm 273 include/media/v4l2-subdev.h int (*s_modulator)(struct v4l2_subdev *sd, const struct v4l2_modulator *vm); vm 197 include/misc/cxl.h int cxl_fd_mmap(struct file *file, struct vm_area_struct *vm); vm 14 include/video/of_videomode.h int of_get_videomode(struct device_node *np, struct videomode *vm, vm 763 include/video/omapfb_dss.h void videomode_to_omap_video_timings(const struct videomode *vm, vm 766 include/video/omapfb_dss.h struct videomode *vm); vm 43 include/video/videomode.h struct videomode *vm); vm 55 include/video/videomode.h struct videomode *vm, unsigned int index); vm 271 kernel/fork.c struct vm_struct *vm = task_stack_vm_area(tsk); vm 273 kernel/fork.c if (vm) { vm 277 kernel/fork.c mod_memcg_page_state(vm->pages[i], vm 281 kernel/fork.c memcg_kmem_uncharge(vm->pages[i], 0); vm 373 kernel/fork.c struct vm_struct *vm = task_stack_vm_area(tsk); vm 377 kernel/fork.c if (vm) { vm 380 kernel/fork.c BUG_ON(vm->nr_pages != THREAD_SIZE / PAGE_SIZE); vm 383 kernel/fork.c mod_zone_page_state(page_zone(vm->pages[i]), vm 405 kernel/fork.c struct vm_struct *vm = task_stack_vm_area(tsk); vm 408 kernel/fork.c if (vm) { vm 418 kernel/fork.c ret = memcg_kmem_charge(vm->pages[i], GFP_KERNEL, 0); vm 422 kernel/fork.c mod_memcg_page_state(vm->pages[i], vm 623 mm/kasan/common.c void kasan_free_shadow(const struct vm_struct *vm) vm 625 mm/kasan/common.c if (vm->flags & VM_KASAN) vm 626 mm/kasan/common.c vfree(kasan_mem_to_shadow(vm->addr)); vm 715 mm/kasan/common.c struct vm_struct *vm; vm 729 mm/kasan/common.c vm = find_vm_area((void *)shadow_start); vm 730 mm/kasan/common.c if (vm) vm 2832 mm/percpu.c static struct vm_struct vm; vm 2885 mm/percpu.c vm.flags = VM_ALLOC; vm 2886 mm/percpu.c vm.size = num_possible_cpus() * ai->unit_size; vm 2887 mm/percpu.c vm_area_register_early(&vm, PAGE_SIZE); vm 2891 mm/percpu.c (unsigned long)vm.addr + unit * ai->unit_size; vm 2919 mm/percpu.c pcpu_setup_first_chunk(ai, vm.addr); vm 1117 mm/vmalloc.c va->vm = NULL; vm 1819 mm/vmalloc.c void __init vm_area_add_early(struct vm_struct *vm) vm 1825 mm/vmalloc.c if (tmp->addr >= vm->addr) { vm 1826 mm/vmalloc.c BUG_ON(tmp->addr < vm->addr + vm->size); vm 1829 mm/vmalloc.c BUG_ON(tmp->addr + tmp->size > vm->addr); vm 1831 mm/vmalloc.c vm->next = *p; vm 1832 mm/vmalloc.c *p = vm; vm 1847 mm/vmalloc.c void __init vm_area_register_early(struct vm_struct *vm, size_t align) vm 1853 mm/vmalloc.c vm_init_off = PFN_ALIGN(addr + vm->size) - VMALLOC_START; vm 1855 mm/vmalloc.c vm->addr = (void *)addr; vm 1857 mm/vmalloc.c vm_area_add_early(vm); vm 1932 mm/vmalloc.c va->vm = tmp; vm 2018 mm/vmalloc.c static void setup_vmalloc_vm(struct vm_struct *vm, struct vmap_area *va, vm 2022 mm/vmalloc.c vm->flags = flags; vm 2023 mm/vmalloc.c vm->addr = (void *)va->va_start; vm 2024 mm/vmalloc.c vm->size = va->va_end - va->va_start; vm 2025 mm/vmalloc.c vm->caller = caller; vm 2026 mm/vmalloc.c va->vm = vm; vm 2030 mm/vmalloc.c static void clear_vm_uninitialized_flag(struct vm_struct *vm) vm 2038 mm/vmalloc.c vm->flags &= ~VM_UNINITIALIZED; vm 2134 mm/vmalloc.c return va->vm; vm 2155 mm/vmalloc.c if (va && va->vm) { vm 2156 mm/vmalloc.c struct vm_struct *vm = va->vm; vm 2158 mm/vmalloc.c va->vm = NULL; vm 2161 mm/vmalloc.c kasan_free_shadow(vm); vm 2164 mm/vmalloc.c return vm; vm 2849 mm/vmalloc.c struct vm_struct *vm; vm 2863 mm/vmalloc.c if (!va->vm) vm 2866 mm/vmalloc.c vm = va->vm; vm 2867 mm/vmalloc.c vaddr = (char *) vm->addr; vm 2868 mm/vmalloc.c if (addr >= vaddr + get_vm_area_size(vm)) vm 2878 mm/vmalloc.c n = vaddr + get_vm_area_size(vm) - addr; vm 2881 mm/vmalloc.c if (!(vm->flags & VM_IOREMAP)) vm 2928 mm/vmalloc.c struct vm_struct *vm; vm 2943 mm/vmalloc.c if (!va->vm) vm 2946 mm/vmalloc.c vm = va->vm; vm 2947 mm/vmalloc.c vaddr = (char *) vm->addr; vm 2948 mm/vmalloc.c if (addr >= vaddr + get_vm_area_size(vm)) vm 2957 mm/vmalloc.c n = vaddr + get_vm_area_size(vm) - addr; vm 2960 mm/vmalloc.c if (!(vm->flags & VM_IOREMAP)) { vm 3501 mm/vmalloc.c if (!va->vm) { vm 3509 mm/vmalloc.c v = va->vm; vm 297 mm/zsmalloc.c struct vm_struct *vm; /* vm area for mapping object that span pages */ vm 1123 mm/zsmalloc.c if (area->vm) vm 1125 mm/zsmalloc.c area->vm = alloc_vm_area(PAGE_SIZE * 2, NULL); vm 1126 mm/zsmalloc.c if (!area->vm) vm 1133 mm/zsmalloc.c if (area->vm) vm 1134 mm/zsmalloc.c free_vm_area(area->vm); vm 1135 mm/zsmalloc.c area->vm = NULL; vm 1141 mm/zsmalloc.c BUG_ON(map_vm_area(area->vm, PAGE_KERNEL, pages)); vm 1142 mm/zsmalloc.c area->vm_addr = area->vm->addr; vm 439 net/vmw_vsock/hyperv_transport.c union hvs_service_id vm, host; vm 442 net/vmw_vsock/hyperv_transport.c vm.srv_id = srv_id_template; vm 443 net/vmw_vsock/hyperv_transport.c vm.svm_port = vsk->local_addr.svm_port; vm 444 net/vmw_vsock/hyperv_transport.c h->vm_srv_id = vm.srv_id; vm 136 sound/pci/ctxfi/ctatc.c struct ct_vm *vm; vm 142 sound/pci/ctxfi/ctatc.c vm = atc->vm; vm 144 sound/pci/ctxfi/ctatc.c apcm->vm_block = vm->map(vm, apcm->substream, runtime->dma_bytes); vm 154 sound/pci/ctxfi/ctatc.c struct ct_vm *vm; vm 159 sound/pci/ctxfi/ctatc.c vm = atc->vm; vm 161 sound/pci/ctxfi/ctatc.c vm->unmap(vm, apcm->vm_block); vm 168 sound/pci/ctxfi/ctatc.c return atc->vm->get_ptp_phys(atc->vm, index); vm 1240 sound/pci/ctxfi/ctatc.c if (atc->vm) { vm 1241 sound/pci/ctxfi/ctatc.c ct_vm_destroy(atc->vm); vm 1242 sound/pci/ctxfi/ctatc.c atc->vm = NULL; vm 1702 sound/pci/ctxfi/ctatc.c err = ct_vm_create(&atc->vm, pci); vm 85 sound/pci/ctxfi/ctatc.h struct ct_vm *vm; /* device virtual memory manager for this card */ vm 30 sound/pci/ctxfi/ctvmem.c get_vm_block(struct ct_vm *vm, unsigned int size, struct ct_atc *atc) vm 36 sound/pci/ctxfi/ctvmem.c if (size > vm->size) { vm 42 sound/pci/ctxfi/ctvmem.c mutex_lock(&vm->lock); vm 43 sound/pci/ctxfi/ctvmem.c list_for_each(pos, &vm->unused) { vm 48 sound/pci/ctxfi/ctvmem.c if (pos == &vm->unused) vm 53 sound/pci/ctxfi/ctvmem.c list_move(&entry->list, &vm->used); vm 54 sound/pci/ctxfi/ctvmem.c vm->size -= size; vm 65 sound/pci/ctxfi/ctvmem.c list_add(&block->list, &vm->used); vm 68 sound/pci/ctxfi/ctvmem.c vm->size -= size; vm 71 sound/pci/ctxfi/ctvmem.c mutex_unlock(&vm->lock); vm 75 sound/pci/ctxfi/ctvmem.c static void put_vm_block(struct ct_vm *vm, struct ct_vm_block *block) vm 82 sound/pci/ctxfi/ctvmem.c mutex_lock(&vm->lock); vm 84 sound/pci/ctxfi/ctvmem.c vm->size += block->size; vm 86 sound/pci/ctxfi/ctvmem.c list_for_each(pos, &vm->unused) { vm 91 sound/pci/ctxfi/ctvmem.c if (pos == &vm->unused) { vm 92 sound/pci/ctxfi/ctvmem.c list_add_tail(&block->list, &vm->unused); vm 107 sound/pci/ctxfi/ctvmem.c while (pre != &vm->unused) { vm 119 sound/pci/ctxfi/ctvmem.c mutex_unlock(&vm->lock); vm 124 sound/pci/ctxfi/ctvmem.c ct_vm_map(struct ct_vm *vm, struct snd_pcm_substream *substream, int size) vm 132 sound/pci/ctxfi/ctvmem.c block = get_vm_block(vm, size, atc); vm 139 sound/pci/ctxfi/ctvmem.c ptp = (unsigned long *)vm->ptp[0].area; vm 152 sound/pci/ctxfi/ctvmem.c static void ct_vm_unmap(struct ct_vm *vm, struct ct_vm_block *block) vm 155 sound/pci/ctxfi/ctvmem.c put_vm_block(vm, block); vm 164 sound/pci/ctxfi/ctvmem.c ct_get_ptp_phys(struct ct_vm *vm, int index) vm 166 sound/pci/ctxfi/ctvmem.c return (index >= CT_PTP_NUM) ? ~0UL : vm->ptp[index].addr; vm 171 sound/pci/ctxfi/ctvmem.c struct ct_vm *vm; vm 177 sound/pci/ctxfi/ctvmem.c vm = kzalloc(sizeof(*vm), GFP_KERNEL); vm 178 sound/pci/ctxfi/ctvmem.c if (!vm) vm 181 sound/pci/ctxfi/ctvmem.c mutex_init(&vm->lock); vm 187 sound/pci/ctxfi/ctvmem.c PAGE_SIZE, &vm->ptp[i]); vm 193 sound/pci/ctxfi/ctvmem.c ct_vm_destroy(vm); vm 196 sound/pci/ctxfi/ctvmem.c vm->size = CT_ADDRS_PER_PAGE * i; vm 197 sound/pci/ctxfi/ctvmem.c vm->map = ct_vm_map; vm 198 sound/pci/ctxfi/ctvmem.c vm->unmap = ct_vm_unmap; vm 199 sound/pci/ctxfi/ctvmem.c vm->get_ptp_phys = ct_get_ptp_phys; vm 200 sound/pci/ctxfi/ctvmem.c INIT_LIST_HEAD(&vm->unused); vm 201 sound/pci/ctxfi/ctvmem.c INIT_LIST_HEAD(&vm->used); vm 205 sound/pci/ctxfi/ctvmem.c block->size = vm->size; vm 206 sound/pci/ctxfi/ctvmem.c list_add(&block->list, &vm->unused); vm 209 sound/pci/ctxfi/ctvmem.c *rvm = vm; vm 215 sound/pci/ctxfi/ctvmem.c void ct_vm_destroy(struct ct_vm *vm) vm 222 sound/pci/ctxfi/ctvmem.c while (!list_empty(&vm->used)) { vm 223 sound/pci/ctxfi/ctvmem.c pos = vm->used.next; vm 228 sound/pci/ctxfi/ctvmem.c while (!list_empty(&vm->unused)) { vm 229 sound/pci/ctxfi/ctvmem.c pos = vm->unused.next; vm 237 sound/pci/ctxfi/ctvmem.c snd_dma_free_pages(&vm->ptp[i]); vm 239 sound/pci/ctxfi/ctvmem.c vm->size = 0; vm 241 sound/pci/ctxfi/ctvmem.c kfree(vm); vm 54 sound/pci/ctxfi/ctvmem.h dma_addr_t (*get_ptp_phys)(struct ct_vm *vm, int index); vm 58 sound/pci/ctxfi/ctvmem.h void ct_vm_destroy(struct ct_vm *vm); vm 151 tools/testing/selftests/kvm/dirty_log_test.c struct kvm_vm *vm = data; vm 156 tools/testing/selftests/kvm/dirty_log_test.c run = vcpu_state(vm, VCPU_ID); vm 158 tools/testing/selftests/kvm/dirty_log_test.c guest_array = addr_gva2hva(vm, (vm_vaddr_t)random_array); vm 163 tools/testing/selftests/kvm/dirty_log_test.c ret = _vcpu_run(vm, VCPU_ID); vm 165 tools/testing/selftests/kvm/dirty_log_test.c if (get_ucall(vm, VCPU_ID, NULL) == UCALL_SYNC) { vm 252 tools/testing/selftests/kvm/dirty_log_test.c struct kvm_vm *vm; vm 255 tools/testing/selftests/kvm/dirty_log_test.c vm = _vm_create(mode, DEFAULT_GUEST_PHY_PAGES + extra_pg_pages, O_RDWR); vm 256 tools/testing/selftests/kvm/dirty_log_test.c kvm_vm_elf_load(vm, program_invocation_name, 0, 0); vm 258 tools/testing/selftests/kvm/dirty_log_test.c vm_create_irqchip(vm); vm 260 tools/testing/selftests/kvm/dirty_log_test.c vm_vcpu_add_default(vm, vcpuid, guest_code); vm 261 tools/testing/selftests/kvm/dirty_log_test.c return vm; vm 271 tools/testing/selftests/kvm/dirty_log_test.c struct kvm_vm *vm; vm 282 tools/testing/selftests/kvm/dirty_log_test.c vm = create_vm(mode, VCPU_ID, vm 286 tools/testing/selftests/kvm/dirty_log_test.c guest_page_size = vm_get_page_size(vm); vm 292 tools/testing/selftests/kvm/dirty_log_test.c vm_get_page_shift(vm))) + 16; vm 302 tools/testing/selftests/kvm/dirty_log_test.c guest_test_phys_mem = (vm_get_max_gfn(vm) - vm 324 tools/testing/selftests/kvm/dirty_log_test.c vm_enable_cap(vm, &cap); vm 328 tools/testing/selftests/kvm/dirty_log_test.c vm_userspace_mem_region_add(vm, VM_MEM_SRC_ANONYMOUS, vm 335 tools/testing/selftests/kvm/dirty_log_test.c virt_map(vm, guest_test_virt_mem, guest_test_phys_mem, vm 339 tools/testing/selftests/kvm/dirty_log_test.c host_test_mem = addr_gpa2hva(vm, (vm_paddr_t)guest_test_phys_mem); vm 342 tools/testing/selftests/kvm/dirty_log_test.c vcpu_set_cpuid(vm, VCPU_ID, kvm_get_supported_cpuid()); vm 345 tools/testing/selftests/kvm/dirty_log_test.c ucall_init(vm, NULL); vm 349 tools/testing/selftests/kvm/dirty_log_test.c sync_global_to_guest(vm, host_page_size); vm 350 tools/testing/selftests/kvm/dirty_log_test.c sync_global_to_guest(vm, guest_page_size); vm 351 tools/testing/selftests/kvm/dirty_log_test.c sync_global_to_guest(vm, guest_test_virt_mem); vm 352 tools/testing/selftests/kvm/dirty_log_test.c sync_global_to_guest(vm, guest_num_pages); vm 356 tools/testing/selftests/kvm/dirty_log_test.c sync_global_to_guest(vm, iteration); vm 362 tools/testing/selftests/kvm/dirty_log_test.c pthread_create(&vcpu_thread, NULL, vcpu_worker, vm); vm 367 tools/testing/selftests/kvm/dirty_log_test.c kvm_vm_get_dirty_log(vm, TEST_MEM_SLOT_INDEX, bmap); vm 369 tools/testing/selftests/kvm/dirty_log_test.c kvm_vm_clear_dirty_log(vm, TEST_MEM_SLOT_INDEX, bmap, 0, vm 374 tools/testing/selftests/kvm/dirty_log_test.c sync_global_to_guest(vm, iteration); vm 387 tools/testing/selftests/kvm/dirty_log_test.c ucall_uninit(vm); vm 388 tools/testing/selftests/kvm/dirty_log_test.c kvm_vm_free(vm); vm 39 tools/testing/selftests/kvm/include/aarch64/processor.h static inline void get_reg(struct kvm_vm *vm, uint32_t vcpuid, uint64_t id, uint64_t *addr) vm 44 tools/testing/selftests/kvm/include/aarch64/processor.h vcpu_ioctl(vm, vcpuid, KVM_GET_ONE_REG, ®); vm 47 tools/testing/selftests/kvm/include/aarch64/processor.h static inline void set_reg(struct kvm_vm *vm, uint32_t vcpuid, uint64_t id, uint64_t val) vm 52 tools/testing/selftests/kvm/include/aarch64/processor.h vcpu_ioctl(vm, vcpuid, KVM_SET_ONE_REG, ®); vm 55 tools/testing/selftests/kvm/include/aarch64/processor.h void aarch64_vcpu_setup(struct kvm_vm *vm, int vcpuid, struct kvm_vcpu_init *init); vm 56 tools/testing/selftests/kvm/include/aarch64/processor.h void aarch64_vcpu_add_default(struct kvm_vm *vm, uint32_t vcpuid, vm 223 tools/testing/selftests/kvm/include/evmcs.h int vcpu_enable_evmcs(struct kvm_vm *vm, int vcpu_id); vm 69 tools/testing/selftests/kvm/include/kvm_util.h int vm_enable_cap(struct kvm_vm *vm, struct kvm_enable_cap *cap); vm 76 tools/testing/selftests/kvm/include/kvm_util.h void kvm_vm_get_dirty_log(struct kvm_vm *vm, int slot, void *log); vm 77 tools/testing/selftests/kvm/include/kvm_util.h void kvm_vm_clear_dirty_log(struct kvm_vm *vm, int slot, void *log, vm 80 tools/testing/selftests/kvm/include/kvm_util.h int kvm_memcmp_hva_gva(void *hva, struct kvm_vm *vm, const vm_vaddr_t gva, vm 83 tools/testing/selftests/kvm/include/kvm_util.h void kvm_vm_elf_load(struct kvm_vm *vm, const char *filename, vm 86 tools/testing/selftests/kvm/include/kvm_util.h void vm_dump(FILE *stream, struct kvm_vm *vm, uint8_t indent); vm 87 tools/testing/selftests/kvm/include/kvm_util.h void vcpu_dump(FILE *stream, struct kvm_vm *vm, uint32_t vcpuid, vm 90 tools/testing/selftests/kvm/include/kvm_util.h void vm_create_irqchip(struct kvm_vm *vm); vm 92 tools/testing/selftests/kvm/include/kvm_util.h void vm_userspace_mem_region_add(struct kvm_vm *vm, vm 97 tools/testing/selftests/kvm/include/kvm_util.h void vcpu_ioctl(struct kvm_vm *vm, uint32_t vcpuid, unsigned long ioctl, vm 99 tools/testing/selftests/kvm/include/kvm_util.h int _vcpu_ioctl(struct kvm_vm *vm, uint32_t vcpuid, unsigned long ioctl, vm 101 tools/testing/selftests/kvm/include/kvm_util.h void vm_ioctl(struct kvm_vm *vm, unsigned long ioctl, void *arg); vm 102 tools/testing/selftests/kvm/include/kvm_util.h void vm_mem_region_set_flags(struct kvm_vm *vm, uint32_t slot, uint32_t flags); vm 103 tools/testing/selftests/kvm/include/kvm_util.h void vm_vcpu_add(struct kvm_vm *vm, uint32_t vcpuid); vm 104 tools/testing/selftests/kvm/include/kvm_util.h vm_vaddr_t vm_vaddr_alloc(struct kvm_vm *vm, size_t sz, vm_vaddr_t vaddr_min, vm 106 tools/testing/selftests/kvm/include/kvm_util.h void virt_map(struct kvm_vm *vm, uint64_t vaddr, uint64_t paddr, vm 108 tools/testing/selftests/kvm/include/kvm_util.h void *addr_gpa2hva(struct kvm_vm *vm, vm_paddr_t gpa); vm 109 tools/testing/selftests/kvm/include/kvm_util.h void *addr_gva2hva(struct kvm_vm *vm, vm_vaddr_t gva); vm 110 tools/testing/selftests/kvm/include/kvm_util.h vm_paddr_t addr_hva2gpa(struct kvm_vm *vm, void *hva); vm 111 tools/testing/selftests/kvm/include/kvm_util.h vm_paddr_t addr_gva2gpa(struct kvm_vm *vm, vm_vaddr_t gva); vm 113 tools/testing/selftests/kvm/include/kvm_util.h struct kvm_run *vcpu_state(struct kvm_vm *vm, uint32_t vcpuid); vm 114 tools/testing/selftests/kvm/include/kvm_util.h void vcpu_run(struct kvm_vm *vm, uint32_t vcpuid); vm 115 tools/testing/selftests/kvm/include/kvm_util.h int _vcpu_run(struct kvm_vm *vm, uint32_t vcpuid); vm 116 tools/testing/selftests/kvm/include/kvm_util.h void vcpu_run_complete_io(struct kvm_vm *vm, uint32_t vcpuid); vm 117 tools/testing/selftests/kvm/include/kvm_util.h void vcpu_set_mp_state(struct kvm_vm *vm, uint32_t vcpuid, vm 119 tools/testing/selftests/kvm/include/kvm_util.h void vcpu_regs_get(struct kvm_vm *vm, uint32_t vcpuid, struct kvm_regs *regs); vm 120 tools/testing/selftests/kvm/include/kvm_util.h void vcpu_regs_set(struct kvm_vm *vm, uint32_t vcpuid, struct kvm_regs *regs); vm 121 tools/testing/selftests/kvm/include/kvm_util.h void vcpu_args_set(struct kvm_vm *vm, uint32_t vcpuid, unsigned int num, ...); vm 122 tools/testing/selftests/kvm/include/kvm_util.h void vcpu_sregs_get(struct kvm_vm *vm, uint32_t vcpuid, vm 124 tools/testing/selftests/kvm/include/kvm_util.h void vcpu_sregs_set(struct kvm_vm *vm, uint32_t vcpuid, vm 126 tools/testing/selftests/kvm/include/kvm_util.h int _vcpu_sregs_set(struct kvm_vm *vm, uint32_t vcpuid, vm 129 tools/testing/selftests/kvm/include/kvm_util.h void vcpu_events_get(struct kvm_vm *vm, uint32_t vcpuid, vm 131 tools/testing/selftests/kvm/include/kvm_util.h void vcpu_events_set(struct kvm_vm *vm, uint32_t vcpuid, vm 135 tools/testing/selftests/kvm/include/kvm_util.h void vcpu_nested_state_get(struct kvm_vm *vm, uint32_t vcpuid, vm 137 tools/testing/selftests/kvm/include/kvm_util.h int vcpu_nested_state_set(struct kvm_vm *vm, uint32_t vcpuid, vm 143 tools/testing/selftests/kvm/include/kvm_util.h void virt_pgd_alloc(struct kvm_vm *vm, uint32_t pgd_memslot); vm 144 tools/testing/selftests/kvm/include/kvm_util.h void virt_pg_map(struct kvm_vm *vm, uint64_t vaddr, uint64_t paddr, vm 146 tools/testing/selftests/kvm/include/kvm_util.h vm_paddr_t vm_phy_page_alloc(struct kvm_vm *vm, vm_paddr_t paddr_min, vm 148 tools/testing/selftests/kvm/include/kvm_util.h vm_paddr_t vm_phy_pages_alloc(struct kvm_vm *vm, size_t num, vm 153 tools/testing/selftests/kvm/include/kvm_util.h void vm_vcpu_add_default(struct kvm_vm *vm, uint32_t vcpuid, void *guest_code); vm 155 tools/testing/selftests/kvm/include/kvm_util.h bool vm_is_unrestricted_guest(struct kvm_vm *vm); vm 157 tools/testing/selftests/kvm/include/kvm_util.h unsigned int vm_get_page_size(struct kvm_vm *vm); vm 158 tools/testing/selftests/kvm/include/kvm_util.h unsigned int vm_get_page_shift(struct kvm_vm *vm); vm 159 tools/testing/selftests/kvm/include/kvm_util.h unsigned int vm_get_max_gfn(struct kvm_vm *vm); vm 162 tools/testing/selftests/kvm/include/kvm_util.h kvm_userspace_memory_region_find(struct kvm_vm *vm, uint64_t start, vm 168 tools/testing/selftests/kvm/include/kvm_util.h int vm_create_device(struct kvm_vm *vm, struct kvm_create_device *cd); vm 170 tools/testing/selftests/kvm/include/kvm_util.h #define sync_global_to_guest(vm, g) ({ \ vm 171 tools/testing/selftests/kvm/include/kvm_util.h typeof(g) *_p = addr_gva2hva(vm, (vm_vaddr_t)&(g)); \ vm 175 tools/testing/selftests/kvm/include/kvm_util.h #define sync_global_from_guest(vm, g) ({ \ vm 176 tools/testing/selftests/kvm/include/kvm_util.h typeof(g) *_p = addr_gva2hva(vm, (vm_vaddr_t)&(g)); \ vm 195 tools/testing/selftests/kvm/include/kvm_util.h void ucall_init(struct kvm_vm *vm, void *arg); vm 196 tools/testing/selftests/kvm/include/kvm_util.h void ucall_uninit(struct kvm_vm *vm); vm 198 tools/testing/selftests/kvm/include/kvm_util.h uint64_t get_ucall(struct kvm_vm *vm, uint32_t vcpu_id, struct ucall *uc); vm 307 tools/testing/selftests/kvm/include/x86_64/processor.h struct kvm_x86_state *vcpu_save_state(struct kvm_vm *vm, uint32_t vcpuid); vm 308 tools/testing/selftests/kvm/include/x86_64/processor.h void vcpu_load_state(struct kvm_vm *vm, uint32_t vcpuid, vm 312 tools/testing/selftests/kvm/include/x86_64/processor.h void vcpu_set_cpuid(struct kvm_vm *vm, uint32_t vcpuid, vm 324 tools/testing/selftests/kvm/include/x86_64/processor.h uint64_t vcpu_get_msr(struct kvm_vm *vm, uint32_t vcpuid, uint64_t msr_index); vm 325 tools/testing/selftests/kvm/include/x86_64/processor.h void vcpu_set_msr(struct kvm_vm *vm, uint32_t vcpuid, uint64_t msr_index, vm 578 tools/testing/selftests/kvm/include/x86_64/vmx.h struct vmx_pages *vcpu_alloc_vmx(struct kvm_vm *vm, vm_vaddr_t *p_vmx_gva); vm 585 tools/testing/selftests/kvm/include/x86_64/vmx.h void nested_pg_map(struct vmx_pages *vmx, struct kvm_vm *vm, vm 587 tools/testing/selftests/kvm/include/x86_64/vmx.h void nested_map(struct vmx_pages *vmx, struct kvm_vm *vm, vm 590 tools/testing/selftests/kvm/include/x86_64/vmx.h void nested_map_memslot(struct vmx_pages *vmx, struct kvm_vm *vm, vm 592 tools/testing/selftests/kvm/include/x86_64/vmx.h void prepare_eptp(struct vmx_pages *vmx, struct kvm_vm *vm, vm 24 tools/testing/selftests/kvm/kvm_create_max_vcpus.c struct kvm_vm *vm; vm 30 tools/testing/selftests/kvm/kvm_create_max_vcpus.c vm = vm_create(VM_MODE_DEFAULT, DEFAULT_GUEST_PHY_PAGES, O_RDWR); vm 36 tools/testing/selftests/kvm/kvm_create_max_vcpus.c vm_vcpu_add(vm, vcpu_id); vm 39 tools/testing/selftests/kvm/kvm_create_max_vcpus.c kvm_vm_free(vm); vm 19 tools/testing/selftests/kvm/lib/aarch64/processor.c static uint64_t page_align(struct kvm_vm *vm, uint64_t v) vm 21 tools/testing/selftests/kvm/lib/aarch64/processor.c return (v + vm->page_size) & ~(vm->page_size - 1); vm 24 tools/testing/selftests/kvm/lib/aarch64/processor.c static uint64_t pgd_index(struct kvm_vm *vm, vm_vaddr_t gva) vm 26 tools/testing/selftests/kvm/lib/aarch64/processor.c unsigned int shift = (vm->pgtable_levels - 1) * (vm->page_shift - 3) + vm->page_shift; vm 27 tools/testing/selftests/kvm/lib/aarch64/processor.c uint64_t mask = (1UL << (vm->va_bits - shift)) - 1; vm 32 tools/testing/selftests/kvm/lib/aarch64/processor.c static uint64_t pud_index(struct kvm_vm *vm, vm_vaddr_t gva) vm 34 tools/testing/selftests/kvm/lib/aarch64/processor.c unsigned int shift = 2 * (vm->page_shift - 3) + vm->page_shift; vm 35 tools/testing/selftests/kvm/lib/aarch64/processor.c uint64_t mask = (1UL << (vm->page_shift - 3)) - 1; vm 37 tools/testing/selftests/kvm/lib/aarch64/processor.c TEST_ASSERT(vm->pgtable_levels == 4, vm 38 tools/testing/selftests/kvm/lib/aarch64/processor.c "Mode %d does not have 4 page table levels", vm->mode); vm 43 tools/testing/selftests/kvm/lib/aarch64/processor.c static uint64_t pmd_index(struct kvm_vm *vm, vm_vaddr_t gva) vm 45 tools/testing/selftests/kvm/lib/aarch64/processor.c unsigned int shift = (vm->page_shift - 3) + vm->page_shift; vm 46 tools/testing/selftests/kvm/lib/aarch64/processor.c uint64_t mask = (1UL << (vm->page_shift - 3)) - 1; vm 48 tools/testing/selftests/kvm/lib/aarch64/processor.c TEST_ASSERT(vm->pgtable_levels >= 3, vm 49 tools/testing/selftests/kvm/lib/aarch64/processor.c "Mode %d does not have >= 3 page table levels", vm->mode); vm 54 tools/testing/selftests/kvm/lib/aarch64/processor.c static uint64_t pte_index(struct kvm_vm *vm, vm_vaddr_t gva) vm 56 tools/testing/selftests/kvm/lib/aarch64/processor.c uint64_t mask = (1UL << (vm->page_shift - 3)) - 1; vm 57 tools/testing/selftests/kvm/lib/aarch64/processor.c return (gva >> vm->page_shift) & mask; vm 60 tools/testing/selftests/kvm/lib/aarch64/processor.c static uint64_t pte_addr(struct kvm_vm *vm, uint64_t entry) vm 62 tools/testing/selftests/kvm/lib/aarch64/processor.c uint64_t mask = ((1UL << (vm->va_bits - vm->page_shift)) - 1) << vm->page_shift; vm 66 tools/testing/selftests/kvm/lib/aarch64/processor.c static uint64_t ptrs_per_pgd(struct kvm_vm *vm) vm 68 tools/testing/selftests/kvm/lib/aarch64/processor.c unsigned int shift = (vm->pgtable_levels - 1) * (vm->page_shift - 3) + vm->page_shift; vm 69 tools/testing/selftests/kvm/lib/aarch64/processor.c return 1 << (vm->va_bits - shift); vm 72 tools/testing/selftests/kvm/lib/aarch64/processor.c static uint64_t __maybe_unused ptrs_per_pte(struct kvm_vm *vm) vm 74 tools/testing/selftests/kvm/lib/aarch64/processor.c return 1 << (vm->page_shift - 3); vm 77 tools/testing/selftests/kvm/lib/aarch64/processor.c void virt_pgd_alloc(struct kvm_vm *vm, uint32_t pgd_memslot) vm 79 tools/testing/selftests/kvm/lib/aarch64/processor.c if (!vm->pgd_created) { vm 80 tools/testing/selftests/kvm/lib/aarch64/processor.c vm_paddr_t paddr = vm_phy_pages_alloc(vm, vm 81 tools/testing/selftests/kvm/lib/aarch64/processor.c page_align(vm, ptrs_per_pgd(vm) * 8) / vm->page_size, vm 83 tools/testing/selftests/kvm/lib/aarch64/processor.c vm->pgd = paddr; vm 84 tools/testing/selftests/kvm/lib/aarch64/processor.c vm->pgd_created = true; vm 88 tools/testing/selftests/kvm/lib/aarch64/processor.c void _virt_pg_map(struct kvm_vm *vm, uint64_t vaddr, uint64_t paddr, vm 94 tools/testing/selftests/kvm/lib/aarch64/processor.c TEST_ASSERT((vaddr % vm->page_size) == 0, vm 96 tools/testing/selftests/kvm/lib/aarch64/processor.c " vaddr: 0x%lx vm->page_size: 0x%x", vaddr, vm->page_size); vm 97 tools/testing/selftests/kvm/lib/aarch64/processor.c TEST_ASSERT(sparsebit_is_set(vm->vpages_valid, vm 98 tools/testing/selftests/kvm/lib/aarch64/processor.c (vaddr >> vm->page_shift)), vm 100 tools/testing/selftests/kvm/lib/aarch64/processor.c TEST_ASSERT((paddr % vm->page_size) == 0, vm 102 tools/testing/selftests/kvm/lib/aarch64/processor.c " paddr: 0x%lx vm->page_size: 0x%x", paddr, vm->page_size); vm 103 tools/testing/selftests/kvm/lib/aarch64/processor.c TEST_ASSERT((paddr >> vm->page_shift) <= vm->max_gfn, vm 106 tools/testing/selftests/kvm/lib/aarch64/processor.c paddr, vm->max_gfn, vm->page_size); vm 108 tools/testing/selftests/kvm/lib/aarch64/processor.c ptep = addr_gpa2hva(vm, vm->pgd) + pgd_index(vm, vaddr) * 8; vm 110 tools/testing/selftests/kvm/lib/aarch64/processor.c *ptep = vm_phy_page_alloc(vm, KVM_GUEST_PAGE_TABLE_MIN_PADDR, pgd_memslot); vm 114 tools/testing/selftests/kvm/lib/aarch64/processor.c switch (vm->pgtable_levels) { vm 116 tools/testing/selftests/kvm/lib/aarch64/processor.c ptep = addr_gpa2hva(vm, pte_addr(vm, *ptep)) + pud_index(vm, vaddr) * 8; vm 118 tools/testing/selftests/kvm/lib/aarch64/processor.c *ptep = vm_phy_page_alloc(vm, KVM_GUEST_PAGE_TABLE_MIN_PADDR, pgd_memslot); vm 123 tools/testing/selftests/kvm/lib/aarch64/processor.c ptep = addr_gpa2hva(vm, pte_addr(vm, *ptep)) + pmd_index(vm, vaddr) * 8; vm 125 tools/testing/selftests/kvm/lib/aarch64/processor.c *ptep = vm_phy_page_alloc(vm, KVM_GUEST_PAGE_TABLE_MIN_PADDR, pgd_memslot); vm 130 tools/testing/selftests/kvm/lib/aarch64/processor.c ptep = addr_gpa2hva(vm, pte_addr(vm, *ptep)) + pte_index(vm, vaddr) * 8; vm 140 tools/testing/selftests/kvm/lib/aarch64/processor.c void virt_pg_map(struct kvm_vm *vm, uint64_t vaddr, uint64_t paddr, vm 145 tools/testing/selftests/kvm/lib/aarch64/processor.c _virt_pg_map(vm, vaddr, paddr, pgd_memslot, attr_idx); vm 148 tools/testing/selftests/kvm/lib/aarch64/processor.c vm_paddr_t addr_gva2gpa(struct kvm_vm *vm, vm_vaddr_t gva) vm 152 tools/testing/selftests/kvm/lib/aarch64/processor.c if (!vm->pgd_created) vm 155 tools/testing/selftests/kvm/lib/aarch64/processor.c ptep = addr_gpa2hva(vm, vm->pgd) + pgd_index(vm, gva) * 8; vm 159 tools/testing/selftests/kvm/lib/aarch64/processor.c switch (vm->pgtable_levels) { vm 161 tools/testing/selftests/kvm/lib/aarch64/processor.c ptep = addr_gpa2hva(vm, pte_addr(vm, *ptep)) + pud_index(vm, gva) * 8; vm 166 tools/testing/selftests/kvm/lib/aarch64/processor.c ptep = addr_gpa2hva(vm, pte_addr(vm, *ptep)) + pmd_index(vm, gva) * 8; vm 171 tools/testing/selftests/kvm/lib/aarch64/processor.c ptep = addr_gpa2hva(vm, pte_addr(vm, *ptep)) + pte_index(vm, gva) * 8; vm 179 tools/testing/selftests/kvm/lib/aarch64/processor.c return pte_addr(vm, *ptep) + (gva & (vm->page_size - 1)); vm 187 tools/testing/selftests/kvm/lib/aarch64/processor.c static void pte_dump(FILE *stream, struct kvm_vm *vm, uint8_t indent, uint64_t page, int level) vm 196 tools/testing/selftests/kvm/lib/aarch64/processor.c for (pte = page; pte < page + ptrs_per_pte(vm) * 8; pte += 8) { vm 197 tools/testing/selftests/kvm/lib/aarch64/processor.c ptep = addr_gpa2hva(vm, pte); vm 201 tools/testing/selftests/kvm/lib/aarch64/processor.c pte_dump(stream, vm, indent + 1, pte_addr(vm, *ptep), level + 1); vm 206 tools/testing/selftests/kvm/lib/aarch64/processor.c void virt_dump(FILE *stream, struct kvm_vm *vm, uint8_t indent) vm 208 tools/testing/selftests/kvm/lib/aarch64/processor.c int level = 4 - (vm->pgtable_levels - 1); vm 211 tools/testing/selftests/kvm/lib/aarch64/processor.c if (!vm->pgd_created) vm 214 tools/testing/selftests/kvm/lib/aarch64/processor.c for (pgd = vm->pgd; pgd < vm->pgd + ptrs_per_pgd(vm) * 8; pgd += 8) { vm 215 tools/testing/selftests/kvm/lib/aarch64/processor.c ptep = addr_gpa2hva(vm, pgd); vm 219 tools/testing/selftests/kvm/lib/aarch64/processor.c pte_dump(stream, vm, indent + 1, pte_addr(vm, *ptep), level); vm 228 tools/testing/selftests/kvm/lib/aarch64/processor.c struct kvm_vm *vm; vm 230 tools/testing/selftests/kvm/lib/aarch64/processor.c vm = vm_create(VM_MODE_DEFAULT, DEFAULT_GUEST_PHY_PAGES + extra_pg_pages, O_RDWR); vm 232 tools/testing/selftests/kvm/lib/aarch64/processor.c kvm_vm_elf_load(vm, program_invocation_name, 0, 0); vm 233 tools/testing/selftests/kvm/lib/aarch64/processor.c vm_vcpu_add_default(vm, vcpuid, guest_code); vm 235 tools/testing/selftests/kvm/lib/aarch64/processor.c return vm; vm 238 tools/testing/selftests/kvm/lib/aarch64/processor.c void aarch64_vcpu_setup(struct kvm_vm *vm, int vcpuid, struct kvm_vcpu_init *init) vm 248 tools/testing/selftests/kvm/lib/aarch64/processor.c vm_ioctl(vm, KVM_ARM_PREFERRED_TARGET, &preferred); vm 252 tools/testing/selftests/kvm/lib/aarch64/processor.c vcpu_ioctl(vm, vcpuid, KVM_ARM_VCPU_INIT, init); vm 258 tools/testing/selftests/kvm/lib/aarch64/processor.c set_reg(vm, vcpuid, ARM64_SYS_REG(CPACR_EL1), 3 << 20); vm 260 tools/testing/selftests/kvm/lib/aarch64/processor.c get_reg(vm, vcpuid, ARM64_SYS_REG(SCTLR_EL1), &sctlr_el1); vm 261 tools/testing/selftests/kvm/lib/aarch64/processor.c get_reg(vm, vcpuid, ARM64_SYS_REG(TCR_EL1), &tcr_el1); vm 263 tools/testing/selftests/kvm/lib/aarch64/processor.c switch (vm->mode) { vm 291 tools/testing/selftests/kvm/lib/aarch64/processor.c TEST_ASSERT(false, "Unknown guest mode, mode: 0x%x", vm->mode); vm 297 tools/testing/selftests/kvm/lib/aarch64/processor.c tcr_el1 |= (64 - vm->va_bits) /* T0SZ */; vm 299 tools/testing/selftests/kvm/lib/aarch64/processor.c set_reg(vm, vcpuid, ARM64_SYS_REG(SCTLR_EL1), sctlr_el1); vm 300 tools/testing/selftests/kvm/lib/aarch64/processor.c set_reg(vm, vcpuid, ARM64_SYS_REG(TCR_EL1), tcr_el1); vm 301 tools/testing/selftests/kvm/lib/aarch64/processor.c set_reg(vm, vcpuid, ARM64_SYS_REG(MAIR_EL1), DEFAULT_MAIR_EL1); vm 302 tools/testing/selftests/kvm/lib/aarch64/processor.c set_reg(vm, vcpuid, ARM64_SYS_REG(TTBR0_EL1), vm->pgd); vm 305 tools/testing/selftests/kvm/lib/aarch64/processor.c void vcpu_dump(FILE *stream, struct kvm_vm *vm, uint32_t vcpuid, uint8_t indent) vm 309 tools/testing/selftests/kvm/lib/aarch64/processor.c get_reg(vm, vcpuid, ARM64_CORE_REG(regs.pstate), &pstate); vm 310 tools/testing/selftests/kvm/lib/aarch64/processor.c get_reg(vm, vcpuid, ARM64_CORE_REG(regs.pc), &pc); vm 316 tools/testing/selftests/kvm/lib/aarch64/processor.c void aarch64_vcpu_add_default(struct kvm_vm *vm, uint32_t vcpuid, vm 319 tools/testing/selftests/kvm/lib/aarch64/processor.c size_t stack_size = vm->page_size == 4096 ? vm 320 tools/testing/selftests/kvm/lib/aarch64/processor.c DEFAULT_STACK_PGS * vm->page_size : vm 321 tools/testing/selftests/kvm/lib/aarch64/processor.c vm->page_size; vm 322 tools/testing/selftests/kvm/lib/aarch64/processor.c uint64_t stack_vaddr = vm_vaddr_alloc(vm, stack_size, vm 325 tools/testing/selftests/kvm/lib/aarch64/processor.c vm_vcpu_add(vm, vcpuid); vm 326 tools/testing/selftests/kvm/lib/aarch64/processor.c aarch64_vcpu_setup(vm, vcpuid, init); vm 328 tools/testing/selftests/kvm/lib/aarch64/processor.c set_reg(vm, vcpuid, ARM64_CORE_REG(sp_el1), stack_vaddr + stack_size); vm 329 tools/testing/selftests/kvm/lib/aarch64/processor.c set_reg(vm, vcpuid, ARM64_CORE_REG(regs.pc), (uint64_t)guest_code); vm 332 tools/testing/selftests/kvm/lib/aarch64/processor.c void vm_vcpu_add_default(struct kvm_vm *vm, uint32_t vcpuid, void *guest_code) vm 334 tools/testing/selftests/kvm/lib/aarch64/processor.c aarch64_vcpu_add_default(vm, vcpuid, NULL, guest_code); vm 12 tools/testing/selftests/kvm/lib/aarch64/ucall.c static bool ucall_mmio_init(struct kvm_vm *vm, vm_paddr_t gpa) vm 14 tools/testing/selftests/kvm/lib/aarch64/ucall.c if (kvm_userspace_memory_region_find(vm, gpa, gpa + 1)) vm 17 tools/testing/selftests/kvm/lib/aarch64/ucall.c virt_pg_map(vm, gpa, gpa, 0); vm 20 tools/testing/selftests/kvm/lib/aarch64/ucall.c sync_global_to_guest(vm, ucall_exit_mmio_addr); vm 25 tools/testing/selftests/kvm/lib/aarch64/ucall.c void ucall_init(struct kvm_vm *vm, void *arg) vm 33 tools/testing/selftests/kvm/lib/aarch64/ucall.c ret = ucall_mmio_init(vm, gpa); vm 54 tools/testing/selftests/kvm/lib/aarch64/ucall.c bits = vm->va_bits - 1; vm 55 tools/testing/selftests/kvm/lib/aarch64/ucall.c bits = vm->pa_bits < bits ? vm->pa_bits : bits; vm 60 tools/testing/selftests/kvm/lib/aarch64/ucall.c if (ucall_mmio_init(vm, start - offset)) vm 62 tools/testing/selftests/kvm/lib/aarch64/ucall.c if (ucall_mmio_init(vm, start + offset)) vm 68 tools/testing/selftests/kvm/lib/aarch64/ucall.c void ucall_uninit(struct kvm_vm *vm) vm 71 tools/testing/selftests/kvm/lib/aarch64/ucall.c sync_global_to_guest(vm, ucall_exit_mmio_addr); vm 92 tools/testing/selftests/kvm/lib/aarch64/ucall.c uint64_t get_ucall(struct kvm_vm *vm, uint32_t vcpu_id, struct ucall *uc) vm 94 tools/testing/selftests/kvm/lib/aarch64/ucall.c struct kvm_run *run = vcpu_state(vm, vcpu_id); vm 104 tools/testing/selftests/kvm/lib/aarch64/ucall.c memcpy(&ucall, addr_gva2hva(vm, gva), sizeof(ucall)); vm 106 tools/testing/selftests/kvm/lib/aarch64/ucall.c vcpu_run_complete_io(vm, vcpu_id); vm 114 tools/testing/selftests/kvm/lib/elf.c void kvm_vm_elf_load(struct kvm_vm *vm, const char *filename, vm 162 tools/testing/selftests/kvm/lib/elf.c seg_vstart &= ~(vm_vaddr_t)(vm->page_size - 1); vm 164 tools/testing/selftests/kvm/lib/elf.c seg_vend |= vm->page_size - 1; vm 167 tools/testing/selftests/kvm/lib/elf.c vm_vaddr_t vaddr = vm_vaddr_alloc(vm, seg_size, seg_vstart, vm 175 tools/testing/selftests/kvm/lib/elf.c memset(addr_gva2hva(vm, vaddr), 0, seg_size); vm 192 tools/testing/selftests/kvm/lib/elf.c test_read(fd, addr_gva2hva(vm, phdr.p_vaddr), vm 77 tools/testing/selftests/kvm/lib/kvm_util.c int vm_enable_cap(struct kvm_vm *vm, struct kvm_enable_cap *cap) vm 81 tools/testing/selftests/kvm/lib/kvm_util.c ret = ioctl(vm->fd, KVM_ENABLE_CAP, cap); vm 88 tools/testing/selftests/kvm/lib/kvm_util.c static void vm_open(struct kvm_vm *vm, int perm) vm 90 tools/testing/selftests/kvm/lib/kvm_util.c vm->kvm_fd = open(KVM_DEV_PATH, perm); vm 91 tools/testing/selftests/kvm/lib/kvm_util.c if (vm->kvm_fd < 0) vm 99 tools/testing/selftests/kvm/lib/kvm_util.c vm->fd = ioctl(vm->kvm_fd, KVM_CREATE_VM, vm->type); vm 100 tools/testing/selftests/kvm/lib/kvm_util.c TEST_ASSERT(vm->fd >= 0, "KVM_CREATE_VM ioctl failed, " vm 101 tools/testing/selftests/kvm/lib/kvm_util.c "rc: %i errno: %i", vm->fd, errno); vm 137 tools/testing/selftests/kvm/lib/kvm_util.c struct kvm_vm *vm; vm 141 tools/testing/selftests/kvm/lib/kvm_util.c vm = calloc(1, sizeof(*vm)); vm 142 tools/testing/selftests/kvm/lib/kvm_util.c TEST_ASSERT(vm != NULL, "Insufficient Memory"); vm 144 tools/testing/selftests/kvm/lib/kvm_util.c vm->mode = mode; vm 145 tools/testing/selftests/kvm/lib/kvm_util.c vm->type = 0; vm 148 tools/testing/selftests/kvm/lib/kvm_util.c switch (vm->mode) { vm 150 tools/testing/selftests/kvm/lib/kvm_util.c vm->pgtable_levels = 4; vm 151 tools/testing/selftests/kvm/lib/kvm_util.c vm->pa_bits = 52; vm 152 tools/testing/selftests/kvm/lib/kvm_util.c vm->va_bits = 48; vm 153 tools/testing/selftests/kvm/lib/kvm_util.c vm->page_size = 0x1000; vm 154 tools/testing/selftests/kvm/lib/kvm_util.c vm->page_shift = 12; vm 157 tools/testing/selftests/kvm/lib/kvm_util.c vm->pgtable_levels = 3; vm 158 tools/testing/selftests/kvm/lib/kvm_util.c vm->pa_bits = 52; vm 159 tools/testing/selftests/kvm/lib/kvm_util.c vm->va_bits = 48; vm 160 tools/testing/selftests/kvm/lib/kvm_util.c vm->page_size = 0x10000; vm 161 tools/testing/selftests/kvm/lib/kvm_util.c vm->page_shift = 16; vm 164 tools/testing/selftests/kvm/lib/kvm_util.c vm->pgtable_levels = 4; vm 165 tools/testing/selftests/kvm/lib/kvm_util.c vm->pa_bits = 48; vm 166 tools/testing/selftests/kvm/lib/kvm_util.c vm->va_bits = 48; vm 167 tools/testing/selftests/kvm/lib/kvm_util.c vm->page_size = 0x1000; vm 168 tools/testing/selftests/kvm/lib/kvm_util.c vm->page_shift = 12; vm 171 tools/testing/selftests/kvm/lib/kvm_util.c vm->pgtable_levels = 3; vm 172 tools/testing/selftests/kvm/lib/kvm_util.c vm->pa_bits = 48; vm 173 tools/testing/selftests/kvm/lib/kvm_util.c vm->va_bits = 48; vm 174 tools/testing/selftests/kvm/lib/kvm_util.c vm->page_size = 0x10000; vm 175 tools/testing/selftests/kvm/lib/kvm_util.c vm->page_shift = 16; vm 178 tools/testing/selftests/kvm/lib/kvm_util.c vm->pgtable_levels = 4; vm 179 tools/testing/selftests/kvm/lib/kvm_util.c vm->pa_bits = 40; vm 180 tools/testing/selftests/kvm/lib/kvm_util.c vm->va_bits = 48; vm 181 tools/testing/selftests/kvm/lib/kvm_util.c vm->page_size = 0x1000; vm 182 tools/testing/selftests/kvm/lib/kvm_util.c vm->page_shift = 12; vm 185 tools/testing/selftests/kvm/lib/kvm_util.c vm->pgtable_levels = 3; vm 186 tools/testing/selftests/kvm/lib/kvm_util.c vm->pa_bits = 40; vm 187 tools/testing/selftests/kvm/lib/kvm_util.c vm->va_bits = 48; vm 188 tools/testing/selftests/kvm/lib/kvm_util.c vm->page_size = 0x10000; vm 189 tools/testing/selftests/kvm/lib/kvm_util.c vm->page_shift = 16; vm 193 tools/testing/selftests/kvm/lib/kvm_util.c kvm_get_cpu_address_width(&vm->pa_bits, &vm->va_bits); vm 194 tools/testing/selftests/kvm/lib/kvm_util.c TEST_ASSERT(vm->va_bits == 48, "Linear address width " vm 195 tools/testing/selftests/kvm/lib/kvm_util.c "(%d bits) not supported", vm->va_bits); vm 196 tools/testing/selftests/kvm/lib/kvm_util.c vm->pgtable_levels = 4; vm 197 tools/testing/selftests/kvm/lib/kvm_util.c vm->page_size = 0x1000; vm 198 tools/testing/selftests/kvm/lib/kvm_util.c vm->page_shift = 12; vm 200 tools/testing/selftests/kvm/lib/kvm_util.c vm->pa_bits); vm 211 tools/testing/selftests/kvm/lib/kvm_util.c if (vm->pa_bits != 40) vm 212 tools/testing/selftests/kvm/lib/kvm_util.c vm->type = KVM_VM_TYPE_ARM_IPA_SIZE(vm->pa_bits); vm 215 tools/testing/selftests/kvm/lib/kvm_util.c vm_open(vm, perm); vm 218 tools/testing/selftests/kvm/lib/kvm_util.c vm->vpages_valid = sparsebit_alloc(); vm 219 tools/testing/selftests/kvm/lib/kvm_util.c sparsebit_set_num(vm->vpages_valid, vm 220 tools/testing/selftests/kvm/lib/kvm_util.c 0, (1ULL << (vm->va_bits - 1)) >> vm->page_shift); vm 221 tools/testing/selftests/kvm/lib/kvm_util.c sparsebit_set_num(vm->vpages_valid, vm 222 tools/testing/selftests/kvm/lib/kvm_util.c (~((1ULL << (vm->va_bits - 1)) - 1)) >> vm->page_shift, vm 223 tools/testing/selftests/kvm/lib/kvm_util.c (1ULL << (vm->va_bits - 1)) >> vm->page_shift); vm 226 tools/testing/selftests/kvm/lib/kvm_util.c vm->max_gfn = ((1ULL << vm->pa_bits) >> vm->page_shift) - 1; vm 229 tools/testing/selftests/kvm/lib/kvm_util.c vm->vpages_mapped = sparsebit_alloc(); vm 231 tools/testing/selftests/kvm/lib/kvm_util.c vm_userspace_mem_region_add(vm, VM_MEM_SRC_ANONYMOUS, vm 234 tools/testing/selftests/kvm/lib/kvm_util.c return vm; vm 277 tools/testing/selftests/kvm/lib/kvm_util.c void kvm_vm_get_dirty_log(struct kvm_vm *vm, int slot, void *log) vm 282 tools/testing/selftests/kvm/lib/kvm_util.c ret = ioctl(vm->fd, KVM_GET_DIRTY_LOG, &args); vm 287 tools/testing/selftests/kvm/lib/kvm_util.c void kvm_vm_clear_dirty_log(struct kvm_vm *vm, int slot, void *log, vm 295 tools/testing/selftests/kvm/lib/kvm_util.c ret = ioctl(vm->fd, KVM_CLEAR_DIRTY_LOG, &args); vm 320 tools/testing/selftests/kvm/lib/kvm_util.c userspace_mem_region_find(struct kvm_vm *vm, uint64_t start, uint64_t end) vm 324 tools/testing/selftests/kvm/lib/kvm_util.c for (region = vm->userspace_mem_region_head; region; vm 353 tools/testing/selftests/kvm/lib/kvm_util.c kvm_userspace_memory_region_find(struct kvm_vm *vm, uint64_t start, vm 358 tools/testing/selftests/kvm/lib/kvm_util.c region = userspace_mem_region_find(vm, start, end); vm 381 tools/testing/selftests/kvm/lib/kvm_util.c struct vcpu *vcpu_find(struct kvm_vm *vm, uint32_t vcpuid) vm 385 tools/testing/selftests/kvm/lib/kvm_util.c for (vcpup = vm->vcpu_head; vcpup; vcpup = vcpup->next) { vm 406 tools/testing/selftests/kvm/lib/kvm_util.c static void vm_vcpu_rm(struct kvm_vm *vm, uint32_t vcpuid) vm 408 tools/testing/selftests/kvm/lib/kvm_util.c struct vcpu *vcpu = vcpu_find(vm, vcpuid); vm 423 tools/testing/selftests/kvm/lib/kvm_util.c vm->vcpu_head = vcpu->next; vm 506 tools/testing/selftests/kvm/lib/kvm_util.c int kvm_memcmp_hva_gva(void *hva, struct kvm_vm *vm, vm_vaddr_t gva, size_t len) vm 521 tools/testing/selftests/kvm/lib/kvm_util.c uintptr_t ptr2 = (uintptr_t)addr_gva2hva(vm, gva + offset); vm 528 tools/testing/selftests/kvm/lib/kvm_util.c if ((ptr1 >> vm->page_shift) != ((ptr1 + amt) >> vm->page_shift)) vm 529 tools/testing/selftests/kvm/lib/kvm_util.c amt = vm->page_size - (ptr1 % vm->page_size); vm 530 tools/testing/selftests/kvm/lib/kvm_util.c if ((ptr2 >> vm->page_shift) != ((ptr2 + amt) >> vm->page_shift)) vm 531 tools/testing/selftests/kvm/lib/kvm_util.c amt = vm->page_size - (ptr2 % vm->page_size); vm 533 tools/testing/selftests/kvm/lib/kvm_util.c assert((ptr1 >> vm->page_shift) == ((ptr1 + amt - 1) >> vm->page_shift)); vm 534 tools/testing/selftests/kvm/lib/kvm_util.c assert((ptr2 >> vm->page_shift) == ((ptr2 + amt - 1) >> vm->page_shift)); vm 575 tools/testing/selftests/kvm/lib/kvm_util.c void vm_userspace_mem_region_add(struct kvm_vm *vm, vm 582 tools/testing/selftests/kvm/lib/kvm_util.c size_t huge_page_size = KVM_UTIL_PGS_PER_HUGEPG * vm->page_size; vm 585 tools/testing/selftests/kvm/lib/kvm_util.c TEST_ASSERT((guest_paddr % vm->page_size) == 0, "Guest physical " vm 588 tools/testing/selftests/kvm/lib/kvm_util.c guest_paddr, vm->page_size); vm 589 tools/testing/selftests/kvm/lib/kvm_util.c TEST_ASSERT((((guest_paddr >> vm->page_shift) + npages) - 1) vm 590 tools/testing/selftests/kvm/lib/kvm_util.c <= vm->max_gfn, "Physical range beyond maximum " vm 594 tools/testing/selftests/kvm/lib/kvm_util.c guest_paddr, npages, vm->max_gfn, vm->page_size); vm 601 tools/testing/selftests/kvm/lib/kvm_util.c vm, guest_paddr, (guest_paddr + npages * vm->page_size) - 1); vm 608 tools/testing/selftests/kvm/lib/kvm_util.c guest_paddr, npages, vm->page_size, vm 613 tools/testing/selftests/kvm/lib/kvm_util.c for (region = vm->userspace_mem_region_head; region; vm 631 tools/testing/selftests/kvm/lib/kvm_util.c region->mmap_size = npages * vm->page_size; vm 661 tools/testing/selftests/kvm/lib/kvm_util.c ret = madvise(region->host_mem, npages * vm->page_size, vm 667 tools/testing/selftests/kvm/lib/kvm_util.c region->host_mem, npages * vm->page_size, src_type); vm 672 tools/testing/selftests/kvm/lib/kvm_util.c guest_paddr >> vm->page_shift, npages); vm 676 tools/testing/selftests/kvm/lib/kvm_util.c region->region.memory_size = npages * vm->page_size; vm 678 tools/testing/selftests/kvm/lib/kvm_util.c ret = ioctl(vm->fd, KVM_SET_USER_MEMORY_REGION, ®ion->region); vm 687 tools/testing/selftests/kvm/lib/kvm_util.c if (vm->userspace_mem_region_head) vm 688 tools/testing/selftests/kvm/lib/kvm_util.c vm->userspace_mem_region_head->prev = region; vm 689 tools/testing/selftests/kvm/lib/kvm_util.c region->next = vm->userspace_mem_region_head; vm 690 tools/testing/selftests/kvm/lib/kvm_util.c vm->userspace_mem_region_head = region; vm 709 tools/testing/selftests/kvm/lib/kvm_util.c memslot2region(struct kvm_vm *vm, uint32_t memslot) vm 713 tools/testing/selftests/kvm/lib/kvm_util.c for (region = vm->userspace_mem_region_head; region; vm 722 tools/testing/selftests/kvm/lib/kvm_util.c vm_dump(stderr, vm, 2); vm 743 tools/testing/selftests/kvm/lib/kvm_util.c void vm_mem_region_set_flags(struct kvm_vm *vm, uint32_t slot, uint32_t flags) vm 748 tools/testing/selftests/kvm/lib/kvm_util.c region = memslot2region(vm, slot); vm 752 tools/testing/selftests/kvm/lib/kvm_util.c ret = ioctl(vm->fd, KVM_SET_USER_MEMORY_REGION, ®ion->region); vm 804 tools/testing/selftests/kvm/lib/kvm_util.c void vm_vcpu_add(struct kvm_vm *vm, uint32_t vcpuid) vm 809 tools/testing/selftests/kvm/lib/kvm_util.c vcpu = vcpu_find(vm, vcpuid); vm 821 tools/testing/selftests/kvm/lib/kvm_util.c vcpu->fd = ioctl(vm->fd, KVM_CREATE_VCPU, vcpuid); vm 834 tools/testing/selftests/kvm/lib/kvm_util.c if (vm->vcpu_head) vm 835 tools/testing/selftests/kvm/lib/kvm_util.c vm->vcpu_head->prev = vcpu; vm 836 tools/testing/selftests/kvm/lib/kvm_util.c vcpu->next = vm->vcpu_head; vm 837 tools/testing/selftests/kvm/lib/kvm_util.c vm->vcpu_head = vcpu; vm 860 tools/testing/selftests/kvm/lib/kvm_util.c static vm_vaddr_t vm_vaddr_unused_gap(struct kvm_vm *vm, size_t sz, vm 863 tools/testing/selftests/kvm/lib/kvm_util.c uint64_t pages = (sz + vm->page_size - 1) >> vm->page_shift; vm 866 tools/testing/selftests/kvm/lib/kvm_util.c uint64_t pgidx_start = (vaddr_min + vm->page_size - 1) >> vm->page_shift; vm 867 tools/testing/selftests/kvm/lib/kvm_util.c if ((pgidx_start * vm->page_size) < vaddr_min) vm 871 tools/testing/selftests/kvm/lib/kvm_util.c if (!sparsebit_is_set_num(vm->vpages_valid, vm 873 tools/testing/selftests/kvm/lib/kvm_util.c pgidx_start = sparsebit_next_set_num(vm->vpages_valid, vm 882 tools/testing/selftests/kvm/lib/kvm_util.c if (sparsebit_is_clear_num(vm->vpages_mapped, vm 885 tools/testing/selftests/kvm/lib/kvm_util.c pgidx_start = sparsebit_next_clear_num(vm->vpages_mapped, vm 894 tools/testing/selftests/kvm/lib/kvm_util.c if (!sparsebit_is_set_num(vm->vpages_valid, vm 897 tools/testing/selftests/kvm/lib/kvm_util.c vm->vpages_valid, pgidx_start, pages); vm 911 tools/testing/selftests/kvm/lib/kvm_util.c TEST_ASSERT(sparsebit_is_set_num(vm->vpages_valid, vm 917 tools/testing/selftests/kvm/lib/kvm_util.c TEST_ASSERT(sparsebit_is_clear_num(vm->vpages_mapped, vm 924 tools/testing/selftests/kvm/lib/kvm_util.c return pgidx_start * vm->page_size; vm 948 tools/testing/selftests/kvm/lib/kvm_util.c vm_vaddr_t vm_vaddr_alloc(struct kvm_vm *vm, size_t sz, vm_vaddr_t vaddr_min, vm 951 tools/testing/selftests/kvm/lib/kvm_util.c uint64_t pages = (sz >> vm->page_shift) + ((sz % vm->page_size) != 0); vm 953 tools/testing/selftests/kvm/lib/kvm_util.c virt_pgd_alloc(vm, pgd_memslot); vm 959 tools/testing/selftests/kvm/lib/kvm_util.c vm_vaddr_t vaddr_start = vm_vaddr_unused_gap(vm, sz, vaddr_min); vm 963 tools/testing/selftests/kvm/lib/kvm_util.c pages--, vaddr += vm->page_size) { vm 966 tools/testing/selftests/kvm/lib/kvm_util.c paddr = vm_phy_page_alloc(vm, vm 967 tools/testing/selftests/kvm/lib/kvm_util.c KVM_UTIL_MIN_PFN * vm->page_size, data_memslot); vm 969 tools/testing/selftests/kvm/lib/kvm_util.c virt_pg_map(vm, vaddr, paddr, pgd_memslot); vm 971 tools/testing/selftests/kvm/lib/kvm_util.c sparsebit_set(vm->vpages_mapped, vm 972 tools/testing/selftests/kvm/lib/kvm_util.c vaddr >> vm->page_shift); vm 995 tools/testing/selftests/kvm/lib/kvm_util.c void virt_map(struct kvm_vm *vm, uint64_t vaddr, uint64_t paddr, vm 998 tools/testing/selftests/kvm/lib/kvm_util.c size_t page_size = vm->page_size; vm 1005 tools/testing/selftests/kvm/lib/kvm_util.c virt_pg_map(vm, vaddr, paddr, pgd_memslot); vm 1028 tools/testing/selftests/kvm/lib/kvm_util.c void *addr_gpa2hva(struct kvm_vm *vm, vm_paddr_t gpa) vm 1031 tools/testing/selftests/kvm/lib/kvm_util.c for (region = vm->userspace_mem_region_head; region; vm 1061 tools/testing/selftests/kvm/lib/kvm_util.c vm_paddr_t addr_hva2gpa(struct kvm_vm *vm, void *hva) vm 1064 tools/testing/selftests/kvm/lib/kvm_util.c for (region = vm->userspace_mem_region_head; region; vm 1091 tools/testing/selftests/kvm/lib/kvm_util.c void vm_create_irqchip(struct kvm_vm *vm) vm 1095 tools/testing/selftests/kvm/lib/kvm_util.c ret = ioctl(vm->fd, KVM_CREATE_IRQCHIP, 0); vm 1099 tools/testing/selftests/kvm/lib/kvm_util.c vm->has_irqchip = true; vm 1117 tools/testing/selftests/kvm/lib/kvm_util.c struct kvm_run *vcpu_state(struct kvm_vm *vm, uint32_t vcpuid) vm 1119 tools/testing/selftests/kvm/lib/kvm_util.c struct vcpu *vcpu = vcpu_find(vm, vcpuid); vm 1139 tools/testing/selftests/kvm/lib/kvm_util.c void vcpu_run(struct kvm_vm *vm, uint32_t vcpuid) vm 1141 tools/testing/selftests/kvm/lib/kvm_util.c int ret = _vcpu_run(vm, vcpuid); vm 1146 tools/testing/selftests/kvm/lib/kvm_util.c int _vcpu_run(struct kvm_vm *vm, uint32_t vcpuid) vm 1148 tools/testing/selftests/kvm/lib/kvm_util.c struct vcpu *vcpu = vcpu_find(vm, vcpuid); vm 1158 tools/testing/selftests/kvm/lib/kvm_util.c void vcpu_run_complete_io(struct kvm_vm *vm, uint32_t vcpuid) vm 1160 tools/testing/selftests/kvm/lib/kvm_util.c struct vcpu *vcpu = vcpu_find(vm, vcpuid); vm 1189 tools/testing/selftests/kvm/lib/kvm_util.c void vcpu_set_mp_state(struct kvm_vm *vm, uint32_t vcpuid, vm 1192 tools/testing/selftests/kvm/lib/kvm_util.c struct vcpu *vcpu = vcpu_find(vm, vcpuid); vm 1217 tools/testing/selftests/kvm/lib/kvm_util.c void vcpu_regs_get(struct kvm_vm *vm, uint32_t vcpuid, struct kvm_regs *regs) vm 1219 tools/testing/selftests/kvm/lib/kvm_util.c struct vcpu *vcpu = vcpu_find(vm, vcpuid); vm 1244 tools/testing/selftests/kvm/lib/kvm_util.c void vcpu_regs_set(struct kvm_vm *vm, uint32_t vcpuid, struct kvm_regs *regs) vm 1246 tools/testing/selftests/kvm/lib/kvm_util.c struct vcpu *vcpu = vcpu_find(vm, vcpuid); vm 1257 tools/testing/selftests/kvm/lib/kvm_util.c void vcpu_events_get(struct kvm_vm *vm, uint32_t vcpuid, vm 1260 tools/testing/selftests/kvm/lib/kvm_util.c struct vcpu *vcpu = vcpu_find(vm, vcpuid); vm 1270 tools/testing/selftests/kvm/lib/kvm_util.c void vcpu_events_set(struct kvm_vm *vm, uint32_t vcpuid, vm 1273 tools/testing/selftests/kvm/lib/kvm_util.c struct vcpu *vcpu = vcpu_find(vm, vcpuid); vm 1285 tools/testing/selftests/kvm/lib/kvm_util.c void vcpu_nested_state_get(struct kvm_vm *vm, uint32_t vcpuid, vm 1288 tools/testing/selftests/kvm/lib/kvm_util.c struct vcpu *vcpu = vcpu_find(vm, vcpuid); vm 1299 tools/testing/selftests/kvm/lib/kvm_util.c int vcpu_nested_state_set(struct kvm_vm *vm, uint32_t vcpuid, vm 1302 tools/testing/selftests/kvm/lib/kvm_util.c struct vcpu *vcpu = vcpu_find(vm, vcpuid); vm 1333 tools/testing/selftests/kvm/lib/kvm_util.c void vcpu_sregs_get(struct kvm_vm *vm, uint32_t vcpuid, struct kvm_sregs *sregs) vm 1335 tools/testing/selftests/kvm/lib/kvm_util.c struct vcpu *vcpu = vcpu_find(vm, vcpuid); vm 1360 tools/testing/selftests/kvm/lib/kvm_util.c void vcpu_sregs_set(struct kvm_vm *vm, uint32_t vcpuid, struct kvm_sregs *sregs) vm 1362 tools/testing/selftests/kvm/lib/kvm_util.c int ret = _vcpu_sregs_set(vm, vcpuid, sregs); vm 1367 tools/testing/selftests/kvm/lib/kvm_util.c int _vcpu_sregs_set(struct kvm_vm *vm, uint32_t vcpuid, struct kvm_sregs *sregs) vm 1369 tools/testing/selftests/kvm/lib/kvm_util.c struct vcpu *vcpu = vcpu_find(vm, vcpuid); vm 1389 tools/testing/selftests/kvm/lib/kvm_util.c void vcpu_ioctl(struct kvm_vm *vm, uint32_t vcpuid, vm 1394 tools/testing/selftests/kvm/lib/kvm_util.c ret = _vcpu_ioctl(vm, vcpuid, cmd, arg); vm 1399 tools/testing/selftests/kvm/lib/kvm_util.c int _vcpu_ioctl(struct kvm_vm *vm, uint32_t vcpuid, vm 1402 tools/testing/selftests/kvm/lib/kvm_util.c struct vcpu *vcpu = vcpu_find(vm, vcpuid); vm 1424 tools/testing/selftests/kvm/lib/kvm_util.c void vm_ioctl(struct kvm_vm *vm, unsigned long cmd, void *arg) vm 1428 tools/testing/selftests/kvm/lib/kvm_util.c ret = ioctl(vm->fd, cmd, arg); vm 1448 tools/testing/selftests/kvm/lib/kvm_util.c void vm_dump(FILE *stream, struct kvm_vm *vm, uint8_t indent) vm 1453 tools/testing/selftests/kvm/lib/kvm_util.c fprintf(stream, "%*smode: 0x%x\n", indent, "", vm->mode); vm 1454 tools/testing/selftests/kvm/lib/kvm_util.c fprintf(stream, "%*sfd: %i\n", indent, "", vm->fd); vm 1455 tools/testing/selftests/kvm/lib/kvm_util.c fprintf(stream, "%*spage_size: 0x%x\n", indent, "", vm->page_size); vm 1457 tools/testing/selftests/kvm/lib/kvm_util.c for (region = vm->userspace_mem_region_head; region; vm 1468 tools/testing/selftests/kvm/lib/kvm_util.c sparsebit_dump(stream, vm->vpages_mapped, indent + 2); vm 1470 tools/testing/selftests/kvm/lib/kvm_util.c vm->pgd_created); vm 1471 tools/testing/selftests/kvm/lib/kvm_util.c if (vm->pgd_created) { vm 1474 tools/testing/selftests/kvm/lib/kvm_util.c virt_dump(stream, vm, indent + 4); vm 1477 tools/testing/selftests/kvm/lib/kvm_util.c for (vcpu = vm->vcpu_head; vcpu; vcpu = vcpu->next) vm 1478 tools/testing/selftests/kvm/lib/kvm_util.c vcpu_dump(stream, vm, vcpu->id, indent + 2); vm 1557 tools/testing/selftests/kvm/lib/kvm_util.c vm_paddr_t vm_phy_pages_alloc(struct kvm_vm *vm, size_t num, vm 1565 tools/testing/selftests/kvm/lib/kvm_util.c TEST_ASSERT((paddr_min % vm->page_size) == 0, "Min physical address " vm 1568 tools/testing/selftests/kvm/lib/kvm_util.c paddr_min, vm->page_size); vm 1570 tools/testing/selftests/kvm/lib/kvm_util.c region = memslot2region(vm, memslot); vm 1571 tools/testing/selftests/kvm/lib/kvm_util.c base = pg = paddr_min >> vm->page_shift; vm 1585 tools/testing/selftests/kvm/lib/kvm_util.c paddr_min, vm->page_size, memslot); vm 1587 tools/testing/selftests/kvm/lib/kvm_util.c vm_dump(stderr, vm, 2); vm 1594 tools/testing/selftests/kvm/lib/kvm_util.c return base * vm->page_size; vm 1597 tools/testing/selftests/kvm/lib/kvm_util.c vm_paddr_t vm_phy_page_alloc(struct kvm_vm *vm, vm_paddr_t paddr_min, vm 1600 tools/testing/selftests/kvm/lib/kvm_util.c return vm_phy_pages_alloc(vm, 1, paddr_min, memslot); vm 1615 tools/testing/selftests/kvm/lib/kvm_util.c void *addr_gva2hva(struct kvm_vm *vm, vm_vaddr_t gva) vm 1617 tools/testing/selftests/kvm/lib/kvm_util.c return addr_gpa2hva(vm, addr_gva2gpa(vm, gva)); vm 1632 tools/testing/selftests/kvm/lib/kvm_util.c bool vm_is_unrestricted_guest(struct kvm_vm *vm) vm 1638 tools/testing/selftests/kvm/lib/kvm_util.c if (vm == NULL) { vm 1656 tools/testing/selftests/kvm/lib/kvm_util.c unsigned int vm_get_page_size(struct kvm_vm *vm) vm 1658 tools/testing/selftests/kvm/lib/kvm_util.c return vm->page_size; vm 1661 tools/testing/selftests/kvm/lib/kvm_util.c unsigned int vm_get_page_shift(struct kvm_vm *vm) vm 1663 tools/testing/selftests/kvm/lib/kvm_util.c return vm->page_shift; vm 1666 tools/testing/selftests/kvm/lib/kvm_util.c unsigned int vm_get_max_gfn(struct kvm_vm *vm) vm 1668 tools/testing/selftests/kvm/lib/kvm_util.c return vm->max_gfn; vm 66 tools/testing/selftests/kvm/lib/kvm_util_internal.h struct vcpu *vcpu_find(struct kvm_vm *vm, uint32_t vcpuid); vm 67 tools/testing/selftests/kvm/lib/kvm_util_internal.h void virt_dump(FILE *stream, struct kvm_vm *vm, uint8_t indent); vm 72 tools/testing/selftests/kvm/lib/kvm_util_internal.h memslot2region(struct kvm_vm *vm, uint32_t memslot); vm 18 tools/testing/selftests/kvm/lib/s390x/processor.c void virt_pgd_alloc(struct kvm_vm *vm, uint32_t memslot) vm 22 tools/testing/selftests/kvm/lib/s390x/processor.c TEST_ASSERT(vm->page_size == 4096, "Unsupported page size: 0x%x", vm 23 tools/testing/selftests/kvm/lib/s390x/processor.c vm->page_size); vm 25 tools/testing/selftests/kvm/lib/s390x/processor.c if (vm->pgd_created) vm 28 tools/testing/selftests/kvm/lib/s390x/processor.c paddr = vm_phy_pages_alloc(vm, PAGES_PER_REGION, vm 30 tools/testing/selftests/kvm/lib/s390x/processor.c memset(addr_gpa2hva(vm, paddr), 0xff, PAGES_PER_REGION * vm->page_size); vm 32 tools/testing/selftests/kvm/lib/s390x/processor.c vm->pgd = paddr; vm 33 tools/testing/selftests/kvm/lib/s390x/processor.c vm->pgd_created = true; vm 41 tools/testing/selftests/kvm/lib/s390x/processor.c static uint64_t virt_alloc_region(struct kvm_vm *vm, int ri, uint32_t memslot) vm 45 tools/testing/selftests/kvm/lib/s390x/processor.c taddr = vm_phy_pages_alloc(vm, ri < 4 ? PAGES_PER_REGION : 1, vm 47 tools/testing/selftests/kvm/lib/s390x/processor.c memset(addr_gpa2hva(vm, taddr), 0xff, PAGES_PER_REGION * vm->page_size); vm 70 tools/testing/selftests/kvm/lib/s390x/processor.c void virt_pg_map(struct kvm_vm *vm, uint64_t gva, uint64_t gpa, vm 76 tools/testing/selftests/kvm/lib/s390x/processor.c TEST_ASSERT((gva % vm->page_size) == 0, vm 79 tools/testing/selftests/kvm/lib/s390x/processor.c gva, vm->page_size); vm 80 tools/testing/selftests/kvm/lib/s390x/processor.c TEST_ASSERT(sparsebit_is_set(vm->vpages_valid, vm 81 tools/testing/selftests/kvm/lib/s390x/processor.c (gva >> vm->page_shift)), vm 84 tools/testing/selftests/kvm/lib/s390x/processor.c TEST_ASSERT((gpa % vm->page_size) == 0, vm 87 tools/testing/selftests/kvm/lib/s390x/processor.c gva, vm->page_size); vm 88 tools/testing/selftests/kvm/lib/s390x/processor.c TEST_ASSERT((gpa >> vm->page_shift) <= vm->max_gfn, vm 91 tools/testing/selftests/kvm/lib/s390x/processor.c gva, vm->max_gfn, vm->page_size); vm 94 tools/testing/selftests/kvm/lib/s390x/processor.c entry = addr_gpa2hva(vm, vm->pgd); vm 98 tools/testing/selftests/kvm/lib/s390x/processor.c entry[idx] = virt_alloc_region(vm, ri, memslot); vm 99 tools/testing/selftests/kvm/lib/s390x/processor.c entry = addr_gpa2hva(vm, entry[idx] & REGION_ENTRY_ORIGIN); vm 130 tools/testing/selftests/kvm/lib/s390x/processor.c vm_paddr_t addr_gva2gpa(struct kvm_vm *vm, vm_vaddr_t gva) vm 135 tools/testing/selftests/kvm/lib/s390x/processor.c TEST_ASSERT(vm->page_size == 4096, "Unsupported page size: 0x%x", vm 136 tools/testing/selftests/kvm/lib/s390x/processor.c vm->page_size); vm 138 tools/testing/selftests/kvm/lib/s390x/processor.c entry = addr_gpa2hva(vm, vm->pgd); vm 144 tools/testing/selftests/kvm/lib/s390x/processor.c entry = addr_gpa2hva(vm, entry[idx] & REGION_ENTRY_ORIGIN); vm 155 tools/testing/selftests/kvm/lib/s390x/processor.c static void virt_dump_ptes(FILE *stream, struct kvm_vm *vm, uint8_t indent, vm 161 tools/testing/selftests/kvm/lib/s390x/processor.c pte = addr_gpa2hva(vm, ptea); vm 169 tools/testing/selftests/kvm/lib/s390x/processor.c static void virt_dump_region(FILE *stream, struct kvm_vm *vm, uint8_t indent, vm 175 tools/testing/selftests/kvm/lib/s390x/processor.c entry = addr_gpa2hva(vm, addr); vm 182 tools/testing/selftests/kvm/lib/s390x/processor.c virt_dump_region(stream, vm, indent + 2, vm 185 tools/testing/selftests/kvm/lib/s390x/processor.c virt_dump_ptes(stream, vm, indent + 2, vm 191 tools/testing/selftests/kvm/lib/s390x/processor.c void virt_dump(FILE *stream, struct kvm_vm *vm, uint8_t indent) vm 193 tools/testing/selftests/kvm/lib/s390x/processor.c if (!vm->pgd_created) vm 196 tools/testing/selftests/kvm/lib/s390x/processor.c virt_dump_region(stream, vm, indent, vm->pgd); vm 223 tools/testing/selftests/kvm/lib/s390x/processor.c struct kvm_vm *vm; vm 225 tools/testing/selftests/kvm/lib/s390x/processor.c vm = vm_create(VM_MODE_DEFAULT, vm 228 tools/testing/selftests/kvm/lib/s390x/processor.c kvm_vm_elf_load(vm, program_invocation_name, 0, 0); vm 229 tools/testing/selftests/kvm/lib/s390x/processor.c vm_vcpu_add_default(vm, vcpuid, guest_code); vm 231 tools/testing/selftests/kvm/lib/s390x/processor.c return vm; vm 241 tools/testing/selftests/kvm/lib/s390x/processor.c void vm_vcpu_add_default(struct kvm_vm *vm, uint32_t vcpuid, void *guest_code) vm 249 tools/testing/selftests/kvm/lib/s390x/processor.c TEST_ASSERT(vm->page_size == 4096, "Unsupported page size: 0x%x", vm 250 tools/testing/selftests/kvm/lib/s390x/processor.c vm->page_size); vm 252 tools/testing/selftests/kvm/lib/s390x/processor.c stack_vaddr = vm_vaddr_alloc(vm, stack_size, vm 255 tools/testing/selftests/kvm/lib/s390x/processor.c vm_vcpu_add(vm, vcpuid); vm 258 tools/testing/selftests/kvm/lib/s390x/processor.c vcpu_regs_get(vm, vcpuid, ®s); vm 260 tools/testing/selftests/kvm/lib/s390x/processor.c vcpu_regs_set(vm, vcpuid, ®s); vm 262 tools/testing/selftests/kvm/lib/s390x/processor.c vcpu_sregs_get(vm, vcpuid, &sregs); vm 264 tools/testing/selftests/kvm/lib/s390x/processor.c sregs.crs[1] = vm->pgd | 0xf; /* Primary region table */ vm 265 tools/testing/selftests/kvm/lib/s390x/processor.c vcpu_sregs_set(vm, vcpuid, &sregs); vm 267 tools/testing/selftests/kvm/lib/s390x/processor.c run = vcpu_state(vm, vcpuid); vm 272 tools/testing/selftests/kvm/lib/s390x/processor.c void vcpu_dump(FILE *stream, struct kvm_vm *vm, uint32_t vcpuid, uint8_t indent) vm 274 tools/testing/selftests/kvm/lib/s390x/processor.c struct vcpu *vcpu = vm->vcpu_head; vm 9 tools/testing/selftests/kvm/lib/s390x/ucall.c void ucall_init(struct kvm_vm *vm, void *arg) vm 13 tools/testing/selftests/kvm/lib/s390x/ucall.c void ucall_uninit(struct kvm_vm *vm) vm 36 tools/testing/selftests/kvm/lib/s390x/ucall.c uint64_t get_ucall(struct kvm_vm *vm, uint32_t vcpu_id, struct ucall *uc) vm 38 tools/testing/selftests/kvm/lib/s390x/ucall.c struct kvm_run *run = vcpu_state(vm, vcpu_id); vm 47 tools/testing/selftests/kvm/lib/s390x/ucall.c memcpy(&ucall, addr_gva2hva(vm, run->s.regs.gprs[reg]), vm 50 tools/testing/selftests/kvm/lib/s390x/ucall.c vcpu_run_complete_io(vm, vcpu_id); vm 229 tools/testing/selftests/kvm/lib/x86_64/processor.c void virt_pgd_alloc(struct kvm_vm *vm, uint32_t pgd_memslot) vm 231 tools/testing/selftests/kvm/lib/x86_64/processor.c TEST_ASSERT(vm->mode == VM_MODE_PXXV48_4K, "Attempt to use " vm 232 tools/testing/selftests/kvm/lib/x86_64/processor.c "unknown or unsupported guest mode, mode: 0x%x", vm->mode); vm 235 tools/testing/selftests/kvm/lib/x86_64/processor.c if (!vm->pgd_created) { vm 236 tools/testing/selftests/kvm/lib/x86_64/processor.c vm_paddr_t paddr = vm_phy_page_alloc(vm, vm 238 tools/testing/selftests/kvm/lib/x86_64/processor.c vm->pgd = paddr; vm 239 tools/testing/selftests/kvm/lib/x86_64/processor.c vm->pgd_created = true; vm 258 tools/testing/selftests/kvm/lib/x86_64/processor.c void virt_pg_map(struct kvm_vm *vm, uint64_t vaddr, uint64_t paddr, vm 264 tools/testing/selftests/kvm/lib/x86_64/processor.c TEST_ASSERT(vm->mode == VM_MODE_PXXV48_4K, "Attempt to use " vm 265 tools/testing/selftests/kvm/lib/x86_64/processor.c "unknown or unsupported guest mode, mode: 0x%x", vm->mode); vm 267 tools/testing/selftests/kvm/lib/x86_64/processor.c TEST_ASSERT((vaddr % vm->page_size) == 0, vm 270 tools/testing/selftests/kvm/lib/x86_64/processor.c vaddr, vm->page_size); vm 271 tools/testing/selftests/kvm/lib/x86_64/processor.c TEST_ASSERT(sparsebit_is_set(vm->vpages_valid, vm 272 tools/testing/selftests/kvm/lib/x86_64/processor.c (vaddr >> vm->page_shift)), vm 275 tools/testing/selftests/kvm/lib/x86_64/processor.c TEST_ASSERT((paddr % vm->page_size) == 0, vm 278 tools/testing/selftests/kvm/lib/x86_64/processor.c paddr, vm->page_size); vm 279 tools/testing/selftests/kvm/lib/x86_64/processor.c TEST_ASSERT((paddr >> vm->page_shift) <= vm->max_gfn, vm 282 tools/testing/selftests/kvm/lib/x86_64/processor.c paddr, vm->max_gfn, vm->page_size); vm 290 tools/testing/selftests/kvm/lib/x86_64/processor.c pml4e = addr_gpa2hva(vm, vm->pgd); vm 292 tools/testing/selftests/kvm/lib/x86_64/processor.c pml4e[index[3]].address = vm_phy_page_alloc(vm, vm 294 tools/testing/selftests/kvm/lib/x86_64/processor.c >> vm->page_shift; vm 301 tools/testing/selftests/kvm/lib/x86_64/processor.c pdpe = addr_gpa2hva(vm, pml4e[index[3]].address * vm->page_size); vm 303 tools/testing/selftests/kvm/lib/x86_64/processor.c pdpe[index[2]].address = vm_phy_page_alloc(vm, vm 305 tools/testing/selftests/kvm/lib/x86_64/processor.c >> vm->page_shift; vm 312 tools/testing/selftests/kvm/lib/x86_64/processor.c pde = addr_gpa2hva(vm, pdpe[index[2]].address * vm->page_size); vm 314 tools/testing/selftests/kvm/lib/x86_64/processor.c pde[index[1]].address = vm_phy_page_alloc(vm, vm 316 tools/testing/selftests/kvm/lib/x86_64/processor.c >> vm->page_shift; vm 323 tools/testing/selftests/kvm/lib/x86_64/processor.c pte = addr_gpa2hva(vm, pde[index[1]].address * vm->page_size); vm 324 tools/testing/selftests/kvm/lib/x86_64/processor.c pte[index[0]].address = paddr >> vm->page_shift; vm 343 tools/testing/selftests/kvm/lib/x86_64/processor.c void virt_dump(FILE *stream, struct kvm_vm *vm, uint8_t indent) vm 350 tools/testing/selftests/kvm/lib/x86_64/processor.c if (!vm->pgd_created) vm 358 tools/testing/selftests/kvm/lib/x86_64/processor.c pml4e_start = (struct pageMapL4Entry *) addr_gpa2hva(vm, vm 359 tools/testing/selftests/kvm/lib/x86_64/processor.c vm->pgd); vm 368 tools/testing/selftests/kvm/lib/x86_64/processor.c addr_hva2gpa(vm, pml4e), (uint64_t) pml4e->address, vm 371 tools/testing/selftests/kvm/lib/x86_64/processor.c pdpe_start = addr_gpa2hva(vm, pml4e->address vm 372 tools/testing/selftests/kvm/lib/x86_64/processor.c * vm->page_size); vm 381 tools/testing/selftests/kvm/lib/x86_64/processor.c addr_hva2gpa(vm, pdpe), vm 385 tools/testing/selftests/kvm/lib/x86_64/processor.c pde_start = addr_gpa2hva(vm, vm 386 tools/testing/selftests/kvm/lib/x86_64/processor.c pdpe->address * vm->page_size); vm 394 tools/testing/selftests/kvm/lib/x86_64/processor.c addr_hva2gpa(vm, pde), vm 398 tools/testing/selftests/kvm/lib/x86_64/processor.c pte_start = addr_gpa2hva(vm, vm 399 tools/testing/selftests/kvm/lib/x86_64/processor.c pde->address * vm->page_size); vm 409 tools/testing/selftests/kvm/lib/x86_64/processor.c addr_hva2gpa(vm, pte), vm 441 tools/testing/selftests/kvm/lib/x86_64/processor.c static void kvm_seg_fill_gdt_64bit(struct kvm_vm *vm, struct kvm_segment *segp) vm 443 tools/testing/selftests/kvm/lib/x86_64/processor.c void *gdt = addr_gva2hva(vm, vm->gdt); vm 477 tools/testing/selftests/kvm/lib/x86_64/processor.c static void kvm_seg_set_kernel_code_64bit(struct kvm_vm *vm, uint16_t selector, vm 490 tools/testing/selftests/kvm/lib/x86_64/processor.c if (vm) vm 491 tools/testing/selftests/kvm/lib/x86_64/processor.c kvm_seg_fill_gdt_64bit(vm, segp); vm 508 tools/testing/selftests/kvm/lib/x86_64/processor.c static void kvm_seg_set_kernel_data_64bit(struct kvm_vm *vm, uint16_t selector, vm 520 tools/testing/selftests/kvm/lib/x86_64/processor.c if (vm) vm 521 tools/testing/selftests/kvm/lib/x86_64/processor.c kvm_seg_fill_gdt_64bit(vm, segp); vm 542 tools/testing/selftests/kvm/lib/x86_64/processor.c vm_paddr_t addr_gva2gpa(struct kvm_vm *vm, vm_vaddr_t gva) vm 550 tools/testing/selftests/kvm/lib/x86_64/processor.c TEST_ASSERT(vm->mode == VM_MODE_PXXV48_4K, "Attempt to use " vm 551 tools/testing/selftests/kvm/lib/x86_64/processor.c "unknown or unsupported guest mode, mode: 0x%x", vm->mode); vm 558 tools/testing/selftests/kvm/lib/x86_64/processor.c if (!vm->pgd_created) vm 560 tools/testing/selftests/kvm/lib/x86_64/processor.c pml4e = addr_gpa2hva(vm, vm->pgd); vm 564 tools/testing/selftests/kvm/lib/x86_64/processor.c pdpe = addr_gpa2hva(vm, pml4e[index[3]].address * vm->page_size); vm 568 tools/testing/selftests/kvm/lib/x86_64/processor.c pde = addr_gpa2hva(vm, pdpe[index[2]].address * vm->page_size); vm 572 tools/testing/selftests/kvm/lib/x86_64/processor.c pte = addr_gpa2hva(vm, pde[index[1]].address * vm->page_size); vm 576 tools/testing/selftests/kvm/lib/x86_64/processor.c return (pte[index[0]].address * vm->page_size) + (gva & 0xfffu); vm 584 tools/testing/selftests/kvm/lib/x86_64/processor.c static void kvm_setup_gdt(struct kvm_vm *vm, struct kvm_dtable *dt, int gdt_memslot, vm 587 tools/testing/selftests/kvm/lib/x86_64/processor.c if (!vm->gdt) vm 588 tools/testing/selftests/kvm/lib/x86_64/processor.c vm->gdt = vm_vaddr_alloc(vm, getpagesize(), vm 591 tools/testing/selftests/kvm/lib/x86_64/processor.c dt->base = vm->gdt; vm 595 tools/testing/selftests/kvm/lib/x86_64/processor.c static void kvm_setup_tss_64bit(struct kvm_vm *vm, struct kvm_segment *segp, vm 599 tools/testing/selftests/kvm/lib/x86_64/processor.c if (!vm->tss) vm 600 tools/testing/selftests/kvm/lib/x86_64/processor.c vm->tss = vm_vaddr_alloc(vm, getpagesize(), vm 604 tools/testing/selftests/kvm/lib/x86_64/processor.c segp->base = vm->tss; vm 609 tools/testing/selftests/kvm/lib/x86_64/processor.c kvm_seg_fill_gdt_64bit(vm, segp); vm 612 tools/testing/selftests/kvm/lib/x86_64/processor.c static void vcpu_setup(struct kvm_vm *vm, int vcpuid, int pgd_memslot, int gdt_memslot) vm 617 tools/testing/selftests/kvm/lib/x86_64/processor.c vcpu_sregs_get(vm, vcpuid, &sregs); vm 621 tools/testing/selftests/kvm/lib/x86_64/processor.c kvm_setup_gdt(vm, &sregs.gdt, gdt_memslot, pgd_memslot); vm 623 tools/testing/selftests/kvm/lib/x86_64/processor.c switch (vm->mode) { vm 630 tools/testing/selftests/kvm/lib/x86_64/processor.c kvm_seg_set_kernel_code_64bit(vm, 0x8, &sregs.cs); vm 631 tools/testing/selftests/kvm/lib/x86_64/processor.c kvm_seg_set_kernel_data_64bit(vm, 0x10, &sregs.ds); vm 632 tools/testing/selftests/kvm/lib/x86_64/processor.c kvm_seg_set_kernel_data_64bit(vm, 0x10, &sregs.es); vm 633 tools/testing/selftests/kvm/lib/x86_64/processor.c kvm_setup_tss_64bit(vm, &sregs.tr, 0x18, gdt_memslot, pgd_memslot); vm 637 tools/testing/selftests/kvm/lib/x86_64/processor.c TEST_ASSERT(false, "Unknown guest mode, mode: 0x%x", vm->mode); vm 640 tools/testing/selftests/kvm/lib/x86_64/processor.c sregs.cr3 = vm->pgd; vm 641 tools/testing/selftests/kvm/lib/x86_64/processor.c vcpu_sregs_set(vm, vcpuid, &sregs); vm 649 tools/testing/selftests/kvm/lib/x86_64/processor.c void vm_vcpu_add_default(struct kvm_vm *vm, uint32_t vcpuid, void *guest_code) vm 654 tools/testing/selftests/kvm/lib/x86_64/processor.c stack_vaddr = vm_vaddr_alloc(vm, DEFAULT_STACK_PGS * getpagesize(), vm 658 tools/testing/selftests/kvm/lib/x86_64/processor.c vm_vcpu_add(vm, vcpuid); vm 659 tools/testing/selftests/kvm/lib/x86_64/processor.c vcpu_setup(vm, vcpuid, 0, 0); vm 662 tools/testing/selftests/kvm/lib/x86_64/processor.c vcpu_regs_get(vm, vcpuid, ®s); vm 666 tools/testing/selftests/kvm/lib/x86_64/processor.c vcpu_regs_set(vm, vcpuid, ®s); vm 670 tools/testing/selftests/kvm/lib/x86_64/processor.c vcpu_set_mp_state(vm, vcpuid, &mp_state); vm 782 tools/testing/selftests/kvm/lib/x86_64/processor.c void vcpu_set_cpuid(struct kvm_vm *vm, vm 785 tools/testing/selftests/kvm/lib/x86_64/processor.c struct vcpu *vcpu = vcpu_find(vm, vcpuid); vm 813 tools/testing/selftests/kvm/lib/x86_64/processor.c struct kvm_vm *vm; vm 824 tools/testing/selftests/kvm/lib/x86_64/processor.c vm = vm_create(VM_MODE_DEFAULT, vm 829 tools/testing/selftests/kvm/lib/x86_64/processor.c kvm_vm_elf_load(vm, program_invocation_name, 0, 0); vm 832 tools/testing/selftests/kvm/lib/x86_64/processor.c vm_create_irqchip(vm); vm 835 tools/testing/selftests/kvm/lib/x86_64/processor.c vm_vcpu_add_default(vm, vcpuid, guest_code); vm 837 tools/testing/selftests/kvm/lib/x86_64/processor.c return vm; vm 853 tools/testing/selftests/kvm/lib/x86_64/processor.c uint64_t vcpu_get_msr(struct kvm_vm *vm, uint32_t vcpuid, uint64_t msr_index) vm 855 tools/testing/selftests/kvm/lib/x86_64/processor.c struct vcpu *vcpu = vcpu_find(vm, vcpuid); vm 886 tools/testing/selftests/kvm/lib/x86_64/processor.c void vcpu_set_msr(struct kvm_vm *vm, uint32_t vcpuid, uint64_t msr_index, vm 889 tools/testing/selftests/kvm/lib/x86_64/processor.c struct vcpu *vcpu = vcpu_find(vm, vcpuid); vm 922 tools/testing/selftests/kvm/lib/x86_64/processor.c void vcpu_args_set(struct kvm_vm *vm, uint32_t vcpuid, unsigned int num, ...) vm 932 tools/testing/selftests/kvm/lib/x86_64/processor.c vcpu_regs_get(vm, vcpuid, ®s); vm 952 tools/testing/selftests/kvm/lib/x86_64/processor.c vcpu_regs_set(vm, vcpuid, ®s); vm 972 tools/testing/selftests/kvm/lib/x86_64/processor.c void vcpu_dump(FILE *stream, struct kvm_vm *vm, uint32_t vcpuid, uint8_t indent) vm 980 tools/testing/selftests/kvm/lib/x86_64/processor.c vcpu_regs_get(vm, vcpuid, ®s); vm 984 tools/testing/selftests/kvm/lib/x86_64/processor.c vcpu_sregs_get(vm, vcpuid, &sregs); vm 1003 tools/testing/selftests/kvm/lib/x86_64/processor.c static int kvm_get_num_msrs(struct kvm_vm *vm) vm 1009 tools/testing/selftests/kvm/lib/x86_64/processor.c r = ioctl(vm->kvm_fd, KVM_GET_MSR_INDEX_LIST, &nmsrs); vm 1016 tools/testing/selftests/kvm/lib/x86_64/processor.c struct kvm_x86_state *vcpu_save_state(struct kvm_vm *vm, uint32_t vcpuid) vm 1018 tools/testing/selftests/kvm/lib/x86_64/processor.c struct vcpu *vcpu = vcpu_find(vm, vcpuid); vm 1037 tools/testing/selftests/kvm/lib/x86_64/processor.c vcpu_run_complete_io(vm, vcpuid); vm 1039 tools/testing/selftests/kvm/lib/x86_64/processor.c nmsrs = kvm_get_num_msrs(vm); vm 1042 tools/testing/selftests/kvm/lib/x86_64/processor.c r = ioctl(vm->kvm_fd, KVM_GET_MSR_INDEX_LIST, list); vm 1099 tools/testing/selftests/kvm/lib/x86_64/processor.c void vcpu_load_state(struct kvm_vm *vm, uint32_t vcpuid, struct kvm_x86_state *state) vm 1101 tools/testing/selftests/kvm/lib/x86_64/processor.c struct vcpu *vcpu = vcpu_find(vm, vcpuid); vm 11 tools/testing/selftests/kvm/lib/x86_64/ucall.c void ucall_init(struct kvm_vm *vm, void *arg) vm 15 tools/testing/selftests/kvm/lib/x86_64/ucall.c void ucall_uninit(struct kvm_vm *vm) vm 38 tools/testing/selftests/kvm/lib/x86_64/ucall.c uint64_t get_ucall(struct kvm_vm *vm, uint32_t vcpu_id, struct ucall *uc) vm 40 tools/testing/selftests/kvm/lib/x86_64/ucall.c struct kvm_run *run = vcpu_state(vm, vcpu_id); vm 46 tools/testing/selftests/kvm/lib/x86_64/ucall.c vcpu_regs_get(vm, vcpu_id, ®s); vm 47 tools/testing/selftests/kvm/lib/x86_64/ucall.c memcpy(&ucall, addr_gva2hva(vm, (vm_vaddr_t)regs.rdi), vm 50 tools/testing/selftests/kvm/lib/x86_64/ucall.c vcpu_run_complete_io(vm, vcpu_id); vm 46 tools/testing/selftests/kvm/lib/x86_64/vmx.c int vcpu_enable_evmcs(struct kvm_vm *vm, int vcpu_id) vm 55 tools/testing/selftests/kvm/lib/x86_64/vmx.c vcpu_ioctl(vm, vcpu_id, KVM_ENABLE_CAP, &enable_evmcs_cap); vm 78 tools/testing/selftests/kvm/lib/x86_64/vmx.c vcpu_alloc_vmx(struct kvm_vm *vm, vm_vaddr_t *p_vmx_gva) vm 80 tools/testing/selftests/kvm/lib/x86_64/vmx.c vm_vaddr_t vmx_gva = vm_vaddr_alloc(vm, getpagesize(), 0x10000, 0, 0); vm 81 tools/testing/selftests/kvm/lib/x86_64/vmx.c struct vmx_pages *vmx = addr_gva2hva(vm, vmx_gva); vm 84 tools/testing/selftests/kvm/lib/x86_64/vmx.c vmx->vmxon = (void *)vm_vaddr_alloc(vm, getpagesize(), 0x10000, 0, 0); vm 85 tools/testing/selftests/kvm/lib/x86_64/vmx.c vmx->vmxon_hva = addr_gva2hva(vm, (uintptr_t)vmx->vmxon); vm 86 tools/testing/selftests/kvm/lib/x86_64/vmx.c vmx->vmxon_gpa = addr_gva2gpa(vm, (uintptr_t)vmx->vmxon); vm 89 tools/testing/selftests/kvm/lib/x86_64/vmx.c vmx->vmcs = (void *)vm_vaddr_alloc(vm, getpagesize(), 0x10000, 0, 0); vm 90 tools/testing/selftests/kvm/lib/x86_64/vmx.c vmx->vmcs_hva = addr_gva2hva(vm, (uintptr_t)vmx->vmcs); vm 91 tools/testing/selftests/kvm/lib/x86_64/vmx.c vmx->vmcs_gpa = addr_gva2gpa(vm, (uintptr_t)vmx->vmcs); vm 94 tools/testing/selftests/kvm/lib/x86_64/vmx.c vmx->msr = (void *)vm_vaddr_alloc(vm, getpagesize(), 0x10000, 0, 0); vm 95 tools/testing/selftests/kvm/lib/x86_64/vmx.c vmx->msr_hva = addr_gva2hva(vm, (uintptr_t)vmx->msr); vm 96 tools/testing/selftests/kvm/lib/x86_64/vmx.c vmx->msr_gpa = addr_gva2gpa(vm, (uintptr_t)vmx->msr); vm 100 tools/testing/selftests/kvm/lib/x86_64/vmx.c vmx->shadow_vmcs = (void *)vm_vaddr_alloc(vm, getpagesize(), 0x10000, 0, 0); vm 101 tools/testing/selftests/kvm/lib/x86_64/vmx.c vmx->shadow_vmcs_hva = addr_gva2hva(vm, (uintptr_t)vmx->shadow_vmcs); vm 102 tools/testing/selftests/kvm/lib/x86_64/vmx.c vmx->shadow_vmcs_gpa = addr_gva2gpa(vm, (uintptr_t)vmx->shadow_vmcs); vm 105 tools/testing/selftests/kvm/lib/x86_64/vmx.c vmx->vmread = (void *)vm_vaddr_alloc(vm, getpagesize(), 0x10000, 0, 0); vm 106 tools/testing/selftests/kvm/lib/x86_64/vmx.c vmx->vmread_hva = addr_gva2hva(vm, (uintptr_t)vmx->vmread); vm 107 tools/testing/selftests/kvm/lib/x86_64/vmx.c vmx->vmread_gpa = addr_gva2gpa(vm, (uintptr_t)vmx->vmread); vm 110 tools/testing/selftests/kvm/lib/x86_64/vmx.c vmx->vmwrite = (void *)vm_vaddr_alloc(vm, getpagesize(), 0x10000, 0, 0); vm 111 tools/testing/selftests/kvm/lib/x86_64/vmx.c vmx->vmwrite_hva = addr_gva2hva(vm, (uintptr_t)vmx->vmwrite); vm 112 tools/testing/selftests/kvm/lib/x86_64/vmx.c vmx->vmwrite_gpa = addr_gva2gpa(vm, (uintptr_t)vmx->vmwrite); vm 116 tools/testing/selftests/kvm/lib/x86_64/vmx.c vmx->vp_assist = (void *)vm_vaddr_alloc(vm, getpagesize(), vm 118 tools/testing/selftests/kvm/lib/x86_64/vmx.c vmx->vp_assist_hva = addr_gva2hva(vm, (uintptr_t)vmx->vp_assist); vm 119 tools/testing/selftests/kvm/lib/x86_64/vmx.c vmx->vp_assist_gpa = addr_gva2gpa(vm, (uintptr_t)vmx->vp_assist); vm 122 tools/testing/selftests/kvm/lib/x86_64/vmx.c vmx->enlightened_vmcs = (void *)vm_vaddr_alloc(vm, getpagesize(), vm 125 tools/testing/selftests/kvm/lib/x86_64/vmx.c addr_gva2hva(vm, (uintptr_t)vmx->enlightened_vmcs); vm 127 tools/testing/selftests/kvm/lib/x86_64/vmx.c addr_gva2gpa(vm, (uintptr_t)vmx->enlightened_vmcs); vm 392 tools/testing/selftests/kvm/lib/x86_64/vmx.c void nested_pg_map(struct vmx_pages *vmx, struct kvm_vm *vm, vm 398 tools/testing/selftests/kvm/lib/x86_64/vmx.c TEST_ASSERT(vm->mode == VM_MODE_PXXV48_4K, "Attempt to use " vm 399 tools/testing/selftests/kvm/lib/x86_64/vmx.c "unknown or unsupported guest mode, mode: 0x%x", vm->mode); vm 401 tools/testing/selftests/kvm/lib/x86_64/vmx.c TEST_ASSERT((nested_paddr % vm->page_size) == 0, vm 404 tools/testing/selftests/kvm/lib/x86_64/vmx.c nested_paddr, vm->page_size); vm 405 tools/testing/selftests/kvm/lib/x86_64/vmx.c TEST_ASSERT((nested_paddr >> vm->page_shift) <= vm->max_gfn, vm 408 tools/testing/selftests/kvm/lib/x86_64/vmx.c paddr, vm->max_gfn, vm->page_size); vm 409 tools/testing/selftests/kvm/lib/x86_64/vmx.c TEST_ASSERT((paddr % vm->page_size) == 0, vm 412 tools/testing/selftests/kvm/lib/x86_64/vmx.c paddr, vm->page_size); vm 413 tools/testing/selftests/kvm/lib/x86_64/vmx.c TEST_ASSERT((paddr >> vm->page_shift) <= vm->max_gfn, vm 416 tools/testing/selftests/kvm/lib/x86_64/vmx.c paddr, vm->max_gfn, vm->page_size); vm 426 tools/testing/selftests/kvm/lib/x86_64/vmx.c pml4e[index[3]].address = vm_phy_page_alloc(vm, vm 428 tools/testing/selftests/kvm/lib/x86_64/vmx.c >> vm->page_shift; vm 436 tools/testing/selftests/kvm/lib/x86_64/vmx.c pdpe = addr_gpa2hva(vm, pml4e[index[3]].address * vm->page_size); vm 438 tools/testing/selftests/kvm/lib/x86_64/vmx.c pdpe[index[2]].address = vm_phy_page_alloc(vm, vm 440 tools/testing/selftests/kvm/lib/x86_64/vmx.c >> vm->page_shift; vm 448 tools/testing/selftests/kvm/lib/x86_64/vmx.c pde = addr_gpa2hva(vm, pdpe[index[2]].address * vm->page_size); vm 450 tools/testing/selftests/kvm/lib/x86_64/vmx.c pde[index[1]].address = vm_phy_page_alloc(vm, vm 452 tools/testing/selftests/kvm/lib/x86_64/vmx.c >> vm->page_shift; vm 460 tools/testing/selftests/kvm/lib/x86_64/vmx.c pte = addr_gpa2hva(vm, pde[index[1]].address * vm->page_size); vm 461 tools/testing/selftests/kvm/lib/x86_64/vmx.c pte[index[0]].address = paddr >> vm->page_shift; vm 491 tools/testing/selftests/kvm/lib/x86_64/vmx.c void nested_map(struct vmx_pages *vmx, struct kvm_vm *vm, vm 495 tools/testing/selftests/kvm/lib/x86_64/vmx.c size_t page_size = vm->page_size; vm 502 tools/testing/selftests/kvm/lib/x86_64/vmx.c nested_pg_map(vmx, vm, nested_paddr, paddr, eptp_memslot); vm 511 tools/testing/selftests/kvm/lib/x86_64/vmx.c void nested_map_memslot(struct vmx_pages *vmx, struct kvm_vm *vm, vm 516 tools/testing/selftests/kvm/lib/x86_64/vmx.c memslot2region(vm, memslot); vm 518 tools/testing/selftests/kvm/lib/x86_64/vmx.c i = (region->region.guest_phys_addr >> vm->page_shift) - 1; vm 519 tools/testing/selftests/kvm/lib/x86_64/vmx.c last = i + (region->region.memory_size >> vm->page_shift); vm 525 tools/testing/selftests/kvm/lib/x86_64/vmx.c nested_map(vmx, vm, vm 526 tools/testing/selftests/kvm/lib/x86_64/vmx.c (uint64_t)i << vm->page_shift, vm 527 tools/testing/selftests/kvm/lib/x86_64/vmx.c (uint64_t)i << vm->page_shift, vm 528 tools/testing/selftests/kvm/lib/x86_64/vmx.c 1 << vm->page_shift, vm 533 tools/testing/selftests/kvm/lib/x86_64/vmx.c void prepare_eptp(struct vmx_pages *vmx, struct kvm_vm *vm, vm 536 tools/testing/selftests/kvm/lib/x86_64/vmx.c vmx->eptp = (void *)vm_vaddr_alloc(vm, getpagesize(), 0x10000, 0, 0); vm 537 tools/testing/selftests/kvm/lib/x86_64/vmx.c vmx->eptp_hva = addr_gva2hva(vm, (uintptr_t)vmx->eptp); vm 538 tools/testing/selftests/kvm/lib/x86_64/vmx.c vmx->eptp_gpa = addr_gva2gpa(vm, (uintptr_t)vmx->eptp); vm 34 tools/testing/selftests/kvm/s390x/memop.c struct kvm_vm *vm; vm 50 tools/testing/selftests/kvm/s390x/memop.c vm = vm_create_default(VCPU_ID, 0, guest_code); vm 51 tools/testing/selftests/kvm/s390x/memop.c run = vcpu_state(vm, VCPU_ID); vm 57 tools/testing/selftests/kvm/s390x/memop.c ksmo.gaddr = addr_gva2gpa(vm, (uintptr_t)mem1); vm 63 tools/testing/selftests/kvm/s390x/memop.c vcpu_ioctl(vm, VCPU_ID, KVM_S390_MEM_OP, &ksmo); vm 66 tools/testing/selftests/kvm/s390x/memop.c vcpu_run(vm, VCPU_ID); vm 81 tools/testing/selftests/kvm/s390x/memop.c vcpu_ioctl(vm, VCPU_ID, KVM_S390_MEM_OP, &ksmo); vm 93 tools/testing/selftests/kvm/s390x/memop.c rv = _vcpu_ioctl(vm, VCPU_ID, KVM_S390_MEM_OP, &ksmo); vm 103 tools/testing/selftests/kvm/s390x/memop.c rv = _vcpu_ioctl(vm, VCPU_ID, KVM_S390_MEM_OP, &ksmo); vm 114 tools/testing/selftests/kvm/s390x/memop.c rv = _vcpu_ioctl(vm, VCPU_ID, KVM_S390_MEM_OP, &ksmo); vm 124 tools/testing/selftests/kvm/s390x/memop.c rv = _vcpu_ioctl(vm, VCPU_ID, KVM_S390_MEM_OP, &ksmo); vm 134 tools/testing/selftests/kvm/s390x/memop.c rv = _vcpu_ioctl(vm, VCPU_ID, KVM_S390_MEM_OP, &ksmo); vm 144 tools/testing/selftests/kvm/s390x/memop.c rv = _vcpu_ioctl(vm, VCPU_ID, KVM_S390_MEM_OP, &ksmo); vm 151 tools/testing/selftests/kvm/s390x/memop.c vcpu_run(vm, VCPU_ID); /* To sync new state to SIE block */ vm 158 tools/testing/selftests/kvm/s390x/memop.c rv = _vcpu_ioctl(vm, VCPU_ID, KVM_S390_MEM_OP, &ksmo); vm 161 tools/testing/selftests/kvm/s390x/memop.c vcpu_run(vm, VCPU_ID); /* Run to sync new state */ vm 163 tools/testing/selftests/kvm/s390x/memop.c kvm_vm_free(vm); vm 68 tools/testing/selftests/kvm/s390x/sync_regs_test.c struct kvm_vm *vm; vm 84 tools/testing/selftests/kvm/s390x/sync_regs_test.c vm = vm_create_default(VCPU_ID, 0, guest_code); vm 86 tools/testing/selftests/kvm/s390x/sync_regs_test.c run = vcpu_state(vm, VCPU_ID); vm 90 tools/testing/selftests/kvm/s390x/sync_regs_test.c rv = _vcpu_run(vm, VCPU_ID); vm 94 tools/testing/selftests/kvm/s390x/sync_regs_test.c vcpu_state(vm, VCPU_ID)->kvm_valid_regs = 0; vm 97 tools/testing/selftests/kvm/s390x/sync_regs_test.c rv = _vcpu_run(vm, VCPU_ID); vm 101 tools/testing/selftests/kvm/s390x/sync_regs_test.c vcpu_state(vm, VCPU_ID)->kvm_valid_regs = 0; vm 105 tools/testing/selftests/kvm/s390x/sync_regs_test.c rv = _vcpu_run(vm, VCPU_ID); vm 109 tools/testing/selftests/kvm/s390x/sync_regs_test.c vcpu_state(vm, VCPU_ID)->kvm_dirty_regs = 0; vm 112 tools/testing/selftests/kvm/s390x/sync_regs_test.c rv = _vcpu_run(vm, VCPU_ID); vm 116 tools/testing/selftests/kvm/s390x/sync_regs_test.c vcpu_state(vm, VCPU_ID)->kvm_dirty_regs = 0; vm 120 tools/testing/selftests/kvm/s390x/sync_regs_test.c rv = _vcpu_run(vm, VCPU_ID); vm 133 tools/testing/selftests/kvm/s390x/sync_regs_test.c vcpu_regs_get(vm, VCPU_ID, ®s); vm 136 tools/testing/selftests/kvm/s390x/sync_regs_test.c vcpu_sregs_get(vm, VCPU_ID, &sregs); vm 145 tools/testing/selftests/kvm/s390x/sync_regs_test.c rv = _vcpu_run(vm, VCPU_ID); vm 158 tools/testing/selftests/kvm/s390x/sync_regs_test.c vcpu_regs_get(vm, VCPU_ID, ®s); vm 161 tools/testing/selftests/kvm/s390x/sync_regs_test.c vcpu_sregs_get(vm, VCPU_ID, &sregs); vm 170 tools/testing/selftests/kvm/s390x/sync_regs_test.c rv = _vcpu_run(vm, VCPU_ID); vm 180 tools/testing/selftests/kvm/s390x/sync_regs_test.c kvm_vm_free(vm); vm 67 tools/testing/selftests/kvm/x86_64/cr4_cpuid_sync_test.c struct kvm_vm *vm; vm 83 tools/testing/selftests/kvm/x86_64/cr4_cpuid_sync_test.c vm = vm_create_default(VCPU_ID, 0, guest_code); vm 84 tools/testing/selftests/kvm/x86_64/cr4_cpuid_sync_test.c vcpu_set_cpuid(vm, VCPU_ID, kvm_get_supported_cpuid()); vm 85 tools/testing/selftests/kvm/x86_64/cr4_cpuid_sync_test.c run = vcpu_state(vm, VCPU_ID); vm 88 tools/testing/selftests/kvm/x86_64/cr4_cpuid_sync_test.c rc = _vcpu_run(vm, VCPU_ID); vm 96 tools/testing/selftests/kvm/x86_64/cr4_cpuid_sync_test.c switch (get_ucall(vm, VCPU_ID, &uc)) { vm 99 tools/testing/selftests/kvm/x86_64/cr4_cpuid_sync_test.c vcpu_sregs_get(vm, VCPU_ID, &sregs); vm 101 tools/testing/selftests/kvm/x86_64/cr4_cpuid_sync_test.c vcpu_sregs_set(vm, VCPU_ID, &sregs); vm 113 tools/testing/selftests/kvm/x86_64/cr4_cpuid_sync_test.c kvm_vm_free(vm); vm 77 tools/testing/selftests/kvm/x86_64/evmcs_test.c struct kvm_vm *vm; vm 84 tools/testing/selftests/kvm/x86_64/evmcs_test.c vm = vm_create_default(VCPU_ID, 0, guest_code); vm 86 tools/testing/selftests/kvm/x86_64/evmcs_test.c vcpu_set_cpuid(vm, VCPU_ID, kvm_get_supported_cpuid()); vm 94 tools/testing/selftests/kvm/x86_64/evmcs_test.c vcpu_enable_evmcs(vm, VCPU_ID); vm 96 tools/testing/selftests/kvm/x86_64/evmcs_test.c run = vcpu_state(vm, VCPU_ID); vm 98 tools/testing/selftests/kvm/x86_64/evmcs_test.c vcpu_regs_get(vm, VCPU_ID, ®s1); vm 100 tools/testing/selftests/kvm/x86_64/evmcs_test.c vcpu_alloc_vmx(vm, &vmx_pages_gva); vm 101 tools/testing/selftests/kvm/x86_64/evmcs_test.c vcpu_args_set(vm, VCPU_ID, 1, vmx_pages_gva); vm 104 tools/testing/selftests/kvm/x86_64/evmcs_test.c _vcpu_run(vm, VCPU_ID); vm 110 tools/testing/selftests/kvm/x86_64/evmcs_test.c switch (get_ucall(vm, VCPU_ID, &uc)) { vm 128 tools/testing/selftests/kvm/x86_64/evmcs_test.c state = vcpu_save_state(vm, VCPU_ID); vm 130 tools/testing/selftests/kvm/x86_64/evmcs_test.c vcpu_regs_get(vm, VCPU_ID, ®s1); vm 132 tools/testing/selftests/kvm/x86_64/evmcs_test.c kvm_vm_release(vm); vm 135 tools/testing/selftests/kvm/x86_64/evmcs_test.c kvm_vm_restart(vm, O_RDWR); vm 136 tools/testing/selftests/kvm/x86_64/evmcs_test.c vm_vcpu_add(vm, VCPU_ID); vm 137 tools/testing/selftests/kvm/x86_64/evmcs_test.c vcpu_set_cpuid(vm, VCPU_ID, kvm_get_supported_cpuid()); vm 138 tools/testing/selftests/kvm/x86_64/evmcs_test.c vcpu_enable_evmcs(vm, VCPU_ID); vm 139 tools/testing/selftests/kvm/x86_64/evmcs_test.c vcpu_load_state(vm, VCPU_ID, state); vm 140 tools/testing/selftests/kvm/x86_64/evmcs_test.c run = vcpu_state(vm, VCPU_ID); vm 144 tools/testing/selftests/kvm/x86_64/evmcs_test.c vcpu_regs_get(vm, VCPU_ID, ®s2); vm 151 tools/testing/selftests/kvm/x86_64/evmcs_test.c kvm_vm_free(vm); vm 100 tools/testing/selftests/kvm/x86_64/hyperv_cpuid.c void test_hv_cpuid_e2big(struct kvm_vm *vm) vm 105 tools/testing/selftests/kvm/x86_64/hyperv_cpuid.c ret = _vcpu_ioctl(vm, VCPU_ID, KVM_GET_SUPPORTED_HV_CPUID, &cpuid); vm 113 tools/testing/selftests/kvm/x86_64/hyperv_cpuid.c struct kvm_cpuid2 *kvm_get_supported_hv_cpuid(struct kvm_vm *vm) vm 127 tools/testing/selftests/kvm/x86_64/hyperv_cpuid.c vcpu_ioctl(vm, VCPU_ID, KVM_GET_SUPPORTED_HV_CPUID, cpuid); vm 135 tools/testing/selftests/kvm/x86_64/hyperv_cpuid.c struct kvm_vm *vm; vm 150 tools/testing/selftests/kvm/x86_64/hyperv_cpuid.c vm = vm_create_default(VCPU_ID, 0, guest_code); vm 152 tools/testing/selftests/kvm/x86_64/hyperv_cpuid.c test_hv_cpuid_e2big(vm); vm 154 tools/testing/selftests/kvm/x86_64/hyperv_cpuid.c hv_cpuid_entries = kvm_get_supported_hv_cpuid(vm); vm 168 tools/testing/selftests/kvm/x86_64/hyperv_cpuid.c vcpu_enable_evmcs(vm, VCPU_ID); vm 170 tools/testing/selftests/kvm/x86_64/hyperv_cpuid.c hv_cpuid_entries = kvm_get_supported_hv_cpuid(vm); vm 179 tools/testing/selftests/kvm/x86_64/hyperv_cpuid.c kvm_vm_free(vm); vm 38 tools/testing/selftests/kvm/x86_64/platform_info_test.c static void set_msr_platform_info_enabled(struct kvm_vm *vm, bool enable) vm 45 tools/testing/selftests/kvm/x86_64/platform_info_test.c vm_enable_cap(vm, &cap); vm 48 tools/testing/selftests/kvm/x86_64/platform_info_test.c static void test_msr_platform_info_enabled(struct kvm_vm *vm) vm 50 tools/testing/selftests/kvm/x86_64/platform_info_test.c struct kvm_run *run = vcpu_state(vm, VCPU_ID); vm 53 tools/testing/selftests/kvm/x86_64/platform_info_test.c set_msr_platform_info_enabled(vm, true); vm 54 tools/testing/selftests/kvm/x86_64/platform_info_test.c vcpu_run(vm, VCPU_ID); vm 59 tools/testing/selftests/kvm/x86_64/platform_info_test.c get_ucall(vm, VCPU_ID, &uc); vm 69 tools/testing/selftests/kvm/x86_64/platform_info_test.c static void test_msr_platform_info_disabled(struct kvm_vm *vm) vm 71 tools/testing/selftests/kvm/x86_64/platform_info_test.c struct kvm_run *run = vcpu_state(vm, VCPU_ID); vm 73 tools/testing/selftests/kvm/x86_64/platform_info_test.c set_msr_platform_info_enabled(vm, false); vm 74 tools/testing/selftests/kvm/x86_64/platform_info_test.c vcpu_run(vm, VCPU_ID); vm 83 tools/testing/selftests/kvm/x86_64/platform_info_test.c struct kvm_vm *vm; vm 97 tools/testing/selftests/kvm/x86_64/platform_info_test.c vm = vm_create_default(VCPU_ID, 0, guest_code); vm 99 tools/testing/selftests/kvm/x86_64/platform_info_test.c msr_platform_info = vcpu_get_msr(vm, VCPU_ID, MSR_PLATFORM_INFO); vm 100 tools/testing/selftests/kvm/x86_64/platform_info_test.c vcpu_set_msr(vm, VCPU_ID, MSR_PLATFORM_INFO, vm 102 tools/testing/selftests/kvm/x86_64/platform_info_test.c test_msr_platform_info_enabled(vm); vm 103 tools/testing/selftests/kvm/x86_64/platform_info_test.c test_msr_platform_info_disabled(vm); vm 104 tools/testing/selftests/kvm/x86_64/platform_info_test.c vcpu_set_msr(vm, VCPU_ID, MSR_PLATFORM_INFO, msr_platform_info); vm 106 tools/testing/selftests/kvm/x86_64/platform_info_test.c kvm_vm_free(vm); vm 30 tools/testing/selftests/kvm/x86_64/set_sregs_test.c struct kvm_vm *vm; vm 37 tools/testing/selftests/kvm/x86_64/set_sregs_test.c vm = vm_create_default(VCPU_ID, 0, NULL); vm 39 tools/testing/selftests/kvm/x86_64/set_sregs_test.c vcpu_sregs_get(vm, VCPU_ID, &sregs); vm 41 tools/testing/selftests/kvm/x86_64/set_sregs_test.c rc = _vcpu_sregs_set(vm, VCPU_ID, &sregs); vm 45 tools/testing/selftests/kvm/x86_64/set_sregs_test.c rc = _vcpu_sregs_set(vm, VCPU_ID, &sregs); vm 49 tools/testing/selftests/kvm/x86_64/set_sregs_test.c kvm_vm_free(vm); vm 93 tools/testing/selftests/kvm/x86_64/smm_test.c struct kvm_vm *vm; vm 99 tools/testing/selftests/kvm/x86_64/smm_test.c vm = vm_create_default(VCPU_ID, 0, guest_code); vm 101 tools/testing/selftests/kvm/x86_64/smm_test.c vcpu_set_cpuid(vm, VCPU_ID, kvm_get_supported_cpuid()); vm 103 tools/testing/selftests/kvm/x86_64/smm_test.c run = vcpu_state(vm, VCPU_ID); vm 105 tools/testing/selftests/kvm/x86_64/smm_test.c vm_userspace_mem_region_add(vm, VM_MEM_SRC_ANONYMOUS, SMRAM_GPA, vm 107 tools/testing/selftests/kvm/x86_64/smm_test.c TEST_ASSERT(vm_phy_pages_alloc(vm, SMRAM_PAGES, SMRAM_GPA, SMRAM_MEMSLOT) vm 110 tools/testing/selftests/kvm/x86_64/smm_test.c memset(addr_gpa2hva(vm, SMRAM_GPA), 0x0, SMRAM_SIZE); vm 111 tools/testing/selftests/kvm/x86_64/smm_test.c memcpy(addr_gpa2hva(vm, SMRAM_GPA) + 0x8000, smi_handler, vm 114 tools/testing/selftests/kvm/x86_64/smm_test.c vcpu_set_msr(vm, VCPU_ID, MSR_IA32_SMBASE, SMRAM_GPA); vm 117 tools/testing/selftests/kvm/x86_64/smm_test.c vcpu_alloc_vmx(vm, &vmx_pages_gva); vm 118 tools/testing/selftests/kvm/x86_64/smm_test.c vcpu_args_set(vm, VCPU_ID, 1, vmx_pages_gva); vm 121 tools/testing/selftests/kvm/x86_64/smm_test.c vcpu_args_set(vm, VCPU_ID, 1, 0); vm 125 tools/testing/selftests/kvm/x86_64/smm_test.c _vcpu_run(vm, VCPU_ID); vm 132 tools/testing/selftests/kvm/x86_64/smm_test.c vcpu_regs_get(vm, VCPU_ID, ®s); vm 144 tools/testing/selftests/kvm/x86_64/smm_test.c state = vcpu_save_state(vm, VCPU_ID); vm 145 tools/testing/selftests/kvm/x86_64/smm_test.c kvm_vm_release(vm); vm 146 tools/testing/selftests/kvm/x86_64/smm_test.c kvm_vm_restart(vm, O_RDWR); vm 147 tools/testing/selftests/kvm/x86_64/smm_test.c vm_vcpu_add(vm, VCPU_ID); vm 148 tools/testing/selftests/kvm/x86_64/smm_test.c vcpu_set_cpuid(vm, VCPU_ID, kvm_get_supported_cpuid()); vm 149 tools/testing/selftests/kvm/x86_64/smm_test.c vcpu_load_state(vm, VCPU_ID, state); vm 150 tools/testing/selftests/kvm/x86_64/smm_test.c run = vcpu_state(vm, VCPU_ID); vm 155 tools/testing/selftests/kvm/x86_64/smm_test.c kvm_vm_free(vm); vm 125 tools/testing/selftests/kvm/x86_64/state_test.c struct kvm_vm *vm; vm 132 tools/testing/selftests/kvm/x86_64/state_test.c vm = vm_create_default(VCPU_ID, 0, guest_code); vm 133 tools/testing/selftests/kvm/x86_64/state_test.c vcpu_set_cpuid(vm, VCPU_ID, kvm_get_supported_cpuid()); vm 134 tools/testing/selftests/kvm/x86_64/state_test.c run = vcpu_state(vm, VCPU_ID); vm 136 tools/testing/selftests/kvm/x86_64/state_test.c vcpu_regs_get(vm, VCPU_ID, ®s1); vm 139 tools/testing/selftests/kvm/x86_64/state_test.c vcpu_alloc_vmx(vm, &vmx_pages_gva); vm 140 tools/testing/selftests/kvm/x86_64/state_test.c vcpu_args_set(vm, VCPU_ID, 1, vmx_pages_gva); vm 143 tools/testing/selftests/kvm/x86_64/state_test.c vcpu_args_set(vm, VCPU_ID, 1, 0); vm 147 tools/testing/selftests/kvm/x86_64/state_test.c _vcpu_run(vm, VCPU_ID); vm 153 tools/testing/selftests/kvm/x86_64/state_test.c switch (get_ucall(vm, VCPU_ID, &uc)) { vm 171 tools/testing/selftests/kvm/x86_64/state_test.c state = vcpu_save_state(vm, VCPU_ID); vm 173 tools/testing/selftests/kvm/x86_64/state_test.c vcpu_regs_get(vm, VCPU_ID, ®s1); vm 175 tools/testing/selftests/kvm/x86_64/state_test.c kvm_vm_release(vm); vm 178 tools/testing/selftests/kvm/x86_64/state_test.c kvm_vm_restart(vm, O_RDWR); vm 179 tools/testing/selftests/kvm/x86_64/state_test.c vm_vcpu_add(vm, VCPU_ID); vm 180 tools/testing/selftests/kvm/x86_64/state_test.c vcpu_set_cpuid(vm, VCPU_ID, kvm_get_supported_cpuid()); vm 181 tools/testing/selftests/kvm/x86_64/state_test.c vcpu_load_state(vm, VCPU_ID, state); vm 182 tools/testing/selftests/kvm/x86_64/state_test.c run = vcpu_state(vm, VCPU_ID); vm 186 tools/testing/selftests/kvm/x86_64/state_test.c vcpu_regs_get(vm, VCPU_ID, ®s2); vm 193 tools/testing/selftests/kvm/x86_64/state_test.c kvm_vm_free(vm); vm 82 tools/testing/selftests/kvm/x86_64/sync_regs_test.c struct kvm_vm *vm; vm 103 tools/testing/selftests/kvm/x86_64/sync_regs_test.c vm = vm_create_default(VCPU_ID, 0, guest_code); vm 105 tools/testing/selftests/kvm/x86_64/sync_regs_test.c run = vcpu_state(vm, VCPU_ID); vm 109 tools/testing/selftests/kvm/x86_64/sync_regs_test.c rv = _vcpu_run(vm, VCPU_ID); vm 113 tools/testing/selftests/kvm/x86_64/sync_regs_test.c vcpu_state(vm, VCPU_ID)->kvm_valid_regs = 0; vm 116 tools/testing/selftests/kvm/x86_64/sync_regs_test.c rv = _vcpu_run(vm, VCPU_ID); vm 120 tools/testing/selftests/kvm/x86_64/sync_regs_test.c vcpu_state(vm, VCPU_ID)->kvm_valid_regs = 0; vm 124 tools/testing/selftests/kvm/x86_64/sync_regs_test.c rv = _vcpu_run(vm, VCPU_ID); vm 128 tools/testing/selftests/kvm/x86_64/sync_regs_test.c vcpu_state(vm, VCPU_ID)->kvm_dirty_regs = 0; vm 131 tools/testing/selftests/kvm/x86_64/sync_regs_test.c rv = _vcpu_run(vm, VCPU_ID); vm 135 tools/testing/selftests/kvm/x86_64/sync_regs_test.c vcpu_state(vm, VCPU_ID)->kvm_dirty_regs = 0; vm 140 tools/testing/selftests/kvm/x86_64/sync_regs_test.c rv = _vcpu_run(vm, VCPU_ID); vm 146 tools/testing/selftests/kvm/x86_64/sync_regs_test.c vcpu_regs_get(vm, VCPU_ID, ®s); vm 149 tools/testing/selftests/kvm/x86_64/sync_regs_test.c vcpu_sregs_get(vm, VCPU_ID, &sregs); vm 152 tools/testing/selftests/kvm/x86_64/sync_regs_test.c vcpu_events_get(vm, VCPU_ID, &events); vm 162 tools/testing/selftests/kvm/x86_64/sync_regs_test.c rv = _vcpu_run(vm, VCPU_ID); vm 174 tools/testing/selftests/kvm/x86_64/sync_regs_test.c vcpu_regs_get(vm, VCPU_ID, ®s); vm 177 tools/testing/selftests/kvm/x86_64/sync_regs_test.c vcpu_sregs_get(vm, VCPU_ID, &sregs); vm 180 tools/testing/selftests/kvm/x86_64/sync_regs_test.c vcpu_events_get(vm, VCPU_ID, &events); vm 189 tools/testing/selftests/kvm/x86_64/sync_regs_test.c rv = _vcpu_run(vm, VCPU_ID); vm 206 tools/testing/selftests/kvm/x86_64/sync_regs_test.c vcpu_regs_set(vm, VCPU_ID, ®s); vm 207 tools/testing/selftests/kvm/x86_64/sync_regs_test.c rv = _vcpu_run(vm, VCPU_ID); vm 215 tools/testing/selftests/kvm/x86_64/sync_regs_test.c vcpu_regs_get(vm, VCPU_ID, ®s); vm 227 tools/testing/selftests/kvm/x86_64/sync_regs_test.c rv = _vcpu_run(vm, VCPU_ID); vm 235 tools/testing/selftests/kvm/x86_64/sync_regs_test.c vcpu_regs_get(vm, VCPU_ID, ®s); vm 240 tools/testing/selftests/kvm/x86_64/sync_regs_test.c kvm_vm_free(vm); vm 28 tools/testing/selftests/kvm/x86_64/vmx_close_while_nested_test.c static struct kvm_vm *vm; vm 59 tools/testing/selftests/kvm/x86_64/vmx_close_while_nested_test.c vm = vm_create_default(VCPU_ID, 0, (void *) l1_guest_code); vm 60 tools/testing/selftests/kvm/x86_64/vmx_close_while_nested_test.c vcpu_set_cpuid(vm, VCPU_ID, kvm_get_supported_cpuid()); vm 63 tools/testing/selftests/kvm/x86_64/vmx_close_while_nested_test.c vcpu_alloc_vmx(vm, &vmx_pages_gva); vm 64 tools/testing/selftests/kvm/x86_64/vmx_close_while_nested_test.c vcpu_args_set(vm, VCPU_ID, 1, vmx_pages_gva); vm 67 tools/testing/selftests/kvm/x86_64/vmx_close_while_nested_test.c volatile struct kvm_run *run = vcpu_state(vm, VCPU_ID); vm 70 tools/testing/selftests/kvm/x86_64/vmx_close_while_nested_test.c vcpu_run(vm, VCPU_ID); vm 79 tools/testing/selftests/kvm/x86_64/vmx_close_while_nested_test.c switch (get_ucall(vm, VCPU_ID, &uc)) { vm 76 tools/testing/selftests/kvm/x86_64/vmx_dirty_log_test.c struct kvm_vm *vm; vm 84 tools/testing/selftests/kvm/x86_64/vmx_dirty_log_test.c vm = vm_create_default(VCPU_ID, 0, l1_guest_code); vm 85 tools/testing/selftests/kvm/x86_64/vmx_dirty_log_test.c vcpu_set_cpuid(vm, VCPU_ID, kvm_get_supported_cpuid()); vm 86 tools/testing/selftests/kvm/x86_64/vmx_dirty_log_test.c vmx = vcpu_alloc_vmx(vm, &vmx_pages_gva); vm 87 tools/testing/selftests/kvm/x86_64/vmx_dirty_log_test.c vcpu_args_set(vm, VCPU_ID, 1, vmx_pages_gva); vm 88 tools/testing/selftests/kvm/x86_64/vmx_dirty_log_test.c run = vcpu_state(vm, VCPU_ID); vm 91 tools/testing/selftests/kvm/x86_64/vmx_dirty_log_test.c vm_userspace_mem_region_add(vm, VM_MEM_SRC_ANONYMOUS, vm 101 tools/testing/selftests/kvm/x86_64/vmx_dirty_log_test.c virt_map(vm, GUEST_TEST_MEM, GUEST_TEST_MEM, vm 111 tools/testing/selftests/kvm/x86_64/vmx_dirty_log_test.c prepare_eptp(vmx, vm, 0); vm 112 tools/testing/selftests/kvm/x86_64/vmx_dirty_log_test.c nested_map_memslot(vmx, vm, 0, 0); vm 113 tools/testing/selftests/kvm/x86_64/vmx_dirty_log_test.c nested_map(vmx, vm, NESTED_TEST_MEM1, GUEST_TEST_MEM, 4096, 0); vm 114 tools/testing/selftests/kvm/x86_64/vmx_dirty_log_test.c nested_map(vmx, vm, NESTED_TEST_MEM2, GUEST_TEST_MEM, 4096, 0); vm 117 tools/testing/selftests/kvm/x86_64/vmx_dirty_log_test.c host_test_mem = addr_gpa2hva(vm, GUEST_TEST_MEM); vm 121 tools/testing/selftests/kvm/x86_64/vmx_dirty_log_test.c _vcpu_run(vm, VCPU_ID); vm 127 tools/testing/selftests/kvm/x86_64/vmx_dirty_log_test.c switch (get_ucall(vm, VCPU_ID, &uc)) { vm 137 tools/testing/selftests/kvm/x86_64/vmx_dirty_log_test.c kvm_vm_get_dirty_log(vm, TEST_MEM_SLOT_INDEX, bmap); vm 30 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c void test_nested_state(struct kvm_vm *vm, struct kvm_nested_state *state) vm 32 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c vcpu_nested_state_set(vm, VCPU_ID, state, false); vm 35 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c void test_nested_state_expect_errno(struct kvm_vm *vm, vm 41 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c rv = vcpu_nested_state_set(vm, VCPU_ID, state, true); vm 48 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c void test_nested_state_expect_einval(struct kvm_vm *vm, vm 51 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c test_nested_state_expect_errno(vm, state, EINVAL); vm 54 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c void test_nested_state_expect_efault(struct kvm_vm *vm, vm 57 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c test_nested_state_expect_errno(vm, state, EFAULT); vm 91 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c void test_vmx_nested_state(struct kvm_vm *vm) vm 101 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c test_nested_state_expect_einval(vm, state); vm 108 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c test_nested_state_expect_einval(vm, state); vm 117 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c test_nested_state_expect_einval(vm, state); vm 121 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c test_nested_state_expect_einval(vm, state); vm 124 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c test_nested_state(vm, state); vm 127 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c vcpu_set_cpuid(vm, VCPU_ID, kvm_get_supported_cpuid()); vm 138 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c test_nested_state_expect_einval(vm, state); vm 142 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c test_nested_state_expect_einval(vm, state); vm 143 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c vcpu_enable_evmcs(vm, VCPU_ID); vm 145 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c test_nested_state(vm, state); vm 149 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c test_nested_state_expect_einval(vm, state); vm 155 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c test_nested_state_expect_einval(vm, state); vm 160 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c test_nested_state_expect_einval(vm, state); vm 170 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c test_nested_state_expect_einval(vm, state); vm 180 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c test_nested_state_expect_einval(vm, state); vm 186 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c test_nested_state_expect_einval(vm, state); vm 191 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c test_nested_state(vm, state); vm 197 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c test_nested_state_expect_einval(vm, state); vm 202 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c test_nested_state_expect_einval(vm, state); vm 212 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c test_nested_state(vm, state); vm 213 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c vcpu_nested_state_get(vm, VCPU_ID, state); vm 225 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c struct kvm_vm *vm; vm 241 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c vm = vm_create_default(VCPU_ID, 0, 0); vm 244 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c test_nested_state_expect_efault(vm, NULL); vm 249 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c test_nested_state_expect_einval(vm, &state); vm 260 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c test_nested_state_expect_einval(vm, &state); vm 268 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c test_nested_state_expect_einval(vm, &state); vm 270 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c test_vmx_nested_state(vm); vm 272 tools/testing/selftests/kvm/x86_64/vmx_set_nested_state_test.c kvm_vm_free(vm); vm 61 tools/testing/selftests/kvm/x86_64/vmx_tsc_adjust_test.c static struct kvm_vm *vm; vm 134 tools/testing/selftests/kvm/x86_64/vmx_tsc_adjust_test.c vm = vm_create_default(VCPU_ID, 0, (void *) l1_guest_code); vm 135 tools/testing/selftests/kvm/x86_64/vmx_tsc_adjust_test.c vcpu_set_cpuid(vm, VCPU_ID, kvm_get_supported_cpuid()); vm 138 tools/testing/selftests/kvm/x86_64/vmx_tsc_adjust_test.c vcpu_alloc_vmx(vm, &vmx_pages_gva); vm 139 tools/testing/selftests/kvm/x86_64/vmx_tsc_adjust_test.c vcpu_args_set(vm, VCPU_ID, 1, vmx_pages_gva); vm 142 tools/testing/selftests/kvm/x86_64/vmx_tsc_adjust_test.c volatile struct kvm_run *run = vcpu_state(vm, VCPU_ID); vm 145 tools/testing/selftests/kvm/x86_64/vmx_tsc_adjust_test.c vcpu_run(vm, VCPU_ID); vm 151 tools/testing/selftests/kvm/x86_64/vmx_tsc_adjust_test.c switch (get_ucall(vm, VCPU_ID, &uc)) { vm 165 tools/testing/selftests/kvm/x86_64/vmx_tsc_adjust_test.c kvm_vm_free(vm); vm 282 virt/kvm/arm/vgic/vgic-v4.c .vm = &kvm->arch.vgic.its_vm,