vmm 64 arch/xtensa/kernel/syscall.c struct vm_area_struct *vmm; vmm 86 arch/xtensa/kernel/syscall.c for (vmm = find_vma(current->mm, addr); ; vmm = vmm->vm_next) { vmm 90 arch/xtensa/kernel/syscall.c if (!vmm || addr + len <= vm_start_gap(vmm)) vmm 92 arch/xtensa/kernel/syscall.c addr = vmm->vm_end; vmm 39 drivers/gpu/drm/ast/ast_ttm.c struct drm_vram_mm *vmm; vmm 43 drivers/gpu/drm/ast/ast_ttm.c vmm = drm_vram_helper_alloc_mm( vmm 46 drivers/gpu/drm/ast/ast_ttm.c if (IS_ERR(vmm)) { vmm 47 drivers/gpu/drm/ast/ast_ttm.c ret = PTR_ERR(vmm); vmm 11 drivers/gpu/drm/bochs/bochs_mm.c struct drm_vram_mm *vmm; vmm 13 drivers/gpu/drm/bochs/bochs_mm.c vmm = drm_vram_helper_alloc_mm(bochs->dev, bochs->fb_base, vmm 16 drivers/gpu/drm/bochs/bochs_mm.c return PTR_ERR_OR_ZERO(vmm); vmm 85 drivers/gpu/drm/drm_vram_mm_helper.c struct drm_vram_mm *vmm = drm_vram_mm_of_bdev(bo->bdev); vmm 87 drivers/gpu/drm/drm_vram_mm_helper.c if (vmm->funcs && vmm->funcs->evict_flags) vmm 88 drivers/gpu/drm/drm_vram_mm_helper.c vmm->funcs->evict_flags(bo, placement); vmm 94 drivers/gpu/drm/drm_vram_mm_helper.c struct drm_vram_mm *vmm = drm_vram_mm_of_bdev(bo->bdev); vmm 96 drivers/gpu/drm/drm_vram_mm_helper.c if (!vmm->funcs || !vmm->funcs->verify_access) vmm 98 drivers/gpu/drm/drm_vram_mm_helper.c return vmm->funcs->verify_access(bo, filp); vmm 105 drivers/gpu/drm/drm_vram_mm_helper.c struct drm_vram_mm *vmm = drm_vram_mm_of_bdev(bdev); vmm 121 drivers/gpu/drm/drm_vram_mm_helper.c mem->bus.base = vmm->vram_base; vmm 163 drivers/gpu/drm/drm_vram_mm_helper.c int drm_vram_mm_init(struct drm_vram_mm *vmm, struct drm_device *dev, vmm 169 drivers/gpu/drm/drm_vram_mm_helper.c vmm->vram_base = vram_base; vmm 170 drivers/gpu/drm/drm_vram_mm_helper.c vmm->vram_size = vram_size; vmm 171 drivers/gpu/drm/drm_vram_mm_helper.c vmm->funcs = funcs; vmm 173 drivers/gpu/drm/drm_vram_mm_helper.c ret = ttm_bo_device_init(&vmm->bdev, &bo_driver, vmm 179 drivers/gpu/drm/drm_vram_mm_helper.c ret = ttm_bo_init_mm(&vmm->bdev, TTM_PL_VRAM, vram_size >> PAGE_SHIFT); vmm 191 drivers/gpu/drm/drm_vram_mm_helper.c void drm_vram_mm_cleanup(struct drm_vram_mm *vmm) vmm 193 drivers/gpu/drm/drm_vram_mm_helper.c ttm_bo_device_release(&vmm->bdev); vmm 208 drivers/gpu/drm/drm_vram_mm_helper.c struct drm_vram_mm *vmm) vmm 210 drivers/gpu/drm/drm_vram_mm_helper.c return ttm_bo_mmap(filp, vma, &vmm->bdev); vmm 26 drivers/gpu/drm/hisilicon/hibmc/hibmc_ttm.c struct drm_vram_mm *vmm; vmm 30 drivers/gpu/drm/hisilicon/hibmc/hibmc_ttm.c vmm = drm_vram_helper_alloc_mm(dev, vmm 33 drivers/gpu/drm/hisilicon/hibmc/hibmc_ttm.c if (IS_ERR(vmm)) { vmm 34 drivers/gpu/drm/hisilicon/hibmc/hibmc_ttm.c ret = PTR_ERR(vmm); vmm 35 drivers/gpu/drm/mgag200/mgag200_ttm.c struct drm_vram_mm *vmm; vmm 39 drivers/gpu/drm/mgag200/mgag200_ttm.c vmm = drm_vram_helper_alloc_mm(dev, pci_resource_start(dev->pdev, 0), vmm 42 drivers/gpu/drm/mgag200/mgag200_ttm.c if (IS_ERR(vmm)) { vmm 43 drivers/gpu/drm/mgag200/mgag200_ttm.c ret = PTR_ERR(vmm); vmm 9 drivers/gpu/drm/nouveau/include/nvif/cl506e.h __u64 vmm; vmm 12 drivers/gpu/drm/nouveau/include/nvif/cl506f.h __u64 vmm; vmm 9 drivers/gpu/drm/nouveau/include/nvif/cl826e.h __u64 vmm; vmm 12 drivers/gpu/drm/nouveau/include/nvif/cl826f.h __u64 vmm; vmm 11 drivers/gpu/drm/nouveau/include/nvif/cl906f.h __u64 vmm; vmm 12 drivers/gpu/drm/nouveau/include/nvif/cla06f.h __u64 vmm; vmm 12 drivers/gpu/drm/nouveau/include/nvif/clc36f.h __u64 vmm; vmm 27 drivers/gpu/drm/nouveau/include/nvkm/engine/fifo.h struct nvkm_vmm *vmm; vmm 113 drivers/gpu/drm/nouveau/include/nvkm/subdev/mmu.h struct nvkm_vmm *vmm; vmm 336 drivers/gpu/drm/nouveau/nouveau_abi16.c ret = nouveau_vma_new(chan->ntfy, chan->chan->vmm, vmm 201 drivers/gpu/drm/nouveau/nouveau_bo.c struct nvif_vmm *vmm = cli->svm.cli ? &cli->svm.vmm : &cli->vmm.vmm; vmm 252 drivers/gpu/drm/nouveau/nouveau_bo.c for (i = 0; i < vmm->page_nr; i++) { vmm 261 drivers/gpu/drm/nouveau/nouveau_bo.c (flags & TTM_PL_FLAG_VRAM) && !vmm->page[i].vram) vmm 264 drivers/gpu/drm/nouveau/nouveau_bo.c (!vmm->page[i].host || vmm->page[i].shift > PAGE_SHIFT)) vmm 271 drivers/gpu/drm/nouveau/nouveau_bo.c if (pi < 0 || !nvbo->comp || vmm->page[i].comp) vmm 275 drivers/gpu/drm/nouveau/nouveau_bo.c if (*size >= 1ULL << vmm->page[i].shift) vmm 283 drivers/gpu/drm/nouveau/nouveau_bo.c if (nvbo->comp && !vmm->page[pi].comp) { vmm 288 drivers/gpu/drm/nouveau/nouveau_bo.c nvbo->page = vmm->page[pi].shift; vmm 1097 drivers/gpu/drm/nouveau/nouveau_bo.c struct nvif_vmm *vmm = &drm->client.vmm.vmm; vmm 1100 drivers/gpu/drm/nouveau/nouveau_bo.c ret = nvif_vmm_get(vmm, LAZY, false, old_mem->mem.page, 0, vmm 1105 drivers/gpu/drm/nouveau/nouveau_bo.c ret = nvif_vmm_get(vmm, LAZY, false, new_mem->mem.page, 0, vmm 1110 drivers/gpu/drm/nouveau/nouveau_bo.c ret = nouveau_mem_map(old_mem, vmm, &old_mem->vma[0]); vmm 1114 drivers/gpu/drm/nouveau/nouveau_bo.c ret = nouveau_mem_map(new_mem, vmm, &old_mem->vma[1]); vmm 1117 drivers/gpu/drm/nouveau/nouveau_bo.c nvif_vmm_put(vmm, &old_mem->vma[1]); vmm 1118 drivers/gpu/drm/nouveau/nouveau_bo.c nvif_vmm_put(vmm, &old_mem->vma[0]); vmm 101 drivers/gpu/drm/nouveau/nouveau_chan.c nouveau_svmm_part(chan->vmm->svmm, chan->inst); vmm 138 drivers/gpu/drm/nouveau/nouveau_chan.c chan->vmm = cli->svm.cli ? &cli->svm : &cli->vmm; vmm 166 drivers/gpu/drm/nouveau/nouveau_chan.c ret = nouveau_vma_new(chan->push.buffer, chan->vmm, vmm 181 drivers/gpu/drm/nouveau/nouveau_chan.c args.limit = chan->vmm->vmm.limit - 1; vmm 211 drivers/gpu/drm/nouveau/nouveau_chan.c args.limit = chan->vmm->vmm.limit - 1; vmm 263 drivers/gpu/drm/nouveau/nouveau_chan.c args.volta.vmm = nvif_handle(&chan->vmm->vmm.object); vmm 272 drivers/gpu/drm/nouveau/nouveau_chan.c args.kepler.vmm = nvif_handle(&chan->vmm->vmm.object); vmm 280 drivers/gpu/drm/nouveau/nouveau_chan.c args.fermi.vmm = nvif_handle(&chan->vmm->vmm.object); vmm 287 drivers/gpu/drm/nouveau/nouveau_chan.c args.nv50.vmm = nvif_handle(&chan->vmm->vmm.object); vmm 383 drivers/gpu/drm/nouveau/nouveau_chan.c args.limit = chan->vmm->vmm.limit - 1; vmm 400 drivers/gpu/drm/nouveau/nouveau_chan.c args.limit = chan->vmm->vmm.limit - 1; vmm 412 drivers/gpu/drm/nouveau/nouveau_chan.c args.limit = chan->vmm->vmm.limit - 1; vmm 502 drivers/gpu/drm/nouveau/nouveau_chan.c ret = nouveau_svmm_join((*pchan)->vmm->svmm, (*pchan)->inst); vmm 11 drivers/gpu/drm/nouveau/nouveau_chan.h struct nouveau_vmm *vmm; vmm 180 drivers/gpu/drm/nouveau/nouveau_drm.c nouveau_vmm_fini(&cli->vmm); vmm 269 drivers/gpu/drm/nouveau/nouveau_drm.c ret = nouveau_vmm_init(cli, vmms[ret].oclass, &cli->vmm); vmm 99 drivers/gpu/drm/nouveau/nouveau_drv.h struct nouveau_vmm vmm; vmm 356 drivers/gpu/drm/nouveau/nouveau_fbcon.c ret = nouveau_vma_new(nvbo, chan->vmm, &fb->vma); vmm 67 drivers/gpu/drm/nouveau/nouveau_gem.c struct nouveau_vmm *vmm = cli->svm.cli ? &cli->svm : &cli->vmm; vmm 71 drivers/gpu/drm/nouveau/nouveau_gem.c if (vmm->vmm.object.oclass < NVIF_CLASS_VMM_NV50) vmm 82 drivers/gpu/drm/nouveau/nouveau_gem.c ret = nouveau_vma_new(nvbo, vmm, &vma); vmm 132 drivers/gpu/drm/nouveau/nouveau_gem.c nouveau_cli_work_queue(vma->vmm->cli, fence, &work->work); vmm 142 drivers/gpu/drm/nouveau/nouveau_gem.c struct nouveau_vmm *vmm = cli->svm.cli ? &cli->svm : & cli->vmm; vmm 146 drivers/gpu/drm/nouveau/nouveau_gem.c if (vmm->vmm.object.oclass < NVIF_CLASS_VMM_NV50) vmm 153 drivers/gpu/drm/nouveau/nouveau_gem.c vma = nouveau_vma_find(nvbo, vmm); vmm 226 drivers/gpu/drm/nouveau/nouveau_gem.c struct nouveau_vmm *vmm = cli->svm.cli ? &cli->svm : &cli->vmm; vmm 236 drivers/gpu/drm/nouveau/nouveau_gem.c if (vmm->vmm.object.oclass >= NVIF_CLASS_VMM_NV50) { vmm 237 drivers/gpu/drm/nouveau/nouveau_gem.c vma = nouveau_vma_find(nvbo, vmm); vmm 344 drivers/gpu/drm/nouveau/nouveau_gem.c if (chan->vmm->vmm.object.oclass >= NVIF_CLASS_VMM_NV50) { vmm 439 drivers/gpu/drm/nouveau/nouveau_gem.c if (chan->vmm->vmm.object.oclass >= NVIF_CLASS_VMM_NV50) { vmm 440 drivers/gpu/drm/nouveau/nouveau_gem.c struct nouveau_vmm *vmm = chan->vmm; vmm 441 drivers/gpu/drm/nouveau/nouveau_gem.c struct nouveau_vma *vma = nouveau_vma_find(nvbo, vmm); vmm 37 drivers/gpu/drm/nouveau/nouveau_mem.c struct nvif_vmm *vmm, struct nvif_vma *vma) vmm 47 drivers/gpu/drm/nouveau/nouveau_mem.c switch (vmm->object.oclass) { vmm 76 drivers/gpu/drm/nouveau/nouveau_mem.c super = vmm->object.client->super; vmm 77 drivers/gpu/drm/nouveau/nouveau_mem.c vmm->object.client->super = true; vmm 78 drivers/gpu/drm/nouveau/nouveau_mem.c ret = nvif_vmm_map(vmm, vma->addr, mem->mem.size, &args, argc, vmm 80 drivers/gpu/drm/nouveau/nouveau_mem.c vmm->object.client->super = super; vmm 87 drivers/gpu/drm/nouveau/nouveau_mem.c nvif_vmm_put(&mem->cli->drm->client.vmm.vmm, &mem->vma[1]); vmm 88 drivers/gpu/drm/nouveau/nouveau_mem.c nvif_vmm_put(&mem->cli->drm->client.vmm.vmm, &mem->vma[0]); vmm 39 drivers/gpu/drm/nouveau/nouveau_sgdma.c ret = nouveau_mem_map(mem, &mem->cli->vmm.vmm, &mem->vma[0]); vmm 91 drivers/gpu/drm/nouveau/nouveau_svm.c struct nouveau_vmm *vmm; vmm 104 drivers/gpu/drm/nouveau/nouveau_svm.c NV_DEBUG((s)->vmm->cli->drm, "svm-%p: "f"\n", (s), ##a) vmm 106 drivers/gpu/drm/nouveau/nouveau_svm.c NV_WARN((s)->vmm->cli->drm, "svm-%p: "f"\n", (s), ##a) vmm 215 drivers/gpu/drm/nouveau/nouveau_svm.c mutex_lock(&svmm->vmm->cli->drm->svm->mutex); vmm 216 drivers/gpu/drm/nouveau/nouveau_svm.c ivmm = nouveau_ivmm_find(svmm->vmm->cli->drm->svm, inst); vmm 221 drivers/gpu/drm/nouveau/nouveau_svm.c mutex_unlock(&svmm->vmm->cli->drm->svm->mutex); vmm 236 drivers/gpu/drm/nouveau/nouveau_svm.c mutex_lock(&svmm->vmm->cli->drm->svm->mutex); vmm 237 drivers/gpu/drm/nouveau/nouveau_svm.c list_add(&ivmm->head, &svmm->vmm->cli->drm->svm->inst); vmm 238 drivers/gpu/drm/nouveau/nouveau_svm.c mutex_unlock(&svmm->vmm->cli->drm->svm->mutex); vmm 248 drivers/gpu/drm/nouveau/nouveau_svm.c bool super = svmm->vmm->vmm.object.client->super; vmm 249 drivers/gpu/drm/nouveau/nouveau_svm.c svmm->vmm->vmm.object.client->super = true; vmm 250 drivers/gpu/drm/nouveau/nouveau_svm.c nvif_object_mthd(&svmm->vmm->vmm.object, NVIF_VMM_V0_PFNCLR, vmm 255 drivers/gpu/drm/nouveau/nouveau_svm.c svmm->vmm->vmm.object.client->super = super; vmm 320 drivers/gpu/drm/nouveau/nouveau_svm.c svmm->vmm = &cli->svm; vmm 338 drivers/gpu/drm/nouveau/nouveau_svm.c ret = nvif_vmm_init(&cli->mmu, cli->vmm.vmm.object.oclass, true, vmm 342 drivers/gpu/drm/nouveau/nouveau_svm.c }, sizeof(struct gp100_vmm_v0), &cli->svm.vmm); vmm 384 drivers/gpu/drm/nouveau/nouveau_svm.c WARN_ON(nvif_object_mthd(&svm->drm->client.vmm.vmm.object, vmm 400 drivers/gpu/drm/nouveau/nouveau_svm.c WARN_ON(nvif_object_mthd(&svm->drm->client.vmm.vmm.object, vmm 706 drivers/gpu/drm/nouveau/nouveau_svm.c svmm->vmm->vmm.object.client->super = true; vmm 707 drivers/gpu/drm/nouveau/nouveau_svm.c ret = nvif_object_ioctl(&svmm->vmm->vmm.object, vmm 711 drivers/gpu/drm/nouveau/nouveau_svm.c svmm->vmm->vmm.object.client->super = false; vmm 138 drivers/gpu/drm/nouveau/nouveau_ttm.c ret = nvif_vmm_get(&mem->cli->vmm.vmm, PTES, false, 12, 0, vmm 259 drivers/gpu/drm/nouveau/nouveau_ttm.c drm->gem.gart_available = drm->client.vmm.vmm.limit; vmm 32 drivers/gpu/drm/nouveau/nouveau_vmm.c nvif_vmm_unmap(&vma->vmm->vmm, vma->addr); vmm 41 drivers/gpu/drm/nouveau/nouveau_vmm.c int ret = nouveau_mem_map(mem, &vma->vmm->vmm, &tmp); vmm 49 drivers/gpu/drm/nouveau/nouveau_vmm.c nouveau_vma_find(struct nouveau_bo *nvbo, struct nouveau_vmm *vmm) vmm 54 drivers/gpu/drm/nouveau/nouveau_vmm.c if (vma->vmm == vmm) vmm 68 drivers/gpu/drm/nouveau/nouveau_vmm.c nvif_vmm_put(&vma->vmm->vmm, &tmp); vmm 77 drivers/gpu/drm/nouveau/nouveau_vmm.c nouveau_vma_new(struct nouveau_bo *nvbo, struct nouveau_vmm *vmm, vmm 85 drivers/gpu/drm/nouveau/nouveau_vmm.c if ((vma = *pvma = nouveau_vma_find(nvbo, vmm))) { vmm 92 drivers/gpu/drm/nouveau/nouveau_vmm.c vma->vmm = vmm; vmm 101 drivers/gpu/drm/nouveau/nouveau_vmm.c ret = nvif_vmm_get(&vmm->vmm, LAZY, false, mem->mem.page, 0, vmm 109 drivers/gpu/drm/nouveau/nouveau_vmm.c ret = nvif_vmm_get(&vmm->vmm, PTES, false, mem->mem.page, 0, vmm 121 drivers/gpu/drm/nouveau/nouveau_vmm.c nouveau_vmm_fini(struct nouveau_vmm *vmm) vmm 123 drivers/gpu/drm/nouveau/nouveau_vmm.c nouveau_svmm_fini(&vmm->svmm); vmm 124 drivers/gpu/drm/nouveau/nouveau_vmm.c nvif_vmm_fini(&vmm->vmm); vmm 125 drivers/gpu/drm/nouveau/nouveau_vmm.c vmm->cli = NULL; vmm 129 drivers/gpu/drm/nouveau/nouveau_vmm.c nouveau_vmm_init(struct nouveau_cli *cli, s32 oclass, struct nouveau_vmm *vmm) vmm 132 drivers/gpu/drm/nouveau/nouveau_vmm.c &vmm->vmm); vmm 136 drivers/gpu/drm/nouveau/nouveau_vmm.c vmm->cli = cli; vmm 8 drivers/gpu/drm/nouveau/nouveau_vmm.h struct nouveau_vmm *vmm; vmm 27 drivers/gpu/drm/nouveau/nouveau_vmm.h struct nvif_vmm vmm; vmm 129 drivers/gpu/drm/nouveau/nv84_fence.c ret = nouveau_vma_new(priv->bo, chan->vmm, &fctx->vma); vmm 28 drivers/gpu/drm/nouveau/nvif/vmm.c nvif_vmm_unmap(struct nvif_vmm *vmm, u64 addr) vmm 30 drivers/gpu/drm/nouveau/nvif/vmm.c return nvif_object_mthd(&vmm->object, NVIF_VMM_V0_UNMAP, vmm 36 drivers/gpu/drm/nouveau/nvif/vmm.c nvif_vmm_map(struct nvif_vmm *vmm, u64 addr, u64 size, void *argv, u32 argc, vmm 57 drivers/gpu/drm/nouveau/nvif/vmm.c ret = nvif_object_mthd(&vmm->object, NVIF_VMM_V0_MAP, vmm 65 drivers/gpu/drm/nouveau/nvif/vmm.c nvif_vmm_put(struct nvif_vmm *vmm, struct nvif_vma *vma) vmm 68 drivers/gpu/drm/nouveau/nvif/vmm.c WARN_ON(nvif_object_mthd(&vmm->object, NVIF_VMM_V0_PUT, vmm 77 drivers/gpu/drm/nouveau/nvif/vmm.c nvif_vmm_get(struct nvif_vmm *vmm, enum nvif_vmm_get type, bool sparse, vmm 98 drivers/gpu/drm/nouveau/nvif/vmm.c ret = nvif_object_mthd(&vmm->object, NVIF_VMM_V0_GET, vmm 108 drivers/gpu/drm/nouveau/nvif/vmm.c nvif_vmm_fini(struct nvif_vmm *vmm) vmm 110 drivers/gpu/drm/nouveau/nvif/vmm.c kfree(vmm->page); vmm 111 drivers/gpu/drm/nouveau/nvif/vmm.c nvif_object_fini(&vmm->object); vmm 116 drivers/gpu/drm/nouveau/nvif/vmm.c u64 size, void *argv, u32 argc, struct nvif_vmm *vmm) vmm 122 drivers/gpu/drm/nouveau/nvif/vmm.c vmm->object.client = NULL; vmm 123 drivers/gpu/drm/nouveau/nvif/vmm.c vmm->page = NULL; vmm 134 drivers/gpu/drm/nouveau/nvif/vmm.c &vmm->object); vmm 138 drivers/gpu/drm/nouveau/nvif/vmm.c vmm->start = args->addr; vmm 139 drivers/gpu/drm/nouveau/nvif/vmm.c vmm->limit = args->size; vmm 141 drivers/gpu/drm/nouveau/nvif/vmm.c vmm->page_nr = args->page_nr; vmm 142 drivers/gpu/drm/nouveau/nvif/vmm.c vmm->page = kmalloc_array(vmm->page_nr, sizeof(*vmm->page), vmm 144 drivers/gpu/drm/nouveau/nvif/vmm.c if (!vmm->page) { vmm 149 drivers/gpu/drm/nouveau/nvif/vmm.c for (i = 0; i < vmm->page_nr; i++) { vmm 152 drivers/gpu/drm/nouveau/nvif/vmm.c ret = nvif_object_mthd(&vmm->object, NVIF_VMM_V0_PAGE, vmm 157 drivers/gpu/drm/nouveau/nvif/vmm.c vmm->page[i].shift = args.shift; vmm 158 drivers/gpu/drm/nouveau/nvif/vmm.c vmm->page[i].sparse = args.sparse; vmm 159 drivers/gpu/drm/nouveau/nvif/vmm.c vmm->page[i].vram = args.vram; vmm 160 drivers/gpu/drm/nouveau/nvif/vmm.c vmm->page[i].host = args.host; vmm 161 drivers/gpu/drm/nouveau/nvif/vmm.c vmm->page[i].comp = args.comp; vmm 166 drivers/gpu/drm/nouveau/nvif/vmm.c nvif_vmm_fini(vmm); vmm 47 drivers/gpu/drm/nouveau/nvkm/core/gpuobj.c struct nvkm_vmm *vmm, struct nvkm_vma *vma, vmm 50 drivers/gpu/drm/nouveau/nvkm/core/gpuobj.c return nvkm_memory_map(gpuobj->memory, offset, vmm, vma, argv, argc); vmm 109 drivers/gpu/drm/nouveau/nvkm/core/gpuobj.c struct nvkm_vmm *vmm, struct nvkm_vma *vma, vmm 113 drivers/gpu/drm/nouveau/nvkm/core/gpuobj.c vmm, vma, argv, argc); vmm 53 drivers/gpu/drm/nouveau/nvkm/engine/dma/usernv04.c device->mmu->vmm->pd->pt[0]->memory; vmm 120 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chan.c if (chan->vmm) vmm 121 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chan.c atomic_dec(&chan->vmm->engref[engine->subdev.index]); vmm 154 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chan.c if (chan->vmm) vmm 155 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chan.c atomic_inc(&chan->vmm->engref[engine->subdev.index]); vmm 330 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chan.c if (chan->vmm) { vmm 331 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chan.c nvkm_vmm_part(chan->vmm, chan->inst->memory); vmm 332 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chan.c nvkm_vmm_unref(&chan->vmm); vmm 390 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chan.c struct nvkm_vmm *vmm = nvkm_uvmm_search(client, hvmm); vmm 391 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chan.c if (IS_ERR(vmm)) vmm 392 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chan.c return PTR_ERR(vmm); vmm 394 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chan.c if (vmm->mmu != device->mmu) vmm 397 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chan.c ret = nvkm_vmm_join(vmm, chan->inst->memory); vmm 401 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chan.c chan->vmm = nvkm_vmm_ref(vmm); vmm 232 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c g84_fifo_chan_ctor(struct nv50_fifo *fifo, u64 vmm, u64 push, vmm 239 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c if (!vmm) vmm 243 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c 0x10000, 0x1000, false, vmm, push, vmm 231 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.c nv50_fifo_chan_ctor(struct nv50_fifo *fifo, u64 vmm, u64 push, vmm 238 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.c if (!vmm) vmm 242 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.c 0x10000, 0x1000, false, vmm, push, vmm 21 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.h int nv50_fifo_chan_ctor(struct nv50_fifo *, u64 vmm, u64 push, vmm 28 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.h int g84_fifo_chan_ctor(struct nv50_fifo *, u64 vmm, u64 push, vmm 49 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmag84.c args->v0.version, args->v0.vmm, args->v0.pushbuf, vmm 60 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmag84.c ret = g84_fifo_chan_ctor(fifo, args->v0.vmm, args->v0.pushbuf, vmm 49 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmanv50.c args->v0.version, args->v0.vmm, args->v0.pushbuf, vmm 60 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmanv50.c ret = nv50_fifo_chan_ctor(fifo, args->v0.vmm, args->v0.pushbuf, vmm 51 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifog84.c args->v0.version, args->v0.vmm, args->v0.pushbuf, vmm 62 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifog84.c ret = g84_fifo_chan_ctor(fifo, args->v0.vmm, args->v0.pushbuf, vmm 129 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogf100.c nvkm_vmm_put(chan->base.vmm, &chan->engn[engine->subdev.index].vma); vmm 149 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogf100.c ret = nvkm_vmm_get(chan->base.vmm, 12, chan->engn[engn].inst->size, vmm 154 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogf100.c return nvkm_memory_map(chan->engn[engn].inst, 0, chan->base.vmm, vmm 230 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogf100.c args->v0.version, args->v0.vmm, args->v0.ioffset, vmm 232 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogf100.c if (!args->v0.vmm) vmm 245 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogf100.c 0x1000, 0x1000, true, args->v0.vmm, 0, vmm 154 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogk104.c nvkm_vmm_put(chan->base.vmm, &chan->engn[engine->subdev.index].vma); vmm 174 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogk104.c ret = nvkm_vmm_get(chan->base.vmm, 12, chan->engn[engn].inst->size, vmm 179 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogk104.c return nvkm_memory_map(chan->engn[engn].inst, 0, chan->base.vmm, vmm 244 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogk104.c u64 vmm, u64 ioffset, u64 ilength, u64 *inst, bool priv, vmm 254 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogk104.c if (!vmm || runlist < 0 || runlist >= fifo->runlist_nr) vmm 276 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogk104.c 0x1000, 0x1000, true, vmm, 0, subdevs, vmm 344 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogk104.c args->v0.version, args->v0.vmm, args->v0.ioffset, vmm 351 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogk104.c args->v0.vmm, vmm 125 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogv100.c u64 vmm, u64 ioffset, u64 ilength, u64 *inst, bool priv, vmm 137 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogv100.c if (!vmm || runlist < 0 || runlist >= fifo->runlist_nr) vmm 155 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogv100.c ret = nvkm_fifo_chan_ctor(func, &fifo->base, 0x1000, 0x1000, true, vmm, vmm 238 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogv100.c args->v0.version, args->v0.vmm, args->v0.ioffset, vmm 245 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogv100.c args->v0.vmm, vmm 51 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifonv50.c args->v0.version, args->v0.vmm, args->v0.pushbuf, vmm 62 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifonv50.c ret = nv50_fifo_chan_ctor(fifo, args->v0.vmm, args->v0.pushbuf, vmm 66 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifotu102.c args->v0.version, args->v0.vmm, args->v0.ioffset, vmm 73 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifotu102.c args->v0.vmm, vmm 1446 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf100.c struct nvkm_vmm *vmm = NULL; vmm 1486 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf100.c ret = nvkm_vmm_new(device, 0, 0, NULL, 0, NULL, "grctx", &vmm); vmm 1490 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf100.c vmm->debug = subdev->debug; vmm 1492 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf100.c ret = nvkm_vmm_join(vmm, inst); vmm 1501 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf100.c ret = nvkm_vmm_get(vmm, 0, nvkm_memory_size(data), &ctx); vmm 1505 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf100.c ret = nvkm_memory_map(data, 0, vmm, ctx, NULL, 0); vmm 1573 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf100.c nvkm_vmm_put(vmm, &ctx); vmm 1575 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf100.c nvkm_vmm_part(vmm, inst); vmm 1576 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf100.c nvkm_vmm_unref(&vmm); vmm 361 drivers/gpu/drm/nouveau/nvkm/engine/gr/gf100.c nvkm_vmm_put(chan->vmm, &chan->data[i].vma); vmm 365 drivers/gpu/drm/nouveau/nvkm/engine/gr/gf100.c nvkm_vmm_put(chan->vmm, &chan->mmio_vma); vmm 367 drivers/gpu/drm/nouveau/nvkm/engine/gr/gf100.c nvkm_vmm_unref(&chan->vmm); vmm 394 drivers/gpu/drm/nouveau/nvkm/engine/gr/gf100.c chan->vmm = nvkm_vmm_ref(fifoch->vmm); vmm 406 drivers/gpu/drm/nouveau/nvkm/engine/gr/gf100.c ret = nvkm_vmm_get(fifoch->vmm, 12, 0x1000, &chan->mmio_vma); vmm 410 drivers/gpu/drm/nouveau/nvkm/engine/gr/gf100.c ret = nvkm_memory_map(chan->mmio, 0, fifoch->vmm, vmm 423 drivers/gpu/drm/nouveau/nvkm/engine/gr/gf100.c ret = nvkm_vmm_get(fifoch->vmm, 12, vmm 431 drivers/gpu/drm/nouveau/nvkm/engine/gr/gf100.c ret = nvkm_memory_map(chan->data[i].mem, 0, chan->vmm, vmm 258 drivers/gpu/drm/nouveau/nvkm/engine/gr/gf100.h struct nvkm_vmm *vmm; vmm 36 drivers/gpu/drm/nouveau/nvkm/subdev/bar/base.c return device->bar->func->bar1.vmm(device->bar); vmm 57 drivers/gpu/drm/nouveau/nvkm/subdev/bar/base.c return bar->func->bar2.vmm(bar); vmm 50 drivers/gpu/drm/nouveau/nvkm/subdev/bar/g84.c .bar1.vmm = nv50_bar_bar1_vmm, vmm 54 drivers/gpu/drm/nouveau/nvkm/subdev/bar/g84.c .bar2.vmm = nv50_bar_bar2_vmm, vmm 34 drivers/gpu/drm/nouveau/nvkm/subdev/bar/gf100.c return gf100_bar(base)->bar[1].vmm; vmm 63 drivers/gpu/drm/nouveau/nvkm/subdev/bar/gf100.c return gf100_bar(base)->bar[0].vmm; vmm 103 drivers/gpu/drm/nouveau/nvkm/subdev/bar/gf100.c (bar_nr == 3) ? "bar2" : "bar1", &bar_vm->vmm); vmm 107 drivers/gpu/drm/nouveau/nvkm/subdev/bar/gf100.c atomic_inc(&bar_vm->vmm->engref[NVKM_SUBDEV_BAR]); vmm 108 drivers/gpu/drm/nouveau/nvkm/subdev/bar/gf100.c bar_vm->vmm->debug = bar->base.subdev.debug; vmm 114 drivers/gpu/drm/nouveau/nvkm/subdev/bar/gf100.c ret = nvkm_vmm_boot(bar_vm->vmm); vmm 119 drivers/gpu/drm/nouveau/nvkm/subdev/bar/gf100.c return nvkm_vmm_join(bar_vm->vmm, bar_vm->inst); vmm 153 drivers/gpu/drm/nouveau/nvkm/subdev/bar/gf100.c nvkm_vmm_part(bar->bar[1].vmm, bar->bar[1].inst); vmm 154 drivers/gpu/drm/nouveau/nvkm/subdev/bar/gf100.c nvkm_vmm_unref(&bar->bar[1].vmm); vmm 157 drivers/gpu/drm/nouveau/nvkm/subdev/bar/gf100.c nvkm_vmm_part(bar->bar[0].vmm, bar->bar[0].inst); vmm 158 drivers/gpu/drm/nouveau/nvkm/subdev/bar/gf100.c nvkm_vmm_unref(&bar->bar[0].vmm); vmm 183 drivers/gpu/drm/nouveau/nvkm/subdev/bar/gf100.c .bar1.vmm = gf100_bar_bar1_vmm, vmm 187 drivers/gpu/drm/nouveau/nvkm/subdev/bar/gf100.c .bar2.vmm = gf100_bar_bar2_vmm, vmm 9 drivers/gpu/drm/nouveau/nvkm/subdev/bar/gf100.h struct nvkm_vmm *vmm; vmm 30 drivers/gpu/drm/nouveau/nvkm/subdev/bar/gk20a.c .bar1.vmm = gf100_bar_bar1_vmm, vmm 53 drivers/gpu/drm/nouveau/nvkm/subdev/bar/gm107.c .bar1.vmm = gf100_bar_bar1_vmm, vmm 57 drivers/gpu/drm/nouveau/nvkm/subdev/bar/gm107.c .bar2.vmm = gf100_bar_bar2_vmm, vmm 30 drivers/gpu/drm/nouveau/nvkm/subdev/bar/gm20b.c .bar1.vmm = gf100_bar_bar1_vmm, vmm 242 drivers/gpu/drm/nouveau/nvkm/subdev/bar/nv50.c .bar1.vmm = nv50_bar_bar1_vmm, vmm 246 drivers/gpu/drm/nouveau/nvkm/subdev/bar/nv50.c .bar2.vmm = nv50_bar_bar2_vmm, vmm 19 drivers/gpu/drm/nouveau/nvkm/subdev/bar/priv.h struct nvkm_vmm *(*vmm)(struct nvkm_bar *); vmm 86 drivers/gpu/drm/nouveau/nvkm/subdev/bar/tu102.c .bar1.vmm = gf100_bar_bar1_vmm, vmm 90 drivers/gpu/drm/nouveau/nvkm/subdev/bar/tu102.c .bar2.vmm = gf100_bar_bar2_vmm, vmm 38 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ram.c nvkm_vram_map(struct nvkm_memory *memory, u64 offset, struct nvkm_vmm *vmm, vmm 48 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ram.c return nvkm_vmm_map(vmm, vma, argv, argc, &map); vmm 281 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/gk20a.c gk20a_instobj_map(struct nvkm_memory *memory, u64 offset, struct nvkm_vmm *vmm, vmm 291 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/gk20a.c return nvkm_vmm_map(vmm, vma, argv, argc, &map); vmm 120 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c nv50_instobj_kmap(struct nv50_instobj *iobj, struct nvkm_vmm *vmm) vmm 137 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c while ((ret = nvkm_vmm_get(vmm, 12, size, &bar))) { vmm 158 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c nvkm_vmm_put(vmm, &ebar); vmm 162 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c ret = nvkm_memory_map(memory, 0, vmm, bar, NULL, 0); vmm 167 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c nvkm_vmm_put(vmm, &bar); vmm 178 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c nvkm_vmm_put(vmm, &iobj->bar); vmm 183 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c nv50_instobj_map(struct nvkm_memory *memory, u64 offset, struct nvkm_vmm *vmm, vmm 187 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c return nvkm_memory_map(memory, offset, vmm, vma, argv, argc); vmm 220 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c struct nvkm_vmm *vmm; vmm 237 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c if ((vmm = nvkm_bar_bar2_vmm(imem->subdev.device))) { vmm 239 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c nv50_instobj_kmap(iobj, vmm); vmm 260 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c nv50_instobj_boot(struct nvkm_memory *memory, struct nvkm_vmm *vmm) vmm 274 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c nv50_instobj_kmap(iobj, vmm); vmm 326 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c struct nvkm_vmm *vmm = nvkm_bar_bar2_vmm(imem->subdev.device); vmm 328 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c if (likely(vmm)) /* Can be NULL during BAR destructor. */ vmm 329 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c nvkm_vmm_put(vmm, &bar); vmm 378 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c if (mmu->func->vmm.global) { vmm 380 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c "gart", &mmu->vmm); vmm 402 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c nvkm_vmm_unref(&mmu->vmm); vmm 32 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/g84.c .vmm = {{ -1, -1, NVIF_CLASS_VMM_NV50}, nv50_vmm_new, false, 0x0200 }, vmm 80 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/gf100.c .vmm = {{ -1, -1, NVIF_CLASS_VMM_GF100}, gf100_vmm_new }, vmm 32 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/gk104.c .vmm = {{ -1, -1, NVIF_CLASS_VMM_GF100}, gk104_vmm_new }, vmm 32 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/gk20a.c .vmm = {{ -1, -1, NVIF_CLASS_VMM_GF100}, gk20a_vmm_new }, vmm 76 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/gm200.c .vmm = {{ -1, 0, NVIF_CLASS_VMM_GM200}, gm200_vmm_new }, vmm 86 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/gm200.c .vmm = {{ -1, -1, NVIF_CLASS_VMM_GM200}, gm200_vmm_new_fixed }, vmm 34 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/gm20b.c .vmm = {{ -1, 0, NVIF_CLASS_VMM_GM200}, gm20b_vmm_new }, vmm 44 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/gm20b.c .vmm = {{ -1, -1, NVIF_CLASS_VMM_GM200}, gm20b_vmm_new_fixed }, vmm 34 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/gp100.c .vmm = {{ -1, 0, NVIF_CLASS_VMM_GP100}, gp100_vmm_new }, vmm 34 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/gp10b.c .vmm = {{ -1, 0, NVIF_CLASS_VMM_GP100}, gp10b_vmm_new }, vmm 34 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/gv100.c .vmm = {{ -1, 0, NVIF_CLASS_VMM_GP100}, gv100_vmm_new }, vmm 32 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mcp77.c .vmm = {{ -1, -1, NVIF_CLASS_VMM_NV50}, mcp77_vmm_new, false, 0x0200 }, vmm 70 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c nvkm_mem_map_dma(struct nvkm_memory *memory, u64 offset, struct nvkm_vmm *vmm, vmm 79 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c return nvkm_vmm_map(vmm, vma, argv, argc, &map); vmm 110 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c nvkm_mem_map_sgl(struct nvkm_memory *memory, u64 offset, struct nvkm_vmm *vmm, vmm 119 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c return nvkm_vmm_map(vmm, vma, argv, argc, &map); vmm 34 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/nv04.c .vmm = {{ -1, -1, NVIF_CLASS_VMM_NV04}, nv04_vmm_new, true }, vmm 35 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/nv41.c nvkm_wr32(device, 0x100800, 0x00000002 | mmu->vmm->pd->pt[0]->addr); vmm 46 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/nv41.c .vmm = {{ -1, -1, NVIF_CLASS_VMM_NV04}, nv41_vmm_new, true }, vmm 35 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/nv44.c struct nvkm_memory *pt = mmu->vmm->pd->pt[0]->memory; vmm 46 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/nv44.c nvkm_wr32(device, 0x100818, mmu->vmm->null); vmm 61 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/nv44.c .vmm = {{ -1, -1, NVIF_CLASS_VMM_NV04}, nv44_vmm_new, true }, vmm 68 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/nv50.c .vmm = {{ -1, -1, NVIF_CLASS_VMM_NV50}, nv50_vmm_new, false, 0x1400 }, vmm 36 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/priv.h } vmm; vmm 34 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/tu102.c .vmm = {{ -1, 0, NVIF_CLASS_VMM_GP100}, tu102_vmm_new }, vmm 45 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/ummu.c if (mmu->func->vmm.user.oclass) { vmm 47 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/ummu.c oclass->base = mmu->func->vmm.user; vmm 42 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c return nvkm_uvmm(object)->vmm; vmm 52 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c struct nvkm_vmm *vmm = uvmm->vmm; vmm 66 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c mutex_lock(&vmm->mutex); vmm 67 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c ret = nvkm_vmm_pfn_unmap(vmm, addr, size); vmm 68 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c mutex_unlock(&vmm->mutex); vmm 81 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c struct nvkm_vmm *vmm = uvmm->vmm; vmm 100 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c mutex_lock(&vmm->mutex); vmm 101 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c ret = nvkm_vmm_pfn_map(vmm, page, addr, size, phys); vmm 102 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c mutex_unlock(&vmm->mutex); vmm 115 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c struct nvkm_vmm *vmm = uvmm->vmm; vmm 125 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c mutex_lock(&vmm->mutex); vmm 126 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c vma = nvkm_vmm_node_search(vmm, addr); vmm 128 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c VMM_DEBUG(vmm, "lookup %016llx: %016llx", vmm 134 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c VMM_DEBUG(vmm, "denied %016llx: %d %d %d", addr, vmm 140 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c VMM_DEBUG(vmm, "unmapped"); vmm 144 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c nvkm_vmm_unmap_locked(vmm, vma, false); vmm 147 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c mutex_unlock(&vmm->mutex); vmm 159 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c struct nvkm_vmm *vmm = uvmm->vmm; vmm 174 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c VMM_DEBUG(vmm, "memory %016llx %ld\n", handle, PTR_ERR(memory)); vmm 178 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c mutex_lock(&vmm->mutex); vmm 179 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c if (ret = -ENOENT, !(vma = nvkm_vmm_node_search(vmm, addr))) { vmm 180 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c VMM_DEBUG(vmm, "lookup %016llx", addr); vmm 185 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c VMM_DEBUG(vmm, "denied %016llx: %d %d %d", addr, vmm 191 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c VMM_DEBUG(vmm, "pfnmap %016llx", addr); vmm 198 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c VMM_DEBUG(vmm, "split %d %d %d " vmm 205 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c vma = nvkm_vmm_node_split(vmm, vma, addr, size); vmm 212 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c mutex_unlock(&vmm->mutex); vmm 214 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c ret = nvkm_memory_map(memory, offset, vmm, vma, argv, argc); vmm 221 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c mutex_lock(&vmm->mutex); vmm 223 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c nvkm_vmm_unmap_region(vmm, vma); vmm 225 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c mutex_unlock(&vmm->mutex); vmm 237 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c struct nvkm_vmm *vmm = uvmm->vmm; vmm 247 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c mutex_lock(&vmm->mutex); vmm 248 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c vma = nvkm_vmm_node_search(vmm, args->v0.addr); vmm 250 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c VMM_DEBUG(vmm, "lookup %016llx: %016llx %d", addr, vmm 256 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c VMM_DEBUG(vmm, "denied %016llx: %d %d %d", addr, vmm 261 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c nvkm_vmm_put_locked(vmm, vma); vmm 264 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c mutex_unlock(&vmm->mutex); vmm 275 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c struct nvkm_vmm *vmm = uvmm->vmm; vmm 292 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c mutex_lock(&vmm->mutex); vmm 293 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c ret = nvkm_vmm_get_locked(vmm, getref, mapref, sparse, vmm 295 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c mutex_unlock(&vmm->mutex); vmm 314 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c page = uvmm->vmm->func->page; vmm 345 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c if (uvmm->vmm->func->mthd) { vmm 346 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c return uvmm->vmm->func->mthd(uvmm->vmm, vmm 361 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c nvkm_vmm_unref(&uvmm->vmm); vmm 398 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c if (!mmu->vmm) { vmm 399 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c ret = mmu->func->vmm.ctor(mmu, managed, addr, size, argv, argc, vmm 400 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c NULL, "user", &uvmm->vmm); vmm 404 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c uvmm->vmm->debug = max(uvmm->vmm->debug, oclass->client->debug); vmm 409 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c uvmm->vmm = nvkm_vmm_ref(mmu->vmm); vmm 412 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c page = uvmm->vmm->func->page; vmm 416 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c args->v0.addr = uvmm->vmm->start; vmm 417 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.c args->v0.size = uvmm->vmm->limit; vmm 9 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/uvmm.h struct nvkm_vmm *vmm; vmm 75 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c struct nvkm_vmm *vmm; vmm 113 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c VMM_TRACE(_it->vmm, "%s "f, _buf, ##a); \ vmm 129 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c if (it->vmm->func->flush) { vmm 131 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c it->vmm->func->flush(it->vmm, it->flush); vmm 145 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c struct nvkm_vmm *vmm = it->vmm; vmm 159 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c func->sparse(vmm, pgd->pt[0], pdei, 1); vmm 162 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c func->unmap(vmm, pgd->pt[0], pdei, 1); vmm 170 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c func->pde(vmm, pgd, pdei); vmm 177 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c func->pde(vmm, pgd, pdei); vmm 190 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_mmu_ptc_put(vmm->mmu, vmm->bootstrapped, &pt); vmm 203 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c struct nvkm_vmm *vmm = it->vmm; vmm 244 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c pair->func->sparse(vmm, pgt->pt[0], pteb, ptes); vmm 252 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c pair->func->invalid(vmm, pgt->pt[0], pteb, ptes); vmm 267 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c dma = desc->func->pfn_clear(it->vmm, pgt->pt[type], ptei, ptes); vmm 272 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c desc->func->pfn_unmap(it->vmm, pgt->pt[type], ptei, ptes); vmm 302 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c struct nvkm_vmm *vmm = it->vmm; vmm 348 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c desc->func->sparse(vmm, pgt->pt[1], spti, sptc); vmm 351 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c pair->func->unmap(vmm, pgt->pt[0], pteb, ptes); vmm 358 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c pair->func->unmap(vmm, pgt->pt[0], pteb, ptes); vmm 419 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c struct nvkm_vmm *vmm = it->vmm; vmm 420 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c struct nvkm_mmu *mmu = vmm->mmu; vmm 457 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c desc->func->sparse(vmm, pt, pteb, ptes); vmm 459 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c desc->func->invalid(vmm, pt, pteb, ptes); vmm 462 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c desc->func->unmap(vmm, pt, pteb, ptes); vmm 470 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c desc->func->sparse(vmm, pt, 0, pten); vmm 472 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c desc->func->invalid(vmm, pt, 0, pten); vmm 478 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c it->desc[it->lvl].func->pde(it->vmm, pgd, pdei); vmm 501 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_iter(struct nvkm_vmm *vmm, const struct nvkm_vmm_page *page, vmm 513 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c it.vmm = vmm; vmm 523 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c it.pt[it.max] = vmm->pd; vmm 567 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c MAP_PTES(vmm, pt, ptei, ptes, map); vmm 569 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c CLR_PTES(vmm, pt, ptei, ptes); vmm 602 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ptes_sparse_put(struct nvkm_vmm *vmm, const struct nvkm_vmm_page *page, vmm 605 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_iter(vmm, page, addr, size, "sparse unref", false, false, vmm 612 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ptes_sparse_get(struct nvkm_vmm *vmm, const struct nvkm_vmm_page *page, vmm 616 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c u64 fail = nvkm_vmm_iter(vmm, page, addr, size, "sparse ref", vmm 621 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ptes_sparse_put(vmm, page, addr, size); vmm 630 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ptes_sparse(struct nvkm_vmm *vmm, u64 addr, u64 size, bool ref) vmm 632 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c const struct nvkm_vmm_page *page = vmm->func->page; vmm 662 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c int ret = nvkm_vmm_ptes_sparse_get(vmm, &page[i], addr, block); vmm 665 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ptes_sparse(vmm, start, size, false); vmm 669 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ptes_sparse_put(vmm, &page[i], addr, block); vmm 680 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ptes_unmap_put(struct nvkm_vmm *vmm, const struct nvkm_vmm_page *page, vmm 684 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_iter(vmm, page, addr, size, "unmap + unref", vmm 691 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ptes_get_map(struct nvkm_vmm *vmm, const struct nvkm_vmm_page *page, vmm 695 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c u64 fail = nvkm_vmm_iter(vmm, page, addr, size, "ref + map", true, vmm 699 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ptes_unmap_put(vmm, page, addr, size, false, false); vmm 706 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ptes_unmap(struct nvkm_vmm *vmm, const struct nvkm_vmm_page *page, vmm 710 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_iter(vmm, page, addr, size, "unmap", false, pfn, vmm 717 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ptes_map(struct nvkm_vmm *vmm, const struct nvkm_vmm_page *page, vmm 721 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_iter(vmm, page, addr, size, "map", false, false, vmm 726 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ptes_put(struct nvkm_vmm *vmm, const struct nvkm_vmm_page *page, vmm 729 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_iter(vmm, page, addr, size, "unref", false, false, vmm 734 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ptes_get(struct nvkm_vmm *vmm, const struct nvkm_vmm_page *page, vmm 737 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c u64 fail = nvkm_vmm_iter(vmm, page, addr, size, "ref", true, false, vmm 741 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ptes_put(vmm, page, addr, fail - addr); vmm 785 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_free_remove(struct nvkm_vmm *vmm, struct nvkm_vma *vma) vmm 787 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c rb_erase(&vma->tree, &vmm->free); vmm 791 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_free_delete(struct nvkm_vmm *vmm, struct nvkm_vma *vma) vmm 793 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_free_remove(vmm, vma); vmm 799 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_free_insert(struct nvkm_vmm *vmm, struct nvkm_vma *vma) vmm 801 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c struct rb_node **ptr = &vmm->free.rb_node; vmm 823 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c rb_insert_color(&vma->tree, &vmm->free); vmm 827 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_node_remove(struct nvkm_vmm *vmm, struct nvkm_vma *vma) vmm 829 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c rb_erase(&vma->tree, &vmm->root); vmm 833 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_node_delete(struct nvkm_vmm *vmm, struct nvkm_vma *vma) vmm 835 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_node_remove(vmm, vma); vmm 841 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_node_insert(struct nvkm_vmm *vmm, struct nvkm_vma *vma) vmm 843 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c struct rb_node **ptr = &vmm->root.rb_node; vmm 859 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c rb_insert_color(&vma->tree, &vmm->root); vmm 863 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_node_search(struct nvkm_vmm *vmm, u64 addr) vmm 865 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c struct rb_node *node = vmm->root.rb_node; vmm 879 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c #define node(root, dir) (((root)->head.dir == &vmm->list) ? NULL : \ vmm 883 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_node_merge(struct nvkm_vmm *vmm, struct nvkm_vma *prev, vmm 889 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_node_delete(vmm, next); vmm 892 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_node_delete(vmm, vma); vmm 899 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_node_remove(vmm, next); vmm 903 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_node_insert(vmm, next); vmm 909 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_node_remove(vmm, vma); vmm 913 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_node_insert(vmm, vma); vmm 916 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_node_delete(vmm, vma); vmm 925 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_node_split(struct nvkm_vmm *vmm, vmm 935 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_node_insert(vmm, vma); vmm 941 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_node_merge(vmm, prev, vma, NULL, vma->size); vmm 945 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_node_insert(vmm, tmp); vmm 969 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_dump(struct nvkm_vmm *vmm) vmm 972 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c list_for_each_entry(vma, &vmm->list, head) { vmm 978 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_dtor(struct nvkm_vmm *vmm) vmm 984 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_dump(vmm); vmm 986 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c while ((node = rb_first(&vmm->root))) { vmm 988 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_put(vmm, &vma); vmm 991 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c if (vmm->bootstrapped) { vmm 992 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c const struct nvkm_vmm_page *page = vmm->func->page; vmm 993 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c const u64 limit = vmm->limit - vmm->start; vmm 998 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_mmu_ptc_dump(vmm->mmu); vmm 999 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ptes_put(vmm, page, vmm->start, limit); vmm 1002 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c vma = list_first_entry(&vmm->list, typeof(*vma), head); vmm 1005 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c WARN_ON(!list_empty(&vmm->list)); vmm 1007 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c if (vmm->nullp) { vmm 1008 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c dma_free_coherent(vmm->mmu->subdev.device->dev, 16 * 1024, vmm 1009 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c vmm->nullp, vmm->null); vmm 1012 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c if (vmm->pd) { vmm 1013 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_mmu_ptc_put(vmm->mmu, true, &vmm->pd->pt[0]); vmm 1014 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_pt_del(&vmm->pd); vmm 1019 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ctor_managed(struct nvkm_vmm *vmm, u64 addr, u64 size) vmm 1028 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_node_insert(vmm, vma); vmm 1029 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c list_add_tail(&vma->head, &vmm->list); vmm 1037 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c struct nvkm_vmm *vmm) vmm 1045 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c vmm->func = func; vmm 1046 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c vmm->mmu = mmu; vmm 1047 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c vmm->name = name; vmm 1048 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c vmm->debug = mmu->subdev.debug; vmm 1049 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c kref_init(&vmm->kref); vmm 1051 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c __mutex_init(&vmm->mutex, "&vmm->mutex", key ? key : &_key); vmm 1072 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c vmm->pd = nvkm_vmm_pt_new(desc, false, NULL); vmm 1073 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c if (!vmm->pd) vmm 1075 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c vmm->pd->refs[0] = 1; vmm 1076 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c INIT_LIST_HEAD(&vmm->join); vmm 1083 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c vmm->pd->pt[0] = nvkm_mmu_ptc_get(mmu, size, desc->align, true); vmm 1084 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c if (!vmm->pd->pt[0]) vmm 1089 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c INIT_LIST_HEAD(&vmm->list); vmm 1090 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c vmm->free = RB_ROOT; vmm 1091 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c vmm->root = RB_ROOT; vmm 1098 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c vmm->start = 0; vmm 1099 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c vmm->limit = 1ULL << bits; vmm 1100 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c if (addr + size < addr || addr + size > vmm->limit) vmm 1104 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c if (addr && (ret = nvkm_vmm_ctor_managed(vmm, 0, addr))) vmm 1111 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_free_insert(vmm, vma); vmm 1112 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c list_add_tail(&vma->head, &vmm->list); vmm 1117 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c size = vmm->limit - addr; vmm 1118 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c if (size && (ret = nvkm_vmm_ctor_managed(vmm, addr, size))) vmm 1124 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c vmm->start = addr; vmm 1125 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c vmm->limit = size ? (addr + size) : (1ULL << bits); vmm 1126 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c if (vmm->start > vmm->limit || vmm->limit > (1ULL << bits)) vmm 1129 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c if (!(vma = nvkm_vma_new(vmm->start, vmm->limit - vmm->start))) vmm 1132 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_free_insert(vmm, vma); vmm 1133 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c list_add(&vma->head, &vmm->list); vmm 1151 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_pfn_split_merge(struct nvkm_vmm *vmm, struct nvkm_vma *vma, vmm 1169 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c return nvkm_vmm_node_merge(vmm, prev, vma, next, size); vmm 1170 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c return nvkm_vmm_node_split(vmm, vma, addr, size); vmm 1174 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_pfn_unmap(struct nvkm_vmm *vmm, u64 addr, u64 size) vmm 1176 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c struct nvkm_vma *vma = nvkm_vmm_node_search(vmm, addr); vmm 1190 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ptes_unmap_put(vmm, &vmm->func->page[vma->refd], vmm 1193 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c next = nvkm_vmm_pfn_split_merge(vmm, vma, start, size, 0, false); vmm 1211 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_pfn_map(struct nvkm_vmm *vmm, u8 shift, u64 addr, u64 size, u64 *pfn) vmm 1213 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c const struct nvkm_vmm_page *page = vmm->func->page; vmm 1229 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c addr + size < addr || addr + size > vmm->limit) { vmm 1230 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c VMM_DEBUG(vmm, "paged map %d %d %016llx %016llx\n", vmm 1235 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c if (!(vma = nvkm_vmm_node_search(vmm, addr))) vmm 1275 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c tmp = nvkm_vmm_pfn_split_merge(vmm, vma, addr, size, vmm 1277 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c vmm->func->page, map); vmm 1284 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c tmp->refd = page - vmm->func->page; vmm 1297 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c ret = nvkm_vmm_ptes_get_map(vmm, page, addr, vmm 1301 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ptes_map(vmm, page, addr, size, &args, vmm 1306 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ptes_unmap_put(vmm, page, addr, size, vmm 1334 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_unmap_region(struct nvkm_vmm *vmm, struct nvkm_vma *vma) vmm 1339 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_memory_tags_put(vma->memory, vmm->mmu->subdev.device, &vma->tags); vmm 1347 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_node_merge(vmm, prev, vma, next, vma->size); vmm 1351 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_unmap_locked(struct nvkm_vmm *vmm, struct nvkm_vma *vma, bool pfn) vmm 1353 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c const struct nvkm_vmm_page *page = &vmm->func->page[vma->refd]; vmm 1356 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ptes_unmap_put(vmm, page, vma->addr, vma->size, vma->sparse, pfn); vmm 1359 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ptes_unmap(vmm, page, vma->addr, vma->size, vma->sparse, pfn); vmm 1362 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_unmap_region(vmm, vma); vmm 1366 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_unmap(struct nvkm_vmm *vmm, struct nvkm_vma *vma) vmm 1369 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c mutex_lock(&vmm->mutex); vmm 1370 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_unmap_locked(vmm, vma, false); vmm 1371 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c mutex_unlock(&vmm->mutex); vmm 1376 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_map_valid(struct nvkm_vmm *vmm, struct nvkm_vma *vma, vmm 1382 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c VMM_DEBUG(vmm, "%d !VRAM", map->page->shift); vmm 1389 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c VMM_DEBUG(vmm, "%d !HOST", map->page->shift); vmm 1402 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c VMM_DEBUG(vmm, "alignment %016llx %016llx %016llx %d %d", vmm 1408 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c return vmm->func->valid(vmm, argv, argc, map); vmm 1412 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_map_choose(struct nvkm_vmm *vmm, struct nvkm_vma *vma, vmm 1415 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c for (map->page = vmm->func->page; map->page->shift; map->page++) { vmm 1416 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c VMM_DEBUG(vmm, "trying %d", map->page->shift); vmm 1417 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c if (!nvkm_vmm_map_valid(vmm, vma, argv, argc, map)) vmm 1424 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_map_locked(struct nvkm_vmm *vmm, struct nvkm_vma *vma, vmm 1432 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c VMM_DEBUG(vmm, "overrun %016llx %016llx %016llx", vmm 1442 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c const u32 debug = vmm->debug; vmm 1443 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c vmm->debug = 0; vmm 1444 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c ret = nvkm_vmm_map_choose(vmm, vma, argv, argc, map); vmm 1445 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c vmm->debug = debug; vmm 1447 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c VMM_DEBUG(vmm, "invalid at any page size"); vmm 1448 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_map_choose(vmm, vma, argv, argc, map); vmm 1454 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c map->page = &vmm->func->page[vma->refd]; vmm 1456 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c map->page = &vmm->func->page[vma->page]; vmm 1458 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c ret = nvkm_vmm_map_valid(vmm, vma, argv, argc, map); vmm 1460 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c VMM_DEBUG(vmm, "invalid %d\n", ret); vmm 1492 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c ret = nvkm_vmm_ptes_get_map(vmm, map->page, vma->addr, vma->size, map, func); vmm 1496 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c vma->refd = map->page - vmm->func->page; vmm 1498 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ptes_map(vmm, map->page, vma->addr, vma->size, map, func); vmm 1501 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_memory_tags_put(vma->memory, vmm->mmu->subdev.device, &vma->tags); vmm 1510 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_map(struct nvkm_vmm *vmm, struct nvkm_vma *vma, void *argv, u32 argc, vmm 1514 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c mutex_lock(&vmm->mutex); vmm 1515 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c ret = nvkm_vmm_map_locked(vmm, vma, argv, argc, map); vmm 1517 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c mutex_unlock(&vmm->mutex); vmm 1522 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_put_region(struct nvkm_vmm *vmm, struct nvkm_vma *vma) vmm 1529 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_free_delete(vmm, prev); vmm 1534 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_free_delete(vmm, next); vmm 1537 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_free_insert(vmm, vma); vmm 1541 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_put_locked(struct nvkm_vmm *vmm, struct nvkm_vma *vma) vmm 1543 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c const struct nvkm_vmm_page *page = vmm->func->page; vmm 1568 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ptes_unmap_put(vmm, &page[refd], addr, vmm 1574 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ptes_put(vmm, &page[refd], addr, size); vmm 1586 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_unmap_region(vmm, next); vmm 1598 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ptes_sparse_put(vmm, &page[vma->refd], vma->addr, vma->size); vmm 1609 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ptes_sparse(vmm, vma->addr, vma->size, false); vmm 1613 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_node_remove(vmm, vma); vmm 1620 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_put_region(vmm, vma); vmm 1624 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_put(struct nvkm_vmm *vmm, struct nvkm_vma **pvma) vmm 1628 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c mutex_lock(&vmm->mutex); vmm 1629 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_put_locked(vmm, vma); vmm 1630 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c mutex_unlock(&vmm->mutex); vmm 1636 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_get_locked(struct nvkm_vmm *vmm, bool getref, bool mapref, bool sparse, vmm 1639 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c const struct nvkm_vmm_page *page = &vmm->func->page[NVKM_VMA_PAGE_NONE]; vmm 1645 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c VMM_TRACE(vmm, "getref %d mapref %d sparse %d " vmm 1651 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c VMM_DEBUG(vmm, "args %016llx %d %d %d", vmm 1662 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c if (unlikely((getref || vmm->func->page_block) && !shift)) { vmm 1663 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c VMM_DEBUG(vmm, "page size required: %d %016llx", vmm 1664 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c getref, vmm->func->page_block); vmm 1672 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c for (page = vmm->func->page; page->shift; page++) { vmm 1678 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c VMM_DEBUG(vmm, "page %d %016llx", shift, size); vmm 1687 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c temp = vmm->free.rb_node; vmm 1708 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c const int p = page - vmm->func->page; vmm 1711 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c if (vmm->func->page_block && prev && prev->page != p) vmm 1712 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c addr = ALIGN(addr, vmm->func->page_block); vmm 1716 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c if (vmm->func->page_block && next && next->page != p) vmm 1717 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c tail = ALIGN_DOWN(tail, vmm->func->page_block); vmm 1720 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_free_remove(vmm, this); vmm 1734 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_put_region(vmm, vma); vmm 1737 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_free_insert(vmm, vma); vmm 1743 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_put_region(vmm, vma); vmm 1746 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_free_insert(vmm, tmp); vmm 1751 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c ret = nvkm_vmm_ptes_sparse_get(vmm, page, vma->addr, vma->size); vmm 1753 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c ret = nvkm_vmm_ptes_sparse(vmm, vma->addr, vma->size, true); vmm 1755 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c ret = nvkm_vmm_ptes_get(vmm, page, vma->addr, vma->size); vmm 1759 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_put_region(vmm, vma); vmm 1765 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c vma->page = page - vmm->func->page; vmm 1768 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_node_insert(vmm, vma); vmm 1774 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_get(struct nvkm_vmm *vmm, u8 page, u64 size, struct nvkm_vma **pvma) vmm 1777 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c mutex_lock(&vmm->mutex); vmm 1778 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c ret = nvkm_vmm_get_locked(vmm, false, true, false, page, 0, size, pvma); vmm 1779 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c mutex_unlock(&vmm->mutex); vmm 1784 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_part(struct nvkm_vmm *vmm, struct nvkm_memory *inst) vmm 1786 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c if (inst && vmm && vmm->func->part) { vmm 1787 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c mutex_lock(&vmm->mutex); vmm 1788 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c vmm->func->part(vmm, inst); vmm 1789 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c mutex_unlock(&vmm->mutex); vmm 1794 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_join(struct nvkm_vmm *vmm, struct nvkm_memory *inst) vmm 1797 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c if (vmm->func->join) { vmm 1798 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c mutex_lock(&vmm->mutex); vmm 1799 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c ret = vmm->func->join(vmm, inst); vmm 1800 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c mutex_unlock(&vmm->mutex); vmm 1810 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_memory_boot(it->pt[0]->pt[type]->memory, it->vmm); vmm 1815 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_boot(struct nvkm_vmm *vmm) vmm 1817 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c const struct nvkm_vmm_page *page = vmm->func->page; vmm 1818 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c const u64 limit = vmm->limit - vmm->start; vmm 1824 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c ret = nvkm_vmm_ptes_get(vmm, page, vmm->start, limit); vmm 1828 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_iter(vmm, page, vmm->start, limit, "bootstrap", false, false, vmm 1830 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c vmm->bootstrapped = true; vmm 1837 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c struct nvkm_vmm *vmm = container_of(kref, typeof(*vmm), kref); vmm 1838 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_dtor(vmm); vmm 1839 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c kfree(vmm); vmm 1845 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c struct nvkm_vmm *vmm = *pvmm; vmm 1846 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c if (vmm) { vmm 1847 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c kref_put(&vmm->kref, nvkm_vmm_del); vmm 1853 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ref(struct nvkm_vmm *vmm) vmm 1855 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c if (vmm) vmm 1856 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c kref_get(&vmm->kref); vmm 1857 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c return vmm; vmm 1866 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c struct nvkm_vmm *vmm = NULL; vmm 1868 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c ret = mmu->func->vmm.ctor(mmu, false, addr, size, argv, argc, vmm 1869 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c key, name, &vmm); vmm 1871 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_unref(&vmm); vmm 1872 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c *pvmm = vmm; vmm 32 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c gf100_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, vmm 44 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c VMM_WO064(pt, vmm, ptei++ * 8, data); vmm 51 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c VMM_WO064(pt, vmm, ptei++ * 8, data); vmm 58 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c gf100_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, vmm 61 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); vmm 65 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c gf100_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, vmm 69 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); vmm 73 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c VMM_WO064(pt, vmm, ptei++ * 8, data); vmm 80 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); vmm 84 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c gf100_vmm_pgt_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, vmm 87 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c VMM_MAP_ITER_MEM(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); vmm 91 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c gf100_vmm_pgt_unmap(struct nvkm_vmm *vmm, vmm 94 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c VMM_FO064(pt, vmm, ptei * 8, 0ULL, ptes); vmm 106 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c gf100_vmm_pgd_pde(struct nvkm_vmm *vmm, struct nvkm_vmm_pt *pgd, u32 pdei) vmm 142 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c VMM_WO064(pd, vmm, pdei * 8, data); vmm 181 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c gf100_vmm_invalidate_pdb(struct nvkm_vmm *vmm, u64 addr) vmm 183 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c struct nvkm_device *device = vmm->mmu->subdev.device; vmm 188 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c gf100_vmm_invalidate(struct nvkm_vmm *vmm, u32 type) vmm 190 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c struct nvkm_subdev *subdev = &vmm->mmu->subdev; vmm 192 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c struct nvkm_mmu_pt *pd = vmm->pd->pt[0]; vmm 213 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c addr |= (vmm->pd->pt[0]->addr >> 12) << 4; vmm 215 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c vmm->func->invalidate_pdb(vmm, addr); vmm 229 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c gf100_vmm_flush(struct nvkm_vmm *vmm, int depth) vmm 232 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c if (atomic_read(&vmm->engref[NVKM_SUBDEV_BAR])) vmm 234 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c gf100_vmm_invalidate(vmm, type); vmm 238 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c gf100_vmm_valid(struct nvkm_vmm *vmm, void *argv, u32 argc, vmm 248 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c struct nvkm_device *device = vmm->mmu->subdev.device; vmm 269 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c VMM_DEBUG(vmm, "args"); vmm 273 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c aper = vmm->func->aper(target); vmm 277 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c kindm = vmm->mmu->func->kind(vmm->mmu, &kindn); vmm 279 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c VMM_DEBUG(vmm, "kind %02x", kind); vmm 287 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c VMM_DEBUG(vmm, "comp %d %02x", aper, page->type); vmm 295 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c VMM_DEBUG(vmm, "comp %d", ret); vmm 335 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c gf100_vmm_part(struct nvkm_vmm *vmm, struct nvkm_memory *inst) vmm 341 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c gf100_vmm_join_(struct nvkm_vmm *vmm, struct nvkm_memory *inst, u64 base) vmm 343 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c struct nvkm_mmu_pt *pd = vmm->pd->pt[0]; vmm 359 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c nvkm_wo64(inst, 0x0208, vmm->limit - 1); vmm 365 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c gf100_vmm_join(struct nvkm_vmm *vmm, struct nvkm_memory *inst) vmm 367 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c return gf100_vmm_join_(vmm, inst, 0); vmm 25 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgk104.c gk104_vmm_lpt_invalid(struct nvkm_vmm *vmm, vmm 29 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgk104.c VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(1) /* PRIV. */, ptes); vmm 28 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgm200.c gm200_vmm_pgt_sparse(struct nvkm_vmm *vmm, vmm 32 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgm200.c VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(32) /* VOL. */, ptes); vmm 53 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgm200.c gm200_vmm_pgd_sparse(struct nvkm_vmm *vmm, vmm 57 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgm200.c VMM_FO064(pt, vmm, pdei * 8, BIT_ULL(35) /* VOL_BIG. */, pdes); vmm 96 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgm200.c gm200_vmm_join_(struct nvkm_vmm *vmm, struct nvkm_memory *inst, u64 base) vmm 98 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgm200.c if (vmm->func->page[1].shift == 16) vmm 100 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgm200.c return gf100_vmm_join_(vmm, inst, base); vmm 104 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgm200.c gm200_vmm_join(struct nvkm_vmm *vmm, struct nvkm_memory *inst) vmm 106 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgm200.c return gm200_vmm_join_(vmm, inst, 0); vmm 34 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_pfn_unmap(struct nvkm_vmm *vmm, vmm 37 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c struct device *dev = vmm->mmu->subdev.device->dev; vmm 55 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_pfn_clear(struct nvkm_vmm *vmm, vmm 65 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_WO064(pt, vmm, ptei * 8, data & ~BIT_ULL(0)); vmm 75 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_pgt_pfn(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, vmm 78 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c struct device *dev = vmm->mmu->subdev.device->dev; vmm 102 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_WO064(pt, vmm, ptei++ * 8, data); vmm 109 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, vmm 117 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_WO064(pt, vmm, ptei++ * 8, data); vmm 123 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, vmm 126 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, gp100_vmm_pgt_pte); vmm 130 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, vmm 134 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); vmm 138 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_WO064(pt, vmm, ptei++ * 8, data); vmm 145 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, gp100_vmm_pgt_pte); vmm 149 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_pgt_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, vmm 152 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_MAP_ITER_MEM(vmm, pt, ptei, ptes, map, gp100_vmm_pgt_pte); vmm 156 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_pgt_sparse(struct nvkm_vmm *vmm, vmm 160 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(3) /* VOL. */, ptes); vmm 176 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_lpt_invalid(struct nvkm_vmm *vmm, vmm 180 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(5) /* PRIV. */, ptes); vmm 192 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_pd0_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, vmm 200 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_WO128(pt, vmm, ptei++ * 0x10, data, 0ULL); vmm 206 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_pd0_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, vmm 209 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_MAP_ITER_MEM(vmm, pt, ptei, ptes, map, gp100_vmm_pd0_pte); vmm 230 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_pd0_pde(struct nvkm_vmm *vmm, struct nvkm_vmm_pt *pgd, u32 pdei) vmm 242 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_WO128(pd, vmm, pdei * 0x10, data[0], data[1]); vmm 247 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_pd0_sparse(struct nvkm_vmm *vmm, vmm 251 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_FO128(pt, vmm, pdei * 0x10, BIT_ULL(3) /* VOL_BIG. */, 0ULL, pdes); vmm 255 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_pd0_unmap(struct nvkm_vmm *vmm, vmm 258 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_FO128(pt, vmm, pdei * 0x10, 0ULL, 0ULL, pdes); vmm 270 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_pd1_pde(struct nvkm_vmm *vmm, struct nvkm_vmm_pt *pgd, u32 pdei) vmm 280 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_WO064(pd, vmm, pdei * 8, data); vmm 312 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_valid(struct nvkm_vmm *vmm, void *argv, u32 argc, vmm 321 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c struct nvkm_device *device = vmm->mmu->subdev.device; vmm 342 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_DEBUG(vmm, "args"); vmm 346 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c aper = vmm->func->aper(target); vmm 350 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c kindm = vmm->mmu->func->kind(vmm->mmu, &kindn); vmm 352 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_DEBUG(vmm, "kind %02x", kind); vmm 359 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_DEBUG(vmm, "comp %d %02x", aper, page->type); vmm 367 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_DEBUG(vmm, "comp %d", ret); vmm 391 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_fault_cancel(struct nvkm_vmm *vmm, void *argv, u32 argc) vmm 393 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c struct nvkm_device *device = vmm->mmu->subdev.device; vmm 413 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gf100_vmm_invalidate(vmm, 0x0000001b vmm 426 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_fault_replay(struct nvkm_vmm *vmm, void *argv, u32 argc) vmm 434 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gf100_vmm_invalidate(vmm, 0x0000000b); /* REPLAY_GLOBAL. */ vmm 441 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_mthd(struct nvkm_vmm *vmm, vmm 447 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c return gp100_vmm_fault_replay(vmm, argv, argc); vmm 449 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c return gp100_vmm_fault_cancel(vmm, argv, argc); vmm 458 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_invalidate_pdb(struct nvkm_vmm *vmm, u64 addr) vmm 460 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c struct nvkm_device *device = vmm->mmu->subdev.device; vmm 466 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_flush(struct nvkm_vmm *vmm, int depth) vmm 470 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c if (atomic_read(&vmm->engref[NVKM_SUBDEV_BAR])) vmm 473 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gf100_vmm_invalidate(vmm, type); vmm 477 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_join(struct nvkm_vmm *vmm, struct nvkm_memory *inst) vmm 480 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c if (vmm->replay) { vmm 484 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c return gf100_vmm_join_(vmm, inst, base); vmm 31 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgv100.c gv100_vmm_join(struct nvkm_vmm *vmm, struct nvkm_memory *inst) vmm 34 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgv100.c int ret = gp100_vmm_join(vmm, inst), i; vmm 28 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c nv04_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, vmm 33 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c VMM_WO032(pt, vmm, 8 + ptei++ * 4, data); vmm 39 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c nv04_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, vmm 42 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); vmm 46 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c nv04_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, vmm 52 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c VMM_WO032(pt, vmm, 8 + (ptei++ * 4), *map->dma++ | 0x00000003); vmm 55 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); vmm 60 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c nv04_vmm_pgt_unmap(struct nvkm_vmm *vmm, vmm 63 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c VMM_FO032(pt, vmm, 8 + (ptei * 4), 0, ptes); vmm 80 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c nv04_vmm_valid(struct nvkm_vmm *vmm, void *argv, u32 argc, vmm 88 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c VMM_DEBUG(vmm, "args"); vmm 126 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c struct nvkm_vmm *vmm; vmm 130 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c argv, argc, key, name, &vmm); vmm 131 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c *pvmm = vmm; vmm 135 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c mem = vmm->pd->pt[0]->memory; vmm 138 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c nvkm_wo32(mem, 0x00004, vmm->limit - 1); vmm 27 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c nv41_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, vmm 32 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c VMM_WO032(pt, vmm, ptei++ * 4, data); vmm 38 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c nv41_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, vmm 41 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); vmm 45 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c nv41_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, vmm 52 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c VMM_WO032(pt, vmm, ptei++ * 4, data); vmm 56 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); vmm 61 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c nv41_vmm_pgt_unmap(struct nvkm_vmm *vmm, vmm 64 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c VMM_FO032(pt, vmm, ptei * 4, 0, ptes); vmm 81 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c nv41_vmm_flush(struct nvkm_vmm *vmm, int level) vmm 83 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c struct nvkm_subdev *subdev = &vmm->mmu->subdev; vmm 27 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c nv44_vmm_pgt_fill(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, vmm 39 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c u32 addr = (list ? *list++ : vmm->null) >> 12; vmm 66 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, pteo + 0x0, tmp[0]); vmm 67 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, pteo + 0x4, tmp[1]); vmm 68 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, pteo + 0x8, tmp[2]); vmm 69 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, pteo + 0xc, tmp[3] | 0x40000000); vmm 73 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c nv44_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, vmm 82 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c nv44_vmm_pgt_fill(vmm, pt, tmp, ptei, pten); vmm 90 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, ptei++ * 4, tmp[0] >> 0 | tmp[1] << 27); vmm 91 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, ptei++ * 4, tmp[1] >> 5 | tmp[2] << 22); vmm 92 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, ptei++ * 4, tmp[2] >> 10 | tmp[3] << 17); vmm 93 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, ptei++ * 4, tmp[3] >> 15 | 0x40000000); vmm 100 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c nv44_vmm_pgt_fill(vmm, pt, tmp, ptei, ptes); vmm 105 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c nv44_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, vmm 108 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv44_vmm_pgt_pte); vmm 112 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c nv44_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, vmm 119 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c nv44_vmm_pgt_fill(vmm, pt, map->dma, ptei, pten); vmm 129 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, ptei++ * 4, tmp[0] >> 0 | tmp[1] << 27); vmm 130 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, ptei++ * 4, tmp[1] >> 5 | tmp[2] << 22); vmm 131 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, ptei++ * 4, tmp[2] >> 10 | tmp[3] << 17); vmm 132 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, ptei++ * 4, tmp[3] >> 15 | 0x40000000); vmm 137 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c nv44_vmm_pgt_fill(vmm, pt, map->dma, ptei, ptes); vmm 142 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv44_vmm_pgt_pte); vmm 147 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c nv44_vmm_pgt_unmap(struct nvkm_vmm *vmm, vmm 153 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c nv44_vmm_pgt_fill(vmm, pt, NULL, ptei, pten); vmm 159 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, ptei++ * 4, 0x00000000); vmm 160 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, ptei++ * 4, 0x00000000); vmm 161 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, ptei++ * 4, 0x00000000); vmm 162 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, ptei++ * 4, 0x00000000); vmm 167 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c nv44_vmm_pgt_fill(vmm, pt, NULL, ptei, ptes); vmm 185 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c nv44_vmm_flush(struct nvkm_vmm *vmm, int level) vmm 187 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c struct nvkm_device *device = vmm->mmu->subdev.device; vmm 188 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c nvkm_wr32(device, 0x100814, vmm->limit - 4096); vmm 213 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c struct nvkm_vmm *vmm; vmm 217 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c argv, argc, key, name, &vmm); vmm 218 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c *pvmm = vmm; vmm 222 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c vmm->nullp = dma_alloc_coherent(subdev->device->dev, 16 * 1024, vmm 223 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c &vmm->null, GFP_KERNEL); vmm 224 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c if (!vmm->nullp) { vmm 226 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c vmm->null = 0; vmm 32 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c nv50_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, vmm 53 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c VMM_WO064(pt, vmm, ptei++ * 8, data); vmm 58 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c nv50_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, vmm 61 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); vmm 65 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c nv50_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, vmm 69 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); vmm 73 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c VMM_WO064(pt, vmm, ptei++ * 8, data); vmm 80 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); vmm 84 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c nv50_vmm_pgt_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, vmm 87 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c VMM_MAP_ITER_MEM(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); vmm 91 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c nv50_vmm_pgt_unmap(struct nvkm_vmm *vmm, vmm 94 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c VMM_FO064(pt, vmm, ptei * 8, 0ULL, ptes); vmm 106 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c nv50_vmm_pde(struct nvkm_vmm *vmm, struct nvkm_vmm_pt *pgt, u64 *pdata) vmm 145 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c nv50_vmm_pgd_pde(struct nvkm_vmm *vmm, struct nvkm_vmm_pt *pgd, u32 pdei) vmm 148 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c u32 pdeo = vmm->mmu->func->vmm.pd_offset + (pdei * 8); vmm 151 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c if (!nv50_vmm_pde(vmm, pgd->pde[pdei], &data)) vmm 154 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c list_for_each_entry(join, &vmm->join, head) { vmm 181 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c nv50_vmm_flush(struct nvkm_vmm *vmm, int level) vmm 183 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c struct nvkm_subdev *subdev = &vmm->mmu->subdev; vmm 189 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c if (!atomic_read(&vmm->engref[i])) vmm 227 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c nv50_vmm_valid(struct nvkm_vmm *vmm, void *argv, u32 argc, vmm 235 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c struct nvkm_device *device = vmm->mmu->subdev.device; vmm 257 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c VMM_DEBUG(vmm, "args"); vmm 281 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c kindm = vmm->mmu->func->kind(vmm->mmu, &kindn); vmm 283 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c VMM_DEBUG(vmm, "kind %02x", kind); vmm 288 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c VMM_DEBUG(vmm, "kind %02x bankswz: %d %d", kind, vmm 296 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c VMM_DEBUG(vmm, "comp %d %02x", aper, page->type); vmm 303 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c VMM_DEBUG(vmm, "comp %d", ret); vmm 325 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c nv50_vmm_part(struct nvkm_vmm *vmm, struct nvkm_memory *inst) vmm 329 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c list_for_each_entry(join, &vmm->join, head) { vmm 339 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c nv50_vmm_join(struct nvkm_vmm *vmm, struct nvkm_memory *inst) vmm 341 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c const u32 pd_offset = vmm->mmu->func->vmm.pd_offset; vmm 350 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c list_add_tail(&join->head, &vmm->join); vmm 353 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c for (pdei = vmm->start >> 29; pdei <= (vmm->limit - 1) >> 29; pdei++) { vmm 354 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c if (!nv50_vmm_pde(vmm, vmm->pd->pde[pdei], &data)) { vmm 27 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmtu102.c tu102_vmm_flush(struct nvkm_vmm *vmm, int depth) vmm 29 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmtu102.c struct nvkm_subdev *subdev = &vmm->mmu->subdev; vmm 34 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmtu102.c if (atomic_read(&vmm->engref[NVKM_SUBDEV_BAR])) vmm 39 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmtu102.c nvkm_wr32(device, 0xb830a0, vmm->pd->pt[0]->addr >> 8); vmm 51 drivers/gpu/drm/nouveau/nvkm/subdev/secboot/gm200.c ret = nvkm_vmm_get(gsb->vmm, 12, blob->size, &vma); vmm 57 drivers/gpu/drm/nouveau/nvkm/subdev/secboot/gm200.c ret = nvkm_memory_map(blob, 0, gsb->vmm, vma, NULL, 0); vmm 98 drivers/gpu/drm/nouveau/nvkm/subdev/secboot/gm200.c nvkm_vmm_put(gsb->vmm, &vma); vmm 118 drivers/gpu/drm/nouveau/nvkm/subdev/secboot/gm200.c &gsb->vmm); vmm 122 drivers/gpu/drm/nouveau/nvkm/subdev/secboot/gm200.c atomic_inc(&gsb->vmm->engref[NVKM_SUBDEV_PMU]); vmm 123 drivers/gpu/drm/nouveau/nvkm/subdev/secboot/gm200.c gsb->vmm->debug = gsb->base.subdev.debug; vmm 125 drivers/gpu/drm/nouveau/nvkm/subdev/secboot/gm200.c ret = nvkm_vmm_join(gsb->vmm, gsb->inst); vmm 156 drivers/gpu/drm/nouveau/nvkm/subdev/secboot/gm200.c nvkm_vmm_part(gsb->vmm, gsb->inst); vmm 157 drivers/gpu/drm/nouveau/nvkm/subdev/secboot/gm200.c nvkm_vmm_unref(&gsb->vmm); vmm 33 drivers/gpu/drm/nouveau/nvkm/subdev/secboot/gm200.h struct nvkm_vmm *vmm; vmm 15 drivers/gpu/drm/vboxvideo/vbox_ttm.c struct drm_vram_mm *vmm; vmm 19 drivers/gpu/drm/vboxvideo/vbox_ttm.c vmm = drm_vram_helper_alloc_mm(dev, pci_resource_start(dev->pdev, 0), vmm 22 drivers/gpu/drm/vboxvideo/vbox_ttm.c if (IS_ERR(vmm)) { vmm 23 drivers/gpu/drm/vboxvideo/vbox_ttm.c ret = PTR_ERR(vmm); vmm 150 fs/proc/kcore.c struct kcore_list *vmm, *tmp; vmm 165 fs/proc/kcore.c vmm = kmalloc(sizeof(*vmm), GFP_KERNEL); vmm 166 fs/proc/kcore.c if (!vmm) vmm 168 fs/proc/kcore.c vmm->addr = start; vmm 169 fs/proc/kcore.c vmm->size = end - start; vmm 170 fs/proc/kcore.c vmm->type = KCORE_VMEMMAP; vmm 171 fs/proc/kcore.c list_add_tail(&vmm->list, head); vmm 63 include/drm/drm_vram_mm_helper.h int drm_vram_mm_init(struct drm_vram_mm *vmm, struct drm_device *dev, vmm 66 include/drm/drm_vram_mm_helper.h void drm_vram_mm_cleanup(struct drm_vram_mm *vmm); vmm 69 include/drm/drm_vram_mm_helper.h struct drm_vram_mm *vmm);