Home
last modified time | relevance | path

Searched refs:pgt (Results 1 – 13 of 13) sorted by relevance

/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/
Dnv44.c38 nv44_vm_fill(struct nvkm_memory *pgt, dma_addr_t null, in nv44_vm_fill() argument
44 tmp[0] = nvkm_ro32(pgt, base + 0x0); in nv44_vm_fill()
45 tmp[1] = nvkm_ro32(pgt, base + 0x4); in nv44_vm_fill()
46 tmp[2] = nvkm_ro32(pgt, base + 0x8); in nv44_vm_fill()
47 tmp[3] = nvkm_ro32(pgt, base + 0xc); in nv44_vm_fill()
77 nvkm_wo32(pgt, base + 0x0, tmp[0]); in nv44_vm_fill()
78 nvkm_wo32(pgt, base + 0x4, tmp[1]); in nv44_vm_fill()
79 nvkm_wo32(pgt, base + 0x8, tmp[2]); in nv44_vm_fill()
80 nvkm_wo32(pgt, base + 0xc, tmp[3] | 0x40000000); in nv44_vm_fill()
84 nv44_vm_map_sg(struct nvkm_vma *vma, struct nvkm_memory *pgt, in nv44_vm_map_sg() argument
[all …]
Dgf100.c73 gf100_vm_map_pgt(struct nvkm_gpuobj *pgd, u32 index, struct nvkm_memory *pgt[2]) in gf100_vm_map_pgt()
77 if (pgt[0]) in gf100_vm_map_pgt()
78 pde[1] = 0x00000001 | (nvkm_memory_addr(pgt[0]) >> 8); in gf100_vm_map_pgt()
79 if (pgt[1]) in gf100_vm_map_pgt()
80 pde[0] = 0x00000001 | (nvkm_memory_addr(pgt[1]) >> 8); in gf100_vm_map_pgt()
103 gf100_vm_map(struct nvkm_vma *vma, struct nvkm_memory *pgt, in gf100_vm_map() argument
119 nvkm_kmap(pgt); in gf100_vm_map()
121 nvkm_wo32(pgt, pte + 0, lower_32_bits(phys)); in gf100_vm_map()
122 nvkm_wo32(pgt, pte + 4, upper_32_bits(phys)); in gf100_vm_map()
126 nvkm_done(pgt); in gf100_vm_map()
[all …]
Dnv50.c32 nv50_vm_map_pgt(struct nvkm_gpuobj *pgd, u32 pde, struct nvkm_memory *pgt[2]) in nv50_vm_map_pgt()
37 if (pgt[0]) { in nv50_vm_map_pgt()
39 phys = 0x00000003 | nvkm_memory_addr(pgt[0]); in nv50_vm_map_pgt()
40 coverage = (nvkm_memory_size(pgt[0]) >> 3) << 12; in nv50_vm_map_pgt()
42 if (pgt[1]) { in nv50_vm_map_pgt()
44 phys = 0x00000001 | nvkm_memory_addr(pgt[1]); in nv50_vm_map_pgt()
45 coverage = (nvkm_memory_size(pgt[1]) >> 3) << 16; in nv50_vm_map_pgt()
77 nv50_vm_map(struct nvkm_vma *vma, struct nvkm_memory *pgt, in nv50_vm_map() argument
96 nvkm_kmap(pgt); in nv50_vm_map()
117 nvkm_wo32(pgt, pte + 0, offset_l); in nv50_vm_map()
[all …]
Dnv41.c38 nv41_vm_map_sg(struct nvkm_vma *vma, struct nvkm_memory *pgt, in nv41_vm_map_sg() argument
42 nvkm_kmap(pgt); in nv41_vm_map_sg()
47 nvkm_wo32(pgt, pte, (phys >> 7) | 1); in nv41_vm_map_sg()
53 nvkm_done(pgt); in nv41_vm_map_sg()
57 nv41_vm_unmap(struct nvkm_vma *vma, struct nvkm_memory *pgt, u32 pte, u32 cnt) in nv41_vm_unmap() argument
60 nvkm_kmap(pgt); in nv41_vm_unmap()
62 nvkm_wo32(pgt, pte, 0x00000000); in nv41_vm_unmap()
65 nvkm_done(pgt); in nv41_vm_unmap()
102 &mmu->vm->pgt[0].mem[0]); in nv41_mmu_oneinit()
103 mmu->vm->pgt[0].refcount[0] = 1; in nv41_mmu_oneinit()
[all …]
Dnv04.c36 nv04_vm_map_sg(struct nvkm_vma *vma, struct nvkm_memory *pgt, in nv04_vm_map_sg() argument
40 nvkm_kmap(pgt); in nv04_vm_map_sg()
45 nvkm_wo32(pgt, pte, phys | 3); in nv04_vm_map_sg()
51 nvkm_done(pgt); in nv04_vm_map_sg()
55 nv04_vm_unmap(struct nvkm_vma *vma, struct nvkm_memory *pgt, u32 pte, u32 cnt) in nv04_vm_unmap() argument
58 nvkm_kmap(pgt); in nv04_vm_unmap()
60 nvkm_wo32(pgt, pte, 0x00000000); in nv04_vm_unmap()
63 nvkm_done(pgt); in nv04_vm_unmap()
91 mmu->vm->pgt[0].mem[0] = dma; in nv04_mmu_oneinit()
92 mmu->vm->pgt[0].refcount[0] = 1; in nv04_mmu_oneinit()
[all …]
Dbase.c49 struct nvkm_memory *pgt = vm->pgt[pde].mem[big]; in nvkm_vm_map_at() local
56 mmu->func->map(vma, pgt, node, pte, len, phys, delta); in nvkm_vm_map_at()
92 struct nvkm_memory *pgt = vm->pgt[pde].mem[big]; in nvkm_vm_map_sg_table() local
103 mmu->func->map_sg(vma, pgt, mem, pte, 1, &addr); in nvkm_vm_map_sg_table()
118 mmu->func->map_sg(vma, pgt, mem, pte, 1, &addr); in nvkm_vm_map_sg_table()
148 struct nvkm_memory *pgt = vm->pgt[pde].mem[big]; in nvkm_vm_map_sg() local
155 mmu->func->map_sg(vma, pgt, mem, pte, len, list); in nvkm_vm_map_sg()
196 struct nvkm_memory *pgt = vm->pgt[pde].mem[big]; in nvkm_vm_unmap_at() local
203 mmu->func->unmap(vma, pgt, pte, len); in nvkm_vm_unmap_at()
228 struct nvkm_memory *pgt; in nvkm_vm_unmap_pgt() local
[all …]
Dpriv.h26 struct nvkm_memory *pgt[2]);
32 void (*unmap)(struct nvkm_vma *, struct nvkm_memory *pgt,
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/dma/
Dusernv04.c53 struct nvkm_memory *pgt = mmu->vm->pgt[0].mem[0]; in nv04_dmaobj_bind() local
55 return nvkm_gpuobj_wrap(pgt, pgpuobj); in nv04_dmaobj_bind()
56 nvkm_kmap(pgt); in nv04_dmaobj_bind()
57 offset = nvkm_ro32(pgt, 8 + (offset >> 10)); in nv04_dmaobj_bind()
59 nvkm_done(pgt); in nv04_dmaobj_bind()
/linux-4.4.14/drivers/gpu/drm/nouveau/include/nvkm/subdev/
Dmmu.h37 struct nvkm_vm_pgt *pgt; member
/linux-4.4.14/arch/x86/platform/efi/
Defi_stub_64.S61 movq efi_scratch+16(%rip), %r15 # EFI pgt
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/bar/
Dgf100.c148 nvkm_memory_del(&bar->bar[0].vm->pgt[0].mem[0]); in gf100_bar_dtor()
Dnv50.c180 nvkm_memory_del(&bar->bar3_vm->pgt[0].mem[0]); in nv50_bar_dtor()
/linux-4.4.14/arch/x86/kernel/cpu/
Dperf_event_intel_uncore_nhmex.c870 DEFINE_UNCORE_FORMAT_ATTR(pgt, pgt, "config1:0-31");