Lines Matching refs:pgt
32 nv50_vm_map_pgt(struct nvkm_gpuobj *pgd, u32 pde, struct nvkm_memory *pgt[2]) in nv50_vm_map_pgt()
37 if (pgt[0]) { in nv50_vm_map_pgt()
39 phys = 0x00000003 | nvkm_memory_addr(pgt[0]); in nv50_vm_map_pgt()
40 coverage = (nvkm_memory_size(pgt[0]) >> 3) << 12; in nv50_vm_map_pgt()
42 if (pgt[1]) { in nv50_vm_map_pgt()
44 phys = 0x00000001 | nvkm_memory_addr(pgt[1]); in nv50_vm_map_pgt()
45 coverage = (nvkm_memory_size(pgt[1]) >> 3) << 16; in nv50_vm_map_pgt()
77 nv50_vm_map(struct nvkm_vma *vma, struct nvkm_memory *pgt, in nv50_vm_map() argument
96 nvkm_kmap(pgt); in nv50_vm_map()
117 nvkm_wo32(pgt, pte + 0, offset_l); in nv50_vm_map()
118 nvkm_wo32(pgt, pte + 4, offset_h); in nv50_vm_map()
123 nvkm_done(pgt); in nv50_vm_map()
127 nv50_vm_map_sg(struct nvkm_vma *vma, struct nvkm_memory *pgt, in nv50_vm_map_sg() argument
132 nvkm_kmap(pgt); in nv50_vm_map_sg()
135 nvkm_wo32(pgt, pte + 0, lower_32_bits(phys)); in nv50_vm_map_sg()
136 nvkm_wo32(pgt, pte + 4, upper_32_bits(phys)); in nv50_vm_map_sg()
139 nvkm_done(pgt); in nv50_vm_map_sg()
143 nv50_vm_unmap(struct nvkm_vma *vma, struct nvkm_memory *pgt, u32 pte, u32 cnt) in nv50_vm_unmap() argument
146 nvkm_kmap(pgt); in nv50_vm_unmap()
148 nvkm_wo32(pgt, pte + 0, 0x00000000); in nv50_vm_unmap()
149 nvkm_wo32(pgt, pte + 4, 0x00000000); in nv50_vm_unmap()
152 nvkm_done(pgt); in nv50_vm_unmap()