pgt              1197 arch/powerpc/kvm/book3s_64_mmu_radix.c 	pgd_t *pgt;
pgt              1234 arch/powerpc/kvm/book3s_64_mmu_radix.c 	pgt = NULL;
pgt              1238 arch/powerpc/kvm/book3s_64_mmu_radix.c 			pgt = NULL;
pgt              1248 arch/powerpc/kvm/book3s_64_mmu_radix.c 		if (!pgt) {
pgt              1250 arch/powerpc/kvm/book3s_64_mmu_radix.c 				pgt = kvm->arch.pgtable;
pgt              1257 arch/powerpc/kvm/book3s_64_mmu_radix.c 				pgt = nested->shadow_pgtable;
pgt              1266 arch/powerpc/kvm/book3s_64_mmu_radix.c 				      "pgdir: %lx\n", (unsigned long)pgt);
pgt              1271 arch/powerpc/kvm/book3s_64_mmu_radix.c 		pgdp = pgt + pgd_index(gpa);
pgt               131 arch/s390/include/asm/gmap.h int gmap_shadow_pgt(struct gmap *sg, unsigned long saddr, unsigned long pgt,
pgt               134 arch/s390/include/asm/gmap.h 			   unsigned long *pgt, int *dat_protection, int *fake);
pgt               983 arch/s390/kvm/gaccess.c 				  unsigned long *pgt, int *dat_protection,
pgt              1148 arch/s390/kvm/gaccess.c 	*pgt = ptr;
pgt              1169 arch/s390/kvm/gaccess.c 	unsigned long pgt;
pgt              1181 arch/s390/kvm/gaccess.c 	rc = gmap_shadow_pgt_lookup(sg, saddr, &pgt, &dat_protection, &fake);
pgt              1183 arch/s390/kvm/gaccess.c 		rc = kvm_s390_shadow_tables(sg, saddr, &pgt, &dat_protection,
pgt              1188 arch/s390/kvm/gaccess.c 		pte.val = pgt + vaddr.px * PAGE_SIZE;
pgt              1192 arch/s390/kvm/gaccess.c 		rc = gmap_read_table(sg->parent, pgt + vaddr.px * 8, &pte.val);
pgt              1307 arch/s390/mm/gmap.c 				unsigned long *pgt)
pgt              1313 arch/s390/mm/gmap.c 		pgt[i] = _PAGE_INVALID;
pgt              1325 arch/s390/mm/gmap.c 	unsigned long sto, *ste, *pgt;
pgt              1335 arch/s390/mm/gmap.c 	pgt = (unsigned long *)(*ste & _SEGMENT_ENTRY_ORIGIN);
pgt              1337 arch/s390/mm/gmap.c 	__gmap_unshadow_pgt(sg, raddr, pgt);
pgt              1339 arch/s390/mm/gmap.c 	page = pfn_to_page(__pa(pgt) >> PAGE_SHIFT);
pgt              1355 arch/s390/mm/gmap.c 	unsigned long *pgt;
pgt              1363 arch/s390/mm/gmap.c 		pgt = (unsigned long *)(sgt[i] & _REGION_ENTRY_ORIGIN);
pgt              1365 arch/s390/mm/gmap.c 		__gmap_unshadow_pgt(sg, raddr, pgt);
pgt              1367 arch/s390/mm/gmap.c 		page = pfn_to_page(__pa(pgt) >> PAGE_SHIFT);
pgt              1987 arch/s390/mm/gmap.c 			   unsigned long *pgt, int *dat_protection,
pgt              2000 arch/s390/mm/gmap.c 		*pgt = page->index & ~GMAP_SHADOW_FAKE_TABLE;
pgt              2026 arch/s390/mm/gmap.c int gmap_shadow_pgt(struct gmap *sg, unsigned long saddr, unsigned long pgt,
pgt              2034 arch/s390/mm/gmap.c 	BUG_ON(!gmap_is_shadow(sg) || (pgt & _SEGMENT_ENTRY_LARGE));
pgt              2039 arch/s390/mm/gmap.c 	page->index = pgt & _SEGMENT_ENTRY_ORIGIN;
pgt              2059 arch/s390/mm/gmap.c 		 (pgt & _SEGMENT_ENTRY_PROTECT) | _SEGMENT_ENTRY_INVALID;
pgt              2070 arch/s390/mm/gmap.c 	origin = pgt & _SEGMENT_ENTRY_ORIGIN & PAGE_MASK;
pgt               877 arch/x86/events/intel/uncore_nhmex.c DEFINE_UNCORE_FORMAT_ATTR(pgt,			pgt,		"config1:0-31");
pgt                52 drivers/gpu/drm/nouveau/nvkm/engine/dma/usernv04.c 		struct nvkm_memory *pgt =
pgt                55 drivers/gpu/drm/nouveau/nvkm/engine/dma/usernv04.c 			return nvkm_gpuobj_wrap(pgt, pgpuobj);
pgt                56 drivers/gpu/drm/nouveau/nvkm/engine/dma/usernv04.c 		nvkm_kmap(pgt);
pgt                57 drivers/gpu/drm/nouveau/nvkm/engine/dma/usernv04.c 		offset  = nvkm_ro32(pgt, 8 + (offset >> 10));
pgt                59 drivers/gpu/drm/nouveau/nvkm/engine/dma/usernv04.c 		nvkm_done(pgt);
pgt                30 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	struct nvkm_vmm_pt *pgt = *ppgt;
pgt                31 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	if (pgt) {
pgt                32 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		kvfree(pgt->pde);
pgt                33 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		kfree(pgt);
pgt                44 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	struct nvkm_vmm_pt *pgt;
pgt                56 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	if (!(pgt = kzalloc(sizeof(*pgt) + lpte, GFP_KERNEL)))
pgt                58 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	pgt->page = page ? page->shift : 0;
pgt                59 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	pgt->sparse = sparse;
pgt                62 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		pgt->pde = kvcalloc(pten, sizeof(*pgt->pde), GFP_KERNEL);
pgt                63 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		if (!pgt->pde) {
pgt                64 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			kfree(pgt);
pgt                69 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	return pgt;
pgt               143 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	struct nvkm_vmm_pt *pgt = it->pt[it->lvl];
pgt               144 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	struct nvkm_mmu_pt *pt = pgt->pt[type];
pgt               154 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		pgt->pt[type] = NULL;
pgt               155 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		if (!pgt->refs[!type]) {
pgt               158 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 				if (pgt->sparse) {
pgt               191 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	if (!pgt->refs[!type])
pgt               192 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		nvkm_vmm_pt_del(&pgt);
pgt               197 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_unref_sptes(struct nvkm_vmm_iter *it, struct nvkm_vmm_pt *pgt,
pgt               211 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		pgt->pte[lpti] -= pten;
pgt               216 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	if (!pgt->refs[0])
pgt               221 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		if (pgt->pte[pteb] & NVKM_VMM_PTE_SPTES) {
pgt               223 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 				if (!(pgt->pte[ptei] & NVKM_VMM_PTE_SPTES))
pgt               235 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		pgt->pte[ptei] &= ~NVKM_VMM_PTE_VALID;
pgt               237 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			if (pgt->pte[ptei] & NVKM_VMM_PTE_SPTES)
pgt               239 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			pgt->pte[ptei] &= ~NVKM_VMM_PTE_VALID;
pgt               242 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		if (pgt->pte[pteb] & NVKM_VMM_PTE_SPARSE) {
pgt               244 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			pair->func->sparse(vmm, pgt->pt[0], pteb, ptes);
pgt               252 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			pair->func->invalid(vmm, pgt->pt[0], pteb, ptes);
pgt               262 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	struct nvkm_vmm_pt *pgt = it->pt[0];
pgt               267 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		dma = desc->func->pfn_clear(it->vmm, pgt->pt[type], ptei, ptes);
pgt               272 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			desc->func->pfn_unmap(it->vmm, pgt->pt[type], ptei, ptes);
pgt               277 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	pgt->refs[type] -= ptes;
pgt               280 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	if (desc->type == SPT && (pgt->refs[0] || pgt->refs[1]))
pgt               281 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		nvkm_vmm_unref_sptes(it, pgt, desc, ptei, ptes);
pgt               284 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	if (!pgt->refs[type]) {
pgt               296 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ref_sptes(struct nvkm_vmm_iter *it, struct nvkm_vmm_pt *pgt,
pgt               310 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		pgt->pte[lpti] += pten;
pgt               315 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	if (!pgt->refs[0])
pgt               320 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		if (pgt->pte[pteb] & NVKM_VMM_PTE_VALID) {
pgt               322 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 				if (!(pgt->pte[ptei] & NVKM_VMM_PTE_VALID))
pgt               334 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		pgt->pte[ptei] |= NVKM_VMM_PTE_VALID;
pgt               336 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			if (pgt->pte[ptei] & NVKM_VMM_PTE_VALID)
pgt               338 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			pgt->pte[ptei] |= NVKM_VMM_PTE_VALID;
pgt               341 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		if (pgt->pte[pteb] & NVKM_VMM_PTE_SPARSE) {
pgt               348 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			desc->func->sparse(vmm, pgt->pt[1], spti, sptc);
pgt               351 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			pair->func->unmap(vmm, pgt->pt[0], pteb, ptes);
pgt               358 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			pair->func->unmap(vmm, pgt->pt[0], pteb, ptes);
pgt               368 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	struct nvkm_vmm_pt *pgt = it->pt[0];
pgt               371 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	pgt->refs[type] += ptes;
pgt               375 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		nvkm_vmm_ref_sptes(it, pgt, desc, ptei, ptes);
pgt               382 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		     struct nvkm_vmm_pt *pgt, u32 ptei, u32 ptes)
pgt               386 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			pgt->pde[ptei++] = NVKM_VMM_PDE_SPARSE;
pgt               389 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		memset(&pgt->pte[ptei], NVKM_VMM_PTE_SPARSE, ptes);
pgt               417 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	struct nvkm_vmm_pt *pgt = pgd->pde[pdei];
pgt               418 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	const bool zero = !pgt->sparse && !desc->func->invalid;
pgt               428 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	pgt->pt[type] = nvkm_mmu_ptc_get(mmu, size, desc->align, zero);
pgt               429 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	if (!pgt->pt[type]) {
pgt               438 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	pt = pgt->pt[type];
pgt               440 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	if (desc->type == LPT && pgt->refs[1]) {
pgt               448 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			bool spte = pgt->pte[ptei] & NVKM_VMM_PTE_SPTES;
pgt               450 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 				bool next = pgt->pte[ptei] & NVKM_VMM_PTE_SPTES;
pgt               456 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 				if (pgt->sparse)
pgt               460 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 				memset(&pgt->pte[pteb], 0x00, ptes);
pgt               464 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 					pgt->pte[pteb++] |= NVKM_VMM_PTE_VALID;
pgt               468 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		if (pgt->sparse) {
pgt               469 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			nvkm_vmm_sparse_ptes(desc, pgt, 0, pten);
pgt               487 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	struct nvkm_vmm_pt *pgt = pgd->pde[pdei];
pgt               489 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	pgt = nvkm_vmm_pt_new(desc, NVKM_VMM_PDE_SPARSED(pgt), it->page);
pgt               490 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	if (!pgt) {
pgt               496 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	pgd->pde[pdei] = pgt;
pgt               532 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		struct nvkm_vmm_pt *pgt = it.pt[it.lvl];
pgt               541 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			struct nvkm_vmm_pt *pgd = pgt;
pgt               548 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			it.pt[it.lvl - 1] = pgt = pgd->pde[pdei];
pgt               556 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			if (ref && !pgt->refs[desc[it.lvl - 1].type == SPT]) {
pgt               564 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			struct nvkm_mmu_pt *pt = pgt->pt[type];
pgt               108 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c 	struct nvkm_vmm_pt *pgt = pgd->pde[pdei];
pgt               113 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c 	if ((pt = pgt->pt[0])) {
pgt               127 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c 	if ((pt = pgt->pt[1])) {
pgt               232 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 	struct nvkm_vmm_pt *pgt = pgd->pde[pdei];
pgt               236 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 	if (pgt->pt[0] && !gp100_vmm_pde(pgt->pt[0], &data[0]))
pgt               238 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 	if (pgt->pt[1] && !gp100_vmm_pde(pgt->pt[1], &data[1]))
pgt               272 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 	struct nvkm_vmm_pt *pgt = pgd->pde[pdei];
pgt               276 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 	if (!gp100_vmm_pde(pgt->pt[0], &data))
pgt               106 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c nv50_vmm_pde(struct nvkm_vmm *vmm, struct nvkm_vmm_pt *pgt, u64 *pdata)
pgt               110 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 	if (pgt && (pt = pgt->pt[0])) {
pgt               111 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 		switch (pgt->page) {