pten               43 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	const u32 pten = 1 << desc->bits;
pten               50 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			lpte = pten >> (desc->bits - pair->bits);
pten               52 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			lpte = pten;
pten               62 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		pgt->pde = kvcalloc(pten, sizeof(*pgt->pde), GFP_KERNEL);
pten              210 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		const u32 pten = min(sptn - spti, ptes);
pten              211 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		pgt->pte[lpti] -= pten;
pten              212 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		ptes -= pten;
pten              309 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		const u32 pten = min(sptn - spti, ptes);
pten              310 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		pgt->pte[lpti] += pten;
pten              311 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		ptes -= pten;
pten              422 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	u32 pten = 1 << desc->bits;
pten              424 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	u32 size = desc->size * pten;
pten              447 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		for (ptei = pteb = 0; ptei < pten; pteb = ptei) {
pten              449 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			for (ptes = 1, ptei++; ptei < pten; ptes++, ptei++) {
pten              469 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			nvkm_vmm_sparse_ptes(desc, pgt, 0, pten);
pten              470 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			desc->func->sparse(vmm, pt, 0, pten);
pten              472 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			desc->func->invalid(vmm, pt, 0, pten);
pten              534 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		const u32 pten = 1 << desc->bits;
pten              536 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		const u32 ptes = min_t(u64, it.cnt, pten - ptei);
pten               79 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		const u32 pten = min(ptes, 4 - (ptei & 3));
pten               80 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		for (i = 0; i < pten; i++, addr += 0x1000)
pten               82 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		nv44_vmm_pgt_fill(vmm, pt, tmp, ptei, pten);
pten               83 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		ptei += pten;
pten               84 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		ptes -= pten;
pten              118 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		const u32 pten = min(ptes, 4 - (ptei & 3));
pten              119 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		nv44_vmm_pgt_fill(vmm, pt, map->dma, ptei, pten);
pten              120 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		ptei += pten;
pten              121 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		ptes -= pten;
pten              122 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		map->dma += pten;
pten              152 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		const u32 pten = min(ptes, 4 - (ptei & 3));
pten              153 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		nv44_vmm_pgt_fill(vmm, pt, NULL, ptei, pten);
pten              154 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		ptei += pten;
pten              155 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		ptes -= pten;
pten               36 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 	u32 pten;
pten               43 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 			pten = 1 << log2blk;
pten               44 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 			if (ptes >= pten && IS_ALIGNED(ptei, pten))
pten               49 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 		next += pten * map->next;
pten               50 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 		ptes -= pten;
pten               52 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 		while (pten--)