pfn_base 92 arch/riscv/include/asm/page.h extern unsigned long pfn_base; pfn_base 115 arch/riscv/include/asm/page.h (((pfn) >= pfn_base) && (((pfn)-pfn_base) < max_mapnr)) pfn_base 118 arch/riscv/include/asm/page.h #define ARCH_PFN_OFFSET (pfn_base) pfn_base 148 arch/riscv/mm/init.c unsigned long pfn_base; pfn_base 149 arch/riscv/mm/init.c EXPORT_SYMBOL(pfn_base); pfn_base 354 arch/riscv/mm/init.c pfn_base = PFN_DOWN(load_pa); pfn_base 122 arch/sparc/include/asm/page_32.h extern unsigned long pfn_base; pfn_base 130 arch/sparc/include/asm/page_32.h #define ARCH_PFN_OFFSET (pfn_base) pfn_base 133 arch/sparc/include/asm/page_32.h #define pfn_valid(pfn) (((pfn) >= (pfn_base)) && (((pfn)-(pfn_base)) < max_mapnr)) pfn_base 89 arch/sparc/include/asm/pgtable_32.h extern unsigned long pfn_base; pfn_base 354 arch/sparc/kernel/setup_32.c pfn_base = phys_base >> PAGE_SHIFT; pfn_base 47 arch/sparc/mm/init_32.c unsigned long pfn_base; pfn_base 48 arch/sparc/mm/init_32.c EXPORT_SYMBOL(pfn_base); pfn_base 84 arch/sparc/mm/init_32.c unsigned long tmp = pfn_base + (SRMMU_MAXMEM >> PAGE_SHIFT); pfn_base 183 arch/sparc/mm/init_32.c if (max_low_pfn > pfn_base + (SRMMU_MAXMEM >> PAGE_SHIFT)) { pfn_base 184 arch/sparc/mm/init_32.c highstart_pfn = pfn_base + (SRMMU_MAXMEM >> PAGE_SHIFT); pfn_base 276 arch/sparc/mm/init_32.c max_mapnr = last_valid_pfn - pfn_base; pfn_base 990 arch/sparc/mm/srmmu.c npages = max_low_pfn - pfn_base; pfn_base 999 arch/sparc/mm/srmmu.c free_area_init_node(0, zones_size, pfn_base, zholes_size); pfn_base 412 drivers/gpu/drm/gma500/gtt.c unsigned pfn_base; pfn_base 528 drivers/gpu/drm/gma500/gtt.c pfn_base = dev_priv->stolen_base >> PAGE_SHIFT; pfn_base 531 drivers/gpu/drm/gma500/gtt.c num_pages, pfn_base << PAGE_SHIFT, 0); pfn_base 533 drivers/gpu/drm/gma500/gtt.c pte = psb_gtt_mask_pte(pfn_base + i, PSB_MMU_CACHED_MEMORY); pfn_base 541 drivers/gpu/drm/gma500/gtt.c pfn_base = page_to_pfn(dev_priv->scratch_page); pfn_base 542 drivers/gpu/drm/gma500/gtt.c pte = psb_gtt_mask_pte(pfn_base, PSB_MMU_CACHED_MEMORY); pfn_base 398 drivers/vfio/vfio_iommu_type1.c long npage, unsigned long *pfn_base, pfn_base 410 drivers/vfio/vfio_iommu_type1.c ret = vaddr_get_pfn(current->mm, vaddr, dma->prot, pfn_base); pfn_base 415 drivers/vfio/vfio_iommu_type1.c rsvd = is_invalid_reserved_pfn(*pfn_base); pfn_base 423 drivers/vfio/vfio_iommu_type1.c put_pfn(*pfn_base, dma->prot); pfn_base 441 drivers/vfio/vfio_iommu_type1.c if (pfn != *pfn_base + pinned || pfn_base 466 drivers/vfio/vfio_iommu_type1.c for (pfn = *pfn_base ; pinned ; pfn++, pinned--) pfn_base 498 drivers/vfio/vfio_iommu_type1.c unsigned long *pfn_base, bool do_accounting) pfn_base 507 drivers/vfio/vfio_iommu_type1.c ret = vaddr_get_pfn(mm, vaddr, dma->prot, pfn_base); pfn_base 508 drivers/vfio/vfio_iommu_type1.c if (!ret && do_accounting && !is_invalid_reserved_pfn(*pfn_base)) { pfn_base 511 drivers/vfio/vfio_iommu_type1.c put_pfn(*pfn_base, dma->prot); pfn_base 15 kernel/dma/coherent.c unsigned long pfn_base; pfn_base 35 kernel/dma/coherent.c return (mem->pfn_base - dev->dma_pfn_offset) << PAGE_SHIFT; pfn_base 73 kernel/dma/coherent.c dma_mem->pfn_base = PFN_DOWN(phys_addr); pfn_base 250 kernel/dma/coherent.c unsigned long pfn = mem->pfn_base + start + off;