page_to_phys 928 arch/alpha/kernel/pci_iommu.c ptes[j] = mk_iommu_pte(page_to_phys(pages[i])); page_to_phys 30 arch/arc/mm/dma.c dma_cache_wback_inv(page_to_phys(page), size); page_to_phys 148 arch/arm/include/asm/pgalloc.h __pmd_populate(pmdp, page_to_phys(ptep), prot); page_to_phys 40 arch/arm/kernel/patch.c set_fixmap(fixmap, page_to_phys(page)); page_to_phys 102 arch/arm/mm/dma-mapping-nommu.c dma_addr_t handle = page_to_phys(page) + offset; page_to_phys 383 arch/arm/mm/dma-mapping.c page_to_phys(page), page_to_phys 947 arch/arm/mm/dma-mapping.c paddr = page_to_phys(page) + off; page_to_phys 959 arch/arm/mm/dma-mapping.c phys_addr_t paddr = page_to_phys(page) + off; page_to_phys 1366 arch/arm/mm/dma-mapping.c phys_addr_t phys = page_to_phys(pages[i]); page_to_phys 1635 arch/arm/mm/dma-mapping.c phys_addr_t phys = page_to_phys(sg_page(s)); page_to_phys 1851 arch/arm/mm/dma-mapping.c ret = iommu_map(mapping->domain, dma_addr, page_to_phys(page), len, prot); page_to_phys 117 arch/arm64/include/asm/pgalloc.h __pmd_populate(pmdp, page_to_phys(ptep), PMD_TYPE_TABLE); page_to_phys 95 arch/arm64/kernel/insn.c return (void *)set_fixmap_offset(fixmap, page_to_phys(page) + page_to_phys 158 arch/arm64/kernel/machine_kexec.c reboot_code_buffer_phys = page_to_phys(kimage->control_code_page); page_to_phys 21 arch/csky/mm/dma-mapping.c void *start = __va(page_to_phys(page)); page_to_phys 58 arch/csky/mm/dma-mapping.c cache_op(page_to_phys(page), size, dma_wbinv_set_zero_range); page_to_phys 133 arch/hexagon/include/asm/page.h #define page_to_virt(page) __va(page_to_phys(page)) page_to_phys 1131 arch/ia64/hp/common/sba_iommu.c *dma_handle = page_to_phys(page); page_to_phys 105 arch/ia64/include/asm/io.h #define page_to_bus page_to_phys page_to_phys 77 arch/ia64/include/asm/pgalloc.h pmd_val(*pmd_entry) = page_to_phys(pte); page_to_phys 76 arch/m68k/emu/nfblock.c page_to_phys(bvec.bv_page) + bvec.bv_offset); page_to_phys 23 arch/m68k/kernel/dma.c cache_push(page_to_phys(page), size); page_to_phys 127 arch/m68k/mm/cache.c : : "a" (page_to_phys(page))); page_to_phys 37 arch/microblaze/include/asm/io.h #define page_to_bus(page) (page_to_phys(page)) page_to_phys 161 arch/microblaze/include/asm/page.h # define page_to_bus(page) (page_to_phys(page)) page_to_phys 20 arch/microblaze/mm/consistent.c phys_addr_t paddr = page_to_phys(page); page_to_phys 592 arch/mips/jazz/jazzdma.c phys_addr_t phys = page_to_phys(page) + offset; page_to_phys 81 arch/nds32/kernel/dma.c cache_op(page_to_phys(page), size, cpu_dma_wbinval_range); page_to_phys 203 arch/nds32/mm/cacheflush.c pto = page_to_phys(to); page_to_phys 204 arch/nds32/mm/cacheflush.c pfrom = page_to_phys(from); page_to_phys 230 arch/nds32/mm/cacheflush.c vto = kremap0(vaddr, page_to_phys(page)); page_to_phys 256 arch/nds32/mm/cacheflush.c kto = kremap0(vaddr, page_to_phys(page)); page_to_phys 272 arch/nds32/mm/cacheflush.c vto = kremap0(vaddr, page_to_phys(page)); page_to_phys 293 arch/nds32/mm/cacheflush.c vto = kremap0(vaddr, page_to_phys(page)); page_to_phys 315 arch/nds32/mm/cacheflush.c ktmp = kremap0(vaddr, page_to_phys(page)); page_to_phys 98 arch/parisc/include/asm/cacheflush.h flush_dcache_page_asm(page_to_phys(page), vmaddr); page_to_phys 367 arch/parisc/kernel/cache.c __flush_cache_page(mpnt, addr, page_to_phys(page)); page_to_phys 78 arch/parisc/kernel/kexec.c unsigned long phys = page_to_phys(image->control_code_page); page_to_phys 42 arch/parisc/kernel/patch.c set_fixmap(fixmap, page_to_phys(page)); page_to_phys 831 arch/powerpc/include/asm/io.h #define page_to_bus(page) (page_to_phys(page) + PCI_DRAM_OFFSET) page_to_phys 110 arch/riscv/include/asm/page.h #define page_to_bus(page) (page_to_phys(page)) page_to_phys 1205 arch/s390/include/asm/pgtable.h unsigned long physpage = page_to_phys(page); page_to_phys 243 arch/s390/kernel/machine_kexec.c reboot_code_buffer = (void *) page_to_phys(image->control_code_page); page_to_phys 283 arch/s390/kernel/machine_kexec.c data_mover = (relocate_kernel_t) page_to_phys(image->control_code_page); page_to_phys 76 arch/s390/mm/gmap.c table = (unsigned long *) page_to_phys(page); page_to_phys 315 arch/s390/mm/gmap.c new = (unsigned long *) page_to_phys(page); page_to_phys 1750 arch/s390/mm/gmap.c s_r2t = (unsigned long *) page_to_phys(page); page_to_phys 1834 arch/s390/mm/gmap.c s_r3t = (unsigned long *) page_to_phys(page); page_to_phys 1918 arch/s390/mm/gmap.c s_sgt = (unsigned long *) page_to_phys(page); page_to_phys 2042 arch/s390/mm/gmap.c s_pgt = (unsigned long *) page_to_phys(page); page_to_phys 66 arch/s390/mm/page-states.c : "a" (page_to_phys(page)), page_to_phys 78 arch/s390/mm/page-states.c : "a" (page_to_phys(page + i)), page_to_phys 89 arch/s390/mm/page-states.c : "a" (page_to_phys(page + i)), page_to_phys 100 arch/s390/mm/page-states.c : "a" (page_to_phys(page + i)), page_to_phys 348 arch/s390/mm/pageattr.c address = page_to_phys(page + i); page_to_phys 376 arch/s390/mm/pageattr.c addr = page_to_phys(page); page_to_phys 61 arch/s390/mm/pgalloc.c return (unsigned long *) page_to_phys(page); page_to_phys 175 arch/s390/mm/pgalloc.c table = (u64 *)page_to_phys(page); page_to_phys 208 arch/s390/mm/pgalloc.c table = (unsigned long *) page_to_phys(page); page_to_phys 231 arch/s390/mm/pgalloc.c table = (unsigned long *) page_to_phys(page); page_to_phys 341 arch/s390/pci/pci_dma.c unsigned long pa = page_to_phys(page) + offset; page_to_phys 411 arch/s390/pci/pci_dma.c pa = page_to_phys(page); page_to_phys 458 arch/s390/pci/pci_dma.c pa = page_to_phys(sg_page(s)); page_to_phys 122 arch/sh/mm/cache-sh4.c (addr & shm_align_mask), page_to_phys(page)); page_to_phys 566 arch/sh/mm/cache-sh5.c sh64_dcache_purge_phy_page(page_to_phys((struct page *)page)); page_to_phys 3145 arch/sparc/mm/init_64.c pfrom = page_to_phys(from); page_to_phys 3146 arch/sparc/mm/init_64.c pto = page_to_phys(to); page_to_phys 3179 arch/sparc/mm/init_64.c pfrom = page_to_phys(from); page_to_phys 3180 arch/sparc/mm/init_64.c pto = page_to_phys(to); page_to_phys 182 arch/sparc/mm/iommu.c phys_addr_t paddr = page_to_phys(page) + offset; page_to_phys 290 arch/um/include/asm/pgtable.h pte_set_val(pte, page_to_phys(page), (pgprot)); \ page_to_phys 630 arch/x86/events/intel/pt.c TOPA_ENTRY(&tp->topa, 1)->base = page_to_phys(p) >> TOPA_SHIFT; page_to_phys 724 arch/x86/events/intel/pt.c TOPA_ENTRY(topa, -1)->base = page_to_phys(p) >> TOPA_SHIFT; page_to_phys 234 arch/x86/kernel/amd_gart_64.c phys_addr_t paddr = page_to_phys(page) + offset; page_to_phys 1526 arch/x86/kvm/svm.c phys_addr_t bpa = __sme_set(page_to_phys(svm->avic_backing_page)); page_to_phys 1527 arch/x86/kvm/svm.c phys_addr_t lpa = __sme_set(page_to_phys(kvm_svm->avic_logical_id_table_page)); page_to_phys 1528 arch/x86/kvm/svm.c phys_addr_t ppa = __sme_set(page_to_phys(kvm_svm->avic_physical_id_table_page)); page_to_phys 1764 arch/x86/kvm/svm.c new_entry = __sme_set((page_to_phys(svm->avic_backing_page) & page_to_phys 5305 arch/x86/kvm/svm.c vcpu_info->pi_desc_addr = __sme_set(page_to_phys((*svm)->avic_backing_page)); page_to_phys 5357 arch/x86/kvm/svm.c pi.base = __sme_set(page_to_phys(svm->avic_backing_page) & page_to_phys 2010 arch/x86/kvm/vmx/nested.c vmcs_write64(PML_ADDRESS, page_to_phys(vmx->pml_pg)); page_to_phys 2953 arch/x86/kvm/vmx/nested.c hpa = page_to_phys(vmx->nested.apic_access_page); page_to_phys 4239 arch/x86/kvm/vmx/vmx.c vmcs_write64(PML_ADDRESS, page_to_phys(vmx->pml_pg)); page_to_phys 8008 arch/x86/kvm/x86.c kvm_x86_ops->set_apic_access_page_addr(vcpu, page_to_phys(page)); page_to_phys 397 arch/x86/platform/efi/efi_64.c efi_scratch.phys_stack = page_to_phys(page + 1); /* stack grows down */ page_to_phys 34 arch/xtensa/include/asm/highmem.h return DCACHE_ALIAS(page_to_phys(page)); page_to_phys 164 arch/xtensa/kernel/pci-dma.c *handle = phys_to_dma(dev, page_to_phys(page)); page_to_phys 63 arch/xtensa/mm/cache.c if (!DCACHE_ALIAS_EQ(page_to_phys(page), vaddr)) { page_to_phys 72 arch/xtensa/mm/cache.c (page_to_phys(page) & DCACHE_ALIAS_MASK); page_to_phys 75 arch/xtensa/mm/cache.c page_to_phys(page)); page_to_phys 83 arch/xtensa/mm/cache.c if (PageHighMem(page) || !DCACHE_ALIAS_EQ(page_to_phys(page), vaddr)) { page_to_phys 84 arch/xtensa/mm/cache.c *paddr = page_to_phys(page); page_to_phys 145 arch/xtensa/mm/cache.c unsigned long phys = page_to_phys(page); page_to_phys 200 arch/xtensa/mm/cache.c unsigned long phys = page_to_phys(pfn_to_page(pfn)); page_to_phys 228 arch/xtensa/mm/cache.c unsigned long phys = page_to_phys(page); page_to_phys 262 arch/xtensa/mm/cache.c unsigned long phys = page_to_phys(page); page_to_phys 298 arch/xtensa/mm/cache.c unsigned long phys = page_to_phys(page); page_to_phys 51 arch/xtensa/mm/highmem.c DCACHE_ALIAS(page_to_phys(page))); page_to_phys 686 block/bio.c phys_addr_t vec_end_addr = page_to_phys(bv->bv_page) + page_to_phys 688 block/bio.c phys_addr_t page_addr = page_to_phys(page); page_to_phys 707 block/bio.c phys_addr_t addr1 = page_to_phys(bv->bv_page) + bv->bv_offset; page_to_phys 708 block/bio.c phys_addr_t addr2 = page_to_phys(page) + offset + len - 1; page_to_phys 79 block/blk.h phys_addr_t addr1 = page_to_phys(vec1->bv_page) + vec1->bv_offset; page_to_phys 80 block/blk.h phys_addr_t addr2 = page_to_phys(vec2->bv_page) + vec2->bv_offset; page_to_phys 155 drivers/char/agp/ali-agp.c page_to_phys(page)) | ALI_CACHE_FLUSH_EN )); page_to_phys 183 drivers/char/agp/ali-agp.c page_to_phys(page)) | ALI_CACHE_FLUSH_EN)); page_to_phys 318 drivers/char/agp/amd-k7-agp.c page_to_phys(mem->pages[i]), page_to_phys 83 drivers/char/agp/amd64-agp.c page_to_phys(mem->pages[i]), page_to_phys 307 drivers/char/agp/ati-agp.c page_to_phys(mem->pages[i]), page_to_phys 154 drivers/char/agp/backend.c bridge->scratch_page_dma = page_to_phys(page); page_to_phys 71 drivers/char/agp/efficeon-agp.c unsigned long addr = page_to_phys(page); page_to_phys 1099 drivers/char/agp/generic.c page_to_phys(mem->pages[i]), page_to_phys 363 drivers/char/agp/hp-agp.c paddr = page_to_phys(mem->pages[i]); page_to_phys 301 drivers/char/agp/i460-agp.c mem, pg_start, type, page_to_phys(mem->pages[0])); page_to_phys 328 drivers/char/agp/i460-agp.c paddr = page_to_phys(mem->pages[i]); page_to_phys 385 drivers/char/agp/i460-agp.c lp->paddr = page_to_phys(lp->page); page_to_phys 272 drivers/char/agp/intel-gtt.c new->physical = page_to_phys(new->pages[0]); page_to_phys 312 drivers/char/agp/intel-gtt.c intel_private.scratch_page_dma = page_to_phys(page); page_to_phys 890 drivers/char/agp/intel-gtt.c dma_addr_t addr = page_to_phys(pages[i]); page_to_phys 228 drivers/char/agp/nvidia-agp.c page_to_phys(mem->pages[i]), mask_type), page_to_phys 154 drivers/char/agp/parisc-agp.c paddr = page_to_phys(mem->pages[i]); page_to_phys 354 drivers/char/agp/sworks-agp.c page_to_phys(mem->pages[i]), mem->type), page_to_phys 143 drivers/char/agp/uninorth-agp.c page_to_phys(agp_bridge->scratch_page_page) >> 12); page_to_phys 186 drivers/char/agp/uninorth-agp.c gp[i] = (page_to_phys(mem->pages[i]) >> PAGE_SHIFT) | 0x80000000UL; page_to_phys 188 drivers/char/agp/uninorth-agp.c gp[i] = cpu_to_le32((page_to_phys(mem->pages[i]) & 0xFFFFF000UL) | page_to_phys 190 drivers/char/agp/uninorth-agp.c flush_dcache_range((unsigned long)__va(page_to_phys(mem->pages[i])), page_to_phys 191 drivers/char/agp/uninorth-agp.c (unsigned long)__va(page_to_phys(mem->pages[i]))+0x1000); page_to_phys 432 drivers/char/agp/uninorth-agp.c scratch_value = (page_to_phys(agp_bridge->scratch_page_page) >> PAGE_SHIFT) | 0x80000000UL; page_to_phys 434 drivers/char/agp/uninorth-agp.c scratch_value = cpu_to_le32((page_to_phys(agp_bridge->scratch_page_page) & 0xFFFFF000UL) | page_to_phys 913 drivers/crypto/n2_core.c src_paddr = (page_to_phys(walk->src.page) + page_to_phys 915 drivers/crypto/n2_core.c dest_paddr = (page_to_phys(walk->dst.page) + page_to_phys 109 drivers/crypto/nx/nx-842.h return page_to_phys(vmalloc_to_page(addr)) + offset_in_page(addr); page_to_phys 91 drivers/crypto/nx/nx.c sg_addr = page_to_phys(vmalloc_to_page(start_addr)) page_to_phys 121 drivers/crypto/nx/nx.c sg_addr = page_to_phys(vmalloc_to_page( page_to_phys 916 drivers/crypto/ux500/cryp/cryp_core.c src_paddr = (page_to_phys(walk.src.page) + walk.src.offset); page_to_phys 919 drivers/crypto/ux500/cryp/cryp_core.c dst_paddr = (page_to_phys(walk.dst.page) + walk.dst.offset); page_to_phys 345 drivers/edac/thunderx_edac.c phys = (uintptr_t)page_to_phys(lmc->mem); page_to_phys 192 drivers/firmware/efi/capsule-loader.c cap_info->phys[cap_info->index] = page_to_phys(page); page_to_phys 171 drivers/firmware/efi/capsule.c sglist_phys = page_to_phys(sg_pages[0]); page_to_phys 266 drivers/firmware/efi/capsule.c sglist[j].data = page_to_phys(sg_pages[i + 1]); page_to_phys 100 drivers/gpu/drm/armada/armada_gem.c obj->phys_addr = page_to_phys(p); page_to_phys 2266 drivers/gpu/drm/drm_fb_helper.c page_to_phys(virt_to_page(fbi->screen_buffer)); page_to_phys 213 drivers/gpu/drm/etnaviv/etnaviv_dump.c *bomap++ = cpu_to_le64(page_to_phys(*pages++)); page_to_phys 772 drivers/gpu/drm/i915/i915_gem_fence_reg.c char new_bit_17 = page_to_phys(page) >> 17; page_to_phys 811 drivers/gpu/drm/i915/i915_gem_fence_reg.c if (page_to_phys(page) & (1 << 17)) page_to_phys 83 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c sg_dma_address(sg) = page_to_phys(sg_page(sg)); page_to_phys 929 drivers/gpu/drm/msm/adreno/a6xx_gmu.c page_to_phys(bo->pages[i]), PAGE_SIZE, page_to_phys 389 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c page_to_phys(pages[n]) : engine->dmm->dummy_pa; page_to_phys 886 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c omap_dmm->dummy_pa = page_to_phys(omap_dmm->dummy_page); page_to_phys 300 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c return page_to_phys(viter->pages[viter->i]); page_to_phys 32 drivers/hv/channel.c paddr = page_to_phys(vmalloc_to_page(addr)) + page_to_phys 664 drivers/infiniband/core/umem_odp.c if (page_to_phys(local_page_list[j]) != p) { page_to_phys 683 drivers/infiniband/core/umem_odp.c p = page_to_phys(local_page_list[j]); page_to_phys 770 drivers/infiniband/hw/hfi1/user_exp_rcv.c __va(page_to_phys(pages[0])), page_to_phys 781 drivers/infiniband/hw/hfi1/user_exp_rcv.c node->phys = page_to_phys(pages[0]); page_to_phys 2510 drivers/iommu/amd_iommu.c phys_addr_t paddr = page_to_phys(page) + offset; page_to_phys 2701 drivers/iommu/amd_iommu.c *dma_addr = page_to_phys(page); page_to_phys 2726 drivers/iommu/amd_iommu.c *dma_addr = __map_single(dev, dma_dom, page_to_phys(page), page_to_phys 708 drivers/iommu/dma-iommu.c phys_addr_t phys = page_to_phys(page) + offset; page_to_phys 1019 drivers/iommu/dma-iommu.c *handle = __iommu_dma_map(dev, page_to_phys(page), size, ioprot); page_to_phys 1092 drivers/iommu/fsl_pamu.c ppaact_phys = page_to_phys(p); page_to_phys 3557 drivers/iommu/intel-iommu.c return __intel_map_single(dev, page_to_phys(page) + offset, page_to_phys 3659 drivers/iommu/intel-iommu.c *dma_handle = __intel_map_single(dev, page_to_phys(page), size, page_to_phys 3921 drivers/iommu/intel-iommu.c return bounce_map_single(dev, page_to_phys(page) + offset, page_to_phys 1716 drivers/irqchip/irq-gic-v3-its.c gic_rdists->prop_table_pa = page_to_phys(page); page_to_phys 2145 drivers/irqchip/irq-gic-v3-its.c paddr = page_to_phys(pend_page); page_to_phys 2174 drivers/irqchip/irq-gic-v3-its.c val = (page_to_phys(pend_page) | page_to_phys 2343 drivers/irqchip/irq-gic-v3-its.c table[idx] = cpu_to_le64(page_to_phys(page) | GITS_BASER_VALID); page_to_phys 31 drivers/misc/mic/host/mic_boot.c void *va = phys_to_virt(page_to_phys(page)) + offset; page_to_phys 169 drivers/misc/mic/host/mic_boot.c void *va = phys_to_virt(page_to_phys(page)) + offset; page_to_phys 320 drivers/misc/mic/host/mic_boot.c void *va = phys_to_virt(page_to_phys(page)) + offset; page_to_phys 112 drivers/misc/mic/scif/scif_map.h *dma_handle = page_to_phys(page); page_to_phys 588 drivers/misc/mic/scif/scif_rma.c phys_prev = page_to_phys(pin->pages[i]); page_to_phys 593 drivers/misc/mic/scif/scif_rma.c phys_curr = page_to_phys(pin->pages[k]); page_to_phys 607 drivers/misc/mic/scif/scif_rma.c phys_to_virt(page_to_phys( page_to_phys 614 drivers/misc/mic/scif/scif_rma.c window->dma_addr[j] = page_to_phys(pin->pages[i]); page_to_phys 190 drivers/misc/sgi-gru/grufault.c *paddr = page_to_phys(page); page_to_phys 2916 drivers/net/ethernet/intel/e1000/e1000_main.c page_to_phys(skb_frag_page(frag)); page_to_phys 9969 drivers/net/ethernet/sun/niu.c return page_to_phys(page) + offset; page_to_phys 504 drivers/net/fjes/fjes_hw.c (__le64)(page_to_phys(vmalloc_to_page(addr)) + page_to_phys 514 drivers/net/fjes/fjes_hw.c (__le64)(page_to_phys(vmalloc_to_page(addr)) + page_to_phys 1204 drivers/net/fjes/fjes_hw.c (__le64)(page_to_phys(vmalloc_to_page(addr)) + page_to_phys 114 drivers/net/fjes/fjes_trace.h __entry->tx = page_to_phys(vmalloc_to_page(tx)) + page_to_phys 116 drivers/net/fjes/fjes_trace.h __entry->rx = page_to_phys(vmalloc_to_page(rx)) + page_to_phys 315 drivers/platform/goldfish/goldfish_pipe.c unsigned long xaddr = page_to_phys(pages[0]); page_to_phys 325 drivers/platform/goldfish/goldfish_pipe.c xaddr = page_to_phys(pages[i]); page_to_phys 152 drivers/remoteproc/remoteproc_core.c return page_to_phys(vmalloc_to_page(cpu_addr)) + page_to_phys 273 drivers/s390/block/dasd_fba.c ccw->cda = (__u32) (addr_t) page_to_phys(ZERO_PAGE(0)); page_to_phys 75 drivers/s390/char/vmcp.c session->response = (char *)page_to_phys(page); page_to_phys 119 drivers/staging/media/ipu3/ipu3-dmamap.c page_to_phys(pages[i]), PAGE_SIZE); page_to_phys 307 drivers/staging/media/ipu3/ipu3-mmu.c phys_addr_t phys = page_to_phys(sg_page(s)) + s->offset; page_to_phys 486 drivers/tee/optee/call.c optee_page = page_to_phys(*pages) + page_to_phys 504 drivers/tee/optee/call.c optee_page = page_to_phys(*pages); page_to_phys 27 drivers/tee/optee/shm_pool.c shm->paddr = page_to_phys(page); page_to_phys 1281 drivers/vfio/vfio_iommu_type1.c ret = iommu_map(domain->domain, 0, page_to_phys(pages), PAGE_SIZE * 2, page_to_phys 264 drivers/virt/fsl_hypervisor.c sg_list[0].source = page_to_phys(pages[0]) + lb_offset; page_to_phys 268 drivers/virt/fsl_hypervisor.c sg_list[0].target = page_to_phys(pages[0]) + lb_offset; page_to_phys 278 drivers/virt/fsl_hypervisor.c sg_list[i].source = page_to_phys(pages[i]); page_to_phys 283 drivers/virt/fsl_hypervisor.c sg_list[i].target = page_to_phys(pages[i]); page_to_phys 291 drivers/virt/vboxguest/vboxguest_core.c req->phys_page[i] = page_to_phys(pages[i]); page_to_phys 330 drivers/virt/vboxguest/vboxguest_core.c req->phys_page[i] = page_to_phys(pages[i]); page_to_phys 362 drivers/virt/vboxguest/vboxguest_utils.c dst_pg_lst->pages[i] = page_to_phys(page); page_to_phys 369 drivers/xen/swiotlb-xen.c phys_addr_t map, phys = page_to_phys(page) + offset; page_to_phys 87 include/asm-generic/page.h #ifndef page_to_phys page_to_phys 224 include/linux/scatterlist.h return page_to_phys(sg_page(sg)) + sg->offset; page_to_phys 396 kernel/dma/debug.c return page_to_phys(pfn_to_page(entry->pfn)) + entry->offset; page_to_phys 102 kernel/dma/direct.c if (page && !dma_coherent_ok(dev, page_to_phys(page), size)) { page_to_phys 109 kernel/dma/direct.c if (page && !dma_coherent_ok(dev, page_to_phys(page), size)) { page_to_phys 144 kernel/dma/direct.c *dma_handle = phys_to_dma(dev, page_to_phys(page)); page_to_phys 164 kernel/dma/direct.c *dma_handle = __phys_to_dma(dev, page_to_phys(page)); page_to_phys 166 kernel/dma/direct.c *dma_handle = phys_to_dma(dev, page_to_phys(page)); page_to_phys 337 kernel/dma/direct.c phys_addr_t phys = page_to_phys(page) + offset; page_to_phys 152 kernel/dma/remap.c page_to_phys(page), atomic_pool_size, -1); page_to_phys 247 kernel/dma/remap.c *dma_handle = phys_to_dma(dev, page_to_phys(page)); page_to_phys 6548 kernel/events/core.c phys_addr = page_to_phys(p) + virt % PAGE_SIZE; page_to_phys 320 lib/kfifo.c if (page_to_phys(page) != page_to_phys(npage) - l) { page_to_phys 2098 mm/percpu.c return page_to_phys(vmalloc_to_page(addr)) + page_to_phys 2101 mm/percpu.c return page_to_phys(pcpu_addr_to_page(addr)) + page_to_phys 24 tools/virtio/linux/dma-mapping.h #define dma_map_page(d, p, o, s, dir) (page_to_phys(p) + (o)) page_to_phys 157 tools/virtio/linux/scatterlist.h return page_to_phys(sg_page(sg)) + sg->offset; page_to_phys 735 virt/kvm/arm/mmu.c return page_to_phys(vmalloc_to_page(kaddr)) + page_to_phys 2333 virt/kvm/kvm_main.c const void *zero_page = (const void *) __va(page_to_phys(ZERO_PAGE(0)));