PAGE_KERNEL 58 arch/alpha/mm/init.c = pte_val(mk_pte(virt_to_page(ret), PAGE_KERNEL)); PAGE_KERNEL 112 arch/alpha/mm/init.c (newptbr << 32) | pgprot_val(PAGE_KERNEL); PAGE_KERNEL 224 arch/alpha/mm/init.c pfn_pte(pfn, PAGE_KERNEL)); PAGE_KERNEL 28 arch/arc/include/asm/highmem.h #define kmap_prot PAGE_KERNEL PAGE_KERNEL 13 arch/arm/include/asm/highmem.h #define kmap_prot PAGE_KERNEL PAGE_KERNEL 90 arch/arm/mm/copypage-v6.c set_top_pte(kfrom, mk_pte(from, PAGE_KERNEL)); PAGE_KERNEL 91 arch/arm/mm/copypage-v6.c set_top_pte(kto, mk_pte(to, PAGE_KERNEL)); PAGE_KERNEL 116 arch/arm/mm/copypage-v6.c set_top_pte(to, mk_pte(page, PAGE_KERNEL)); PAGE_KERNEL 360 arch/arm/mm/dma-mapping.c pgprot_t prot = pgprot_dmacoherent(PAGE_KERNEL); PAGE_KERNEL 587 arch/arm/mm/dma-mapping.c __dma_remap(page, size, PAGE_KERNEL); PAGE_KERNEL 774 arch/arm/mm/dma-mapping.c pgprot_t prot = __get_dma_pgprot(attrs, PAGE_KERNEL); PAGE_KERNEL 783 arch/arm/mm/dma-mapping.c return __dma_alloc(dev, size, handle, gfp, PAGE_KERNEL, true, PAGE_KERNEL 1465 arch/arm/mm/dma-mapping.c pgprot_t prot = __get_dma_pgprot(attrs, PAGE_KERNEL); PAGE_KERNEL 2359 arch/arm/mm/dma-mapping.c __get_dma_pgprot(attrs, PAGE_KERNEL), false, PAGE_KERNEL 240 arch/arm/mm/fault-armv.c pgprot_t prot = __pgprot_modify(PAGE_KERNEL, PAGE_KERNEL 43 arch/arm/mm/flush.c set_top_pte(to, pfn_pte(pfn, PAGE_KERNEL)); PAGE_KERNEL 58 arch/arm/mm/flush.c set_top_pte(va, pfn_pte(pfn, PAGE_KERNEL)); PAGE_KERNEL 24 arch/arm64/include/asm/vmap_stack.h THREADINFO_GFP, PAGE_KERNEL, 0, node, PAGE_KERNEL 255 arch/arm64/kernel/acpi.c return PAGE_KERNEL; PAGE_KERNEL 50 arch/arm64/kernel/efi.c return pgprot_val(PAGE_KERNEL); PAGE_KERNEL 93 arch/arm64/kernel/kaslr.c fdt = fixmap_remap_fdt(dt_phys, &size, PAGE_KERNEL); PAGE_KERNEL 37 arch/arm64/kernel/module.c module_alloc_end, gfp_mask, PAGE_KERNEL, 0, PAGE_KERNEL 53 arch/arm64/kernel/module.c PAGE_KERNEL, 0, NUMA_NO_NODE, PAGE_KERNEL 174 arch/arm64/kernel/setup.c void *dt_virt = fixmap_remap_fdt(dt_phys, &size, PAGE_KERNEL); PAGE_KERNEL 113 arch/arm64/mm/kasan_init.c set_pte(ptep, pfn_pte(__phys_to_pfn(page_phys), PAGE_KERNEL)); PAGE_KERNEL 492 arch/arm64/mm/mmu.c __map_memblock(pgdp, start, end, PAGE_KERNEL, flags); PAGE_KERNEL 506 arch/arm64/mm/mmu.c PAGE_KERNEL, NO_CONT_MAPPINGS); PAGE_KERNEL 517 arch/arm64/mm/mmu.c PAGE_KERNEL, PAGE_KERNEL 633 arch/arm64/mm/mmu.c map_kernel_segment(pgdp, __start_rodata, __inittext_begin, PAGE_KERNEL, PAGE_KERNEL 637 arch/arm64/mm/mmu.c map_kernel_segment(pgdp, __initdata_begin, __initdata_end, PAGE_KERNEL, PAGE_KERNEL 639 arch/arm64/mm/mmu.c map_kernel_segment(pgdp, _data, _end, PAGE_KERNEL, &vmlinux_data, 0, 0); PAGE_KERNEL 1062 arch/arm64/mm/mmu.c size, PAGE_KERNEL, __pgd_pgtable_alloc, flags); PAGE_KERNEL 971 arch/arm64/net/bpf_jit_comp.c PAGE_KERNEL, 0, NUMA_NO_NODE, PAGE_KERNEL 47 arch/csky/include/asm/highmem.h #define kmap_prot PAGE_KERNEL PAGE_KERNEL 43 arch/csky/include/asm/io.h #define ioremap(addr, size) __ioremap((addr), (size), pgprot_noncached(PAGE_KERNEL)) PAGE_KERNEL 44 arch/csky/include/asm/io.h #define ioremap_wc(addr, size) __ioremap((addr), (size), pgprot_writecombine(PAGE_KERNEL)) PAGE_KERNEL 29 arch/csky/kernel/vdso.c vdso = vmap(&vdso_page, 1, 0, PAGE_KERNEL); PAGE_KERNEL 55 arch/csky/mm/highmem.c set_pte(kmap_pte-idx, mk_pte(page, PAGE_KERNEL)); PAGE_KERNEL 101 arch/csky/mm/highmem.c set_pte(kmap_pte-idx, pfn_pte(pfn, PAGE_KERNEL)); PAGE_KERNEL 49 arch/csky/mm/ioremap.c return __ioremap_caller(phys_addr, size, PAGE_KERNEL, PAGE_KERNEL 28 arch/hexagon/kernel/vdso.c vdso = vmap(&vdso_page, 1, 0, PAGE_KERNEL); PAGE_KERNEL 469 arch/ia64/kernel/efi.c pte_val(pfn_pte(__pa(pal_vaddr) >> PAGE_SHIFT, PAGE_KERNEL)), PAGE_KERNEL 1871 arch/ia64/kernel/mca.c pte_val(mk_pte_phys(__pa(cpu_data), PAGE_KERNEL))); PAGE_KERNEL 1883 arch/ia64/kernel/mca.c PAGE_KERNEL))); PAGE_KERNEL 476 arch/ia64/mm/init.c PAGE_KERNEL)); PAGE_KERNEL 71 arch/ia64/mm/ioremap.c prot = PAGE_KERNEL; PAGE_KERNEL 36 arch/m68k/mm/sun3kmap.c ptep = pfn_pte(phys >> PAGE_SHIFT, PAGE_KERNEL); PAGE_KERNEL 33 arch/m68k/sun3/dvma.c ptep = pfn_pte(virt_to_pfn(kaddr), PAGE_KERNEL); PAGE_KERNEL 126 arch/m68k/sun3x/dvma.c PAGE_KERNEL)); PAGE_KERNEL 67 arch/microblaze/mm/init.c kmap_prot = PAGE_KERNEL; PAGE_KERNEL 62 arch/mips/include/asm/highmem.h #define kmap_prot PAGE_KERNEL PAGE_KERNEL 39 arch/mips/kernel/module.c GFP_KERNEL, PAGE_KERNEL, 0, NUMA_NO_NODE, PAGE_KERNEL 63 arch/mips/mm/highmem.c set_pte(kmap_pte-idx, mk_pte(page, PAGE_KERNEL)); PAGE_KERNEL 117 arch/mips/mm/highmem.c set_pte(kmap_pte-idx, pfn_pte(pfn, PAGE_KERNEL)); PAGE_KERNEL 141 arch/mips/mm/init.c return __kmap_pgprot(page, addr, PAGE_KERNEL); PAGE_KERNEL 35 arch/nds32/include/asm/highmem.h #define kmap_prot PAGE_KERNEL PAGE_KERNEL 13 arch/nds32/kernel/module.c GFP_KERNEL, PAGE_KERNEL, 0, NUMA_NO_NODE, PAGE_KERNEL 92 arch/nds32/mm/cacheflush.c pte = (pa | PAGE_KERNEL); PAGE_KERNEL 112 arch/nds32/mm/cacheflush.c pte = (pa | PAGE_KERNEL); PAGE_KERNEL 51 arch/nds32/mm/highmem.c pte = (page_to_pfn(page) << PAGE_SHIFT) | (PAGE_KERNEL); PAGE_KERNEL 92 arch/nds32/mm/init.c set_pte(pte, __pte(p + pgprot_val(PAGE_KERNEL))); PAGE_KERNEL 114 arch/openrisc/mm/init.c prot = PAGE_KERNEL; PAGE_KERNEL 69 arch/openrisc/mm/ioremap.c __pgprot(pgprot_val(PAGE_KERNEL) | _PAGE_CI))) { PAGE_KERNEL 440 arch/parisc/mm/init.c prot = PAGE_KERNEL; PAGE_KERNEL 448 arch/parisc/mm/init.c PAGE_KERNEL_EXEC : PAGE_KERNEL; PAGE_KERNEL 451 arch/parisc/mm/init.c prot = PAGE_KERNEL; PAGE_KERNEL 497 arch/parisc/mm/init.c PAGE_KERNEL, 0); PAGE_KERNEL 511 arch/parisc/mm/init.c PAGE_KERNEL, 1); PAGE_KERNEL 665 arch/parisc/mm/init.c size, PAGE_KERNEL, 0); PAGE_KERNEL 672 arch/parisc/mm/init.c initrd_end - initrd_start, PAGE_KERNEL, 0); PAGE_KERNEL 721 arch/powerpc/include/asm/io.h ioremap_prot((addr), (size), pgprot_val(PAGE_KERNEL)) PAGE_KERNEL 109 arch/powerpc/kernel/isa-bridge.c size, pgprot_noncached(PAGE_KERNEL)); PAGE_KERNEL 116 arch/powerpc/kernel/isa-bridge.c 0x10000, pgprot_noncached(PAGE_KERNEL)); PAGE_KERNEL 252 arch/powerpc/kernel/isa-bridge.c size, pgprot_noncached(PAGE_KERNEL)); PAGE_KERNEL 164 arch/powerpc/kernel/pci_64.c pgprot_noncached(PAGE_KERNEL)) == NULL) PAGE_KERNEL 793 arch/powerpc/kvm/e500_mmu.c virt = vmap(pages, num_pages, VM_MAP, PAGE_KERNEL); PAGE_KERNEL 96 arch/powerpc/lib/code-patching.c err = map_kernel_page(text_poke_addr, (pfn << PAGE_SHIFT), PAGE_KERNEL); PAGE_KERNEL 119 arch/powerpc/mm/book3s64/hash_pgtable.c pgprot_val(PAGE_KERNEL), PAGE_KERNEL 457 arch/powerpc/mm/book3s64/hash_pgtable.c pp = htab_convert_pte_flags(pgprot_val(PAGE_KERNEL)); PAGE_KERNEL 802 arch/powerpc/mm/book3s64/hash_utils.c pgprot_val(PAGE_KERNEL), mmu_linear_psize, PAGE_KERNEL 916 arch/powerpc/mm/book3s64/hash_utils.c prot = pgprot_val(PAGE_KERNEL); PAGE_KERNEL 1876 arch/powerpc/mm/book3s64/hash_utils.c unsigned long mode = htab_convert_pte_flags(pgprot_val(PAGE_KERNEL)); PAGE_KERNEL 291 arch/powerpc/mm/book3s64/radix_pgtable.c prot = PAGE_KERNEL; PAGE_KERNEL 13 arch/powerpc/mm/ioremap.c pgprot_t prot = pgprot_noncached(PAGE_KERNEL); PAGE_KERNEL 24 arch/powerpc/mm/ioremap.c pgprot_t prot = pgprot_noncached_wc(PAGE_KERNEL); PAGE_KERNEL 35 arch/powerpc/mm/ioremap.c pgprot_t prot = pgprot_cached(PAGE_KERNEL); PAGE_KERNEL 11 arch/powerpc/mm/ioremap_32.c pgprot_t prot = pgprot_cached_wthru(PAGE_KERNEL); PAGE_KERNEL 37 arch/powerpc/mm/kasan/kasan_init_32.c pgprot_t prot = slab_is_available() ? kasan_prot_ro() : PAGE_KERNEL; PAGE_KERNEL 99 arch/powerpc/mm/kasan/kasan_init_32.c pte_t pte = pfn_pte(PHYS_PFN(__pa(va)), PAGE_KERNEL); PAGE_KERNEL 212 arch/powerpc/mm/kasan/kasan_init_32.c kasan_populate_pte(kasan_early_shadow_pte, PAGE_KERNEL); PAGE_KERNEL 254 arch/powerpc/mm/mem.c kmap_prot = PAGE_KERNEL; PAGE_KERNEL 99 arch/powerpc/mm/pgtable_32.c map_kernel_page(v, p, ktext ? PAGE_KERNEL_TEXT : PAGE_KERNEL); PAGE_KERNEL 213 arch/powerpc/mm/pgtable_32.c change_page_attr(page, numpages, PAGE_KERNEL); PAGE_KERNEL 254 arch/powerpc/mm/pgtable_32.c change_page_attr(page, numpages, enable ? PAGE_KERNEL : __pgprot(0)); PAGE_KERNEL 33 arch/riscv/include/asm/fixmap.h #define FIXMAP_PAGE_IO PAGE_KERNEL PAGE_KERNEL 402 arch/riscv/mm/init.c PAGE_SIZE, PAGE_KERNEL); PAGE_KERNEL 68 arch/riscv/mm/ioremap.c return __ioremap_caller(offset, size, PAGE_KERNEL, PAGE_KERNEL 316 arch/s390/kernel/setup.c PAGE_KERNEL, 0, NUMA_NO_NODE, PAGE_KERNEL 123 arch/s390/mm/pageattr.c prot = pgprot_val(ro ? PAGE_KERNEL_RO : PAGE_KERNEL); PAGE_KERNEL 82 arch/s390/mm/vmem.c pgt_prot = pgprot_val(PAGE_KERNEL); PAGE_KERNEL 225 arch/s390/mm/vmem.c pgt_prot = pgprot_val(PAGE_KERNEL); PAGE_KERNEL 249 arch/s390/pci/pci.c ioaddr, PAGE_KERNEL)) { PAGE_KERNEL 342 arch/sh/include/asm/io.h return __ioremap_mode(offset, size, PAGE_KERNEL); PAGE_KERNEL 236 arch/sh/kernel/setup.c PAGE_KERNEL); PAGE_KERNEL 48 arch/sh/mm/kmap.c set_pte(kmap_coherent_pte - idx, mk_pte(page, PAGE_KERNEL)); PAGE_KERNEL 37 arch/sh/mm/numa.c PAGE_KERNEL); PAGE_KERNEL 218 arch/sparc/include/asm/pgtable_64.h extern pgprot_t PAGE_KERNEL; PAGE_KERNEL 33 arch/sparc/kernel/module.c GFP_KERNEL, PAGE_KERNEL, 0, NUMA_NO_NODE, PAGE_KERNEL 1906 arch/sparc/mm/init_64.c PAGE_KERNEL, use_huge); PAGE_KERNEL 1927 arch/sparc/mm/init_64.c (enable ? PAGE_KERNEL : __pgprot(0)), false); PAGE_KERNEL 2575 arch/sparc/mm/init_64.c pgprot_t PAGE_KERNEL __read_mostly; PAGE_KERNEL 2576 arch/sparc/mm/init_64.c EXPORT_SYMBOL(PAGE_KERNEL); PAGE_KERNEL 2679 arch/sparc/mm/init_64.c PAGE_KERNEL = __pgprot (_PAGE_PRESENT_4U | _PAGE_VALID | PAGE_KERNEL 2732 arch/sparc/mm/init_64.c PAGE_KERNEL = __pgprot (_PAGE_PRESENT_4V | _PAGE_VALID | PAGE_KERNEL 2736 arch/sparc/mm/init_64.c PAGE_KERNEL_LOCKED = PAGE_KERNEL; PAGE_KERNEL 365 arch/x86/events/intel/ds.c ds_update_cea(cea, buffer, bsiz, PAGE_KERNEL); PAGE_KERNEL 409 arch/x86/events/intel/ds.c ds_update_cea(cea, buffer, BTS_BUFFER_SIZE, PAGE_KERNEL); PAGE_KERNEL 87 arch/x86/hyperv/hv_init.c PAGE_KERNEL); PAGE_KERNEL 155 arch/x86/include/asm/fixmap.h #define kmap_prot PAGE_KERNEL PAGE_KERNEL 826 arch/x86/kernel/alternative.c pgprot = __pgprot(pgprot_val(PAGE_KERNEL) & ~_PAGE_GLOBAL); PAGE_KERNEL 46 arch/x86/kernel/irq_64.c va = vmap(pages, IRQ_STACK_SIZE / PAGE_SIZE, GFP_KERNEL, PAGE_KERNEL); PAGE_KERNEL 78 arch/x86/kernel/module.c PAGE_KERNEL, 0, NUMA_NO_NODE, PAGE_KERNEL 1855 arch/x86/kvm/svm.c PAGE_KERNEL); PAGE_KERNEL 1927 arch/x86/kvm/svm.c PAGE_KERNEL); PAGE_KERNEL 6644 arch/x86/kvm/vmx/vmx.c PAGE_KERNEL); PAGE_KERNEL 68 arch/x86/mm/cpu_entry_area.c PAGE_KERNEL); PAGE_KERNEL 86 arch/x86/mm/cpu_entry_area.c estacks->name## _stack, npages, PAGE_KERNEL); \ PAGE_KERNEL 134 arch/x86/mm/cpu_entry_area.c PAGE_KERNEL_RO : PAGE_KERNEL; PAGE_KERNEL 135 arch/x86/mm/cpu_entry_area.c pgprot_t tss_prot = PAGE_KERNEL; PAGE_KERNEL 142 arch/x86/mm/cpu_entry_area.c PAGE_KERNEL); PAGE_KERNEL 349 arch/x86/mm/init_32.c pgprot_t prot = PAGE_KERNEL; PAGE_KERNEL 485 arch/x86/mm/init_64.c pfn_pte(paddr >> PAGE_SHIFT, PAGE_KERNEL).pte); PAGE_KERNEL 598 arch/x86/mm/init_64.c pgprot_t prot = PAGE_KERNEL; PAGE_KERNEL 51 arch/x86/mm/kasan_init_64.c if (p && pmd_set_huge(pmd, __pa(p), PAGE_KERNEL)) PAGE_KERNEL 70 arch/x86/mm/kasan_init_64.c entry = pfn_pte(PFN_DOWN(__pa(p)), PAGE_KERNEL); PAGE_KERNEL 88 arch/x86/mm/kasan_init_64.c if (p && pud_set_huge(pud, __pa(p), PAGE_KERNEL)) PAGE_KERNEL 467 arch/x86/mm/pti.c *target_pte = pfn_pte(pa >> PAGE_SHIFT, PAGE_KERNEL); PAGE_KERNEL 430 arch/x86/xen/enlighten_pv.c set_aliased_prot(ldt + i, PAGE_KERNEL); PAGE_KERNEL 46 arch/x86/xen/grant-table.c mfn_pte(frames[i], PAGE_KERNEL)); PAGE_KERNEL 68 arch/x86/xen/grant-table.c mfn_pte(frames[i], PAGE_KERNEL)); PAGE_KERNEL 907 arch/x86/xen/mmu_pv.c pfn_pte(pfn, PAGE_KERNEL), PAGE_KERNEL 1687 arch/x86/xen/mmu_pv.c __set_pfn_prot(pfn, PAGE_KERNEL); PAGE_KERNEL 1855 arch/x86/xen/mmu_pv.c set_page_prot_flags((void *)addr, PAGE_KERNEL, UVMF_INVLPG); PAGE_KERNEL 1860 arch/x86/xen/mmu_pv.c set_page_prot_flags((void *)addr, PAGE_KERNEL, UVMF_INVLPG); PAGE_KERNEL 2110 arch/x86/xen/mmu_pv.c PAGE_KERNEL); PAGE_KERNEL 2207 arch/x86/xen/mmu_pv.c set_page_prot(initial_page_table, PAGE_KERNEL); PAGE_KERNEL 2208 arch/x86/xen/mmu_pv.c set_page_prot(initial_kernel_pmd, PAGE_KERNEL); PAGE_KERNEL 2317 arch/x86/xen/mmu_pv.c pte = pfn_pte(PFN_DOWN(__pa(dummy_mapping)), PAGE_KERNEL); PAGE_KERNEL 2327 arch/x86/xen/mmu_pv.c pte = pfn_pte(PFN_DOWN(__pa(dummy_mapping)), PAGE_KERNEL); PAGE_KERNEL 2533 arch/x86/xen/mmu_pv.c mfn_pte(mfn, PAGE_KERNEL), flags); PAGE_KERNEL 390 arch/x86/xen/p2m.c pfn_pte(PFN_DOWN(__pa(mfns)), PAGE_KERNEL)); PAGE_KERNEL 610 arch/x86/xen/p2m.c pfn_pte(PFN_DOWN(__pa(p2m)), PAGE_KERNEL)); PAGE_KERNEL 314 arch/x86/xen/setup.c mfn_pte(mfn, PAGE_KERNEL), 0)) { PAGE_KERNEL 354 arch/x86/xen/setup.c set_pte_mfn(buf, mfn, PAGE_KERNEL); PAGE_KERNEL 373 arch/x86/xen/setup.c set_pte_mfn(buf, mfn_save, PAGE_KERNEL); PAGE_KERNEL 512 arch/x86/xen/setup.c set_pte_mfn(buf, xen_remap_mfn, PAGE_KERNEL); PAGE_KERNEL 538 arch/x86/xen/setup.c set_pte_mfn(buf, mfn_save, PAGE_KERNEL); PAGE_KERNEL 171 arch/xtensa/kernel/pci-dma.c pgprot_noncached(PAGE_KERNEL), PAGE_KERNEL 49 arch/xtensa/mm/ioremap.c return xtensa_ioremap(addr, size, pgprot_noncached(PAGE_KERNEL)); PAGE_KERNEL 55 arch/xtensa/mm/ioremap.c return xtensa_ioremap(addr, size, PAGE_KERNEL); PAGE_KERNEL 29 arch/xtensa/mm/kasan_init.c PAGE_KERNEL)); PAGE_KERNEL 66 arch/xtensa/mm/kasan_init.c set_pte(pte + j, pfn_pte(PHYS_PFN(phys), PAGE_KERNEL)); PAGE_KERNEL 401 drivers/block/drbd/drbd_bitmap.c PAGE_KERNEL); PAGE_KERNEL 1000 drivers/firewire/ohci.c ctx->buffer = vmap(pages, ARRAY_SIZE(pages), VM_MAP, PAGE_KERNEL); PAGE_KERNEL 119 drivers/firmware/efi/capsule-loader.c VM_MAP, PAGE_KERNEL); PAGE_KERNEL 59 drivers/firmware/efi/earlycon.c fb_prot = fb_wb ? PAGE_KERNEL : pgprot_writecombine(PAGE_KERNEL); PAGE_KERNEL 261 drivers/gpu/drm/drm_gem_shmem_helper.c VM_MAP, pgprot_writecombine(PAGE_KERNEL)); PAGE_KERNEL 53 drivers/gpu/drm/drm_memory.c # define PAGE_AGP pgprot_noncached_wc(PAGE_KERNEL) PAGE_KERNEL 55 drivers/gpu/drm/drm_memory.c # define PAGE_AGP PAGE_KERNEL PAGE_KERNEL 49 drivers/gpu/drm/drm_scatter.c return __vmalloc(size, GFP_KERNEL, pgprot_noncached_wc(PAGE_KERNEL)); PAGE_KERNEL 158 drivers/gpu/drm/etnaviv/etnaviv_dump.c PAGE_KERNEL); PAGE_KERNEL 362 drivers/gpu/drm/etnaviv/etnaviv_gem.c VM_MAP, pgprot_writecombine(PAGE_KERNEL)); PAGE_KERNEL 96 drivers/gpu/drm/exynos/exynos_drm_fbdev.c VM_MAP, pgprot_writecombine(PAGE_KERNEL)); PAGE_KERNEL 262 drivers/gpu/drm/i915/gem/i915_gem_pages.c pgprot = PAGE_KERNEL; PAGE_KERNEL 69 drivers/gpu/drm/i915/gem/selftests/mock_dmabuf.c return vm_map_ram(mock->pages, mock->npages, 0, PAGE_KERNEL); PAGE_KERNEL 271 drivers/gpu/drm/mediatek/mtk_drm_gem.c pgprot_writecombine(PAGE_KERNEL)); PAGE_KERNEL 945 drivers/gpu/drm/msm/adreno/a6xx_gmu.c pgprot_writecombine(PAGE_KERNEL)); PAGE_KERNEL 581 drivers/gpu/drm/msm/msm_gem.c VM_MAP, pgprot_writecombine(PAGE_KERNEL)); PAGE_KERNEL 210 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/gk20a.c pgprot_writecombine(PAGE_KERNEL)); PAGE_KERNEL 137 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c *pmap = vmap(mem->mem, mem->pages, VM_MAP, PAGE_KERNEL); PAGE_KERNEL 971 drivers/gpu/drm/omapdrm/omap_gem.c VM_MAP, pgprot_writecombine(PAGE_KERNEL)); PAGE_KERNEL 135 drivers/gpu/drm/rockchip/rockchip_drm_gem.c pgprot_writecombine(PAGE_KERNEL)); PAGE_KERNEL 548 drivers/gpu/drm/rockchip/rockchip_drm_gem.c pgprot_writecombine(PAGE_KERNEL)); PAGE_KERNEL 264 drivers/gpu/drm/tegra/fb.c pgprot_writecombine(PAGE_KERNEL)); PAGE_KERNEL 53 drivers/gpu/drm/tegra/gem.c pgprot_writecombine(PAGE_KERNEL)); PAGE_KERNEL 78 drivers/gpu/drm/tegra/gem.c pgprot_writecombine(PAGE_KERNEL)); PAGE_KERNEL 289 drivers/gpu/drm/ttm/ttm_bo_util.c if (pgprot_val(prot) == pgprot_val(PAGE_KERNEL)) PAGE_KERNEL 305 drivers/gpu/drm/ttm/ttm_bo_util.c if (pgprot_val(prot) == pgprot_val(PAGE_KERNEL)) PAGE_KERNEL 422 drivers/gpu/drm/ttm/ttm_bo_util.c PAGE_KERNEL); PAGE_KERNEL 427 drivers/gpu/drm/ttm/ttm_bo_util.c PAGE_KERNEL); PAGE_KERNEL 611 drivers/gpu/drm/ttm/ttm_bo_util.c prot = ttm_io_prot(mem->placement, PAGE_KERNEL); PAGE_KERNEL 163 drivers/gpu/drm/udl/udl_gem.c obj->vmapping = vmap(obj->pages, page_count, 0, PAGE_KERNEL); PAGE_KERNEL 395 drivers/gpu/drm/vgem/vgem_drv.c return vmap(pages, n_pages, 0, pgprot_writecombine(PAGE_KERNEL)); PAGE_KERNEL 206 drivers/gpu/drm/vkms/vkms_gem.c vkms_obj->vaddr = vmap(pages, n_pages, VM_MAP, PAGE_KERNEL); PAGE_KERNEL 486 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c d.dst_prot = ttm_io_prot(dst->mem.placement, PAGE_KERNEL); PAGE_KERNEL 487 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c d.src_prot = ttm_io_prot(src->mem.placement, PAGE_KERNEL); PAGE_KERNEL 284 drivers/gpu/drm/xen/xen_drm_front_gem.c VM_MAP, PAGE_KERNEL); PAGE_KERNEL 214 drivers/hv/ring_buffer.c vmap(pages_wraparound, page_cnt * 2 - 1, VM_MAP, PAGE_KERNEL); PAGE_KERNEL 277 drivers/hwtracing/coresight/coresight-tmc-etr.c PAGE_KERNEL); PAGE_KERNEL 297 drivers/hwtracing/coresight/coresight-tmc-etr.c PAGE_KERNEL); PAGE_KERNEL 572 drivers/iommu/dma-iommu.c pgprot_t prot = dma_pgprot(dev, PAGE_KERNEL, attrs); PAGE_KERNEL 976 drivers/iommu/dma-iommu.c pgprot_t prot = dma_pgprot(dev, PAGE_KERNEL, attrs); PAGE_KERNEL 150 drivers/lightnvm/pblk-init.c PAGE_KERNEL); PAGE_KERNEL 403 drivers/md/dm-bufio.c void *ptr = __vmalloc(c->block_size, gfp_mask, PAGE_KERNEL); PAGE_KERNEL 409 drivers/md/dm-bufio.c return __vmalloc(c->block_size, gfp_mask, PAGE_KERNEL); PAGE_KERNEL 284 drivers/md/dm-writecache.c wc->memory_map = vmap(pages, p, VM_MAP, PAGE_KERNEL); PAGE_KERNEL 313 drivers/media/common/videobuf2/videobuf2-dma-sg.c buf->num_pages, -1, PAGE_KERNEL); PAGE_KERNEL 111 drivers/media/common/videobuf2/videobuf2-vmalloc.c PAGE_KERNEL); PAGE_KERNEL 239 drivers/media/v4l2-core/videobuf-dma-sg.c PAGE_KERNEL); PAGE_KERNEL 754 drivers/misc/vmw_vmci/vmci_queue_pair.c produce_q->q_header = vmap(headers, 2, VM_MAP, PAGE_KERNEL); PAGE_KERNEL 1300 drivers/mtd/ubi/io.c buf1 = __vmalloc(len, GFP_NOFS, PAGE_KERNEL); PAGE_KERNEL 1364 drivers/mtd/ubi/io.c buf = __vmalloc(len, GFP_NOFS, PAGE_KERNEL); PAGE_KERNEL 32 drivers/net/ethernet/google/gve/gve_tx.c PAGE_KERNEL); PAGE_KERNEL 3950 drivers/pci/pci.c pgprot_device(PAGE_KERNEL)); PAGE_KERNEL 220 drivers/pcmcia/electra_cf.c pgprot_noncached(PAGE_KERNEL)) == NULL)) { PAGE_KERNEL 851 drivers/perf/arm_spe_pmu.c buf->base = vmap(pglist, nr_pages, VM_MAP, PAGE_KERNEL); PAGE_KERNEL 37 drivers/staging/android/ion/ion_heap.c pgprot = PAGE_KERNEL; PAGE_KERNEL 39 drivers/staging/android/ion/ion_heap.c pgprot = pgprot_writecombine(PAGE_KERNEL); PAGE_KERNEL 141 drivers/staging/android/ion/ion_heap.c pgprot = PAGE_KERNEL; PAGE_KERNEL 143 drivers/staging/android/ion/ion_heap.c pgprot = pgprot_writecombine(PAGE_KERNEL); PAGE_KERNEL 19 drivers/staging/comedi/comedi_buf.c #define COMEDI_PAGE_PROTECTION PAGE_KERNEL PAGE_KERNEL 133 drivers/staging/media/ipu3/ipu3-dmamap.c if (map_vm_area(map->vma, PAGE_KERNEL, pages)) PAGE_KERNEL 2492 drivers/target/target_core_transport.c cmd->t_data_vmap = vmap(pages, cmd->t_data_nents, VM_MAP, PAGE_KERNEL); PAGE_KERNEL 55 drivers/xen/mem-reservation.c mfn_pte(frames[i], PAGE_KERNEL), PAGE_KERNEL 589 drivers/xen/xenbus/xenbus_client.c PAGE_KERNEL); PAGE_KERNEL 248 drivers/xen/xlate_mmu.c vaddr = vmap(pages, nr_pages, 0, PAGE_KERNEL); PAGE_KERNEL 267 fs/erofs/decompressor.c dst = vm_map_ram(rq->out, nrpages_out, -1, PAGE_KERNEL); PAGE_KERNEL 213 fs/ext4/super.c ret = __vmalloc(size, flags, PAGE_KERNEL); PAGE_KERNEL 223 fs/ext4/super.c ret = __vmalloc(size, flags | __GFP_ZERO, PAGE_KERNEL); PAGE_KERNEL 360 fs/gfs2/dir.c hc = __vmalloc(hsize, GFP_NOFS, PAGE_KERNEL); PAGE_KERNEL 1172 fs/gfs2/dir.c hc2 = __vmalloc(hsize_bytes * 2, GFP_NOFS, PAGE_KERNEL); PAGE_KERNEL 1333 fs/gfs2/dir.c ptr = __vmalloc(size, GFP_NOFS, PAGE_KERNEL); PAGE_KERNEL 1994 fs/gfs2/dir.c PAGE_KERNEL); PAGE_KERNEL 1358 fs/gfs2/quota.c __GFP_ZERO, PAGE_KERNEL); PAGE_KERNEL 585 fs/nfs/blocklayout/extent_tree.c start_p = __vmalloc(buffer_size, GFP_NOFS, PAGE_KERNEL); PAGE_KERNEL 37 fs/ntfs/malloc.h return __vmalloc(size, gfp_mask, PAGE_KERNEL); PAGE_KERNEL 415 fs/pstore/ram_core.c prot = pgprot_noncached(PAGE_KERNEL); PAGE_KERNEL 417 fs/pstore/ram_core.c prot = pgprot_writecombine(PAGE_KERNEL); PAGE_KERNEL 818 fs/ubifs/debug.c buf = __vmalloc(c->leb_size, GFP_NOFS, PAGE_KERNEL); PAGE_KERNEL 1098 fs/ubifs/lprops.c buf = __vmalloc(c->leb_size, GFP_NOFS, PAGE_KERNEL); PAGE_KERNEL 1599 fs/ubifs/lpt_commit.c buf = p = __vmalloc(c->leb_size, GFP_NOFS, PAGE_KERNEL); PAGE_KERNEL 1848 fs/ubifs/lpt_commit.c buf = p = __vmalloc(c->leb_size, GFP_NOFS, PAGE_KERNEL); PAGE_KERNEL 975 fs/ubifs/orphan.c buf = __vmalloc(c->leb_size, GFP_NOFS, PAGE_KERNEL); PAGE_KERNEL 51 fs/xfs/kmem.c ptr = __vmalloc(size, lflags, PAGE_KERNEL); PAGE_KERNEL 474 fs/xfs/xfs_buf.c -1, PAGE_KERNEL); PAGE_KERNEL 48 include/asm-generic/fixmap.h #define FIXMAP_PAGE_NORMAL PAGE_KERNEL PAGE_KERNEL 1153 include/asm-generic/pgtable.h # define PAGE_KERNEL_RO PAGE_KERNEL PAGE_KERNEL 1157 include/asm-generic/pgtable.h # define PAGE_KERNEL_EXEC PAGE_KERNEL PAGE_KERNEL 115 include/linux/io-mapping.h iomap->prot = pgprot_noncached_wc(PAGE_KERNEL); PAGE_KERNEL 117 include/linux/io-mapping.h iomap->prot = pgprot_writecombine(PAGE_KERNEL); PAGE_KERNEL 119 include/linux/io-mapping.h iomap->prot = pgprot_noncached(PAGE_KERNEL); PAGE_KERNEL 84 kernel/bpf/core.c fp = __vmalloc(size, gfp_flags, PAGE_KERNEL); PAGE_KERNEL 236 kernel/bpf/core.c fp = __vmalloc(size, gfp_flags, PAGE_KERNEL); PAGE_KERNEL 1033 kernel/bpf/core.c fp = __vmalloc(fp_other->pages * PAGE_SIZE, gfp_flags, PAGE_KERNEL); PAGE_KERNEL 146 kernel/dma/remap.c pgprot_dmacoherent(PAGE_KERNEL), PAGE_KERNEL 238 kernel/dma/remap.c dma_pgprot(dev, PAGE_KERNEL, attrs), PAGE_KERNEL 243 kernel/fork.c PAGE_KERNEL, PAGE_KERNEL 23 kernel/groups.c gi = __vmalloc(len, GFP_KERNEL_ACCOUNT, PAGE_KERNEL); PAGE_KERNEL 522 kernel/kexec_core.c safecopy = vmap(&vmcoreinfo_page, 1, VM_MAP, PAGE_KERNEL); PAGE_KERNEL 2993 kernel/module.c GFP_KERNEL | __GFP_NOWARN, PAGE_KERNEL); PAGE_KERNEL 141 kernel/relay.c mem = vmap(buf->page_array, n_pages, VM_MAP, PAGE_KERNEL); PAGE_KERNEL 97 lib/test_vmalloc.c PAGE_KERNEL, PAGE_KERNEL 124 lib/test_vmalloc.c PAGE_KERNEL, PAGE_KERNEL 146 lib/test_vmalloc.c PAGE_KERNEL, PAGE_KERNEL 610 mm/kasan/common.c PAGE_KERNEL, VM_NO_GUARD, NUMA_NO_NODE, PAGE_KERNEL 704 mm/kasan/common.c PAGE_KERNEL, VM_NO_GUARD, PAGE_KERNEL 103 mm/kasan/init.c PAGE_KERNEL); PAGE_KERNEL 167 mm/memremap.c pgprot_t pgprot = PAGE_KERNEL; PAGE_KERNEL 155 mm/nommu.c return __vmalloc(size, flags, PAGE_KERNEL); PAGE_KERNEL 162 mm/nommu.c ret = __vmalloc(size, GFP_KERNEL | __GFP_ZERO, PAGE_KERNEL); PAGE_KERNEL 222 mm/nommu.c return __vmalloc(size, GFP_KERNEL | __GFP_HIGHMEM, PAGE_KERNEL); PAGE_KERNEL 241 mm/nommu.c PAGE_KERNEL); PAGE_KERNEL 306 mm/nommu.c return __vmalloc(size, GFP_KERNEL, PAGE_KERNEL); PAGE_KERNEL 8154 mm/page_alloc.c table = __vmalloc(size, gfp_flags, PAGE_KERNEL); PAGE_KERNEL 196 mm/percpu-vm.c PAGE_KERNEL, pages); PAGE_KERNEL 511 mm/percpu.c return __vmalloc(size, gfp | __GFP_ZERO, PAGE_KERNEL); PAGE_KERNEL 151 mm/sparse-vmemmap.c entry = pfn_pte(__pa(p) >> PAGE_SHIFT, PAGE_KERNEL); PAGE_KERNEL 2415 mm/vmalloc.c PAGE_KERNEL, node, area->caller); PAGE_KERNEL 2566 mm/vmalloc.c return __vmalloc_node(size, 1, flags, PAGE_KERNEL, PAGE_KERNEL 2574 mm/vmalloc.c return __vmalloc_node(size, 1, flags, PAGE_KERNEL, node, caller); PAGE_KERNEL 2628 mm/vmalloc.c GFP_KERNEL | __GFP_ZERO, PAGE_KERNEL, PAGE_KERNEL 2649 mm/vmalloc.c return __vmalloc_node(size, 1, GFP_KERNEL, PAGE_KERNEL, PAGE_KERNEL 2718 mm/vmalloc.c return __vmalloc_node(size, 1, GFP_VMALLOC32, PAGE_KERNEL, PAGE_KERNEL 2735 mm/vmalloc.c GFP_VMALLOC32 | __GFP_ZERO, PAGE_KERNEL, PAGE_KERNEL 1141 mm/zsmalloc.c BUG_ON(map_vm_area(area->vm, PAGE_KERNEL, pages)); PAGE_KERNEL 1099 net/bridge/netfilter/ebtables.c PAGE_KERNEL); PAGE_KERNEL 1107 net/bridge/netfilter/ebtables.c PAGE_KERNEL); PAGE_KERNEL 198 net/xdp/xdp_umem.c addr = vmap(&umem->pgs[i], 1, VM_MAP, PAGE_KERNEL); PAGE_KERNEL 186 security/keys/big_key.c buf->virt = vmap(buf->pages, buf->nr_pages, VM_MAP, PAGE_KERNEL); PAGE_KERNEL 370 sound/core/pcm_memory.c runtime->dma_area = __vmalloc(size, gfp_flags, PAGE_KERNEL); PAGE_KERNEL 65 sound/core/sgbuf.c pgprot_t prot = PAGE_KERNEL; PAGE_KERNEL 75 sound/core/sgbuf.c prot = pgprot_noncached(PAGE_KERNEL);