Home
last modified time | relevance | path

Searched refs:kmap (Results 1 – 200 of 239) sorted by relevance

12

/linux-4.4.14/arch/arm/mm/
Dhighmem.c37 void *kmap(struct page *page) in kmap() function
44 EXPORT_SYMBOL(kmap);
59 void *kmap; in kmap_atomic() local
73 kmap = NULL; in kmap_atomic()
76 kmap = kmap_high_get(page); in kmap_atomic()
77 if (kmap) in kmap_atomic()
78 return kmap; in kmap_atomic()
/linux-4.4.14/fs/hfsplus/
Dbnode.c31 memcpy(buf, kmap(*pagep) + off, l); in hfs_bnode_read()
37 memcpy(buf, kmap(*++pagep), l); in hfs_bnode_read()
84 memcpy(kmap(*pagep) + off, buf, l); in hfs_bnode_write()
91 memcpy(kmap(*++pagep), buf, l); in hfs_bnode_write()
114 memset(kmap(*pagep) + off, 0, l); in hfs_bnode_clear()
120 memset(kmap(*++pagep), 0, l); in hfs_bnode_clear()
146 memcpy(kmap(*dst_page) + src, kmap(*src_page) + src, l); in hfs_bnode_copy()
153 memcpy(kmap(*++dst_page), kmap(*++src_page), l); in hfs_bnode_copy()
162 src_ptr = kmap(*src_page) + src; in hfs_bnode_copy()
163 dst_ptr = kmap(*dst_page) + dst; in hfs_bnode_copy()
[all …]
Dbitmap.c41 pptr = kmap(page); in hfsplus_block_allocate()
86 curr = pptr = kmap(page); in hfsplus_block_allocate()
137 pptr = kmap(page); in hfsplus_block_allocate()
187 pptr = kmap(page); in hfsplus_block_free()
221 pptr = kmap(page); in hfsplus_block_free()
Dbtree.c165 head = (struct hfs_btree_header_rec *)(kmap(page) + in hfs_btree_open()
293 head = (struct hfs_btree_header_rec *)(kmap(page) + in hfs_btree_write()
384 data = kmap(*pagep); in hfs_bmap_alloc()
408 data = kmap(*++pagep); in hfs_bmap_alloc()
430 data = kmap(*pagep); in hfs_bmap_alloc()
479 data = kmap(page); in hfs_bmap_free()
/linux-4.4.14/drivers/gpu/drm/mgag200/
Dmgag200_cursor.c118 if (!bo->kmap.virtual) { in mga_crtc_cursor_set()
119 ret = ttm_bo_kmap(&bo->bo, 0, bo->bo.num_pages, &bo->kmap); in mga_crtc_cursor_set()
129 this_colour = ioread32(bo->kmap.virtual + i); in mga_crtc_cursor_set()
181 if (!pixels_prev->kmap.virtual) { in mga_crtc_cursor_set()
184 &pixels_prev->kmap); in mga_crtc_cursor_set()
195 this_colour = ioread32(bo->kmap.virtual + 4*(col + 64*row)); in mga_crtc_cursor_set()
213 memcpy_toio(pixels_prev->kmap.virtual + row*48, &this_row[0], 48); in mga_crtc_cursor_set()
239 ttm_bo_kunmap(&pixels_prev->kmap); in mga_crtc_cursor_set()
241 ttm_bo_kunmap(&bo->kmap); in mga_crtc_cursor_set()
Dmgag200_fb.c79 if (!bo->kmap.virtual) { in mga_dirty_update()
80 ret = ttm_bo_kmap(&bo->bo, 0, bo->bo.num_pages, &bo->kmap); in mga_dirty_update()
91 memcpy_toio(bo->kmap.virtual + src_offset, mfbdev->sysram + src_offset, (x2 - x + 1) * bpp); in mga_dirty_update()
95 ttm_bo_kunmap(&bo->kmap); in mga_dirty_update()
Dmgag200_ttm.c406 if (bo->kmap.virtual) in mgag200_bo_push_sysram()
407 ttm_bo_kunmap(&bo->kmap); in mgag200_bo_push_sysram()
Dmgag200_drv.h226 struct ttm_bo_kmap_obj kmap; member
/linux-4.4.14/tools/perf/util/
Dsymbol-elf.c751 static bool ref_reloc_sym_not_found(struct kmap *kmap) in ref_reloc_sym_not_found() argument
753 return kmap && kmap->ref_reloc_sym && kmap->ref_reloc_sym->name && in ref_reloc_sym_not_found()
754 !kmap->ref_reloc_sym->unrelocated_addr; in ref_reloc_sym_not_found()
765 static u64 ref_reloc(struct kmap *kmap) in ref_reloc() argument
767 if (kmap && kmap->ref_reloc_sym && in ref_reloc()
768 kmap->ref_reloc_sym->unrelocated_addr) in ref_reloc()
769 return kmap->ref_reloc_sym->addr - in ref_reloc()
770 kmap->ref_reloc_sym->unrelocated_addr; in ref_reloc()
785 struct kmap *kmap = dso->kernel ? map__kmap(map) : NULL; in dso__load_sym() local
786 struct map_groups *kmaps = kmap ? map__kmaps(map) : NULL; in dso__load_sym()
[all …]
Dmap.c216 (dso->kernel ? sizeof(struct kmap) : 0))); in map__new2()
837 struct kmap *map__kmap(struct map *map) in map__kmap()
843 return (struct kmap *)(map + 1); in map__kmap()
848 struct kmap *kmap = map__kmap(map); in map__kmaps() local
850 if (!kmap || !kmap->kmaps) { in map__kmaps()
854 return kmap->kmaps; in map__kmaps()
Dsymbol.c1074 struct kmap *kmap = map__kmap(map); in validate_kcore_addresses() local
1076 if (!kmap) in validate_kcore_addresses()
1079 if (kmap->ref_reloc_sym && kmap->ref_reloc_sym->name) { in validate_kcore_addresses()
1083 kmap->ref_reloc_sym->name); in validate_kcore_addresses()
1084 if (start != kmap->ref_reloc_sym->addr) in validate_kcore_addresses()
1246 struct kmap *kmap = map__kmap(map); in kallsyms__delta() local
1249 if (!kmap) in kallsyms__delta()
1252 if (!kmap->ref_reloc_sym || !kmap->ref_reloc_sym->name) in kallsyms__delta()
1256 kmap->ref_reloc_sym->name); in kallsyms__delta()
1260 *delta = addr - kmap->ref_reloc_sym->addr; in kallsyms__delta()
Dmachine.c744 struct kmap *kmap; in __machine__create_kernel_maps() local
755 kmap = map__kmap(map); in __machine__create_kernel_maps()
756 if (!kmap) in __machine__create_kernel_maps()
759 kmap->kmaps = &machine->kmaps; in __machine__create_kernel_maps()
771 struct kmap *kmap; in machine__destroy_kernel_maps() local
777 kmap = map__kmap(map); in machine__destroy_kernel_maps()
779 if (kmap && kmap->ref_reloc_sym) { in machine__destroy_kernel_maps()
785 zfree((char **)&kmap->ref_reloc_sym->name); in machine__destroy_kernel_maps()
786 zfree(&kmap->ref_reloc_sym); in machine__destroy_kernel_maps()
788 kmap->ref_reloc_sym = NULL; in machine__destroy_kernel_maps()
Dmap.h57 struct kmap { struct
86 struct kmap *map__kmap(struct map *map);
Devent.c654 struct kmap *kmap; in perf_event__synthesize_kernel_mmap() local
684 kmap = map__kmap(map); in perf_event__synthesize_kernel_mmap()
686 "%s%s", mmap_name, kmap->ref_reloc_sym->name) + 1; in perf_event__synthesize_kernel_mmap()
691 event->mmap.pgoff = kmap->ref_reloc_sym->addr; in perf_event__synthesize_kernel_mmap()
Dprobe-event.c128 struct kmap *kmap; in kernel_get_ref_reloc_sym() local
134 kmap = map__kmap(map); in kernel_get_ref_reloc_sym()
135 if (!kmap) in kernel_get_ref_reloc_sym()
137 return kmap->ref_reloc_sym; in kernel_get_ref_reloc_sym()
Dsession.c1729 struct kmap *kmap = map__kmap(maps[i]); in maps__set_kallsyms_ref_reloc_sym() local
1731 if (!kmap) in maps__set_kallsyms_ref_reloc_sym()
1733 kmap->ref_reloc_sym = ref; in maps__set_kallsyms_ref_reloc_sym()
/linux-4.4.14/fs/btrfs/
Dlzo.c120 data_in = kmap(in_page); in lzo_compress_pages()
131 cpage_out = kmap(out_page); in lzo_compress_pages()
201 cpage_out = kmap(out_page); in lzo_compress_pages()
228 data_in = kmap(in_page); in lzo_compress_pages()
236 cpage_out = kmap(pages[0]); in lzo_compress_pages()
286 data_in = kmap(pages_in[0]); in lzo_decompress_biovec()
346 data_in = kmap(pages_in[++page_in_index]); in lzo_decompress_biovec()
Dzlib.c107 data_in = kmap(in_page); in zlib_compress_pages()
114 cpage_out = kmap(out_page); in zlib_compress_pages()
156 cpage_out = kmap(out_page); in zlib_compress_pages()
178 data_in = kmap(in_page); in zlib_compress_pages()
230 data_in = kmap(pages_in[page_in_index]); in zlib_decompress_biovec()
287 data_in = kmap(pages_in[page_in_index]); in zlib_decompress_biovec()
Draid56.c270 s = kmap(rbio->bio_pages[i]); in cache_rbio_pages()
271 d = kmap(rbio->stripe_pages[i]); in cache_rbio_pages()
1236 pointers[stripe] = kmap(p); in finish_rmw()
1242 pointers[stripe++] = kmap(p); in finish_rmw()
1252 pointers[stripe++] = kmap(p); in finish_rmw()
1853 pointers[stripe] = kmap(page); in __raid_recover_end_io()
2381 pointers[stripe] = kmap(p); in finish_parity_scrub()
2385 pointers[stripe++] = kmap(p_page); in finish_parity_scrub()
2393 pointers[stripe++] = kmap(q_page); in finish_parity_scrub()
2405 parity = kmap(p); in finish_parity_scrub()
Dcheck-integrity.c1710 block_ctx->datav[i] = kmap(block_ctx->pagev[i]); in btrfsic_read_block()
3000 mapped_datav[i] = kmap(bio->bi_io_vec[i].bv_page); in __btrfsic_submit_bio()
/linux-4.4.14/arch/m68k/mm/
DMakefile8 obj-$(CONFIG_MMU_MOTOROLA) += kmap.o memory.o motorola.o hwtest.o
10 obj-$(CONFIG_MMU_COLDFIRE) += kmap.o memory.o mcfmmu.o
/linux-4.4.14/arch/m68k/include/asm/
Dmotorola_pgalloc.h43 pte = kmap(page); in pte_alloc_one()
54 cache_page(kmap(page)); in pte_free()
63 cache_page(kmap(page)); in __pte_free_tlb()
Dmcf_pgalloc.h64 pte = kmap(page); in pte_alloc_one()
/linux-4.4.14/fs/hfs/
Dbnode.c25 memcpy(buf, kmap(page) + off, len); in hfs_bnode_read()
67 memcpy(kmap(page) + off, buf, len); in hfs_bnode_write()
92 memset(kmap(page) + off, 0, len); in hfs_bnode_clear()
112 memcpy(kmap(dst_page) + dst, kmap(src_page) + src, len); in hfs_bnode_copy()
129 ptr = kmap(page); in hfs_bnode_move()
341 desc = (struct hfs_bnode_desc *)(kmap(node->page[0]) + node->page_offset); in hfs_bnode_find()
431 memset(kmap(*pagep) + node->page_offset, 0, in hfs_bnode_create()
436 memset(kmap(*++pagep), 0, PAGE_CACHE_SIZE); in hfs_bnode_create()
Dbtree.c82 head = (struct hfs_btree_header_rec *)(kmap(page) + sizeof(struct hfs_bnode_desc)); in hfs_btree_open()
171 head = (struct hfs_btree_header_rec *)(kmap(page) + sizeof(struct hfs_bnode_desc)); in hfs_btree_write()
261 data = kmap(*pagep); in hfs_bmap_alloc()
284 data = kmap(*++pagep); in hfs_bmap_alloc()
306 data = kmap(*pagep); in hfs_bmap_alloc()
352 data = kmap(page); in hfs_bmap_free()
/linux-4.4.14/arch/frv/mm/
Dhighmem.c14 void *kmap(struct page *page) in kmap() function
22 EXPORT_SYMBOL(kmap);
DMakefile5 obj-y := init.o kmap.o
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/bar/
Dbase.c37 if (bar && bar->func->kmap && bar->subdev.oneinit) in nvkm_bar_kmap()
38 return bar->func->kmap(bar); in nvkm_bar_kmap()
Dpriv.h13 struct nvkm_vm *(*kmap)(struct nvkm_bar *); member
Dgf100.c103 if (bar->base.func->kmap) { in gf100_bar_oneinit()
173 .kmap = gf100_bar_kmap,
Dg84.c47 .kmap = nv50_bar_kmap,
Dnv50.c207 .kmap = nv50_bar_kmap,
/linux-4.4.14/arch/mips/mm/
Dhighmem.c13 void *kmap(struct page *page) in kmap() function
25 EXPORT_SYMBOL(kmap);
/linux-4.4.14/arch/metag/mm/
Dhighmem.c13 void *kmap(struct page *page) in kmap() function
20 EXPORT_SYMBOL(kmap);
/linux-4.4.14/arch/x86/mm/
Dhighmem_32.c6 void *kmap(struct page *page) in kmap() function
13 EXPORT_SYMBOL(kmap);
/linux-4.4.14/drivers/gpu/drm/nouveau/
Dnouveau_bo.h17 struct ttm_bo_kmap_obj kmap; member
103 &nvbo->kmap, &is_iomem); in nvbo_kmap_obj_iovirtual()
Dnouveau_gem.c346 ttm_bo_kunmap(&nvbo->kmap); in validate_fini_no_ticket()
629 if (!nvbo->kmap.virtual) { in nouveau_gem_pushbuf_reloc_apply()
631 &nvbo->kmap); in nouveau_gem_pushbuf_reloc_apply()
800 if (!nvbo->kmap.virtual) { in nouveau_gem_ioctl_pushbuf()
804 &nvbo->kmap); in nouveau_gem_ioctl_pushbuf()
Dnouveau_dma.c34 u32 *mem = ttm_kmap_obj_virtual(&chan->push.buffer->kmap, &is_iomem); in OUT_RINGp()
Dnouveau_bo.c433 &nvbo->kmap); in nouveau_bo_map()
450 ttm_bo_kunmap(&nvbo->kmap); in nouveau_bo_unmap()
535 u16 *mem = ttm_kmap_obj_virtual(&nvbo->kmap, &is_iomem); in nouveau_bo_wr16()
549 u32 *mem = ttm_kmap_obj_virtual(&nvbo->kmap, &is_iomem); in nouveau_bo_rd32()
563 u32 *mem = ttm_kmap_obj_virtual(&nvbo->kmap, &is_iomem); in nouveau_bo_wr32()
/linux-4.4.14/fs/nilfs2/
Dalloc.c537 desc_kaddr = kmap(desc_bh->b_page); in nilfs_palloc_prepare_alloc_entry()
549 bitmap_kaddr = kmap(bitmap_bh->b_page); in nilfs_palloc_prepare_alloc_entry()
618 desc_kaddr = kmap(req->pr_desc_bh->b_page); in nilfs_palloc_commit_free_entry()
621 bitmap_kaddr = kmap(req->pr_bitmap_bh->b_page); in nilfs_palloc_commit_free_entry()
659 desc_kaddr = kmap(req->pr_desc_bh->b_page); in nilfs_palloc_abort_alloc_entry()
662 bitmap_kaddr = kmap(req->pr_bitmap_bh->b_page); in nilfs_palloc_abort_alloc_entry()
767 bitmap_kaddr = kmap(bitmap_bh->b_page); in nilfs_palloc_freev()
Difile.h38 void *kaddr = kmap(ibh->b_page); in nilfs_ifile_map_inode()
Ddir.c186 kmap(page); in nilfs_get_page()
Dcpfile.c250 kaddr = kmap(cp_bh->b_page); in nilfs_cpfile_get_checkpoint()
/linux-4.4.14/drivers/gpu/drm/cirrus/
Dcirrus_fbdev.c76 if (!bo->kmap.virtual) { in cirrus_dirty_update()
77 ret = ttm_bo_kmap(&bo->bo, 0, bo->bo.num_pages, &bo->kmap); in cirrus_dirty_update()
88 memcpy_toio(bo->kmap.virtual + src_offset, afbdev->sysram + src_offset, width * bpp); in cirrus_dirty_update()
92 ttm_bo_kunmap(&bo->kmap); in cirrus_dirty_update()
Dcirrus_ttm.c393 if (bo->kmap.virtual) in cirrus_bo_push_sysram()
394 ttm_bo_kunmap(&bo->kmap); in cirrus_bo_push_sysram()
Dcirrus_drv.h166 struct ttm_bo_kmap_obj kmap; member
Dcirrus_mode.c169 ret = ttm_bo_kmap(&bo->bo, 0, bo->bo.num_pages, &bo->kmap); in cirrus_crtc_do_set_base()
/linux-4.4.14/drivers/gpu/drm/ast/
Dast_fb.c103 if (!bo->kmap.virtual) { in ast_dirty_update()
104 ret = ttm_bo_kmap(&bo->bo, 0, bo->bo.num_pages, &bo->kmap); in ast_dirty_update()
115 memcpy_toio(bo->kmap.virtual + src_offset, afbdev->sysram + src_offset, (x2 - x + 1) * bpp); in ast_dirty_update()
119 ttm_bo_kunmap(&bo->kmap); in ast_dirty_update()
Dast_ttm.c409 if (bo->kmap.virtual) in ast_bo_push_sysram()
410 ttm_bo_kunmap(&bo->kmap); in ast_bo_push_sysram()
Dast_drv.h323 struct ttm_bo_kmap_obj kmap; member
Dast_mode.c547 ret = ttm_bo_kmap(&bo->bo, 0, bo->bo.num_pages, &bo->kmap); in ast_crtc_do_set_base()
/linux-4.4.14/drivers/gpu/drm/virtio/
Dvirtgpu_object.c110 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap); in virtio_gpu_object_kmap()
113 bo->vmap = ttm_kmap_obj_virtual(&bo->kmap, &is_iomem); in virtio_gpu_object_kmap()
Dvirtgpu_drv.h64 struct ttm_bo_kmap_obj kmap; member
/linux-4.4.14/net/ceph/
Dpagelist.c49 pl->mapped_tail = kmap(page); in ceph_pagelist_addpage()
146 pl->mapped_tail = kmap(page); in ceph_pagelist_truncate()
Dmessenger.c546 kaddr = kmap(page); in ceph_tcp_recvpage()
599 iov.iov_base = kmap(page) + offset; in ceph_tcp_sendpage()
1540 kaddr = kmap(page); in ceph_crc32c_page()
/linux-4.4.14/fs/ecryptfs/
Dread_write.c79 virt = kmap(page_for_lower); in ecryptfs_write_lower_page_segment()
266 virt = kmap(page_for_ecryptfs); in ecryptfs_read_lower_page_segment()
Dcrypto.c520 enc_extent_virt = kmap(enc_extent_page); in ecryptfs_encrypt_page()
569 page_virt = kmap(page); in ecryptfs_decrypt_page()
/linux-4.4.14/fs/ntfs/
Daops.h92 kmap(page); in ntfs_map_page()
Dcompress.c590 kmap(page); in ntfs_read_compressed_block()
Daops.c1119 kaddr = kmap(page); in ntfs_write_mst_block()
/linux-4.4.14/drivers/gpu/drm/omapdrm/
Domap_gem_dmabuf.c127 return kmap(pages[page_num]); in omap_gem_dmabuf_kmap()
163 .kmap = omap_gem_dmabuf_kmap,
/linux-4.4.14/arch/mips/include/asm/
Dhighmem.h47 extern void *kmap(struct page *page);
/linux-4.4.14/fs/freevxfs/
Dvxfs_immed.c72 kaddr = kmap(pp); in vxfs_immed_readpage()
Dvxfs_subr.c75 kmap(pp); in vxfs_get_page()
/linux-4.4.14/drivers/gpu/drm/qxl/
Dqxl_object.c135 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap); in qxl_bo_kmap()
138 bo->kptr = ttm_kmap_obj_virtual(&bo->kmap, &is_iomem); in qxl_bo_kmap()
183 ttm_bo_kunmap(&bo->kmap); in qxl_bo_kunmap()
Dqxl_drv.h108 struct ttm_bo_kmap_obj kmap; member
/linux-4.4.14/arch/metag/include/asm/
Dhighmem.h54 extern void *kmap(struct page *page);
/linux-4.4.14/fs/coda/
Dsymlink.c29 char *p = kmap(page); in coda_symlink_filler()
/linux-4.4.14/arch/x86/include/asm/
Dhighmem.h63 void *kmap(struct page *page);
/linux-4.4.14/arch/tile/include/asm/
Dhighmem.h55 void *kmap(struct page *page);
/linux-4.4.14/arch/tile/mm/
Dhighmem.c25 void *kmap(struct page *page) in kmap() function
47 EXPORT_SYMBOL(kmap);
/linux-4.4.14/arch/arc/include/asm/
Dhighmem.h37 extern void *kmap(struct page *page);
/linux-4.4.14/arch/arm/include/asm/
Dhighmem.h66 extern void *kmap(struct page *page);
/linux-4.4.14/arch/microblaze/include/asm/
Dhighmem.h58 static inline void *kmap(struct page *page) in kmap() function
/linux-4.4.14/fs/nfs/
Dsymlink.c59 return kmap(page); in nfs_follow_link()
/linux-4.4.14/arch/sparc/include/asm/
Dhighmem.h55 static inline void *kmap(struct page *page) in kmap() function
/linux-4.4.14/Documentation/frv/
Dmmu-layout.txt66 D8000000-DBFFFFFF various TLB,xAMR1 D-NS??V 64MB kmap() area
88 the kernel calls kmap(), does the access and then calls kunmap(); or it calls kmap_atomic(), does
91 kmap() creates an attachment between an arbitrary inaccessible page and a range of virtual
113 Note that the first three kmap atomic types are really just declared as placeholders. The DAMPR
116 Also note that kmap() itself may sleep, kmap_atomic() may never sleep and both always succeed;
117 furthermore, a driver using kmap() may sleep before calling kunmap(), but may not sleep before
127 allow userspace (by way of page tables) and itself (by way of kmap) to deal with the memory
/linux-4.4.14/arch/powerpc/include/asm/
Dhighmem.h66 static inline void *kmap(struct page *page) in kmap() function
/linux-4.4.14/fs/efs/
Dsymlink.c16 char *link = kmap(page); in efs_symlink_readpage()
/linux-4.4.14/drivers/gpu/drm/bochs/
Dbochs_fbdev.c97 &bo->kmap); in bochsfb_create()
134 info->screen_base = bo->kmap.virtual; in bochsfb_create()
Dbochs.h102 struct ttm_bo_kmap_obj kmap; member
/linux-4.4.14/arch/xtensa/include/asm/
Dhighmem.h69 static inline void *kmap(struct page *page) in kmap() function
/linux-4.4.14/fs/udf/
Dfile.c47 kaddr = kmap(page); in __udf_adinicb_readpage()
73 kaddr = kmap(page); in udf_adinicb_writepage()
Dsymlink.c110 unsigned char *p = kmap(page); in udf_symlink_filler()
Dinode.c288 kaddr = kmap(page); in udf_expand_file_adinicb()
312 kaddr = kmap(page); in udf_expand_file_adinicb()
/linux-4.4.14/include/linux/
Dhost1x.h67 void *(*kmap)(struct host1x_bo *bo, unsigned int pagenum); member
114 return bo->ops->kmap(bo, pagenum); in host1x_bo_kmap()
Ddma-buf.h102 void *(*kmap)(struct dma_buf *, unsigned long); member
Dhighmem.h56 static inline void *kmap(struct page *page) in kmap() function
/linux-4.4.14/arch/mn10300/include/asm/
Dhighmem.h49 static inline unsigned long kmap(struct page *page) in kmap() function
/linux-4.4.14/arch/frv/include/asm/
Dhighmem.h62 extern void *kmap(struct page *page);
/linux-4.4.14/drivers/staging/lustre/lnet/klnds/socklnd/
Dsocklnd_lib.c167 scratchiov[i].iov_base = kmap(kiov[i].kiov_page) + in ksocknal_lib_send_kiov()
342 scratchiov[i].iov_base = kmap(kiov[i].kiov_page) + in ksocknal_lib_recv_kiov()
361 base = kmap(kiov[i].kiov_page) + kiov[i].kiov_offset; in ksocknal_lib_recv_kiov()
401 base = kmap(tx->tx_kiov[i].kiov_page) + in ksocknal_lib_csum_tx()
/linux-4.4.14/fs/ncpfs/
Dmmap.c47 pg_addr = kmap(vmf->page); in ncp_file_mmap_fault()
Dsymlink.c47 char *buf = kmap(page); in ncp_symlink_readpage()
Ddir.c447 ctl.cache = cache = kmap(page); in ncp_readdir()
475 ctl.cache = kmap(ctl.page); in ncp_readdir()
659 ctl.cache = kmap(ctl.page); in ncp_fill_cache()
/linux-4.4.14/drivers/gpu/drm/vmwgfx/
Dvmwgfx_prime.c111 .kmap = vmw_prime_dmabuf_kmap,
/linux-4.4.14/fs/affs/
Dsymlink.c17 char *link = kmap(page); in affs_symlink_readpage()
Dfile.c514 kmap(page); in affs_do_readpage_ofs()
/linux-4.4.14/arch/arc/mm/
Dhighmem.c56 void *kmap(struct page *page) in kmap() function
/linux-4.4.14/fs/qnx6/
Ddir.c31 kmap(page); in qnx6_get_page()
57 kmap(*p = page); in qnx6_longname()
Dinode.c190 kmap(page); in qnx6_checkroot()
555 kmap(page); in qnx6_iget()
/linux-4.4.14/fs/ext4/
Dsymlink.c48 caddr = kmap(cpage); in ext4_encrypted_follow_link()
/linux-4.4.14/net/rds/
Dpage.c63 addr = kmap(page); in rds_page_copy_user()
/linux-4.4.14/fs/jffs2/
Dfile.c94 pg_buf = kmap(pg); in jffs2_do_readpage_nolock()
292 kmap(pg); in jffs2_write_end()
Dfs.c697 return kmap(pg); in jffs2_gc_fetch_page()
/linux-4.4.14/Documentation/
Ddma-buf-sharing.txt277 an api similar to kmap. Accessing a dma_buf is done in aligned chunks of
288 There are also atomic variants of these interfaces. Like for kmap they
296 For importers all the restrictions of using kmap apply, like the limited
300 dma_buf kmap calls outside of the range specified in begin_cpu_access are
301 undefined. If the range is not PAGE_SIZE aligned, kmap needs to succeed on
308 For some cases the overhead of kmap can be too high, a vmap interface
317 runs out of vmalloc space. Fallback to kmap should be implemented. Note that
327 unpinning of any pinned resources). The result of any dma_buf kmap calls
/linux-4.4.14/arch/sh/mm/
DMakefile18 mmu-$(CONFIG_MMU) := extable_$(BITS).o fault.o gup.o ioremap.o kmap.o \
/linux-4.4.14/arch/parisc/include/asm/
Dcacheflush.h132 static inline void *kmap(struct page *page) in kmap() function
/linux-4.4.14/drivers/gpu/drm/ttm/
Dttm_bo_util.c268 dst = kmap(d); in ttm_copy_io_ttm_page()
304 src = kmap(s); in ttm_copy_ttm_io_page()
554 map->virtual = kmap(map->page); in ttm_bo_kmap_ttm()
/linux-4.4.14/fs/sysv/
Ddir.c59 kmap(page); in dir_get_page()
261 kmap(page); in sysv_make_empty()
/linux-4.4.14/fs/f2fs/
Ddir.c89 dentry_blk = (struct f2fs_dentry_block *)kmap(dentry_page); in find_in_block()
265 dentry_blk = kmap(page); in f2fs_parent_dir()
581 dentry_blk = kmap(dentry_page); in __f2fs_add_link()
861 dentry_blk = kmap(dentry_page); in f2fs_readdir()
Df2fs.h1247 char *src_kaddr = kmap(src); in f2fs_copy_page()
1248 char *dst_kaddr = kmap(dst); in f2fs_copy_page()
Dnamei.c945 caddr = kmap(cpage); in f2fs_encrypted_follow_link()
/linux-4.4.14/drivers/staging/rdma/ipath/
Dipath_user_sdma.c191 mpage = kmap(page); in ipath_user_sdma_coalesce()
308 pages[j], kmap(pages[j]), in ipath_user_sdma_pin_pages()
447 pbc = kmap(page); in ipath_user_sdma_queue_pkts()
/linux-4.4.14/drivers/gpu/drm/amd/amdgpu/
Damdgpu_object.c322 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap); in amdgpu_bo_kmap()
326 bo->kptr = ttm_kmap_obj_virtual(&bo->kmap, &is_iomem); in amdgpu_bo_kmap()
338 ttm_bo_kunmap(&bo->kmap); in amdgpu_bo_kunmap()
Damdgpu_ttm.c1180 ptr = kmap(page); in amdgpu_ttm_gtt_read()
/linux-4.4.14/drivers/gpu/drm/i915/
Di915_gem_render_state.c107 d = kmap(page); in render_state_setup()
Di915_gem_dmabuf.c219 .kmap = i915_gem_dmabuf_kmap,
Di915_gem_fence.c722 vaddr = kmap(page); in i915_gem_swizzle_page()
Dintel_ringbuffer.c698 ring->scratch.cpu_page = kmap(sg_page(ring->scratch.obj->pages->sgl)); in intel_init_pipe_control()
1992 ring->status_page.page_addr = kmap(sg_page(obj->pages->sgl)); in init_status_page()
/linux-4.4.14/drivers/gpu/drm/gma500/
Dmmu.c205 v = kmap(pd->dummy_pt); in psb_mmu_alloc_pd()
211 v = kmap(pd->p); in psb_mmu_alloc_pd()
217 clear_page(kmap(pd->dummy_page)); in psb_mmu_alloc_pd()
Dgma_display.c417 tmp_src = kmap(gt->pages[i]); in gma_crtc_cursor_set()
/linux-4.4.14/drivers/gpu/drm/radeon/
Dradeon_object.c285 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap); in radeon_bo_kmap()
289 bo->kptr = ttm_kmap_obj_virtual(&bo->kmap, &is_iomem); in radeon_bo_kmap()
303 ttm_bo_kunmap(&bo->kmap); in radeon_bo_kunmap()
Dradeon_ttm.c1123 ptr = kmap(page); in radeon_ttm_gtt_read()
/linux-4.4.14/fs/cramfs/
Dinode.c231 memcpy(data, kmap(page), PAGE_CACHE_SIZE); in cramfs_read()
500 pgdata = kmap(page); in cramfs_readpage()
/linux-4.4.14/drivers/staging/lustre/lustre/lov/
Dlov_page.c223 addr = kmap(vmpage); in lov_page_init_empty()
/linux-4.4.14/fs/reiserfs/
Dtail_conversion.c244 tail = (char *)kmap(page); /* this can schedule */ in indirect2direct()
Dxattr.c432 kmap(page); in reiserfs_get_page()
Dinode.c399 p = (char *)kmap(bh_result->b_page); in _get_block_create_0()
2395 kmap(bh_result->b_page); in map_block_for_writepage()
/linux-4.4.14/fs/afs/
Dmntpt.c75 buf = kmap(page); in afs_mntpt_check_symlink()
Drxrpc.c303 iov->iov_base = kmap(pages[loop]) + offset; in afs_send_pages()
Ddir.c198 kmap(page); in afs_dir_get_page()
/linux-4.4.14/fs/hostfs/
Dhostfs_kern.c420 buffer = kmap(page); in hostfs_writepage()
448 buffer = kmap(page); in hostfs_readpage()
491 buffer = kmap(page); in hostfs_write_end()
/linux-4.4.14/mm/
Duserfaultfd.c262 page_kaddr = kmap(page); in __mcopy_atomic()
/linux-4.4.14/drivers/gpu/drm/udl/
Dudl_dmabuf.c194 .kmap = udl_dmabuf_kmap,
/linux-4.4.14/drivers/dma-buf/
Ddma-buf.c303 || !exp_info->ops->kmap in dma_buf_export()
634 return dmabuf->ops->kmap(dmabuf, page_num); in dma_buf_kmap()
/linux-4.4.14/drivers/gpu/drm/tegra/
Dgem.c91 .kmap = tegra_bo_kmap,
610 .kmap = tegra_gem_prime_kmap,
/linux-4.4.14/drivers/s390/block/
Dxpram.c207 kmap(bvec.bv_page) + bvec.bv_offset; in xpram_make_request()
/linux-4.4.14/fs/ufs/
Ddir.c192 kmap(page); in ufs_get_page()
575 kmap(page); in ufs_make_empty()
/linux-4.4.14/drivers/staging/lustre/lustre/include/
Dobd_support.h499 memset(kmap(page), val, PAGE_CACHE_SIZE); \
/linux-4.4.14/fs/isofs/
Dcompress.c339 kmap(pages[i]); in zisofs_readpage()
Drock.c697 char *link = kmap(page); in rock_ridge_symlink_readpage()
/linux-4.4.14/fs/jfs/
Djfs_metapage.c115 kmap(page); in insert_metapage()
169 kmap(page); in insert_metapage()
/linux-4.4.14/fs/ubifs/
Dfile.c117 addr = kmap(page); in do_readpage()
622 addr = zaddr = kmap(page); in populate_page()
915 addr = kmap(page); in do_writepage()
/linux-4.4.14/arch/powerpc/mm/
Dmem.c473 maddr = (unsigned long) kmap(page) + (addr & ~PAGE_MASK); in flush_icache_user_range()
/linux-4.4.14/drivers/media/v4l2-core/
Dvideobuf2-vmalloc.c342 .kmap = vb2_vmalloc_dmabuf_ops_kmap,
Dvideobuf2-dma-contig.c353 .kmap = vb2_dc_dmabuf_ops_kmap,
Dvideobuf2-dma-sg.c519 .kmap = vb2_dma_sg_dmabuf_ops_kmap,
/linux-4.4.14/drivers/char/tpm/
Dtpm.h426 buf->data = kmap(buf->data_page); in tpm_buf_init()
/linux-4.4.14/Documentation/DocBook/
Dscsi.xml.db48 API-scsi-kmap-atomic-sg
Ddevice-drivers.xml.db406 API-dma-buf-kmap-atomic
408 API-dma-buf-kmap
/linux-4.4.14/drivers/staging/lustre/lustre/libcfs/
Dtracefile.c710 buf = kmap(tage->page); in cfs_tracefile_dump_all_pages()
1017 buf = kmap(tage->page); in tracefiled()
/linux-4.4.14/fs/cifs/
Dfile.c1849 write_data = kmap(page); in cifs_partialpagewrite()
2231 page_data = kmap(page); in cifs_write_end()
2867 iov.iov_base = kmap(page); in cifs_uncached_read_into_pages()
2874 iov.iov_base = kmap(page); in cifs_uncached_read_into_pages()
3320 iov.iov_base = kmap(page); in cifs_readpages_read_into_pages()
3327 iov.iov_base = kmap(page); in cifs_readpages_read_into_pages()
3577 read_data = kmap(page); in cifs_readpage_worker()
Dtransport.c264 iov->iov_base = kmap(rqst->rq_pages[idx]); in cifs_rqst_page_to_kvec()
/linux-4.4.14/drivers/xen/
Dgntalloc.c184 uint8_t *tmp = kmap(gref->page); in __del_gref()
/linux-4.4.14/drivers/mmc/host/
Dsdricoh_cs.c326 buf = kmap(page) + data->sg->offset + (len * i); in sdricoh_request()
Dusdhi6rol0.c323 host->pg.mapped = kmap(host->pg.page); in usdhi6_blk_bounce()
365 host->pg.mapped = kmap(host->pg.page); in usdhi6_sg_map()
503 host->pg.mapped = kmap(host->pg.page); in usdhi6_sg_advance()
Dmmc_spi.c935 kmap_addr = kmap(sg_page(sg)); in mmc_spi_data_do()
/linux-4.4.14/lib/
Dscatterlist.c589 miter->addr = kmap(miter->page) + miter->__offset; in sg_miter_next()
Diov_iter.c189 kaddr = kmap(page); in copy_page_to_iter_iovec()
270 kaddr = kmap(page); in copy_page_from_iter_iovec()
/linux-4.4.14/drivers/block/
Dnbd.c228 void *kaddr = kmap(bvec->bv_page); in sock_send_bvec()
325 void *kaddr = kmap(bvec->bv_page); in sock_recv_bvec()
/linux-4.4.14/kernel/
Dkexec_core.c741 ptr = kmap(page); in kimage_load_normal_segment()
802 ptr = kmap(page); in kimage_load_crash_segment()
/linux-4.4.14/fs/logfs/
Dsegment.c382 buf = kmap(page); in logfs_segment_write()
667 buf = kmap(page); in logfs_segment_read()
Ddir.c312 dd = kmap(page); in logfs_readdir()
/linux-4.4.14/drivers/staging/lustre/lustre/llite/
Ddir.c219 dp = kmap(page); in ll_dir_filler()
295 dp = kmap(page); in ll_dir_page_locate()
428 (void)kmap(page); in ll_get_dir_page()
/linux-4.4.14/drivers/staging/lustre/lnet/lnet/
Dlib-move.c329 daddr = ((char *)kmap(diov->kiov_page)) + in lnet_copy_kiov2kiov()
332 saddr = ((char *)kmap(siov->kiov_page)) + in lnet_copy_kiov2kiov()
410 addr = ((char *)kmap(kiov->kiov_page)) + in lnet_copy_kiov2iov()
481 addr = ((char *)kmap(kiov->kiov_page)) + in lnet_copy_iov2kiov()
/linux-4.4.14/fs/minix/
Ddir.c71 kmap(page); in dir_get_page()
/linux-4.4.14/fs/romfs/
Dsuper.c109 buf = kmap(page); in romfs_readpage()
/linux-4.4.14/Documentation/vm/
Dhighmem.txt71 (*) kmap(). This permits a short duration mapping of a single page. It needs
/linux-4.4.14/crypto/
Dahash.c51 walk->data = kmap(walk->pg); in hash_walk_next()
/linux-4.4.14/drivers/gpu/drm/
Ddrm_prime.c297 .kmap = drm_gem_dmabuf_kmap,
/linux-4.4.14/drivers/base/
Dfirmware_class.c748 page_data = kmap(buf->pages[page_nr]); in firmware_data_read()
844 page_data = kmap(buf->pages[page_nr]); in firmware_data_write()
/linux-4.4.14/net/core/
Ddatagram.c602 u8 *vaddr = kmap(page); in skb_copy_and_csum_datagram()
/linux-4.4.14/drivers/gpu/drm/armada/
Darmada_gem.c533 .kmap = armada_gem_dmabuf_no_kmap,
/linux-4.4.14/drivers/target/
Dtarget_core_rd.c178 p = kmap(pg); in rd_allocate_sgl_table()
Dtarget_core_transport.c1493 buf = kmap(sg_page(sgl)) + sgl->offset; in target_submit_cmd_map_sgls()
2248 return kmap(sg_page(sg)) + sg->offset; in transport_kmap_data_sg()
/linux-4.4.14/drivers/staging/rts5208/
Drtsx_transport.c104 unsigned char *ptr = kmap(page); in rtsx_stor_access_xfer_buf()
/linux-4.4.14/fs/ext2/
Ddir.c203 kmap(page); in ext2_get_page()
/linux-4.4.14/fs/exofs/
Ddir.c163 kmap(page); in exofs_get_page()
/linux-4.4.14/net/sunrpc/
Dxdr.c1304 c = kmap(*ppages) + base; in xdr_xcode_array2()
1382 c = kmap(*ppages); in xdr_xcode_array2()
/linux-4.4.14/tools/perf/
Dbuiltin-report.c391 struct kmap *kernel_kmap = kernel_map ? map__kmap(kernel_map) : NULL; in report__warn_kptr_restrict()
/linux-4.4.14/fs/cachefiles/
Drdwr.c931 data = kmap(page); in cachefiles_write_page()
/linux-4.4.14/drivers/misc/vmw_vmci/
Dvmci_host.c254 context->notify = kmap(context->notify_page) + (uva & (PAGE_SIZE - 1)); in vmci_host_setup_notify()
Dvmci_queue_pair.c369 va = kmap(kernel_if->u.h.page[page_index]); in __qp_memcpy_to_queue()
429 va = kmap(kernel_if->u.h.page[page_index]); in __qp_memcpy_from_queue()
/linux-4.4.14/fs/
Dbinfmt_elf_fdpic.c729 src = kmap(bprm->page[index]); in elf_fdpic_transfer_args_to_stack()
1533 void *kaddr = kmap(page); in elf_fdpic_dump_segments()
Daio.c1216 ev = kmap(page); in aio_read_events_ring()
Dexec.c528 kaddr = kmap(kmapped_page); in copy_strings()
/linux-4.4.14/fs/hpfs/
Dnamei.c478 char *link = kmap(page); in hpfs_symlink_readpage()
/linux-4.4.14/drivers/scsi/
Dlibiscsi_tcp.c142 segment->sg_mapped = kmap(sg_page(sg)); in iscsi_tcp_segment_map()
/linux-4.4.14/drivers/staging/lustre/lustre/ptlrpc/
Dsec_plain.c164 ptr = kmap(desc->bd_iov[i].kiov_page); in corrupt_bulk_data()
/linux-4.4.14/drivers/staging/lustre/lustre/obdecho/
Decho_client.c1494 addr = kmap(page); in echo_client_page_debug_setup()
1526 addr = kmap(page); in echo_client_page_debug_check()
/linux-4.4.14/drivers/staging/lustre/lustre/osc/
Dosc_request.c1075 ptr = kmap(pga[i]->pg) + in handle_short_read()
1091 ptr = kmap(pga[i]->pg) + (pga[i]->off & ~CFS_PAGE_MASK); in handle_short_read()
1179 unsigned char *ptr = kmap(pga[i]->pg); in osc_checksum_bulk()
/linux-4.4.14/drivers/md/bcache/
Drequest.c46 void *d = kmap(bv.bv_page) + bv.bv_offset; in bio_csum()
/linux-4.4.14/drivers/acpi/
Dosl.c351 return (void __iomem __force *)kmap(pfn_to_page(pfn)); in acpi_map()
/linux-4.4.14/fs/gfs2/
Dbmap.c71 void *kaddr = kmap(page); in gfs2_unstuffer_page()
Dops_fstype.c256 p = kmap(page); in gfs2_read_super()
/linux-4.4.14/drivers/infiniband/hw/qib/
Dqib_user_sdma.c579 mpage = kmap(page); in qib_user_sdma_coalesce()
/linux-4.4.14/drivers/staging/lustre/lustre/mgc/
Dmgc_request.c1414 ptr = kmap(pages[i]); in mgc_process_recover_log()
/linux-4.4.14/fs/ceph/
Ddir.c185 cache_ctl.dentries = kmap(cache_ctl.page); in __dcache_readdir()
/linux-4.4.14/arch/x86/kvm/
Dvmx.c4517 vapic_page = kmap(vmx->nested.virtual_apic_page); in vmx_complete_nested_posted_interrupt()
6676 *(u32 *)kmap(page) != VMCS12_REVISION) { in nested_vmx_check_vmptr()
6931 vmcs12 = kmap(page); in handle_vmclear()
7261 new_vmcs12 = kmap(page); in handle_vmptrld()
9118 (struct pi_desc *)kmap(vmx->nested.pi_desc_page); in nested_get_vmcs12_pages()
9195 msr_bitmap = (unsigned long *)kmap(page); in nested_vmx_merge_msr_bitmap()
/linux-4.4.14/drivers/md/
Draid1.c984 memcpy(kmap(bvecs[i].bv_page) + bvec->bv_offset, in alloc_behind_pages()
985 kmap(bvec->bv_page) + bvec->bv_offset, bvec->bv_len); in alloc_behind_pages()

12