vto                82 arch/microblaze/include/asm/page.h # define copy_user_page(vto, vfrom, vaddr, topg) \
vto                83 arch/microblaze/include/asm/page.h 			memcpy((vto), (vfrom), PAGE_SIZE)
vto               174 arch/mips/mm/init.c 	void *vfrom, *vto;
vto               176 arch/mips/mm/init.c 	vto = kmap_atomic(to);
vto               180 arch/mips/mm/init.c 		copy_page(vto, vfrom);
vto               184 arch/mips/mm/init.c 		copy_page(vto, vfrom);
vto               188 arch/mips/mm/init.c 	    pages_do_alias((unsigned long)vto, vaddr & PAGE_MASK))
vto               189 arch/mips/mm/init.c 		flush_data_cache_page((unsigned long)vto);
vto               190 arch/mips/mm/init.c 	kunmap_atomic(vto);
vto               201 arch/mips/mm/init.c 		void *vto = kmap_coherent(page, vaddr) + (vaddr & ~PAGE_MASK);
vto               202 arch/mips/mm/init.c 		memcpy(vto, src, len);
vto                30 arch/nds32/include/asm/page.h void copy_user_page(void *vto, void *vfrom, unsigned long vaddr,
vto               157 arch/nds32/mm/cacheflush.c 	unsigned long vto, flags;
vto               160 arch/nds32/mm/cacheflush.c 	vto = kremap0(addr, pfn << PAGE_SHIFT);
vto               161 arch/nds32/mm/cacheflush.c 	cpu_cache_wbinval_page(vto, vma->vm_flags & VM_EXEC);
vto               162 arch/nds32/mm/cacheflush.c 	kunmap01(vto);
vto               178 arch/nds32/mm/cacheflush.c void copy_user_page(void *vto, void *vfrom, unsigned long vaddr,
vto               183 arch/nds32/mm/cacheflush.c 	copy_page(vto, vfrom);
vto               184 arch/nds32/mm/cacheflush.c 	cpu_dcache_wbinval_page((unsigned long)vto);
vto               185 arch/nds32/mm/cacheflush.c 	cpu_icache_inval_page((unsigned long)vto);
vto               200 arch/nds32/mm/cacheflush.c 	unsigned long vto, vfrom, flags, kto, kfrom, pfrom, pto;
vto               209 arch/nds32/mm/cacheflush.c 	vto = kremap0(vaddr, pto);
vto               211 arch/nds32/mm/cacheflush.c 	copy_page((void *)vto, (void *)vfrom);
vto               213 arch/nds32/mm/cacheflush.c 	kunmap01(vto);
vto               221 arch/nds32/mm/cacheflush.c 	unsigned long vto, flags, kto;
vto               230 arch/nds32/mm/cacheflush.c 	vto = kremap0(vaddr, page_to_phys(page));
vto               231 arch/nds32/mm/cacheflush.c 	clear_page((void *)vto);
vto               232 arch/nds32/mm/cacheflush.c 	kunmap01(vto);
vto               269 arch/nds32/mm/cacheflush.c 	unsigned long line_size, start, end, vto, flags;
vto               272 arch/nds32/mm/cacheflush.c 	vto = kremap0(vaddr, page_to_phys(page));
vto               273 arch/nds32/mm/cacheflush.c 	dst = (void *)(vto | (vaddr & (PAGE_SIZE - 1)));
vto               283 arch/nds32/mm/cacheflush.c 	kunmap01(vto);
vto               290 arch/nds32/mm/cacheflush.c 	unsigned long vto, flags;
vto               293 arch/nds32/mm/cacheflush.c 	vto = kremap0(vaddr, page_to_phys(page));
vto               294 arch/nds32/mm/cacheflush.c 	src = (void *)(vto | (vaddr & (PAGE_SIZE - 1)));
vto               296 arch/nds32/mm/cacheflush.c 	kunmap01(vto);
vto                54 arch/nios2/include/asm/page.h extern void copy_user_page(void *vto, void *vfrom, unsigned long vaddr,
vto               233 arch/nios2/mm/cacheflush.c void copy_user_page(void *vto, void *vfrom, unsigned long vaddr,
vto               238 arch/nios2/mm/cacheflush.c 	copy_page(vto, vfrom);
vto               239 arch/nios2/mm/cacheflush.c 	__flush_dcache((unsigned long)vto, (unsigned long)vto + PAGE_SIZE);
vto               240 arch/nios2/mm/cacheflush.c 	__flush_icache((unsigned long)vto, (unsigned long)vto + PAGE_SIZE);
vto                32 arch/parisc/include/asm/page.h #define clear_user_page(vto, vaddr, page) clear_page_asm(vto)
vto                33 arch/parisc/include/asm/page.h void copy_user_page(void *vto, void *vfrom, unsigned long vaddr,
vto               469 arch/parisc/kernel/cache.c void copy_user_page(void *vto, void *vfrom, unsigned long vaddr,
vto               478 arch/parisc/kernel/cache.c 	copy_page_asm(vto, vfrom);
vto               552 arch/powerpc/mm/mem.c void copy_user_page(void *vto, void *vfrom, unsigned long vaddr,
vto               555 arch/powerpc/mm/mem.c 	copy_page(vto, vfrom);
vto                54 arch/riscv/include/asm/page.h #define copy_user_page(vto, vfrom, vaddr, topg) \
vto                55 arch/riscv/include/asm/page.h 			memcpy((vto), (vfrom), PAGE_SIZE)
vto               366 arch/s390/include/asm/vx-insn.h .macro	VLM	vfrom, vto, disp, base, hint=3
vto               376 arch/s390/include/asm/vx-insn.h .macro	VSTM	vfrom, vto, disp, base, hint=3
vto                66 arch/sh/mm/cache.c 		void *vto = kmap_coherent(page, vaddr) + (vaddr & ~PAGE_MASK);
vto                67 arch/sh/mm/cache.c 		memcpy(vto, src, len);
vto                68 arch/sh/mm/cache.c 		kunmap_coherent(vto);
vto                98 arch/sh/mm/cache.c 	void *vfrom, *vto;
vto               100 arch/sh/mm/cache.c 	vto = kmap_atomic(to);
vto               105 arch/sh/mm/cache.c 		copy_page(vto, vfrom);
vto               109 arch/sh/mm/cache.c 		copy_page(vto, vfrom);
vto               113 arch/sh/mm/cache.c 	if (pages_do_alias((unsigned long)vto, vaddr & PAGE_MASK) ||
vto               115 arch/sh/mm/cache.c 		__flush_purge_region(vto, PAGE_SIZE);
vto               117 arch/sh/mm/cache.c 	kunmap_atomic(vto);
vto              3131 arch/sparc/mm/init_64.c 	char *vfrom, *vto;
vto              3134 arch/sparc/mm/init_64.c 	vto = kmap_atomic(to);
vto              3135 arch/sparc/mm/init_64.c 	copy_user_page(vto, vfrom, vaddr, to);
vto              3136 arch/sparc/mm/init_64.c 	kunmap_atomic(vto);
vto              3165 arch/sparc/mm/init_64.c 	char *vfrom, *vto;
vto              3168 arch/sparc/mm/init_64.c 	vto = kmap_atomic(to);
vto              3169 arch/sparc/mm/init_64.c 	copy_page(vto, vfrom);
vto              3170 arch/sparc/mm/init_64.c 	kunmap_atomic(vto);
vto                78 block/bounce.c 	unsigned char *vto;
vto                80 block/bounce.c 	vto = kmap_atomic(to->bv_page);
vto                81 block/bounce.c 	memcpy(vto + to->bv_offset, vfrom, to->bv_len);
vto                82 block/bounce.c 	kunmap_atomic(vto);
vto               331 block/bounce.c 			char *vto, *vfrom;
vto               335 block/bounce.c 			vto = page_address(to->bv_page) + to->bv_offset;
vto               337 block/bounce.c 			memcpy(vto, vfrom, to->bv_len);
vto               687 fs/dax.c       	void *vto, *kaddr;
vto               702 fs/dax.c       	vto = kmap_atomic(to);
vto               703 fs/dax.c       	copy_user_page(vto, (void __force *)kaddr, vaddr, to);
vto               704 fs/dax.c       	kunmap_atomic(vto);
vto               183 fs/erofs/data.c 			void *vsrc, *vto;
vto               196 fs/erofs/data.c 			vto = kmap_atomic(page);
vto               197 fs/erofs/data.c 			memcpy(vto, vsrc + blkoff, map.m_plen);
vto               198 fs/erofs/data.c 			memset(vto + map.m_plen, 0, PAGE_SIZE - map.m_plen);
vto               199 fs/erofs/data.c 			kunmap_atomic(vto);
vto               253 include/linux/highmem.h 	char *vfrom, *vto;
vto               256 include/linux/highmem.h 	vto = kmap_atomic(to);
vto               257 include/linux/highmem.h 	copy_user_page(vto, vfrom, vaddr, to);
vto               258 include/linux/highmem.h 	kunmap_atomic(vto);
vto               268 include/linux/highmem.h 	char *vfrom, *vto;
vto               271 include/linux/highmem.h 	vto = kmap_atomic(to);
vto               272 include/linux/highmem.h 	copy_page(vto, vfrom);
vto               273 include/linux/highmem.h 	kunmap_atomic(vto);
vto               223 net/sunrpc/xdr.c 	char *vfrom, *vto;
vto               256 net/sunrpc/xdr.c 		vto = kmap_atomic(*pgto);
vto               259 net/sunrpc/xdr.c 			memcpy(vto + pgto_base, vfrom + pgfrom_base, copy);
vto               262 net/sunrpc/xdr.c 			memmove(vto + pgto_base, vto + pgfrom_base, copy);
vto               264 net/sunrpc/xdr.c 		kunmap_atomic(vto);
vto               283 net/sunrpc/xdr.c 	char *vto;
vto               294 net/sunrpc/xdr.c 		vto = kmap_atomic(*pgto);
vto               295 net/sunrpc/xdr.c 		memcpy(vto + pgbase, p, copy);
vto               296 net/sunrpc/xdr.c 		kunmap_atomic(vto);