Lines Matching refs:page
64 struct page *page; in setup_zero_pages() local
75 page = virt_to_page((void *)empty_zero_page); in setup_zero_pages()
76 split_page(page, order); in setup_zero_pages()
77 for (i = 0; i < (1 << order); i++, page++) in setup_zero_pages()
78 mark_page_reserved(page); in setup_zero_pages()
83 static void *__kmap_pgprot(struct page *page, unsigned long addr, pgprot_t prot) in __kmap_pgprot() argument
91 BUG_ON(Page_dcache_dirty(page)); in __kmap_pgprot()
97 pte = mk_pte(page, prot); in __kmap_pgprot()
126 void *kmap_coherent(struct page *page, unsigned long addr) in kmap_coherent() argument
128 return __kmap_pgprot(page, addr, PAGE_KERNEL); in kmap_coherent()
131 void *kmap_noncoherent(struct page *page, unsigned long addr) in kmap_noncoherent() argument
133 return __kmap_pgprot(page, addr, PAGE_KERNEL_NC); in kmap_noncoherent()
157 void copy_user_highpage(struct page *to, struct page *from, in copy_user_highpage()
182 struct page *page, unsigned long vaddr, void *dst, const void *src, in copy_to_user_page() argument
186 page_mapped(page) && !Page_dcache_dirty(page)) { in copy_to_user_page()
187 void *vto = kmap_coherent(page, vaddr) + (vaddr & ~PAGE_MASK); in copy_to_user_page()
193 SetPageDcacheDirty(page); in copy_to_user_page()
196 flush_cache_page(vma, vaddr, page_to_pfn(page)); in copy_to_user_page()
200 struct page *page, unsigned long vaddr, void *dst, const void *src, in copy_from_user_page() argument
204 page_mapped(page) && !Page_dcache_dirty(page)) { in copy_from_user_page()
205 void *vfrom = kmap_coherent(page, vaddr) + (vaddr & ~PAGE_MASK); in copy_from_user_page()
211 SetPageDcacheDirty(page); in copy_from_user_page()
324 struct page *page = pfn_to_page(tmp); in mem_init_free_highmem() local
327 SetPageReserved(page); in mem_init_free_highmem()
329 free_highmem_page(page); in mem_init_free_highmem()
399 struct page *page = pfn_to_page(pfn); in free_init_pages() local
403 free_reserved_page(page); in free_init_pages()