Home
last modified time | relevance | path

Searched refs:new_pmd (Results 1 – 8 of 8) sorted by relevance

/linux-4.4.14/arch/powerpc/mm/
Dhugepage-hash64.c28 unsigned long old_pmd, new_pmd; in __hash_page_thp() local
52 new_pmd = old_pmd | _PAGE_BUSY | _PAGE_ACCESSED; in __hash_page_thp()
54 new_pmd |= _PAGE_DIRTY; in __hash_page_thp()
56 old_pmd, new_pmd)); in __hash_page_thp()
61 rflags = new_pmd & _PAGE_USER; in __hash_page_thp()
62 if ((new_pmd & _PAGE_USER) && !((new_pmd & _PAGE_RW) && in __hash_page_thp()
63 (new_pmd & _PAGE_DIRTY))) in __hash_page_thp()
68 rflags |= ((new_pmd & _PAGE_EXEC) ? 0 : HPTE_R_N); in __hash_page_thp()
132 new_pmd |= _PAGE_HASHPTE; in __hash_page_thp()
135 rflags |= (new_pmd & (_PAGE_WRITETHRU | _PAGE_NO_CACHE | in __hash_page_thp()
[all …]
/linux-4.4.14/arch/arm/mm/
Dpgd.c37 pmd_t *new_pmd, *init_pmd; in pgd_alloc() local
64 new_pmd = pmd_alloc(mm, new_pud, 0); in pgd_alloc()
65 if (!new_pmd) in pgd_alloc()
79 new_pmd = pmd_alloc(mm, new_pud, 0); in pgd_alloc()
80 if (!new_pmd) in pgd_alloc()
83 new_pte = pte_alloc_map(mm, NULL, new_pmd, 0); in pgd_alloc()
93 pmd_val(*new_pmd) &= ~PMD_DOMAIN_MASK; in pgd_alloc()
94 pmd_val(*new_pmd) |= PMD_DOMAIN(DOMAIN_VECTORS); in pgd_alloc()
109 pmd_free(mm, new_pmd); in pgd_alloc()
/linux-4.4.14/arch/unicore32/mm/
Dpgd.c30 pmd_t *new_pmd, *init_pmd; in get_pgd_slow() local
53 new_pmd = pmd_alloc(mm, (pud_t *)new_pgd, 0); in get_pgd_slow()
54 if (!new_pmd) in get_pgd_slow()
57 new_pte = pte_alloc_map(mm, NULL, new_pmd, 0); in get_pgd_slow()
71 pmd_free(mm, new_pmd); in get_pgd_slow()
/linux-4.4.14/mm/
Dmremap.c91 struct vm_area_struct *new_vma, pmd_t *new_pmd, in move_ptes() argument
134 new_pte = pte_offset_map(new_pmd, new_addr); in move_ptes()
135 new_ptl = pte_lockptr(mm, new_pmd); in move_ptes()
169 pmd_t *old_pmd, *new_pmd; in move_page_tables() local
191 new_pmd = alloc_new_pmd(vma->vm_mm, vma, new_addr); in move_page_tables()
192 if (!new_pmd) in move_page_tables()
204 old_pmd, new_pmd); in move_page_tables()
216 if (pmd_none(*new_pmd) && __pte_alloc(new_vma->vm_mm, new_vma, in move_page_tables()
217 new_pmd, new_addr)) in move_page_tables()
225 new_vma, new_pmd, new_addr, need_rmap_locks); in move_page_tables()
Dhuge_memory.c1499 pmd_t *old_pmd, pmd_t *new_pmd) in move_huge_pmd() argument
1517 if (WARN_ON(!pmd_none(*new_pmd))) { in move_huge_pmd()
1518 VM_BUG_ON(pmd_trans_huge(*new_pmd)); in move_huge_pmd()
1528 new_ptl = pmd_lockptr(mm, new_pmd); in move_huge_pmd()
1532 VM_BUG_ON(!pmd_none(*new_pmd)); in move_huge_pmd()
1537 pgtable_trans_huge_deposit(mm, new_pmd, pgtable); in move_huge_pmd()
1539 set_pmd_at(mm, new_addr, new_pmd, pmd_mksoft_dirty(pmd)); in move_huge_pmd()
/linux-4.4.14/arch/arm/include/asm/
Dkvm_mmu.h72 static inline void kvm_set_pmd(pmd_t *pmd, pmd_t new_pmd) in kvm_set_pmd() argument
74 *pmd = new_pmd; in kvm_set_pmd()
/linux-4.4.14/arch/arm/kvm/
Dmmu.c870 *cache, phys_addr_t addr, const pmd_t *new_pmd) in stage2_set_pmd_huge() argument
886 VM_BUG_ON(pmd_present(*pmd) && pmd_pfn(*pmd) != pmd_pfn(*new_pmd)); in stage2_set_pmd_huge()
896 kvm_set_pmd(pmd, *new_pmd); in stage2_set_pmd_huge()
1318 pmd_t new_pmd = pfn_pmd(pfn, mem_type); in user_mem_abort() local
1319 new_pmd = pmd_mkhuge(new_pmd); in user_mem_abort()
1321 kvm_set_s2pmd_writable(&new_pmd); in user_mem_abort()
1325 ret = stage2_set_pmd_huge(kvm, memcache, fault_ipa, &new_pmd); in user_mem_abort()
/linux-4.4.14/include/linux/
Dhuge_mm.h32 pmd_t *old_pmd, pmd_t *new_pmd);