P4D_SIZE 232 arch/s390/mm/dump_pagetables.c addr += P4D_SIZE; P4D_SIZE 122 arch/s390/mm/kasan_init.c IS_ALIGNED(address, P4D_SIZE) && P4D_SIZE 123 arch/s390/mm/kasan_init.c end - address >= P4D_SIZE) { P4D_SIZE 126 arch/s390/mm/kasan_init.c address = (address + P4D_SIZE) & P4D_MASK; P4D_SIZE 270 arch/s390/mm/kasan_init.c BUILD_BUG_ON(!IS_ALIGNED(KASAN_SHADOW_START, P4D_SIZE)); P4D_SIZE 271 arch/s390/mm/kasan_init.c BUILD_BUG_ON(!IS_ALIGNED(KASAN_SHADOW_END, P4D_SIZE)); P4D_SIZE 174 arch/s390/mm/vmem.c address += P4D_SIZE; P4D_SIZE 65 arch/x86/include/asm/pgtable_64_types.h #define P4D_MASK (~(P4D_SIZE - 1)) P4D_SIZE 70 arch/x86/mm/ident_map.c next = (addr & P4D_MASK) + P4D_SIZE; P4D_SIZE 688 arch/x86/mm/init_64.c vaddr_next = (vaddr & P4D_MASK) + P4D_SIZE; P4D_SIZE 191 arch/x86/mm/kasan_init_64.c for (; start < end; start += P4D_SIZE) P4D_SIZE 263 arch/x86/mm/mem_encrypt_identity.c entries += (DIV_ROUND_UP(len, P4D_SIZE) + 1) * sizeof(pud_t) * PTRS_PER_PUD; P4D_SIZE 274 arch/x86/mm/mem_encrypt_identity.c tables += DIV_ROUND_UP(entries, P4D_SIZE) * sizeof(pud_t) * PTRS_PER_PUD; P4D_SIZE 118 arch/x86/platform/efi/efi_64.c addr_p4d = addr_pgd + i * P4D_SIZE; P4D_SIZE 177 arch/x86/platform/efi/efi_64.c pgd_idx * PGDIR_SIZE + i * P4D_SIZE); P4D_SIZE 1190 arch/x86/xen/mmu_pv.c xen_free_ro_pages(pa, P4D_SIZE); P4D_SIZE 2071 arch/x86/xen/mmu_pv.c n_pud = roundup(size, P4D_SIZE) >> P4D_SHIFT; P4D_SIZE 15 include/asm-generic/pgtable-nop4d.h #define P4D_MASK (~(P4D_SIZE-1)) P4D_SIZE 536 include/asm-generic/pgtable.h ({ unsigned long __boundary = ((addr) + P4D_SIZE) & P4D_MASK; \ P4D_SIZE 174 lib/ioremap.c if ((end - addr) != P4D_SIZE) P4D_SIZE 177 lib/ioremap.c if (!IS_ALIGNED(addr, P4D_SIZE)) P4D_SIZE 180 lib/ioremap.c if (!IS_ALIGNED(phys_addr, P4D_SIZE)) P4D_SIZE 191 mm/kasan/init.c if (IS_ALIGNED(addr, P4D_SIZE) && end - addr >= P4D_SIZE) { P4D_SIZE 441 mm/kasan/init.c if (IS_ALIGNED(addr, P4D_SIZE) && P4D_SIZE 442 mm/kasan/init.c IS_ALIGNED(next, P4D_SIZE))