t_end 352 arch/nds32/mm/proc.c unsigned long line_size, t_start, t_end; t_end 369 arch/nds32/mm/proc.c t_end = ((end - 1) & PAGE_MASK); t_end 371 arch/nds32/mm/proc.c if ((start & PAGE_MASK) == t_end) { t_end 390 arch/nds32/mm/proc.c cpu_dcache_wbinval_range(t_end, end); t_end 392 arch/nds32/mm/proc.c cpu_icache_inval_range(t_end, end); t_end 395 arch/nds32/mm/proc.c while (t_start < t_end) { t_end 131 arch/nios2/mm/ioremap.c char *t_addr, *t_end; t_end 135 arch/nios2/mm/ioremap.c t_end = t_addr + (size - 1); t_end 137 arch/nios2/mm/ioremap.c page <= virt_to_page(t_end); page++) t_end 54 arch/parisc/mm/ioremap.c char *t_addr, *t_end; t_end 58 arch/parisc/mm/ioremap.c t_end = t_addr + (size - 1); t_end 61 arch/parisc/mm/ioremap.c page <= virt_to_page(t_end); page++) { t_end 104 arch/s390/numa/numa.c unsigned long t_start, t_end; t_end 109 arch/s390/numa/numa.c for_each_mem_pfn_range(i, nid, &t_start, &t_end, NULL) { t_end 112 arch/s390/numa/numa.c if (t_end > end_pfn) t_end 113 arch/s390/numa/numa.c end_pfn = t_end; t_end 438 arch/x86/mm/pageattr.c unsigned long t_end = (unsigned long)_etext - 1; t_end 441 arch/x86/mm/pageattr.c if (overlaps(start, end, t_start, t_end)) t_end 459 arch/x86/mm/pageattr.c unsigned long t_end = (unsigned long)__end_rodata_hpage_align - 1; t_end 463 arch/x86/mm/pageattr.c if (!kernel_set_to_readonly || !overlaps(start, end, t_start, t_end))