text_end           81 arch/arm/mm/idmap.c 				 const char *text_end, unsigned long prot)
text_end           87 arch/arm/mm/idmap.c 	end = virt_to_idmap(text_end);
text_end           81 arch/x86/include/asm/alternative.h 					void *text, void *text_end);
text_end           89 arch/x86/include/asm/alternative.h 					       void *text, void *text_end) {}
text_end          441 arch/x86/kernel/alternative.c 				  u8 *text, u8 *text_end)
text_end          448 arch/x86/kernel/alternative.c 		if (!*poff || ptr < text || ptr >= text_end)
text_end          457 arch/x86/kernel/alternative.c 				    u8 *text, u8 *text_end)
text_end          464 arch/x86/kernel/alternative.c 		if (!*poff || ptr < text || ptr >= text_end)
text_end          483 arch/x86/kernel/alternative.c 	u8		*text_end;
text_end          493 arch/x86/kernel/alternative.c 						  void *text,  void *text_end)
text_end          515 arch/x86/kernel/alternative.c 	smp->text_end	= text_end;
text_end          518 arch/x86/kernel/alternative.c 		smp->text, smp->text_end, smp->name);
text_end          522 arch/x86/kernel/alternative.c 	alternatives_smp_unlock(locks, locks_end, text, text_end);
text_end          558 arch/x86/kernel/alternative.c 					      mod->text, mod->text_end);
text_end          573 arch/x86/kernel/alternative.c 	u8 *text_end = end;
text_end          578 arch/x86/kernel/alternative.c 		if (mod->text > text_end || mod->text_end < text_start)
text_end          583 arch/x86/kernel/alternative.c 			if (text_start <= ptr && text_end > ptr)
text_end         1302 arch/x86/mm/init_64.c 	unsigned long text_end = PFN_ALIGN(&__stop___ex_table);
text_end         1325 arch/x86/mm/init_64.c 	set_memory_nx(text_end, (all_end - text_end) >> PAGE_SHIFT);
text_end         1335 arch/x86/mm/init_64.c 	free_kernel_image_pages((void *)text_end, (void *)rodata_start);