_stext            129 arch/alpha/kernel/traps.c 		extern char _stext[], _etext[];
_stext            132 arch/alpha/kernel/traps.c 		if (tmp < (unsigned long) &_stext)
_stext            483 arch/arc/kernel/setup.c 	return addr >= (unsigned long)_stext && addr <= (unsigned long)_end;
_stext            116 arch/arm/include/asm/memory.h #define KERNEL_START		_stext
_stext            524 arch/arm/mm/init.c 		.end	= (unsigned long)_stext,
_stext            550 arch/arm/mm/init.c 		.start  = (unsigned long)_stext,
_stext            149 arch/arm64/kernel/kaslr.c 		module_range = SZ_2G - (u64)(_end - _stext);
_stext            160 arch/arm64/kernel/kaslr.c 		module_range = MODULES_VSIZE - (u64)(_etext - _stext);
_stext            341 arch/c6x/kernel/setup.c 	init_mm.start_code = (unsigned long) &_stext;
_stext             32 arch/csky/kernel/setup.c 	memblock_reserve(__pa(_stext), _end - _stext);
_stext             84 arch/csky/kernel/setup.c 	init_mm.start_code = (unsigned long) _stext;
_stext             91 arch/h8300/kernel/setup.c 	memblock_reserve(__pa(_stext), _end - _stext);
_stext            103 arch/h8300/kernel/setup.c 	init_mm.start_code = (unsigned long) _stext;
_stext             91 arch/m68k/kernel/setup_no.c 	init_mm.start_code = (unsigned long) &_stext;
_stext            138 arch/m68k/kernel/setup_no.c 		 _stext, _etext, _sdata, _edata, __bss_start, __bss_stop);
_stext            168 arch/microblaze/mm/pgtable.c 		if ((char *) v < _stext || (char *) v >= _etext)
_stext           1517 arch/mips/mm/c-r4k.c 	begin = (unsigned long) &_stext;
_stext            191 arch/mips/mm/sc-rm7k.c 	begin = (unsigned long) &_stext;
_stext             77 arch/mips/sgi-ip27/ip27-klnuma.c 	source_start = (unsigned long) _stext;
_stext            301 arch/nds32/kernel/setup.c 	init_mm.start_code = (unsigned long)&_stext;
_stext            155 arch/nios2/kernel/setup.c 	init_mm.start_code = (unsigned long) _stext;
_stext             96 arch/nios2/kernel/traps.c 		if (((addr >= (unsigned long) _stext) &&
_stext             82 arch/openrisc/kernel/setup.c 	memblock_reserve(__pa(_stext), _end - _stext);
_stext            290 arch/openrisc/kernel/setup.c 	init_mm.start_code = (unsigned long)_stext;
_stext             43 arch/powerpc/include/asm/sections.h 	if (addr >= (unsigned long)_stext && addr < (unsigned long)__init_end)
_stext             65 arch/powerpc/include/asm/sections.h 	real_start = __start_interrupts - _stext;
_stext             66 arch/powerpc/include/asm/sections.h 	real_end = __end_interrupts - _stext;
_stext             75 arch/powerpc/include/asm/sections.h 		(unsigned long)_stext < end;
_stext             37 arch/powerpc/kernel/kprobes.c 		(addr >= (unsigned long)_stext &&
_stext            678 arch/powerpc/kernel/kvm.c 	start = (void*)_stext;
_stext            170 arch/powerpc/kernel/machine_kexec.c 	if (overlaps_crashkernel(__pa(_stext), _end - _stext)) {
_stext            190 arch/powerpc/kernel/paca.c 	new_paca->kernelbase = (unsigned long) _stext;
_stext            918 arch/powerpc/kernel/setup-common.c 	init_mm.start_code = (unsigned long)_stext;
_stext            947 arch/powerpc/kernel/setup-common.c 	if (IS_ENABLED(CONFIG_PPC64) && (unsigned long)_stext & 0xffff)
_stext            949 arch/powerpc/kernel/setup-common.c 		      (unsigned long)_stext);
_stext            400 arch/powerpc/kernel/traps.c 	unsigned long kbase = (unsigned long)_stext;
_stext            399 arch/powerpc/lib/feature-fixups.c 	length = (__end_interrupts - _stext) / sizeof(int);
_stext            188 arch/powerpc/mm/book3s32/mmu.c 	unsigned long base = (unsigned long)_stext - PAGE_OFFSET;
_stext            444 arch/powerpc/mm/book3s64/hash_pgtable.c 	start = (unsigned long)_stext;
_stext            215 arch/powerpc/mm/book3s64/radix_pgtable.c 	start = (unsigned long)_stext;
_stext             35 arch/powerpc/mm/pgtable_32.c extern char etext[], _stext[], _sinittext[], _einittext[];
_stext             97 arch/powerpc/mm/pgtable_32.c 		ktext = ((char *)v >= _stext && (char *)v < etext) ||
_stext            210 arch/powerpc/mm/pgtable_32.c 	if (v_block_mapped((unsigned long)_stext + 1))
_stext            228 arch/powerpc/mm/pgtable_32.c 	page = virt_to_page(_stext);
_stext            230 arch/powerpc/mm/pgtable_32.c 		   PFN_DOWN((unsigned long)_stext);
_stext            570 arch/powerpc/platforms/powermac/bootx_init.c 		for (ptr = ((unsigned long) &_stext) & PAGE_MASK;
_stext             58 arch/riscv/kernel/setup.c 	init_mm.start_code = (unsigned long) _stext;
_stext            835 arch/s390/kernel/setup.c 	memblock_reserve((unsigned long)_stext, PFN_PHYS(start_pfn)
_stext            836 arch/s390/kernel/setup.c 			 - (unsigned long)_stext);
_stext            157 arch/s390/kernel/suspend.c 	unsigned long stext_pfn = PFN_DOWN(__pa(_stext));
_stext             33 arch/s390/mm/dump_pagetables.c 	[KERNEL_START_NR]	= {(unsigned long)_stext, "Kernel Image Start"},
_stext            407 arch/s390/mm/vmem.c 	__set_memory((unsigned long)_stext,
_stext            408 arch/s390/mm/vmem.c 		     (unsigned long)(_etext - _stext) >> PAGE_SHIFT,
_stext            423 arch/s390/mm/vmem.c 		(unsigned long)(__end_rodata - _stext) >> 10);
_stext            230 arch/sh/kernel/smp.c 	mp_ops->start_cpu(cpu, (unsigned long)_stext);
_stext            197 arch/unicore32/kernel/setup.c 	kernel_code.start   = virt_to_phys(_stext);
_stext            244 arch/unicore32/kernel/setup.c 	init_mm.start_code = (unsigned long) _stext;
_stext             29 arch/x86/include/asm/orc_lookup.h #define LOOKUP_START_IP		(unsigned long)_stext
_stext            529 arch/x86/mm/fault.c 	if ((address >= (u64)_stext && address <= (u64)_etext) ||
_stext            365 arch/x86/mm/kasan_init_64.c 	kasan_populate_shadow((unsigned long)kasan_mem_to_shadow(_stext),
_stext            367 arch/x86/mm/kasan_init_64.c 			      early_pfn_to_nid(__pa(_stext)));
_stext            276 arch/xtensa/kernel/setup.c extern char _stext[];
_stext            341 arch/xtensa/kernel/setup.c 	mem_reserve(__pa(_stext), __pa(_end));
_stext             35 include/asm-generic/sections.h extern char _text[], _stext[], _etext[];
_stext             33 include/linux/kallsyms.h 	if ((addr >= (unsigned long)_stext && addr <= (unsigned long)_etext) ||
_stext             41 include/linux/kallsyms.h 	if (addr >= (unsigned long)_stext && addr <= (unsigned long)_end)
_stext             26 include/trace/events/preemptirq.h 		__entry->caller_offs = (s32)(ip - (unsigned long)_stext);
_stext             27 include/trace/events/preemptirq.h 		__entry->parent_offs = (s32)(parent_ip - (unsigned long)_stext);
_stext             31 include/trace/events/preemptirq.h 		  (void *)((unsigned long)(_stext) + __entry->caller_offs),
_stext             32 include/trace/events/preemptirq.h 		  (void *)((unsigned long)(_stext) + __entry->parent_offs))
_stext            404 kernel/crash_core.c 	VMCOREINFO_SYMBOL(_stext);
_stext           1133 kernel/dma/debug.c 	if (overlap(addr, len, _stext, _etext) ||
_stext             72 kernel/extable.c 	if (addr >= (unsigned long)_stext &&
_stext            738 kernel/locking/lockdep.c 	unsigned long start = (unsigned long) &_stext,
_stext            110 kernel/profile.c 	prof_len = (_etext - _stext) >> prof_shift;
_stext            291 kernel/profile.c 	pc = min((pc - (unsigned long)_stext) >> prof_shift, prof_len - 1);
_stext            389 kernel/profile.c 	pc = ((unsigned long)__pc - (unsigned long)_stext) >> prof_shift;
_stext            194 mm/kasan/report.c 	if (addr >= (void *)_stext && addr < (void *)_end)
_stext           7542 mm/page_alloc.c 	codesize = _etext - _stext;
_stext           7564 mm/page_alloc.c 	adj_init_size(_stext, _etext, codesize, _sinittext, init_code_size);
_stext           7566 mm/page_alloc.c 	adj_init_size(_stext, _etext, codesize, __start_rodata, rosize);
_stext            120 mm/usercopy.c  	unsigned long textlow = (unsigned long)_stext;