vdso_base 346 arch/arm64/kernel/signal32.c void *vdso_base = current->mm->context.vdso; vdso_base 351 arch/arm64/kernel/signal32.c vdso_trampoline = VDSO_SYMBOL(vdso_base, vdso_base 354 arch/arm64/kernel/signal32.c vdso_trampoline = VDSO_SYMBOL(vdso_base, vdso_base 359 arch/arm64/kernel/signal32.c vdso_trampoline = VDSO_SYMBOL(vdso_base, vdso_base 362 arch/arm64/kernel/signal32.c vdso_trampoline = VDSO_SYMBOL(vdso_base, vdso_base 144 arch/arm64/kernel/vdso.c unsigned long vdso_base, vdso_text_len, vdso_mapping_len; vdso_base 151 arch/arm64/kernel/vdso.c vdso_base = get_unmapped_area(NULL, 0, vdso_mapping_len, 0, 0); vdso_base 152 arch/arm64/kernel/vdso.c if (IS_ERR_VALUE(vdso_base)) { vdso_base 153 arch/arm64/kernel/vdso.c ret = ERR_PTR(vdso_base); vdso_base 157 arch/arm64/kernel/vdso.c ret = _install_special_mapping(mm, vdso_base, PAGE_SIZE, vdso_base 163 arch/arm64/kernel/vdso.c vdso_base += PAGE_SIZE; vdso_base 164 arch/arm64/kernel/vdso.c mm->context.vdso = (void *)vdso_base; vdso_base 165 arch/arm64/kernel/vdso.c ret = _install_special_mapping(mm, vdso_base, vdso_text_len, vdso_base 52 arch/hexagon/kernel/vdso.c unsigned long vdso_base; vdso_base 59 arch/hexagon/kernel/vdso.c vdso_base = STACK_TOP; vdso_base 61 arch/hexagon/kernel/vdso.c vdso_base = get_unmapped_area(NULL, vdso_base, PAGE_SIZE, 0, 0); vdso_base 62 arch/hexagon/kernel/vdso.c if (IS_ERR_VALUE(vdso_base)) { vdso_base 63 arch/hexagon/kernel/vdso.c ret = vdso_base; vdso_base 68 arch/hexagon/kernel/vdso.c ret = install_special_mapping(mm, vdso_base, PAGE_SIZE, vdso_base 76 arch/hexagon/kernel/vdso.c mm->context.vdso = (void *)vdso_base; vdso_base 127 arch/mips/kernel/vdso.c base = get_unmapped_area(NULL, vdso_base(), size, 0, 0); vdso_base 117 arch/nds32/kernel/vdso.c unsigned long vdso_base, vdso_text_len, vdso_mapping_len; vdso_base 137 arch/nds32/kernel/vdso.c vdso_base = get_unmapped_area(NULL, addr, vdso_mapping_len, 0, 0); vdso_base 138 arch/nds32/kernel/vdso.c if (IS_ERR_VALUE(vdso_base)) { vdso_base 139 arch/nds32/kernel/vdso.c ret = vdso_base; vdso_base 149 arch/nds32/kernel/vdso.c (vdso_base & aliasing_mask); vdso_base 150 arch/nds32/kernel/vdso.c vdso_base += page_colour_ofs & aliasing_mask; vdso_base 154 arch/nds32/kernel/vdso.c vma = _install_special_mapping(mm, vdso_base, vvar_page_num * PAGE_SIZE, vdso_base 162 arch/nds32/kernel/vdso.c ret = io_remap_pfn_range(vma, vdso_base, vdso_base 169 arch/nds32/kernel/vdso.c vdso_base += PAGE_SIZE; vdso_base 171 arch/nds32/kernel/vdso.c ret = io_remap_pfn_range(vma, vdso_base, timer_mapping_base >> PAGE_SHIFT, vdso_base 177 arch/nds32/kernel/vdso.c vdso_base += PAGE_SIZE; vdso_base 178 arch/nds32/kernel/vdso.c mm->context.vdso = (void *)vdso_base; vdso_base 179 arch/nds32/kernel/vdso.c vma = _install_special_mapping(mm, vdso_base, vdso_text_len, vdso_base 93 arch/powerpc/include/asm/book3s/32/mmu-hash.h unsigned long vdso_base; vdso_base 121 arch/powerpc/include/asm/book3s/64/mmu.h unsigned long vdso_base; vdso_base 174 arch/powerpc/include/asm/elf.h VDSO_AUX_ENT(AT_SYSINFO_EHDR, current->mm->context.vdso_base); \ vdso_base 20 arch/powerpc/include/asm/mm-arch-hooks.h if (old_start == mm->context.vdso_base) vdso_base 21 arch/powerpc/include/asm/mm-arch-hooks.h mm->context.vdso_base = new_start; vdso_base 237 arch/powerpc/include/asm/mmu_context.h if (start <= mm->context.vdso_base && mm->context.vdso_base < end) vdso_base 238 arch/powerpc/include/asm/mmu_context.h mm->context.vdso_base = 0; vdso_base 60 arch/powerpc/include/asm/nohash/32/mmu-40x.h unsigned long vdso_base; vdso_base 111 arch/powerpc/include/asm/nohash/32/mmu-44x.h unsigned long vdso_base; vdso_base 212 arch/powerpc/include/asm/nohash/32/mmu-8xx.h unsigned long vdso_base; vdso_base 232 arch/powerpc/include/asm/nohash/mmu-book3e.h unsigned long vdso_base; vdso_base 933 arch/powerpc/kernel/signal_32.c if (vdso32_rt_sigtramp && tsk->mm->context.vdso_base) { vdso_base 935 arch/powerpc/kernel/signal_32.c tramp = tsk->mm->context.vdso_base + vdso32_rt_sigtramp; vdso_base 1395 arch/powerpc/kernel/signal_32.c if (vdso32_sigtramp && tsk->mm->context.vdso_base) { vdso_base 1397 arch/powerpc/kernel/signal_32.c tramp = tsk->mm->context.vdso_base + vdso32_sigtramp; vdso_base 869 arch/powerpc/kernel/signal_64.c if (vdso64_rt_sigtramp && tsk->mm->context.vdso_base) { vdso_base 870 arch/powerpc/kernel/signal_64.c regs->link = tsk->mm->context.vdso_base + vdso64_rt_sigtramp; vdso_base 131 arch/powerpc/kernel/vdso.c unsigned long vdso_base; vdso_base 141 arch/powerpc/kernel/vdso.c vdso_base = VDSO32_MBASE; vdso_base 150 arch/powerpc/kernel/vdso.c vdso_base = 0; vdso_base 155 arch/powerpc/kernel/vdso.c vdso_base = VDSO32_MBASE; vdso_base 158 arch/powerpc/kernel/vdso.c current->mm->context.vdso_base = 0; vdso_base 176 arch/powerpc/kernel/vdso.c vdso_base = get_unmapped_area(NULL, vdso_base, vdso_base 180 arch/powerpc/kernel/vdso.c if (IS_ERR_VALUE(vdso_base)) { vdso_base 181 arch/powerpc/kernel/vdso.c rc = vdso_base; vdso_base 186 arch/powerpc/kernel/vdso.c vdso_base = ALIGN(vdso_base, VDSO_ALIGNMENT); vdso_base 193 arch/powerpc/kernel/vdso.c current->mm->context.vdso_base = vdso_base; vdso_base 205 arch/powerpc/kernel/vdso.c rc = install_special_mapping(mm, vdso_base, vdso_pages << PAGE_SHIFT, vdso_base 210 arch/powerpc/kernel/vdso.c current->mm->context.vdso_base = 0; vdso_base 224 arch/powerpc/kernel/vdso.c if (vma->vm_mm && vma->vm_start == vma->vm_mm->context.vdso_base) vdso_base 209 arch/powerpc/perf/callchain.c if (vdso64_rt_sigtramp && current->mm->context.vdso_base && vdso_base 210 arch/powerpc/perf/callchain.c nip == current->mm->context.vdso_base + vdso64_rt_sigtramp) vdso_base 368 arch/powerpc/perf/callchain.c if (vdso32_sigtramp && current->mm->context.vdso_base && vdso_base 369 arch/powerpc/perf/callchain.c nip == current->mm->context.vdso_base + vdso32_sigtramp) vdso_base 379 arch/powerpc/perf/callchain.c if (vdso32_rt_sigtramp && current->mm->context.vdso_base && vdso_base 380 arch/powerpc/perf/callchain.c nip == current->mm->context.vdso_base + vdso32_rt_sigtramp) vdso_base 59 arch/riscv/kernel/vdso.c unsigned long vdso_base, vdso_len; vdso_base 65 arch/riscv/kernel/vdso.c vdso_base = get_unmapped_area(NULL, 0, vdso_len, 0, 0); vdso_base 66 arch/riscv/kernel/vdso.c if (IS_ERR_VALUE(vdso_base)) { vdso_base 67 arch/riscv/kernel/vdso.c ret = vdso_base; vdso_base 76 arch/riscv/kernel/vdso.c mm->context.vdso = (void *)vdso_base; vdso_base 78 arch/riscv/kernel/vdso.c ret = install_special_mapping(mm, vdso_base, vdso_len, vdso_base 276 arch/s390/include/asm/elf.h (unsigned long)current->mm->context.vdso_base); \ vdso_base 18 arch/s390/include/asm/mmu.h unsigned long vdso_base; vdso_base 90 arch/s390/kernel/vdso.c current->mm->context.vdso_base = vma->vm_start; vdso_base 206 arch/s390/kernel/vdso.c unsigned long vdso_base; vdso_base 232 arch/s390/kernel/vdso.c vdso_base = get_unmapped_area(NULL, 0, vdso_pages << PAGE_SHIFT, 0, 0); vdso_base 233 arch/s390/kernel/vdso.c if (IS_ERR_VALUE(vdso_base)) { vdso_base 234 arch/s390/kernel/vdso.c rc = vdso_base; vdso_base 248 arch/s390/kernel/vdso.c vma = _install_special_mapping(mm, vdso_base, vdso_pages << PAGE_SHIFT, vdso_base 257 arch/s390/kernel/vdso.c current->mm->context.vdso_base = vdso_base;