vm86               10 arch/x86/include/asm/processor.h struct vm86;
vm86              480 arch/x86/include/asm/processor.h 	struct vm86		*vm86;
vm86               56 arch/x86/include/asm/vm86.h 	if (__t->vm86 != NULL) {			\
vm86               57 arch/x86/include/asm/vm86.h 		kfree(__t->vm86);			\
vm86               58 arch/x86/include/asm/vm86.h 		__t->vm86 = NULL;			\
vm86              101 arch/x86/kernel/process.c 	dst->thread.vm86 = NULL;
vm86               87 arch/x86/kernel/vm86_32.c #define VFLAGS	(*(unsigned short *)&(current->thread.vm86->veflags))
vm86               88 arch/x86/kernel/vm86_32.c #define VEFLAGS	(current->thread.vm86->veflags)
vm86              100 arch/x86/kernel/vm86_32.c 	struct vm86 *vm86 = current->thread.vm86;
vm86              110 arch/x86/kernel/vm86_32.c 	if (!vm86 || !vm86->user_vm86) {
vm86              114 arch/x86/kernel/vm86_32.c 	set_flags(regs->pt.flags, VEFLAGS, X86_EFLAGS_VIF | vm86->veflags_mask);
vm86              115 arch/x86/kernel/vm86_32.c 	user = vm86->user_vm86;
vm86              117 arch/x86/kernel/vm86_32.c 	if (!access_ok(user, vm86->vm86plus.is_vm86pus ?
vm86              142 arch/x86/kernel/vm86_32.c 		put_user_ex(vm86->screen_bitmap, &user->screen_bitmap);
vm86              150 arch/x86/kernel/vm86_32.c 	tsk->thread.sp0 = vm86->saved_sp0;
vm86              154 arch/x86/kernel/vm86_32.c 	vm86->saved_sp0 = 0;
vm86              157 arch/x86/kernel/vm86_32.c 	memcpy(&regs->pt, &vm86->regs32, sizeof(struct pt_regs));
vm86              159 arch/x86/kernel/vm86_32.c 	lazy_load_gs(vm86->regs32.gs);
vm86              216 arch/x86/kernel/vm86_32.c SYSCALL_DEFINE2(vm86, unsigned long, cmd, unsigned long, arg)
vm86              242 arch/x86/kernel/vm86_32.c 	struct vm86 *vm86 = tsk->thread.vm86;
vm86              273 arch/x86/kernel/vm86_32.c 	if (!vm86) {
vm86              274 arch/x86/kernel/vm86_32.c 		if (!(vm86 = kzalloc(sizeof(*vm86), GFP_KERNEL)))
vm86              276 arch/x86/kernel/vm86_32.c 		tsk->thread.vm86 = vm86;
vm86              278 arch/x86/kernel/vm86_32.c 	if (vm86->saved_sp0)
vm86              308 arch/x86/kernel/vm86_32.c 		get_user_ex(vm86->flags, &user_vm86->flags);
vm86              309 arch/x86/kernel/vm86_32.c 		get_user_ex(vm86->screen_bitmap, &user_vm86->screen_bitmap);
vm86              310 arch/x86/kernel/vm86_32.c 		get_user_ex(vm86->cpu_type, &user_vm86->cpu_type);
vm86              315 arch/x86/kernel/vm86_32.c 	if (copy_from_user(&vm86->int_revectored,
vm86              319 arch/x86/kernel/vm86_32.c 	if (copy_from_user(&vm86->int21_revectored,
vm86              324 arch/x86/kernel/vm86_32.c 		if (copy_from_user(&vm86->vm86plus, &user_vm86->vm86plus,
vm86              327 arch/x86/kernel/vm86_32.c 		vm86->vm86plus.is_vm86pus = 1;
vm86              329 arch/x86/kernel/vm86_32.c 		memset(&vm86->vm86plus, 0,
vm86              332 arch/x86/kernel/vm86_32.c 	memcpy(&vm86->regs32, regs, sizeof(struct pt_regs));
vm86              333 arch/x86/kernel/vm86_32.c 	vm86->user_vm86 = user_vm86;
vm86              347 arch/x86/kernel/vm86_32.c 	switch (vm86->cpu_type) {
vm86              349 arch/x86/kernel/vm86_32.c 		vm86->veflags_mask = 0;
vm86              352 arch/x86/kernel/vm86_32.c 		vm86->veflags_mask = X86_EFLAGS_NT | X86_EFLAGS_IOPL;
vm86              355 arch/x86/kernel/vm86_32.c 		vm86->veflags_mask = X86_EFLAGS_AC | X86_EFLAGS_NT | X86_EFLAGS_IOPL;
vm86              358 arch/x86/kernel/vm86_32.c 		vm86->veflags_mask = X86_EFLAGS_ID | X86_EFLAGS_AC | X86_EFLAGS_NT | X86_EFLAGS_IOPL;
vm86              365 arch/x86/kernel/vm86_32.c 	vm86->saved_sp0 = tsk->thread.sp0;
vm86              366 arch/x86/kernel/vm86_32.c 	lazy_save_gs(vm86->regs32.gs);
vm86              380 arch/x86/kernel/vm86_32.c 	if (vm86->flags & VM86_SCREEN_BITMAP)
vm86              422 arch/x86/kernel/vm86_32.c 	set_flags(VEFLAGS, flags, current->thread.vm86->veflags_mask);
vm86              432 arch/x86/kernel/vm86_32.c 	set_flags(VFLAGS, flags, current->thread.vm86->veflags_mask);
vm86              447 arch/x86/kernel/vm86_32.c 	return flags | (VEFLAGS & current->thread.vm86->veflags_mask);
vm86              542 arch/x86/kernel/vm86_32.c 	struct vm86 *vm86 = current->thread.vm86;
vm86              546 arch/x86/kernel/vm86_32.c 	if (is_revectored(i, &vm86->int_revectored))
vm86              548 arch/x86/kernel/vm86_32.c 	if (i == 0x21 && is_revectored(AH(regs), &vm86->int21_revectored))
vm86              572 arch/x86/kernel/vm86_32.c 	struct vm86 *vm86 = current->thread.vm86;
vm86              574 arch/x86/kernel/vm86_32.c 	if (vm86->vm86plus.is_vm86pus) {
vm86              597 arch/x86/kernel/vm86_32.c 	struct vm86plus_info_struct *vmpi = &current->thread.vm86->vm86plus;
vm86              277 arch/x86/mm/fault.c 	if (!v8086_mode(regs) || !tsk->thread.vm86)
vm86              282 arch/x86/mm/fault.c 		tsk->thread.vm86->screen_bitmap |= 1 << bit;
vm86              374 kernel/sys_ni.c COND_SYSCALL(vm86);
vm86              142 tools/testing/selftests/x86/entry_from_vm86.c 	ret = vm86(VM86_ENTER, v86);
vm86              188 tools/testing/selftests/x86/entry_from_vm86.c void do_umip_tests(struct vm86plus_struct *vm86, unsigned char *test_mem)
vm86              203 tools/testing/selftests/x86/entry_from_vm86.c 	do_test(vm86, vmcode_umip - vmcode, VM86_TRAP, 3, "UMIP tests");
vm86              240 tools/testing/selftests/x86/entry_from_vm86.c 	do_test(vm86, vmcode_umip_str - vmcode, VM86_SIGNAL, 0,
vm86              245 tools/testing/selftests/x86/entry_from_vm86.c 	do_test(vm86, vmcode_umip_sldt - vmcode, VM86_SIGNAL, 0,