per_event         129 arch/s390/include/asm/processor.h 	struct per_event per_event;	/* Cause of the last PER trap */
per_event          34 arch/s390/kernel/asm-offsets.c 	OFFSET(__THREAD_per_cause, thread_struct, per_event.cause);
per_event          35 arch/s390/kernel/asm-offsets.c 	OFFSET(__THREAD_per_address, thread_struct, per_event.address);
per_event          36 arch/s390/kernel/asm-offsets.c 	OFFSET(__THREAD_per_paid, thread_struct, per_event.paid);
per_event          99 arch/s390/kernel/process.c 	memset(&p->thread.per_event, 0, sizeof(p->thread.per_event));
per_event         143 arch/s390/kernel/ptrace.c 	memset(&task->thread.per_event, 0, sizeof(task->thread.per_event));
per_event         181 arch/s390/kernel/ptrace.c 			child->thread.per_event.cause << (BITS_PER_LONG - 16);
per_event         184 arch/s390/kernel/ptrace.c 		return child->thread.per_event.address;
per_event         188 arch/s390/kernel/ptrace.c 			child->thread.per_event.paid << (BITS_PER_LONG - 8);
per_event         565 arch/s390/kernel/ptrace.c 		return (__u32) child->thread.per_event.cause << 16;
per_event         568 arch/s390/kernel/ptrace.c 		return (__u32) child->thread.per_event.address;
per_event         571 arch/s390/kernel/ptrace.c 		return (__u32) child->thread.per_event.paid << 24;
per_event          82 arch/s390/kernel/traps.c 		(void __force __user *) current->thread.per_event.address);
per_event         101 arch/s390/kernel/uprobes.c 	if (check_per_event(current->thread.per_event.cause,
per_event         104 arch/s390/kernel/uprobes.c 		current->thread.per_event.address = utask->vaddr;
per_event         140 arch/s390/kernel/uprobes.c 	current->thread.per_event.address = current->utask->vaddr;
per_event         260 arch/s390/kernel/uprobes.c 	current->thread.per_event.address = regs->psw.addr;
per_event         261 arch/s390/kernel/uprobes.c 	current->thread.per_event.cause = PER_EVENT_STORE >> 16;
per_event         234 arch/s390/kvm/intercept.c 	if (guestdbg_enabled(vcpu) && per_event(vcpu)) {