PERF_REG_X86_XMM0   56 arch/x86/include/uapi/asm/perf_regs.h #define PERF_REG_EXTENDED_MASK	(~((1ULL << PERF_REG_X86_XMM0) - 1))
PERF_REG_X86_XMM0   64 arch/x86/kernel/perf_regs.c 	if (idx >= PERF_REG_X86_XMM0 && idx < PERF_REG_X86_XMM_MAX) {
PERF_REG_X86_XMM0   68 arch/x86/kernel/perf_regs.c 		return perf_regs->xmm_regs[idx - PERF_REG_X86_XMM0];
PERF_REG_X86_XMM0   77 arch/x86/kernel/perf_regs.c #define PERF_REG_X86_RESERVED	(((1ULL << PERF_REG_X86_XMM0) - 1) & \
PERF_REG_X86_XMM0   56 tools/arch/x86/include/uapi/asm/perf_regs.h #define PERF_REG_EXTENDED_MASK	(~((1ULL << PERF_REG_X86_XMM0) - 1))
PERF_REG_X86_XMM0   36 tools/perf/arch/x86/util/perf_regs.c 	SMPL_REG2(XMM0, PERF_REG_X86_XMM0),
PERF_REG_X86_XMM0 1623 tools/perf/util/intel-pt.c #ifndef PERF_REG_X86_XMM0
PERF_REG_X86_XMM0 1631 tools/perf/util/intel-pt.c 	u32 mask = items->has_xmm & (regs_mask >> PERF_REG_X86_XMM0);
PERF_REG_X86_XMM0 1640 tools/perf/util/intel-pt.c 	intr_regs->mask |= (u64)mask << PERF_REG_X86_XMM0;