ss1                19 arch/csky/abiv1/inc/abi/entry.h #define usp ss1
ss1                55 arch/csky/abiv1/inc/abi/entry.h 	mfcr	lr, ss1
ss1                92 arch/csky/abiv1/inc/abi/entry.h 	mtcr	a0, ss1
ss1               272 arch/x86/include/asm/processor.h 	unsigned short		ss1;	/* MSR_IA32_SYSENTER_CS */
ss1                79 arch/x86/include/asm/switch_to.h 	if (unlikely(this_cpu_read(cpu_tss_rw.x86_tss.ss1) == thread->sysenter_cs))
ss1                82 arch/x86/include/asm/switch_to.h 	this_cpu_write(cpu_tss_rw.x86_tss.ss1, thread->sysenter_cs);
ss1              1608 arch/x86/kernel/cpu/common.c 	tss->x86_tss.ss1 = __KERNEL_CS;
ss1              1609 arch/x86/kernel/cpu/common.c 	wrmsr(MSR_IA32_SYSENTER_CS, tss->x86_tss.ss1, 0);
ss1                74 arch/x86/kernel/process.c 		.ss1 = __KERNEL_CS,
ss1                10 arch/x86/kvm/tss.h 	u32 ss1;
ss1                40 arch/x86/kvm/tss.h 	u16 ss1;
ss1                74 crypto/sm3_generic.c 	u32 ss1;
ss1                92 crypto/sm3_generic.c 		ss1 = rol32((rol32(a, 12) + e + rol32(t(i), i & 31)), 7);
ss1                94 crypto/sm3_generic.c 		ss2 = ss1 ^ rol32(a, 12);
ss1                99 crypto/sm3_generic.c 		tt2 = gg(i, e, f, g) + h + ss1 + *w;
ss1               121 crypto/sm3_generic.c 	a = b = c = d = e = f = g = h = ss1 = ss2 = tt1 = tt2 = 0;
ss1              2156 include/net/sctp/structs.h int sctp_cmp_addr_exact(const union sctp_addr *ss1,
ss1               890 net/sctp/associola.c int sctp_cmp_addr_exact(const union sctp_addr *ss1,
ss1               895 net/sctp/associola.c 	af = sctp_get_af_specific(ss1->sa.sa_family);
ss1               899 net/sctp/associola.c 	return af->cmp_addr(ss1, ss2);