spin_wait          40 arch/s390/lib/spinlock.c 	struct spin_wait *next, *prev;
spin_wait          44 arch/s390/lib/spinlock.c static DEFINE_PER_CPU_ALIGNED(struct spin_wait, spin_wait[4]);
spin_wait          62 arch/s390/lib/spinlock.c 	struct spin_wait *node;
spin_wait          65 arch/s390/lib/spinlock.c 	node = per_cpu_ptr(&spin_wait[0], cpu);
spin_wait          97 arch/s390/lib/spinlock.c static inline struct spin_wait *arch_spin_decode_tail(int lock)
spin_wait         103 arch/s390/lib/spinlock.c 	return per_cpu_ptr(&spin_wait[ix], cpu - 1);
spin_wait         106 arch/s390/lib/spinlock.c static inline int arch_spin_yield_target(int lock, struct spin_wait *node)
spin_wait         119 arch/s390/lib/spinlock.c 	struct spin_wait *node, *next;
spin_wait         125 arch/s390/lib/spinlock.c 	node = this_cpu_ptr(&spin_wait[ix]);