serve              45 arch/ia64/include/asm/spinlock.h 	int	*p = (int *)&lock->lock, ticket, serve;
serve              55 arch/ia64/include/asm/spinlock.h 		asm volatile ("ld4.c.nc %0=[%1]" : "=r"(serve) : "r"(p) : "memory");
serve              57 arch/ia64/include/asm/spinlock.h 		if (!(((serve >> TICKET_SHIFT) ^ ticket) & TICKET_MASK))
serve             114 arch/ia64/mm/tlb.c 	unsigned long	serve;
serve             120 arch/ia64/mm/tlb.c 	ss->serve = val;
serve             125 arch/ia64/mm/tlb.c 	unsigned long t = ia64_fetchadd(1, &ss->ticket, acq), serve;
serve             127 arch/ia64/mm/tlb.c 	if (time_before(t, ss->serve))
serve             133 arch/ia64/mm/tlb.c 		asm volatile ("ld8.c.nc %0=[%1]" : "=r"(serve) : "r"(&ss->serve) : "memory");
serve             134 arch/ia64/mm/tlb.c 		if (time_before(t, serve))
serve             142 arch/ia64/mm/tlb.c 	ia64_fetchadd(1, &ss->serve, rel);