spin_needbreak   2528 arch/x86/kvm/mmu.c 		if (need_resched() || spin_needbreak(&vcpu->kvm->mmu_lock)) {
spin_needbreak   5703 arch/x86/kvm/mmu.c 		if (need_resched() || spin_needbreak(&kvm->mmu_lock)) {
spin_needbreak   6449 arch/x86/kvm/mmu.c 		if (!--to_zap || need_resched() || spin_needbreak(&kvm->mmu_lock)) {
spin_needbreak    320 fs/jbd2/checkpoint.c 		    spin_needbreak(&journal->j_list_lock))
spin_needbreak    170 kernel/cgroup/rstat.c 				  spin_needbreak(&cgroup_rstat_lock))) {
spin_needbreak   5635 kernel/sched/core.c 	if (spin_needbreak(lock) || resched) {
spin_needbreak    816 mm/memory.c    			    spin_needbreak(src_ptl) || spin_needbreak(dst_ptl))