Home
last modified time | relevance | path

Searched refs:LOCK_PREFIX (Results 1 – 16 of 16) sorted by relevance

/linux-4.4.14/arch/x86/include/asm/
Datomic.h51 asm volatile(LOCK_PREFIX "addl %1,%0" in atomic_add()
65 asm volatile(LOCK_PREFIX "subl %1,%0" in atomic_sub()
81 GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", "e"); in atomic_sub_and_test()
92 asm volatile(LOCK_PREFIX "incl %0" in atomic_inc()
104 asm volatile(LOCK_PREFIX "decl %0" in atomic_dec()
118 GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", "e"); in atomic_dec_and_test()
131 GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, "%0", "e"); in atomic_inc_and_test()
145 GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", "s"); in atomic_add_negative()
188 asm volatile(LOCK_PREFIX #op"l %1,%0" \
233 asm(LOCK_PREFIX "addw $1, %0" : "+m" (*v)); in atomic_inc_short()
Drwsem.h66 LOCK_PREFIX _ASM_INC "(%1)\n\t" in __down_read()
89 LOCK_PREFIX " cmpxchg %2,%0\n\t" in __down_read_trylock()
106 LOCK_PREFIX " xadd %1,(%2)\n\t" in __down_write_nested()
138 LOCK_PREFIX " cmpxchg %2,%0\n\t" in __down_write_trylock()
157 LOCK_PREFIX " xadd %1,(%2)\n\t" in __up_read()
175 LOCK_PREFIX " xadd %1,(%2)\n\t" in __up_write()
192 LOCK_PREFIX _ASM_ADD "%2,(%1)\n\t" in __downgrade_write()
211 asm volatile(LOCK_PREFIX _ASM_ADD "%1,%0" in rwsem_atomic_add()
Datomic64_64.h45 asm volatile(LOCK_PREFIX "addq %1,%0" in atomic64_add()
59 asm volatile(LOCK_PREFIX "subq %1,%0" in atomic64_sub()
75 GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, "er", i, "%0", "e"); in atomic64_sub_and_test()
86 asm volatile(LOCK_PREFIX "incq %0" in atomic64_inc()
99 asm volatile(LOCK_PREFIX "decq %0" in atomic64_dec()
114 GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, "%0", "e"); in atomic64_dec_and_test()
127 GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, "%0", "e"); in atomic64_inc_and_test()
141 GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, "er", i, "%0", "s"); in atomic64_add_negative()
226 asm volatile(LOCK_PREFIX #op"q %1,%0" \
Dbitops.h75 asm volatile(LOCK_PREFIX "orb %1,%0" in set_bit()
80 asm volatile(LOCK_PREFIX "bts %1,%0" in set_bit()
113 asm volatile(LOCK_PREFIX "andb %1,%0" in clear_bit()
117 asm volatile(LOCK_PREFIX "btr %1,%0" in clear_bit()
186 asm volatile(LOCK_PREFIX "xorb %1,%0" in change_bit()
190 asm volatile(LOCK_PREFIX "btc %1,%0" in change_bit()
206 GEN_BINARY_RMWcc(LOCK_PREFIX "bts", *addr, "Ir", nr, "%0", "c"); in test_and_set_bit()
252 GEN_BINARY_RMWcc(LOCK_PREFIX "btr", *addr, "Ir", nr, "%0", "c"); in test_and_clear_bit()
305 GEN_BINARY_RMWcc(LOCK_PREFIX "btc", *addr, "Ir", nr, "%0", "c"); in test_and_change_bit()
Dmutex_64.h23 asm_volatile_goto(LOCK_PREFIX " decl %0\n" in __mutex_fastpath_lock()
40 asm volatile(LOCK_PREFIX " decl (%%rdi)\n" \
78 asm_volatile_goto(LOCK_PREFIX " incl %0\n" in __mutex_fastpath_unlock()
95 asm volatile(LOCK_PREFIX " incl (%%rdi)\n" \
Dmutex_32.h31 asm volatile(LOCK_PREFIX " decl (%%eax)\n" \
77 asm volatile(LOCK_PREFIX " incl (%%eax)\n" \
Dcmpxchg.h132 __raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX)
164 #define xadd(ptr, inc) __xadd((ptr), (inc), LOCK_PREFIX)
205 #define add_smp(ptr, inc) __add((ptr), (inc), LOCK_PREFIX)
226 __cmpxchg_double(LOCK_PREFIX, p1, p2, o1, o2, n1, n2)
Dcmpxchg_32.h30 LOCK_PREFIX "cmpxchg8b %0\n\t" in set_64bit()
49 asm volatile(LOCK_PREFIX "cmpxchg8b %1" in __cmpxchg64()
Dalternative-asm.h9 .macro LOCK_PREFIX
17 .macro LOCK_PREFIX
Dfutex.h31 "2:\t" LOCK_PREFIX "cmpxchgl %3, %2\n" \
65 __futex_atomic_op1(LOCK_PREFIX "xaddl %0, %2", ret, oldval, in futex_atomic_op_inuser()
Duaccess.h554 "1:\t" LOCK_PREFIX "cmpxchgb %4, %2\n" \
570 "1:\t" LOCK_PREFIX "cmpxchgw %4, %2\n" \
586 "1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n" \
605 "1:\t" LOCK_PREFIX "cmpxchgq %4, %2\n" \
Dalternative.h39 #define LOCK_PREFIX LOCK_PREFIX_HERE "\n\tlock; " macro
43 #define LOCK_PREFIX "" macro
Dspinlock.h34 # define UNLOCK_LOCK_PREFIX LOCK_PREFIX
/linux-4.4.14/arch/x86/lib/
Datomic64_cx8_32.S19 LOCK_PREFIX
40 LOCK_PREFIX
64 LOCK_PREFIX
92 LOCK_PREFIX
117 LOCK_PREFIX
147 LOCK_PREFIX
176 LOCK_PREFIX
/linux-4.4.14/tools/arch/x86/include/asm/
Datomic.h8 #define LOCK_PREFIX "\n\tlock; " macro
48 asm volatile(LOCK_PREFIX "incl %0" in atomic_inc()
62 GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", "e"); in atomic_dec_and_test()
/linux-4.4.14/arch/x86/include/asm/uv/
Duv_bau.h763 asm volatile(LOCK_PREFIX "xaddw %0, %1" in atom_asr()