ALTERNATIVE 28 arch/arm64/include/asm/barrier.h #define spec_bar() asm volatile(ALTERNATIVE("dsb nsh\nisb\n", \ ALTERNATIVE 52 arch/arm64/include/asm/io.h asm volatile(ALTERNATIVE("ldrb %w0, [%1]", ALTERNATIVE 64 arch/arm64/include/asm/io.h asm volatile(ALTERNATIVE("ldrh %w0, [%1]", ALTERNATIVE 75 arch/arm64/include/asm/io.h asm volatile(ALTERNATIVE("ldr %w0, [%1]", ALTERNATIVE 86 arch/arm64/include/asm/io.h asm volatile(ALTERNATIVE("ldr %0, [%1]", ALTERNATIVE 36 arch/arm64/include/asm/irqflags.h asm volatile(ALTERNATIVE( ALTERNATIVE 55 arch/arm64/include/asm/irqflags.h asm volatile(ALTERNATIVE( ALTERNATIVE 71 arch/arm64/include/asm/irqflags.h asm volatile(ALTERNATIVE( ALTERNATIVE 86 arch/arm64/include/asm/irqflags.h asm volatile(ALTERNATIVE( ALTERNATIVE 118 arch/arm64/include/asm/irqflags.h asm volatile(ALTERNATIVE( ALTERNATIVE 21 arch/arm64/include/asm/kvm_hyp.h asm volatile(ALTERNATIVE(__mrs_s("%0", r##nvh), \ ALTERNATIVE 31 arch/arm64/include/asm/kvm_hyp.h asm volatile(ALTERNATIVE(__msr_s(r##nvh, "%x0"), \ ALTERNATIVE 98 arch/arm64/include/asm/kvm_hyp.h asm(ALTERNATIVE("nop", "isb", ARM64_WORKAROUND_1165522)); ALTERNATIVE 37 arch/arm64/include/asm/lse.h ALTERNATIVE(llsc, __LSE_PREAMBLE lse, ARM64_HAS_LSE_ATOMICS) ALTERNATIVE 16 arch/arm64/include/asm/percpu.h asm volatile(ALTERNATIVE("msr tpidr_el1, %0", ALTERNATIVE 30 arch/arm64/include/asm/percpu.h asm(ALTERNATIVE("mrs %0, tpidr_el1", ALTERNATIVE 31 arch/arm64/include/asm/tlbflush.h ALTERNATIVE("nop\n nop", \ ALTERNATIVE 38 arch/arm64/include/asm/tlbflush.h ALTERNATIVE("nop\n nop", \ ALTERNATIVE 46 arch/arm64/include/asm/uaccess.h asm(ALTERNATIVE("nop", SET_PSTATE_UAO(1), ARM64_HAS_UAO)); ALTERNATIVE 48 arch/arm64/include/asm/uaccess.h asm(ALTERNATIVE("nop", SET_PSTATE_UAO(0), ARM64_HAS_UAO, ALTERNATIVE 178 arch/arm64/include/asm/uaccess.h asm(ALTERNATIVE("nop", SET_PSTATE_PAN(0), ARM64_HAS_PAN, ALTERNATIVE 184 arch/arm64/include/asm/uaccess.h asm(ALTERNATIVE("nop", SET_PSTATE_PAN(1), ARM64_HAS_PAN, ALTERNATIVE 191 arch/arm64/include/asm/uaccess.h asm(ALTERNATIVE("nop", SET_PSTATE_PAN(1), alt, \ ALTERNATIVE 198 arch/arm64/include/asm/uaccess.h asm(ALTERNATIVE("nop", SET_PSTATE_PAN(0), alt, \ ALTERNATIVE 257 arch/arm64/include/asm/uaccess.h "1:"ALTERNATIVE(instr " " reg "1, [%2]\n", \ ALTERNATIVE 322 arch/arm64/include/asm/uaccess.h "1:"ALTERNATIVE(instr " " reg "1, [%2]\n", \ ALTERNATIVE 448 arch/arm64/kernel/process.c asm(ALTERNATIVE("nop", SET_PSTATE_UAO(1), ARM64_HAS_UAO)); ALTERNATIVE 450 arch/arm64/kernel/process.c asm(ALTERNATIVE("nop", SET_PSTATE_UAO(0), ARM64_HAS_UAO)); ALTERNATIVE 159 arch/arm64/kvm/hyp/switch.c asm(ALTERNATIVE("nop", "isb", ARM64_WORKAROUND_1165522)); ALTERNATIVE 243 arch/arm64/mm/context.c asm(ALTERNATIVE("nop; nop; nop", ALTERNATIVE 43 arch/parisc/include/asm/cache.h ALTERNATIVE(ALT_COND_NO_SMP, INSN_PxTLB) \ ALTERNATIVE 46 arch/parisc/include/asm/cache.h ALTERNATIVE(ALT_COND_NO_SMP, INSN_PxTLB) \ ALTERNATIVE 47 arch/parisc/include/asm/cache.h ALTERNATIVE(ALT_COND_NO_SPLIT_TLB, INSN_NOP) \ ALTERNATIVE 50 arch/parisc/include/asm/cache.h ALTERNATIVE(ALT_COND_NO_SMP, INSN_PxTLB) \ ALTERNATIVE 54 arch/parisc/include/asm/cache.h ALTERNATIVE(ALT_COND_NO_DCACHE, INSN_NOP) \ ALTERNATIVE 55 arch/parisc/include/asm/cache.h ALTERNATIVE(ALT_COND_NO_IOC_FDC, INSN_NOP) \ ALTERNATIVE 58 arch/parisc/include/asm/cache.h ALTERNATIVE(ALT_COND_NO_DCACHE, INSN_NOP) \ ALTERNATIVE 59 arch/parisc/include/asm/cache.h ALTERNATIVE(ALT_COND_NO_IOC_FDC, INSN_NOP) :::"memory") ALTERNATIVE 67 arch/s390/include/asm/alternative-asm.h .macro ALTERNATIVE oldinstr, newinstr, feature ALTERNATIVE 142 arch/s390/include/asm/alternative.h asm volatile(ALTERNATIVE(oldinstr, altinstr, facility) : : : "memory") ALTERNATIVE 111 arch/s390/include/asm/nospec-insn.h # The ALTERNATIVE replacement code has a .+10 which targets ALTERNATIVE 118 arch/s390/include/asm/nospec-insn.h ALTERNATIVE "ex %r0,_LC_BR_R1", ".insn ril,0xc60000000000,0,.+10", 35 ALTERNATIVE 89 arch/s390/include/asm/spinlock.h ALTERNATIVE("", ".long 0xb2fa0070", 49) /* NIAI 7 */ ALTERNATIVE 78 arch/s390/lib/spinlock.c ALTERNATIVE("", ".long 0xb2fa0040", 49) /* NIAI 4 */ ALTERNATIVE 89 arch/s390/lib/spinlock.c ALTERNATIVE("", ".long 0xb2fa0080", 49) /* NIAI 8 */ ALTERNATIVE 194 arch/x86/entry/calling.h ALTERNATIVE "", "SET_NOFLUSH_BIT \reg", X86_FEATURE_PCID ALTERNATIVE 200 arch/x86/entry/calling.h ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_PTI ALTERNATIVE 211 arch/x86/entry/calling.h ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_PTI ALTERNATIVE 214 arch/x86/entry/calling.h ALTERNATIVE "jmp .Lwrcr3_\@", "", X86_FEATURE_PCID ALTERNATIVE 251 arch/x86/entry/calling.h ALTERNATIVE "jmp .Ldone_\@", "", X86_FEATURE_PTI ALTERNATIVE 269 arch/x86/entry/calling.h ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_PTI ALTERNATIVE 271 arch/x86/entry/calling.h ALTERNATIVE "jmp .Lwrcr3_\@", "", X86_FEATURE_PCID ALTERNATIVE 330 arch/x86/entry/calling.h ALTERNATIVE "", "lfence", X86_FEATURE_FENCE_SWAPGS_USER ALTERNATIVE 333 arch/x86/entry/calling.h ALTERNATIVE "", "lfence", X86_FEATURE_FENCE_SWAPGS_KERNEL ALTERNATIVE 54 arch/x86/include/asm/alternative-asm.h .macro ALTERNATIVE oldinstr, newinstr, feature ALTERNATIVE 204 arch/x86/include/asm/alternative.h asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, feature) : : : "memory") ALTERNATIVE 221 arch/x86/include/asm/alternative.h asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, feature) \ ALTERNATIVE 240 arch/x86/include/asm/alternative.h asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, feature) \ ALTERNATIVE 245 arch/x86/include/asm/alternative.h asm_inline volatile (ALTERNATIVE("call %P[old]", "call %P[new]", feature) \ ALTERNATIVE 19 arch/x86/include/asm/arch_hweight.h asm (ALTERNATIVE("call __sw_hweight32", "popcntl %1, %0", X86_FEATURE_POPCNT) ALTERNATIVE 47 arch/x86/include/asm/arch_hweight.h asm (ALTERNATIVE("call __sw_hweight64", "popcntq %1, %0", X86_FEATURE_POPCNT) ALTERNATIVE 15 arch/x86/include/asm/barrier.h #define mb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "mfence", \ ALTERNATIVE 17 arch/x86/include/asm/barrier.h #define rmb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "lfence", \ ALTERNATIVE 19 arch/x86/include/asm/barrier.h #define wmb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "sfence", \ ALTERNATIVE 261 arch/x86/include/asm/fpu/internal.h asm volatile(ALTERNATIVE(XRSTOR, \ ALTERNATIVE 21 arch/x86/include/asm/kvm_para.h ALTERNATIVE(".byte 0x0f,0x01,0xc1", ".byte 0x0f,0x01,0xd9", X86_FEATURE_VMMCALL) ALTERNATIVE 142 arch/x86/include/asm/nospec-branch.h ALTERNATIVE "jmp .Lskip_rsb_\@", \ ALTERNATIVE 253 arch/x86/include/asm/nospec-branch.h ALTERNATIVE("jmp 910f", ALTERNATIVE 265 arch/x86/include/asm/nospec-branch.h asm volatile(ALTERNATIVE("", "wrmsr", %c[feature]) ALTERNATIVE 26 arch/x86/include/asm/smap.h ALTERNATIVE "", __ASM_CLAC, X86_FEATURE_SMAP ALTERNATIVE 29 arch/x86/include/asm/smap.h ALTERNATIVE "", __ASM_STAC, X86_FEATURE_SMAP ALTERNATIVE 60 arch/x86/include/asm/smap.h asm volatile (ALTERNATIVE("", "pushf; pop %0; " __ASM_CLAC, ALTERNATIVE 69 arch/x86/include/asm/smap.h asm volatile (ALTERNATIVE("", "push %0; popf", X86_FEATURE_SMAP) ALTERNATIVE 75 arch/x86/include/asm/smap.h ALTERNATIVE("", __ASM_CLAC, X86_FEATURE_SMAP) ALTERNATIVE 77 arch/x86/include/asm/smap.h ALTERNATIVE("", __ASM_STAC, X86_FEATURE_SMAP)