Home
last modified time | relevance | path

Searched refs:xmm (Results 1 – 11 of 11) sorted by relevance

/linux-4.4.14/arch/x86/include/asm/
Dinst.h125 .macro XMM_NUM opd xmm
127 .ifc \xmm,%xmm0
130 .ifc \xmm,%xmm1
133 .ifc \xmm,%xmm2
136 .ifc \xmm,%xmm3
139 .ifc \xmm,%xmm4
142 .ifc \xmm,%xmm5
145 .ifc \xmm,%xmm6
148 .ifc \xmm,%xmm7
151 .ifc \xmm,%xmm8
[all …]
Dkvm_emulate.h243 unsigned xmm; member
/linux-4.4.14/arch/x86/crypto/
Daesni-intel_asm.S232 pxor %xmm\i, %xmm\i
237 psrldq $4, %xmm\i
238 pxor \TMP1, %xmm\i
248 psrldq $4, %xmm\i
254 PSHUFB_XMM %xmm14, %xmm\i # byte-reflect the AAD data
269 movdqa \XMM0, %xmm\index
270 PSHUFB_XMM %xmm14, %xmm\index # perform a 16 byte swap
271 pxor \TMP2, %xmm\index
281 AESENC \TMP1, %xmm\index
289 AESENCLAST \TMP1, %xmm\index # Last Round
[all …]
Dsha256-ssse3-asm.S65 # COPY_XMM_AND_BSWAP xmm, [mem], byte_flip_mask
66 # Load xmm with mem and byte swap each dword
Dsha256-avx-asm.S72 # COPY_XMM_AND_BSWAP xmm, [mem], byte_flip_mask
73 # Load xmm with mem and byte swap each dword
Daes_ctrby8_avx-x86_64.S96 #define XMM(i) CONCAT(%xmm, i)
Daesni-intel_avx-x86_64.S78 ## padded AAD in xmm register = {A1 A0 0 0}
94 ## padded AAD in xmm register = {A2 A1 A0 0}
202 reg_\r = %xmm\n
1145 # clobbering all xmm registers
2422 # clobbering all xmm registers
/linux-4.4.14/arch/x86/include/uapi/asm/
Dkvm.h166 __u8 xmm[16][16]; member
/linux-4.4.14/arch/x86/kvm/
Demulate.c1131 op->addr.xmm = reg; in decode_register_operand()
1182 op->addr.xmm = ctxt->modrm_rm; in decode_modrm()
1736 write_sse_reg(ctxt, &op->vec_val, op->addr.xmm); in writeback()
Dx86.c7177 memcpy(fpu->xmm, fxsave->xmm_space, sizeof fxsave->xmm_space); in kvm_arch_vcpu_ioctl_get_fpu()
7194 memcpy(fxsave->xmm_space, fpu->xmm, sizeof fxsave->xmm_space); in kvm_arch_vcpu_ioctl_set_fpu()
/linux-4.4.14/Documentation/virtual/kvm/
Dapi.txt586 __u8 xmm[16][16];
612 __u8 xmm[16][16];