Searched refs:xmm (Results 1 – 11 of 11) sorted by relevance
/linux-4.4.14/arch/x86/include/asm/ |
D | inst.h | 125 .macro XMM_NUM opd xmm 127 .ifc \xmm,%xmm0 130 .ifc \xmm,%xmm1 133 .ifc \xmm,%xmm2 136 .ifc \xmm,%xmm3 139 .ifc \xmm,%xmm4 142 .ifc \xmm,%xmm5 145 .ifc \xmm,%xmm6 148 .ifc \xmm,%xmm7 151 .ifc \xmm,%xmm8 [all …]
|
D | kvm_emulate.h | 243 unsigned xmm; member
|
/linux-4.4.14/arch/x86/crypto/ |
D | aesni-intel_asm.S | 232 pxor %xmm\i, %xmm\i 237 psrldq $4, %xmm\i 238 pxor \TMP1, %xmm\i 248 psrldq $4, %xmm\i 254 PSHUFB_XMM %xmm14, %xmm\i # byte-reflect the AAD data 269 movdqa \XMM0, %xmm\index 270 PSHUFB_XMM %xmm14, %xmm\index # perform a 16 byte swap 271 pxor \TMP2, %xmm\index 281 AESENC \TMP1, %xmm\index 289 AESENCLAST \TMP1, %xmm\index # Last Round [all …]
|
D | sha256-ssse3-asm.S | 65 # COPY_XMM_AND_BSWAP xmm, [mem], byte_flip_mask 66 # Load xmm with mem and byte swap each dword
|
D | sha256-avx-asm.S | 72 # COPY_XMM_AND_BSWAP xmm, [mem], byte_flip_mask 73 # Load xmm with mem and byte swap each dword
|
D | aes_ctrby8_avx-x86_64.S | 96 #define XMM(i) CONCAT(%xmm, i)
|
D | aesni-intel_avx-x86_64.S | 78 ## padded AAD in xmm register = {A1 A0 0 0} 94 ## padded AAD in xmm register = {A2 A1 A0 0} 202 reg_\r = %xmm\n 1145 # clobbering all xmm registers 2422 # clobbering all xmm registers
|
/linux-4.4.14/arch/x86/include/uapi/asm/ |
D | kvm.h | 166 __u8 xmm[16][16]; member
|
/linux-4.4.14/arch/x86/kvm/ |
D | emulate.c | 1131 op->addr.xmm = reg; in decode_register_operand() 1182 op->addr.xmm = ctxt->modrm_rm; in decode_modrm() 1736 write_sse_reg(ctxt, &op->vec_val, op->addr.xmm); in writeback()
|
D | x86.c | 7177 memcpy(fpu->xmm, fxsave->xmm_space, sizeof fxsave->xmm_space); in kvm_arch_vcpu_ioctl_get_fpu() 7194 memcpy(fxsave->xmm_space, fpu->xmm, sizeof fxsave->xmm_space); in kvm_arch_vcpu_ioctl_set_fpu()
|
/linux-4.4.14/Documentation/virtual/kvm/ |
D | api.txt | 586 __u8 xmm[16][16]; 612 __u8 xmm[16][16];
|