Lines Matching refs:XMM2

357 .macro INITIAL_BLOCKS_AVX num_initial_blocks T1 T2 T3 T4 T5 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 …
491 vmovdqa \CTR, \XMM2
492 vpshufb SHUF_MASK(%rip), \XMM2, \XMM2 # perform a 16Byte swap
520 vpxor \T_key, \XMM2, \XMM2
533 vaesenc \T_key, \XMM2, \XMM2
547 vaesenclast \T_key, \XMM2, \XMM2
563 vpxor \T1, \XMM2, \XMM2
564 vmovdqu \XMM2, 16*1(arg2 , %r11)
566 vmovdqa \T1, \XMM2
615 vpshufb SHUF_MASK(%rip), \XMM2, \XMM2 # perform a 16Byte swap
633 .macro GHASH_8_ENCRYPT_8_PARALLEL_AVX T1 T2 T3 T4 T5 T6 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8…
636 vmovdqa \XMM2, TMP2(%rsp)
646 vpaddd ONE(%rip), \XMM1, \XMM2
647 vpaddd ONE(%rip), \XMM2, \XMM3
656 vpshufb SHUF_MASK(%rip), \XMM2, \XMM2 # perform a 16Byte swap
665 vpaddd ONEf(%rip), \XMM1, \XMM2
666 vpaddd ONEf(%rip), \XMM2, \XMM3
680 vpxor \T1, \XMM2, \XMM2
696 vaesenc \T1, \XMM2, \XMM2
706 vaesenc \T1, \XMM2, \XMM2
729 vaesenc \T1, \XMM2, \XMM2
752 vaesenc \T1, \XMM2, \XMM2
777 vaesenc \T1, \XMM2, \XMM2
800 vaesenc \T1, \XMM2, \XMM2
824 vaesenc \T1, \XMM2, \XMM2
848 vaesenc \T1, \XMM2, \XMM2
873 vaesenc \T5, \XMM2, \XMM2
942 vmovdqu \XMM2, 16*1(arg2,%r11) # Write to the Ciphertext buffer
965 vpshufb SHUF_MASK(%rip), \XMM2, \XMM2 # perform a 16Byte swap
982 .macro GHASH_LAST_8_AVX T1 T2 T3 T4 T5 T6 T7 XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8
998 vpshufd $0b01001110, \XMM2, \T2
999 vpxor \XMM2, \T2, \T2
1001 vpclmulqdq $0x11, \T5, \XMM2, \T4
1004 vpclmulqdq $0x00, \T5, \XMM2, \T4
1616 .macro INITIAL_BLOCKS_AVX2 num_initial_blocks T1 T2 T3 T4 T5 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7…
1751 vmovdqa \CTR, \XMM2
1752 vpshufb SHUF_MASK(%rip), \XMM2, \XMM2 # perform a 16Byte swap
1780 vpxor \T_key, \XMM2, \XMM2
1793 vaesenc \T_key, \XMM2, \XMM2
1807 vaesenclast \T_key, \XMM2, \XMM2
1823 vpxor \T1, \XMM2, \XMM2
1824 vmovdqu \XMM2, 16*1(arg2 , %r11)
1826 vmovdqa \T1, \XMM2
1876 vpshufb SHUF_MASK(%rip), \XMM2, \XMM2 # perform a 16Byte swap
1897 .macro GHASH_8_ENCRYPT_8_PARALLEL_AVX2 T1 T2 T3 T4 T5 T6 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM…
1900 vmovdqa \XMM2, TMP2(%rsp)
1910 vpaddd ONE(%rip), \XMM1, \XMM2
1911 vpaddd ONE(%rip), \XMM2, \XMM3
1920 vpshufb SHUF_MASK(%rip), \XMM2, \XMM2 # perform a 16Byte swap
1929 vpaddd ONEf(%rip), \XMM1, \XMM2
1930 vpaddd ONEf(%rip), \XMM2, \XMM3
1944 vpxor \T1, \XMM2, \XMM2
1960 vaesenc \T1, \XMM2, \XMM2
1970 vaesenc \T1, \XMM2, \XMM2
1990 vaesenc \T1, \XMM2, \XMM2
2014 vaesenc \T1, \XMM2, \XMM2
2040 vaesenc \T1, \XMM2, \XMM2
2064 vaesenc \T1, \XMM2, \XMM2
2089 vaesenc \T1, \XMM2, \XMM2
2113 vaesenc \T1, \XMM2, \XMM2
2140 vaesenc \T5, \XMM2, \XMM2
2203 vmovdqu \XMM2, 16*1(arg2,%r11) # Write to the Ciphertext buffer
2225 vpshufb SHUF_MASK(%rip), \XMM2, \XMM2 # perform a 16Byte swap
2242 .macro GHASH_LAST_8_AVX2 T1 T2 T3 T4 T5 T6 T7 XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8
2261 vpshufd $0b01001110, \XMM2, \T2
2263 vpxor \XMM2, \T2, \T2
2266 vpclmulqdq $0x11, \T5, \XMM2, \T4
2269 vpclmulqdq $0x00, \T5, \XMM2, \T4