Lines Matching refs:T3

251 .macro  GHASH_MUL_AVX GH HK T1 T2 T3 T4 T5
254 vpshufd $0b01001110, \HK, \T3
256 vpxor \HK , \T3, \T3 # T3 = (b1+b0)
260 vpclmulqdq $0x00, \T3, \T2, \T2 # T2 = (a1+a0)*(b1+b0)
264 vpslldq $8, \T2,\T3 # shift-L T3 2 DWs
266 vpxor \T3, \GH, \GH
271 vpslld $30, \GH, \T3 # packed right shifting shift << 30
274 vpxor \T3, \T2, \T2 # xor the shifted versions
285 vpsrld $2,\GH, \T3 # packed left shifting >> 2
287 vpxor \T3, \T2, \T2 # xor the shifted versions
297 .macro PRECOMPUTE_AVX HK T1 T2 T3 T4 T5 T6
306 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^2<<1 mod poly
312 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^3<<1 mod poly
318 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^4<<1 mod poly
324 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^5<<1 mod poly
330 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^6<<1 mod poly
336 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^7<<1 mod poly
342 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^8<<1 mod poly
357 .macro INITIAL_BLOCKS_AVX num_initial_blocks T1 T2 T3 T4 T5 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 …
467 GHASH_MUL_AVX reg_i, \T2, \T1, \T3, \T4, \T5, \T6
471 … GHASH_MUL_AVX reg_j, \T2, \T1, \T3, \T4, \T5, \T6 # apply GHASH on num_initial_blocks blocks
479 vmovdqa \XMM8, \T3
633 .macro GHASH_8_ENCRYPT_8_PARALLEL_AVX T1 T2 T3 T4 T5 T6 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8…
739 vpclmulqdq $0x11, \T5, \T1, \T3
740 vpxor \T3, \T4, \T4
741 vpclmulqdq $0x00, \T5, \T1, \T3
742 vpxor \T3, \T7, \T7
744 vpshufd $0b01001110, \T1, \T3
745 vpxor \T1, \T3, \T3
747 vpclmulqdq $0x10, \T5, \T3, \T3
748 vpxor \T3, \T6, \T6
764 vpclmulqdq $0x11, \T5, \T1, \T3
765 vpxor \T3, \T4, \T4
766 vpclmulqdq $0x00, \T5, \T1, \T3
767 vpxor \T3, \T7, \T7
769 vpshufd $0b01001110, \T1, \T3
770 vpxor \T1, \T3, \T3
772 vpclmulqdq $0x10, \T5, \T3, \T3
773 vpxor \T3, \T6, \T6
787 vpclmulqdq $0x11, \T5, \T1, \T3
788 vpxor \T3, \T4, \T4
789 vpclmulqdq $0x00, \T5, \T1, \T3
790 vpxor \T3, \T7, \T7
792 vpshufd $0b01001110, \T1, \T3
793 vpxor \T1, \T3, \T3
795 vpclmulqdq $0x10, \T5, \T3, \T3
796 vpxor \T3, \T6, \T6
811 vpclmulqdq $0x11, \T5, \T1, \T3
812 vpxor \T3, \T4, \T4
813 vpclmulqdq $0x00, \T5, \T1, \T3
814 vpxor \T3, \T7, \T7
816 vpshufd $0b01001110, \T1, \T3
817 vpxor \T1, \T3, \T3
819 vpclmulqdq $0x10, \T5, \T3, \T3
820 vpxor \T3, \T6, \T6
834 vpclmulqdq $0x11, \T5, \T1, \T3
835 vpxor \T3, \T4, \T4
836 vpclmulqdq $0x00, \T5, \T1, \T3
837 vpxor \T3, \T7, \T7
839 vpshufd $0b01001110, \T1, \T3
840 vpxor \T1, \T3, \T3
842 vpclmulqdq $0x10, \T5, \T3, \T3
843 vpxor \T3, \T6, \T6
858 vpclmulqdq $0x11, \T5, \T1, \T3
859 vpxor \T3, \T4, \T4
860 vpclmulqdq $0x00, \T5, \T1, \T3
861 vpxor \T3, \T7, \T7
863 vpshufd $0b01001110, \T1, \T3
864 vpxor \T1, \T3, \T3
866 vpclmulqdq $0x10, \T5, \T3, \T3
867 vpxor \T3, \T6, \T6
883 vpclmulqdq $0x11, \T5, \T1, \T3
884 vpxor \T3, \T4, \T4
885 vpclmulqdq $0x00, \T5, \T1, \T3
886 vpxor \T3, \T7, \T7
888 vpshufd $0b01001110, \T1, \T3
889 vpxor \T1, \T3, \T3
891 vpclmulqdq $0x10, \T5, \T3, \T3
892 vpxor \T3, \T6, \T6
907 vaesenclast \T2, reg_j, \T3
909 vmovdqu \T3, 16*i(arg2, %r11)
918 vpslldq $8, \T6, \T3 # shift-L T3 2 DWs
920 vpxor \T3, \T7, \T7
929 vpslld $30, \T7, \T3 # packed right shifting shift << 30
932 vpxor \T3, \T2, \T2 # xor the shifted versions
954 vpsrld $2, \T7, \T3 # packed left shifting >> 2
956 vpxor \T3, \T2, \T2 # xor the shifted versions
982 .macro GHASH_LAST_8_AVX T1 T2 T3 T4 T5 T6 T7 XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8
993 vmovdqa HashKey_8_k(arg1), \T3
994 vpclmulqdq $0x00, \T3, \T2, \XMM1
1007 vmovdqa HashKey_7_k(arg1), \T3
1008 vpclmulqdq $0x00, \T3, \T2, \T2
1022 vmovdqa HashKey_6_k(arg1), \T3
1023 vpclmulqdq $0x00, \T3, \T2, \T2
1037 vmovdqa HashKey_5_k(arg1), \T3
1038 vpclmulqdq $0x00, \T3, \T2, \T2
1052 vmovdqa HashKey_4_k(arg1), \T3
1053 vpclmulqdq $0x00, \T3, \T2, \T2
1067 vmovdqa HashKey_3_k(arg1), \T3
1068 vpclmulqdq $0x00, \T3, \T2, \T2
1082 vmovdqa HashKey_2_k(arg1), \T3
1083 vpclmulqdq $0x00, \T3, \T2, \T2
1097 vmovdqa HashKey_k(arg1), \T3
1098 vpclmulqdq $0x00, \T3, \T2, \T2
1117 vpslld $30, \T7, \T3 # packed right shifting shift << 30
1120 vpxor \T3, \T2, \T2 # xor the shifted versions
1132 vpsrld $2, \T7, \T3 # packed left shifting >> 2
1134 vpxor \T3, \T2, \T2 # xor the shifted versions
1543 .macro GHASH_MUL_AVX2 GH HK T1 T2 T3 T4 T5
1547 vpclmulqdq $0x01,\HK,\GH,\T3 # T3 = a1*b0
1549 vpxor \T3, \GH, \GH
1552 vpsrldq $8 , \GH, \T3 # shift-R GH 2 DWs
1555 vpxor \T3, \T1, \T1
1560 vmovdqa POLY2(%rip), \T3
1562 vpclmulqdq $0x01, \GH, \T3, \T2
1568 vpclmulqdq $0x00, \GH, \T3, \T2
1571 vpclmulqdq $0x10, \GH, \T3, \GH
1581 .macro PRECOMPUTE_AVX2 HK T1 T2 T3 T4 T5 T6
1585 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^2<<1 mod poly
1588 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^3<<1 mod poly
1591 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^4<<1 mod poly
1594 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^5<<1 mod poly
1597 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^6<<1 mod poly
1600 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^7<<1 mod poly
1603 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^8<<1 mod poly
1616 .macro INITIAL_BLOCKS_AVX2 num_initial_blocks T1 T2 T3 T4 T5 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7…
1727 GHASH_MUL_AVX2 reg_i, \T2, \T1, \T3, \T4, \T5, \T6
1731 …GHASH_MUL_AVX2 reg_j, \T2, \T1, \T3, \T4, \T5, \T6 # apply GHASH on num_initial_blocks bloc…
1739 vmovdqa \XMM8, \T3
1897 .macro GHASH_8_ENCRYPT_8_PARALLEL_AVX2 T1 T2 T3 T4 T5 T6 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM…
2000 vpclmulqdq $0x11, \T5, \T1, \T3
2001 vpxor \T3, \T4, \T4
2003 vpclmulqdq $0x00, \T5, \T1, \T3
2004 vpxor \T3, \T7, \T7
2006 vpclmulqdq $0x01, \T5, \T1, \T3
2007 vpxor \T3, \T6, \T6
2009 vpclmulqdq $0x10, \T5, \T1, \T3
2010 vpxor \T3, \T6, \T6
2026 vpclmulqdq $0x11, \T5, \T1, \T3
2027 vpxor \T3, \T4, \T4
2029 vpclmulqdq $0x00, \T5, \T1, \T3
2030 vpxor \T3, \T7, \T7
2032 vpclmulqdq $0x01, \T5, \T1, \T3
2033 vpxor \T3, \T6, \T6
2035 vpclmulqdq $0x10, \T5, \T1, \T3
2036 vpxor \T3, \T6, \T6
2050 vpclmulqdq $0x11, \T5, \T1, \T3
2051 vpxor \T3, \T4, \T4
2053 vpclmulqdq $0x00, \T5, \T1, \T3
2054 vpxor \T3, \T7, \T7
2056 vpclmulqdq $0x01, \T5, \T1, \T3
2057 vpxor \T3, \T6, \T6
2059 vpclmulqdq $0x10, \T5, \T1, \T3
2060 vpxor \T3, \T6, \T6
2075 vpclmulqdq $0x11, \T5, \T1, \T3
2076 vpxor \T3, \T4, \T4
2078 vpclmulqdq $0x00, \T5, \T1, \T3
2079 vpxor \T3, \T7, \T7
2081 vpclmulqdq $0x01, \T5, \T1, \T3
2082 vpxor \T3, \T6, \T6
2084 vpclmulqdq $0x10, \T5, \T1, \T3
2085 vpxor \T3, \T6, \T6
2099 vpclmulqdq $0x11, \T5, \T1, \T3
2100 vpxor \T3, \T4, \T4
2102 vpclmulqdq $0x00, \T5, \T1, \T3
2103 vpxor \T3, \T7, \T7
2105 vpclmulqdq $0x01, \T5, \T1, \T3
2106 vpxor \T3, \T6, \T6
2108 vpclmulqdq $0x10, \T5, \T1, \T3
2109 vpxor \T3, \T6, \T6
2123 vpclmulqdq $0x11, \T5, \T1, \T3
2124 vpxor \T3, \T4, \T4
2126 vpclmulqdq $0x00, \T5, \T1, \T3
2127 vpxor \T3, \T7, \T7
2129 vpclmulqdq $0x01, \T5, \T1, \T3
2130 vpxor \T3, \T6, \T6
2132 vpclmulqdq $0x10, \T5, \T1, \T3
2133 vpxor \T3, \T6, \T6
2151 vpclmulqdq $0x00, \T5, \T1, \T3
2152 vpxor \T3, \T7, \T7
2154 vpclmulqdq $0x01, \T5, \T1, \T3
2155 vpxor \T3, \T6, \T6
2157 vpclmulqdq $0x10, \T5, \T1, \T3
2158 vpxor \T3, \T6, \T6
2160 vpclmulqdq $0x11, \T5, \T1, \T3
2161 vpxor \T3, \T4, \T1
2174 vaesenclast \T2, reg_j, \T3
2176 vmovdqu \T3, 16*i(arg2, %r11)
2185 vpslldq $8, \T6, \T3 # shift-L T3 2 DWs
2187 vpxor \T3, \T7, \T7
2194 vmovdqa POLY2(%rip), \T3
2196 vpclmulqdq $0x01, \T7, \T3, \T2
2214 vpclmulqdq $0x00, \T7, \T3, \T2
2217 vpclmulqdq $0x10, \T7, \T3, \T4
2242 .macro GHASH_LAST_8_AVX2 T1 T2 T3 T4 T5 T6 T7 XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8
2249 vpshufd $0b01001110, \T5, \T3
2251 vpxor \T5, \T3, \T3
2256 vpclmulqdq $0x00, \T3, \T2, \XMM1
2262 vpshufd $0b01001110, \T5, \T3
2264 vpxor \T5, \T3, \T3
2272 vpclmulqdq $0x00, \T3, \T2, \T2
2280 vpshufd $0b01001110, \T5, \T3
2282 vpxor \T5, \T3, \T3
2290 vpclmulqdq $0x00, \T3, \T2, \T2
2298 vpshufd $0b01001110, \T5, \T3
2300 vpxor \T5, \T3, \T3
2308 vpclmulqdq $0x00, \T3, \T2, \T2
2316 vpshufd $0b01001110, \T5, \T3
2318 vpxor \T5, \T3, \T3
2326 vpclmulqdq $0x00, \T3, \T2, \T2
2334 vpshufd $0b01001110, \T5, \T3
2336 vpxor \T5, \T3, \T3
2344 vpclmulqdq $0x00, \T3, \T2, \T2
2352 vpshufd $0b01001110, \T5, \T3
2354 vpxor \T5, \T3, \T3
2362 vpclmulqdq $0x00, \T3, \T2, \T2
2370 vpshufd $0b01001110, \T5, \T3
2372 vpxor \T5, \T3, \T3
2380 vpclmulqdq $0x00, \T3, \T2, \T2
2398 vmovdqa POLY2(%rip), \T3
2400 vpclmulqdq $0x01, \T7, \T3, \T2
2408 vpclmulqdq $0x00, \T7, \T3, \T2
2411 vpclmulqdq $0x10, \T7, \T3, \T4