Lines Matching refs:TMP1

163 .macro GHASH_MUL GH HK TMP1 TMP2 TMP3 TMP4 TMP5
164 movdqa \GH, \TMP1
169 PCLMULQDQ 0x11, \HK, \TMP1 # TMP1 = a1*b1
173 pxor \TMP1, \TMP2 # TMP2 = (a0*b0)+(a1*b0)
178 pxor \TMP2, \TMP1 # TMP2:GH holds the result of GH*HK
211 pxor \TMP1, \GH # result is in TMP1
226 .macro INITIAL_BLOCKS_DEC num_initial_blocks TMP1 TMP2 TMP3 TMP4 TMP5 XMM0 XMM1 \
235 movd (%r10), \TMP1
236 pslldq $12, \TMP1
238 pxor \TMP1, %xmm\i
265 MOVADQ ONE(%RIP),\TMP1
268 paddd \TMP1, \XMM0 # INCR Y0
279 MOVADQ (%r10),\TMP1
281 AESENC \TMP1, %xmm\index
287 MOVADQ (%r10), \TMP1
289 AESENCLAST \TMP1, %xmm\index # Last Round
292 movdqu (%arg3 , %r11, 1), \TMP1
293 pxor \TMP1, %xmm\index
298 movdqa \TMP1, %xmm\index
303 GHASH_MUL %xmm\i, \TMP3, \TMP1, \TMP2, \TMP4, \TMP5, \XMM1
308 GHASH_MUL %xmm6, \TMP3, \TMP1, \TMP2, \TMP4, \TMP5, \XMM1
310 GHASH_MUL %xmm7, \TMP3, \TMP1, \TMP2, \TMP4, \TMP5, \XMM1
312 GHASH_MUL %xmm8, \TMP3, \TMP1, \TMP2, \TMP4, \TMP5, \XMM1
315 GHASH_MUL %xmm7, \TMP3, \TMP1, \TMP2, \TMP4, \TMP5, \XMM1
317 GHASH_MUL %xmm8, \TMP3, \TMP1, \TMP2, \TMP4, \TMP5, \XMM1
320 GHASH_MUL %xmm8, \TMP3, \TMP1, \TMP2, \TMP4, \TMP5, \XMM1
330 MOVADQ ONE(%rip), \TMP1
331 paddd \TMP1, \XMM0 # INCR Y0
335 paddd \TMP1, \XMM0 # INCR Y0
339 paddd \TMP1, \XMM0 # INCR Y0
343 paddd \TMP1, \XMM0 # INCR Y0
347 MOVADQ 0(%arg1),\TMP1
348 pxor \TMP1, \XMM1
349 pxor \TMP1, \XMM2
350 pxor \TMP1, \XMM3
351 pxor \TMP1, \XMM4
353 pshufd $78, \TMP3, \TMP1
354 pxor \TMP3, \TMP1
355 movdqa \TMP1, HashKey_k(%rsp)
356 GHASH_MUL \TMP5, \TMP3, \TMP1, \TMP2, \TMP4, \TMP6, \TMP7
360 pshufd $78, \TMP5, \TMP1
361 pxor \TMP5, \TMP1
362 movdqa \TMP1, HashKey_2_k(%rsp)
364 movaps 0x10*\index(%arg1), \TMP1
365 AESENC \TMP1, \XMM1
366 AESENC \TMP1, \XMM2
367 AESENC \TMP1, \XMM3
368 AESENC \TMP1, \XMM4
370 GHASH_MUL \TMP5, \TMP3, \TMP1, \TMP2, \TMP4, \TMP6, \TMP7
373 pshufd $78, \TMP5, \TMP1
374 pxor \TMP5, \TMP1
375 movdqa \TMP1, HashKey_3_k(%rsp)
377 movaps 0x10*\index(%arg1), \TMP1
378 AESENC \TMP1, \XMM1
379 AESENC \TMP1, \XMM2
380 AESENC \TMP1, \XMM3
381 AESENC \TMP1, \XMM4
383 GHASH_MUL \TMP5, \TMP3, \TMP1, \TMP2, \TMP4, \TMP6, \TMP7
386 pshufd $78, \TMP5, \TMP1
387 pxor \TMP5, \TMP1
388 movdqa \TMP1, HashKey_4_k(%rsp)
410 movdqu 16*0(%arg3 , %r11 , 1), \TMP1
411 pxor \TMP1, \XMM1
413 movdqa \TMP1, \XMM1
414 movdqu 16*1(%arg3 , %r11 , 1), \TMP1
415 pxor \TMP1, \XMM2
417 movdqa \TMP1, \XMM2
418 movdqu 16*2(%arg3 , %r11 , 1), \TMP1
419 pxor \TMP1, \XMM3
421 movdqa \TMP1, \XMM3
422 movdqu 16*3(%arg3 , %r11 , 1), \TMP1
423 pxor \TMP1, \XMM4
425 movdqa \TMP1, \XMM4
451 .macro INITIAL_BLOCKS_ENC num_initial_blocks TMP1 TMP2 TMP3 TMP4 TMP5 XMM0 XMM1 \
459 movd (%r10), \TMP1
460 pslldq $12, \TMP1
462 pxor \TMP1, %xmm\i
487 MOVADQ ONE(%RIP),\TMP1
490 paddd \TMP1, \XMM0 # INCR Y0
501 MOVADQ (%r10),\TMP1
503 AESENC \TMP1, %xmm\index
509 MOVADQ (%r10), \TMP1
511 AESENCLAST \TMP1, %xmm\index # Last Round
514 movdqu (%arg3 , %r11, 1), \TMP1
515 pxor \TMP1, %xmm\index
524 GHASH_MUL %xmm\i, \TMP3, \TMP1, \TMP2, \TMP4, \TMP5, \XMM1
529 GHASH_MUL %xmm6, \TMP3, \TMP1, \TMP2, \TMP4, \TMP5, \XMM1
531 GHASH_MUL %xmm7, \TMP3, \TMP1, \TMP2, \TMP4, \TMP5, \XMM1
533 GHASH_MUL %xmm8, \TMP3, \TMP1, \TMP2, \TMP4, \TMP5, \XMM1
536 GHASH_MUL %xmm7, \TMP3, \TMP1, \TMP2, \TMP4, \TMP5, \XMM1
538 GHASH_MUL %xmm8, \TMP3, \TMP1, \TMP2, \TMP4, \TMP5, \XMM1
541 GHASH_MUL %xmm8, \TMP3, \TMP1, \TMP2, \TMP4, \TMP5, \XMM1
551 MOVADQ ONE(%RIP),\TMP1
552 paddd \TMP1, \XMM0 # INCR Y0
556 paddd \TMP1, \XMM0 # INCR Y0
560 paddd \TMP1, \XMM0 # INCR Y0
564 paddd \TMP1, \XMM0 # INCR Y0
568 MOVADQ 0(%arg1),\TMP1
569 pxor \TMP1, \XMM1
570 pxor \TMP1, \XMM2
571 pxor \TMP1, \XMM3
572 pxor \TMP1, \XMM4
574 pshufd $78, \TMP3, \TMP1
575 pxor \TMP3, \TMP1
576 movdqa \TMP1, HashKey_k(%rsp)
577 GHASH_MUL \TMP5, \TMP3, \TMP1, \TMP2, \TMP4, \TMP6, \TMP7
581 pshufd $78, \TMP5, \TMP1
582 pxor \TMP5, \TMP1
583 movdqa \TMP1, HashKey_2_k(%rsp)
585 movaps 0x10*\index(%arg1), \TMP1
586 AESENC \TMP1, \XMM1
587 AESENC \TMP1, \XMM2
588 AESENC \TMP1, \XMM3
589 AESENC \TMP1, \XMM4
591 GHASH_MUL \TMP5, \TMP3, \TMP1, \TMP2, \TMP4, \TMP6, \TMP7
594 pshufd $78, \TMP5, \TMP1
595 pxor \TMP5, \TMP1
596 movdqa \TMP1, HashKey_3_k(%rsp)
598 movaps 0x10*\index(%arg1), \TMP1
599 AESENC \TMP1, \XMM1
600 AESENC \TMP1, \XMM2
601 AESENC \TMP1, \XMM3
602 AESENC \TMP1, \XMM4
604 GHASH_MUL \TMP5, \TMP3, \TMP1, \TMP2, \TMP4, \TMP6, \TMP7
607 pshufd $78, \TMP5, \TMP1
608 pxor \TMP5, \TMP1
609 movdqa \TMP1, HashKey_4_k(%rsp)
631 movdqu 16*0(%arg3 , %r11 , 1), \TMP1
632 pxor \TMP1, \XMM1
633 movdqu 16*1(%arg3 , %r11 , 1), \TMP1
634 pxor \TMP1, \XMM2
635 movdqu 16*2(%arg3 , %r11 , 1), \TMP1
636 pxor \TMP1, \XMM3
637 movdqu 16*3(%arg3 , %r11 , 1), \TMP1
638 pxor \TMP1, \XMM4
662 .macro GHASH_4_ENCRYPT_4_PARALLEL_ENC TMP1 TMP2 TMP3 TMP4 TMP5 \
698 movaps 0x10(%arg1), \TMP1
699 AESENC \TMP1, \XMM1 # Round 1
700 AESENC \TMP1, \XMM2
701 AESENC \TMP1, \XMM3
702 AESENC \TMP1, \XMM4
703 movaps 0x20(%arg1), \TMP1
704 AESENC \TMP1, \XMM1 # Round 2
705 AESENC \TMP1, \XMM2
706 AESENC \TMP1, \XMM3
707 AESENC \TMP1, \XMM4
708 movdqa \XMM6, \TMP1
712 PCLMULQDQ 0x11, \TMP5, \TMP1 # TMP1 = a1 * b1
731 pxor \TMP1, \TMP4
735 movdqa \XMM7, \TMP1
742 PCLMULQDQ 0x11, \TMP5, \TMP1 # TMP1 = a1*b1
761 pxor \TMP1, \TMP4
769 movdqa \XMM8, \TMP1
773 PCLMULQDQ 0x11, \TMP5, \TMP1 # TMP1 = a1*b1
820 pxor \TMP4, \TMP1
823 pxor \TMP1, \TMP2
829 pxor \TMP2, \TMP1 # accumulate the results in TMP1:XMM5
859 pxor \TMP1, \XMM5 # result is in TMP1
870 .macro GHASH_4_ENCRYPT_4_PARALLEL_DEC TMP1 TMP2 TMP3 TMP4 TMP5 \
906 movaps 0x10(%arg1), \TMP1
907 AESENC \TMP1, \XMM1 # Round 1
908 AESENC \TMP1, \XMM2
909 AESENC \TMP1, \XMM3
910 AESENC \TMP1, \XMM4
911 movaps 0x20(%arg1), \TMP1
912 AESENC \TMP1, \XMM1 # Round 2
913 AESENC \TMP1, \XMM2
914 AESENC \TMP1, \XMM3
915 AESENC \TMP1, \XMM4
916 movdqa \XMM6, \TMP1
920 PCLMULQDQ 0x11, \TMP5, \TMP1 # TMP1 = a1 * b1
939 pxor \TMP1, \TMP4
943 movdqa \XMM7, \TMP1
950 PCLMULQDQ 0x11, \TMP5, \TMP1 # TMP1 = a1*b1
969 pxor \TMP1, \TMP4
977 movdqa \XMM8, \TMP1
981 PCLMULQDQ 0x11, \TMP5, \TMP1 # TMP1 = a1*b1
1032 pxor \TMP4, \TMP1
1035 pxor \TMP1, \TMP2
1041 pxor \TMP2, \TMP1 # accumulate the results in TMP1:XMM5
1071 pxor \TMP1, \XMM5 # result is in TMP1
1077 .macro GHASH_LAST_4 TMP1 TMP2 TMP3 TMP4 TMP5 TMP6 \
1093 # Multiply TMP1 * HashKey (using Karatsuba)
1095 movdqa \XMM2, \TMP1
1099 PCLMULQDQ 0x11, \TMP5, \TMP1 # TMP1 = a1*b1
1103 pxor \TMP1, \TMP6
1108 # Multiply TMP1 * HashKey (using Karatsuba)
1110 movdqa \XMM3, \TMP1
1114 PCLMULQDQ 0x11, \TMP5, \TMP1 # TMP1 = a1*b1
1118 pxor \TMP1, \TMP6
1122 # Multiply TMP1 * HashKey (using Karatsuba)
1123 movdqa \XMM4, \TMP1
1127 PCLMULQDQ 0x11, \TMP5, \TMP1 # TMP1 = a1*b1
1131 pxor \TMP1, \TMP6
1178 .macro ENCRYPT_SINGLE_BLOCK XMM0 TMP1
1187 MOVADQ (%r10),\TMP1
1188 AESENC \TMP1,\XMM0
1193 MOVADQ (%r10),\TMP1
1194 AESENCLAST \TMP1,\XMM0