Lines Matching refs:r11
137 #define T2 %r11
231 mov %r12, %r11
243 cmp $16, %r11
250 cmp %r11, %r12
256 xor %r11, %r11 # initialise the data pointer offset as zero
292 movdqu (%arg3 , %r11, 1), \TMP1
294 movdqu %xmm\index, (%arg2 , %r11, 1)
296 add $16, %r11
410 movdqu 16*0(%arg3 , %r11 , 1), \TMP1
412 movdqu \XMM1, 16*0(%arg2 , %r11 , 1)
414 movdqu 16*1(%arg3 , %r11 , 1), \TMP1
416 movdqu \XMM2, 16*1(%arg2 , %r11 , 1)
418 movdqu 16*2(%arg3 , %r11 , 1), \TMP1
420 movdqu \XMM3, 16*2(%arg2 , %r11 , 1)
422 movdqu 16*3(%arg3 , %r11 , 1), \TMP1
424 movdqu \XMM4, 16*3(%arg2 , %r11 , 1)
426 add $64, %r11
456 mov %r12, %r11
466 cmp $16, %r11
472 cmp %r11, %r12
477 xor %r11, %r11 # initialise the data pointer offset as zero
514 movdqu (%arg3 , %r11, 1), \TMP1
516 movdqu %xmm\index, (%arg2 , %r11, 1)
518 add $16, %r11
631 movdqu 16*0(%arg3 , %r11 , 1), \TMP1
633 movdqu 16*1(%arg3 , %r11 , 1), \TMP1
635 movdqu 16*2(%arg3 , %r11 , 1), \TMP1
637 movdqu 16*3(%arg3 , %r11 , 1), \TMP1
639 movdqu \XMM1, 16*0(%arg2 , %r11 , 1)
640 movdqu \XMM2, 16*1(%arg2 , %r11 , 1)
641 movdqu \XMM3, 16*2(%arg2 , %r11 , 1)
642 movdqu \XMM4, 16*3(%arg2 , %r11 , 1)
644 add $64, %r11
803 movdqu (%arg3,%r11,1), \TMP3
805 movdqu 16(%arg3,%r11,1), \TMP3
807 movdqu 32(%arg3,%r11,1), \TMP3
809 movdqu 48(%arg3,%r11,1), \TMP3
811 movdqu \XMM1, (%arg2,%r11,1) # Write to the ciphertext buffer
812 movdqu \XMM2, 16(%arg2,%r11,1) # Write to the ciphertext buffer
813 movdqu \XMM3, 32(%arg2,%r11,1) # Write to the ciphertext buffer
814 movdqu \XMM4, 48(%arg2,%r11,1) # Write to the ciphertext buffer
1011 movdqu (%arg3,%r11,1), \TMP3
1013 movdqu \XMM1, (%arg2,%r11,1) # Write to plaintext buffer
1015 movdqu 16(%arg3,%r11,1), \TMP3
1017 movdqu \XMM2, 16(%arg2,%r11,1) # Write to plaintext buffer
1019 movdqu 32(%arg3,%r11,1), \TMP3
1021 movdqu \XMM3, 32(%arg2,%r11,1) # Write to plaintext buffer
1023 movdqu 48(%arg3,%r11,1), \TMP3
1025 movdqu \XMM4, 48(%arg2,%r11,1) # Write to plaintext buffer
1354 add $64, %r11
1372 sub $16, %r11
1373 add %r13, %r11
1374 movdqu (%arg3,%r11,1), %xmm1 # receive the last <16 byte block
1394 sub %r13, %r11
1395 add $16, %r11
1401 mov %rax, (%arg2 , %r11, 1)
1402 add $8, %r11
1407 mov %al, (%arg2, %r11, 1)
1408 add $1, %r11
1432 mov arg10, %r11 # %r11 = auth_tag_len
1433 cmp $16, %r11
1435 cmp $12, %r11
1616 add $64, %r11
1634 sub $16, %r11
1635 add %r13, %r11
1636 movdqu (%arg3,%r11,1), %xmm1 # receive the last <16 byte blocks
1653 sub %r13, %r11
1654 add $16, %r11
1665 mov %rax, (%arg2 , %r11, 1)
1666 add $8, %r11
1671 mov %al, (%arg2, %r11, 1)
1672 add $1, %r11
1696 mov arg10, %r11 # %r11 = auth_tag_len
1697 cmp $16, %r11
1699 cmp $12, %r11
2683 leaq _aesni_enc4, %r11
2686 cmoveq %rax, %r11
2717 call *%r11
2762 call *%r11