Searched refs:rT0 (Results 1 – 6 of 6) sorted by relevance
/linux-4.4.14/arch/powerpc/crypto/ |
D | sha256-spe-asm.S | 43 #define rT0 r22 /* 64 bit temporaries */ macro 108 rotrwi rT0,e,6; /* 1: S1 = e rotr 6 */ \ 111 xor rT0,rT0,rT1; /* 1: S1 = S1 xor S1' */ \ 113 xor rT0,rT0,rT2; /* 1: S1 = S1 xor S1" */ \ 117 add h,h,rT0; /* 1: temp1 = h + S1 */ \ 119 rotrwi rT0,a,2; /* 1: S0 = a rotr 2 */ \ 124 xor rT0,rT0,rT1; /* 1: S0 = S0 xor S0' */ \ 126 xor rT3,rT0,rT3; /* 1: S0 = S0 xor S0" */ \ 133 rotrwi rT0,d,6; /* 2: S1 = e rotr 6 */ \ 138 xor rT0,rT0,rT1; /* 2: S1 = S1 xor S1' */ \ [all …]
|
D | md5-asm.S | 40 #define rT0 r25 macro 71 and rT0,b,c; /* 1: f = b and c */ \ 75 or rT0,rT0,rT1; /* 1: f = f or f' */ \ 77 add a,a,rT0; /* 1: a = a + f */ \ 85 and rT0,a,b; /* 2: f = b and c */ \ 87 or rT0,rT0,rT1; /* 2: f = f or f' */ \ 88 add d,d,rT0; /* 2: a = a + f */ \ 94 andc rT0,c,d; /* 1: f = c and ~d */ \ 97 or rT0,rT0,rT1; /* 1: f = f or f' */ \ 99 add a,a,rT0; /* 1: a = a + f */ \ [all …]
|
D | sha1-spe-asm.S | 40 #define rT0 r22 /* 64 bit temporary */ macro 116 rotrwi rT0,a,27; /* 1: A' = A rotl 5 */ \ 118 add e,e,rT0; /* 1: E = E + A' */ \ 128 rotrwi rT0,e,27; /* 2: A' = A rotl 5 */ \ 131 add d,d,rT0; /* 2: E = E + A' */ \ 137 evmergelohi rT0,w7,w6; /* W[-3] */ \ 139 evxor w0,w0,rT0; /* W = W[-16] xor W[-3] */ \ 147 evaddw rT0,w0,rK; /* WK = W + K */ \ 150 evmergehi rT1,rT1,rT0; /* WK1/WK2 */ \ 151 add e,e,rT0; /* 1: E = E + WK */ \ [all …]
|
D | aes-spe-modes.S | 87 lis rT0,tab@h; /* en-/decryption table pointer */ \ 89 ori rT0,rT0,tab@l; \ 200 addi rT1,rT0,4096 264 addi rT1,rT0,4096 356 addi rT1,rT0,4096 575 addi rT1,rT0,4096 581 subi rT0,rT0,4096 589 addi rT0,rT0,4096
|
D | aes-spe-core.S | 22 rlwimi rT0,in,28-((bpos+3)%4)*8,20,27; 28 evlwwsplat out,off(rT0); /* load word high */ 31 lwz out,off(rT0); /* load word low */ 46 LBZ(out, rT0, 8) 49 LBZ(out, rT0, 8) /* load enc byte */
|
D | aes-spe-regs.h | 21 #define rT0 r11 /* pointers to en-/decrpytion tables */ macro
|