Lines Matching refs:src
18 #define load_8way(src, x0, x1, x2, x3, x4, x5, x6, x7) \ argument
19 vmovdqu (0*16)(src), x0; \
20 vmovdqu (1*16)(src), x1; \
21 vmovdqu (2*16)(src), x2; \
22 vmovdqu (3*16)(src), x3; \
23 vmovdqu (4*16)(src), x4; \
24 vmovdqu (5*16)(src), x5; \
25 vmovdqu (6*16)(src), x6; \
26 vmovdqu (7*16)(src), x7;
38 #define store_cbc_8way(src, dst, x0, x1, x2, x3, x4, x5, x6, x7) \ argument
39 vpxor (0*16)(src), x1, x1; \
40 vpxor (1*16)(src), x2, x2; \
41 vpxor (2*16)(src), x3, x3; \
42 vpxor (3*16)(src), x4, x4; \
43 vpxor (4*16)(src), x5, x5; \
44 vpxor (5*16)(src), x6, x6; \
45 vpxor (6*16)(src), x7, x7; \
82 #define store_ctr_8way(src, dst, x0, x1, x2, x3, x4, x5, x6, x7) \ argument
83 vpxor (0*16)(src), x0, x0; \
84 vpxor (1*16)(src), x1, x1; \
85 vpxor (2*16)(src), x2, x2; \
86 vpxor (3*16)(src), x3, x3; \
87 vpxor (4*16)(src), x4, x4; \
88 vpxor (5*16)(src), x5, x5; \
89 vpxor (6*16)(src), x6, x6; \
90 vpxor (7*16)(src), x7, x7; \
100 #define load_xts_8way(iv, src, dst, x0, x1, x2, x3, x4, x5, x6, x7, tiv, t0, \ argument
106 vpxor (0*16)(src), tiv, x0; \
111 vpxor (1*16)(src), tiv, x1; \
115 vpxor (2*16)(src), tiv, x2; \
119 vpxor (3*16)(src), tiv, x3; \
123 vpxor (4*16)(src), tiv, x4; \
127 vpxor (5*16)(src), tiv, x5; \
131 vpxor (6*16)(src), tiv, x6; \
135 vpxor (7*16)(src), tiv, x7; \