1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40 #include <linux/linkage.h>
41 #include <asm/inst.h>
42
43
44 .section .rodata
45 .align 16
46
47
48
49
50
51
52
53 .Lconstant_R2R1:
54 .octa 0x00000001c6e415960000000154442bd4
55
56
57
58
59
60
61
62 .Lconstant_R4R3:
63 .octa 0x00000000ccaa009e00000001751997d0
64
65
66
67
68 .Lconstant_R5:
69 .octa 0x00000000000000000000000163cd6124
70 .Lconstant_mask32:
71 .octa 0x000000000000000000000000FFFFFFFF
72
73
74
75
76
77
78 .Lconstant_RUpoly:
79 .octa 0x00000001F701164100000001DB710641
80
81 #define CONSTANT %xmm0
82
83 #ifdef __x86_64__
84 #define BUF %rdi
85 #define LEN %rsi
86 #define CRC %edx
87 #else
88 #define BUF %eax
89 #define LEN %edx
90 #define CRC %ecx
91 #endif
92
93
94
95 .text
96
97
98
99
100
101
102
103
104
105
106 ENTRY(crc32_pclmul_le_16)
107 movdqa (BUF), %xmm1
108 movdqa 0x10(BUF), %xmm2
109 movdqa 0x20(BUF), %xmm3
110 movdqa 0x30(BUF), %xmm4
111 movd CRC, CONSTANT
112 pxor CONSTANT, %xmm1
113 sub $0x40, LEN
114 add $0x40, BUF
115 cmp $0x40, LEN
116 jb less_64
117
118 #ifdef __x86_64__
119 movdqa .Lconstant_R2R1(%rip), CONSTANT
120 #else
121 movdqa .Lconstant_R2R1, CONSTANT
122 #endif
123
124 loop_64:
125 prefetchnta 0x40(BUF)
126 movdqa %xmm1, %xmm5
127 movdqa %xmm2, %xmm6
128 movdqa %xmm3, %xmm7
129 #ifdef __x86_64__
130 movdqa %xmm4, %xmm8
131 #endif
132 PCLMULQDQ 00, CONSTANT, %xmm1
133 PCLMULQDQ 00, CONSTANT, %xmm2
134 PCLMULQDQ 00, CONSTANT, %xmm3
135 #ifdef __x86_64__
136 PCLMULQDQ 00, CONSTANT, %xmm4
137 #endif
138 PCLMULQDQ 0x11, CONSTANT, %xmm5
139 PCLMULQDQ 0x11, CONSTANT, %xmm6
140 PCLMULQDQ 0x11, CONSTANT, %xmm7
141 #ifdef __x86_64__
142 PCLMULQDQ 0x11, CONSTANT, %xmm8
143 #endif
144 pxor %xmm5, %xmm1
145 pxor %xmm6, %xmm2
146 pxor %xmm7, %xmm3
147 #ifdef __x86_64__
148 pxor %xmm8, %xmm4
149 #else
150
151 movdqa %xmm4, %xmm5
152 PCLMULQDQ 00, CONSTANT, %xmm4
153 PCLMULQDQ 0x11, CONSTANT, %xmm5
154 pxor %xmm5, %xmm4
155 #endif
156
157 pxor (BUF), %xmm1
158 pxor 0x10(BUF), %xmm2
159 pxor 0x20(BUF), %xmm3
160 pxor 0x30(BUF), %xmm4
161
162 sub $0x40, LEN
163 add $0x40, BUF
164 cmp $0x40, LEN
165 jge loop_64
166 less_64:
167 #ifdef __x86_64__
168 movdqa .Lconstant_R4R3(%rip), CONSTANT
169 #else
170 movdqa .Lconstant_R4R3, CONSTANT
171 #endif
172 prefetchnta (BUF)
173
174 movdqa %xmm1, %xmm5
175 PCLMULQDQ 0x00, CONSTANT, %xmm1
176 PCLMULQDQ 0x11, CONSTANT, %xmm5
177 pxor %xmm5, %xmm1
178 pxor %xmm2, %xmm1
179
180 movdqa %xmm1, %xmm5
181 PCLMULQDQ 0x00, CONSTANT, %xmm1
182 PCLMULQDQ 0x11, CONSTANT, %xmm5
183 pxor %xmm5, %xmm1
184 pxor %xmm3, %xmm1
185
186 movdqa %xmm1, %xmm5
187 PCLMULQDQ 0x00, CONSTANT, %xmm1
188 PCLMULQDQ 0x11, CONSTANT, %xmm5
189 pxor %xmm5, %xmm1
190 pxor %xmm4, %xmm1
191
192 cmp $0x10, LEN
193 jb fold_64
194 loop_16:
195 movdqa %xmm1, %xmm5
196 PCLMULQDQ 0x00, CONSTANT, %xmm1
197 PCLMULQDQ 0x11, CONSTANT, %xmm5
198 pxor %xmm5, %xmm1
199 pxor (BUF), %xmm1
200 sub $0x10, LEN
201 add $0x10, BUF
202 cmp $0x10, LEN
203 jge loop_16
204
205 fold_64:
206
207
208 PCLMULQDQ 0x01, %xmm1, CONSTANT
209 psrldq $0x08, %xmm1
210 pxor CONSTANT, %xmm1
211
212
213 movdqa %xmm1, %xmm2
214 #ifdef __x86_64__
215 movdqa .Lconstant_R5(%rip), CONSTANT
216 movdqa .Lconstant_mask32(%rip), %xmm3
217 #else
218 movdqa .Lconstant_R5, CONSTANT
219 movdqa .Lconstant_mask32, %xmm3
220 #endif
221 psrldq $0x04, %xmm2
222 pand %xmm3, %xmm1
223 PCLMULQDQ 0x00, CONSTANT, %xmm1
224 pxor %xmm2, %xmm1
225
226
227 #ifdef __x86_64__
228 movdqa .Lconstant_RUpoly(%rip), CONSTANT
229 #else
230 movdqa .Lconstant_RUpoly, CONSTANT
231 #endif
232 movdqa %xmm1, %xmm2
233 pand %xmm3, %xmm1
234 PCLMULQDQ 0x10, CONSTANT, %xmm1
235 pand %xmm3, %xmm1
236 PCLMULQDQ 0x00, CONSTANT, %xmm1
237 pxor %xmm2, %xmm1
238 PEXTRD 0x01, %xmm1, %eax
239
240 ret
241 ENDPROC(crc32_pclmul_le_16)