This source file includes following definitions.
- __serpent_crypt_ctr
- serpent_xts_enc
- serpent_xts_dec
- serpent_setkey_skcipher
- xts_serpent_setkey
- ecb_encrypt
- ecb_decrypt
- cbc_encrypt
- cbc_decrypt
- ctr_crypt
- xts_encrypt
- xts_decrypt
- serpent_init
- serpent_exit
1
2
3
4
5
6
7
8
9
10
11 #include <linux/module.h>
12 #include <linux/types.h>
13 #include <linux/crypto.h>
14 #include <linux/err.h>
15 #include <crypto/algapi.h>
16 #include <crypto/internal/simd.h>
17 #include <crypto/serpent.h>
18 #include <crypto/xts.h>
19 #include <asm/crypto/glue_helper.h>
20 #include <asm/crypto/serpent-avx.h>
21
22
23 asmlinkage void serpent_ecb_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst,
24 const u8 *src);
25 EXPORT_SYMBOL_GPL(serpent_ecb_enc_8way_avx);
26
27 asmlinkage void serpent_ecb_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
28 const u8 *src);
29 EXPORT_SYMBOL_GPL(serpent_ecb_dec_8way_avx);
30
31 asmlinkage void serpent_cbc_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
32 const u8 *src);
33 EXPORT_SYMBOL_GPL(serpent_cbc_dec_8way_avx);
34
35 asmlinkage void serpent_ctr_8way_avx(struct serpent_ctx *ctx, u8 *dst,
36 const u8 *src, le128 *iv);
37 EXPORT_SYMBOL_GPL(serpent_ctr_8way_avx);
38
39 asmlinkage void serpent_xts_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst,
40 const u8 *src, le128 *iv);
41 EXPORT_SYMBOL_GPL(serpent_xts_enc_8way_avx);
42
43 asmlinkage void serpent_xts_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
44 const u8 *src, le128 *iv);
45 EXPORT_SYMBOL_GPL(serpent_xts_dec_8way_avx);
46
47 void __serpent_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv)
48 {
49 be128 ctrblk;
50
51 le128_to_be128(&ctrblk, iv);
52 le128_inc(iv);
53
54 __serpent_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk);
55 u128_xor(dst, src, (u128 *)&ctrblk);
56 }
57 EXPORT_SYMBOL_GPL(__serpent_crypt_ctr);
58
59 void serpent_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
60 {
61 glue_xts_crypt_128bit_one(ctx, dst, src, iv,
62 GLUE_FUNC_CAST(__serpent_encrypt));
63 }
64 EXPORT_SYMBOL_GPL(serpent_xts_enc);
65
66 void serpent_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
67 {
68 glue_xts_crypt_128bit_one(ctx, dst, src, iv,
69 GLUE_FUNC_CAST(__serpent_decrypt));
70 }
71 EXPORT_SYMBOL_GPL(serpent_xts_dec);
72
73 static int serpent_setkey_skcipher(struct crypto_skcipher *tfm,
74 const u8 *key, unsigned int keylen)
75 {
76 return __serpent_setkey(crypto_skcipher_ctx(tfm), key, keylen);
77 }
78
79 int xts_serpent_setkey(struct crypto_skcipher *tfm, const u8 *key,
80 unsigned int keylen)
81 {
82 struct serpent_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
83 int err;
84
85 err = xts_verify_key(tfm, key, keylen);
86 if (err)
87 return err;
88
89
90 err = __serpent_setkey(&ctx->crypt_ctx, key, keylen / 2);
91 if (err)
92 return err;
93
94
95 return __serpent_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2);
96 }
97 EXPORT_SYMBOL_GPL(xts_serpent_setkey);
98
99 static const struct common_glue_ctx serpent_enc = {
100 .num_funcs = 2,
101 .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
102
103 .funcs = { {
104 .num_blocks = SERPENT_PARALLEL_BLOCKS,
105 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_enc_8way_avx) }
106 }, {
107 .num_blocks = 1,
108 .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_encrypt) }
109 } }
110 };
111
112 static const struct common_glue_ctx serpent_ctr = {
113 .num_funcs = 2,
114 .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
115
116 .funcs = { {
117 .num_blocks = SERPENT_PARALLEL_BLOCKS,
118 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(serpent_ctr_8way_avx) }
119 }, {
120 .num_blocks = 1,
121 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(__serpent_crypt_ctr) }
122 } }
123 };
124
125 static const struct common_glue_ctx serpent_enc_xts = {
126 .num_funcs = 2,
127 .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
128
129 .funcs = { {
130 .num_blocks = SERPENT_PARALLEL_BLOCKS,
131 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc_8way_avx) }
132 }, {
133 .num_blocks = 1,
134 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc) }
135 } }
136 };
137
138 static const struct common_glue_ctx serpent_dec = {
139 .num_funcs = 2,
140 .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
141
142 .funcs = { {
143 .num_blocks = SERPENT_PARALLEL_BLOCKS,
144 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_dec_8way_avx) }
145 }, {
146 .num_blocks = 1,
147 .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_decrypt) }
148 } }
149 };
150
151 static const struct common_glue_ctx serpent_dec_cbc = {
152 .num_funcs = 2,
153 .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
154
155 .funcs = { {
156 .num_blocks = SERPENT_PARALLEL_BLOCKS,
157 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(serpent_cbc_dec_8way_avx) }
158 }, {
159 .num_blocks = 1,
160 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(__serpent_decrypt) }
161 } }
162 };
163
164 static const struct common_glue_ctx serpent_dec_xts = {
165 .num_funcs = 2,
166 .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
167
168 .funcs = { {
169 .num_blocks = SERPENT_PARALLEL_BLOCKS,
170 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec_8way_avx) }
171 }, {
172 .num_blocks = 1,
173 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec) }
174 } }
175 };
176
177 static int ecb_encrypt(struct skcipher_request *req)
178 {
179 return glue_ecb_req_128bit(&serpent_enc, req);
180 }
181
182 static int ecb_decrypt(struct skcipher_request *req)
183 {
184 return glue_ecb_req_128bit(&serpent_dec, req);
185 }
186
187 static int cbc_encrypt(struct skcipher_request *req)
188 {
189 return glue_cbc_encrypt_req_128bit(GLUE_FUNC_CAST(__serpent_encrypt),
190 req);
191 }
192
193 static int cbc_decrypt(struct skcipher_request *req)
194 {
195 return glue_cbc_decrypt_req_128bit(&serpent_dec_cbc, req);
196 }
197
198 static int ctr_crypt(struct skcipher_request *req)
199 {
200 return glue_ctr_req_128bit(&serpent_ctr, req);
201 }
202
203 static int xts_encrypt(struct skcipher_request *req)
204 {
205 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
206 struct serpent_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
207
208 return glue_xts_req_128bit(&serpent_enc_xts, req,
209 XTS_TWEAK_CAST(__serpent_encrypt),
210 &ctx->tweak_ctx, &ctx->crypt_ctx, false);
211 }
212
213 static int xts_decrypt(struct skcipher_request *req)
214 {
215 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
216 struct serpent_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
217
218 return glue_xts_req_128bit(&serpent_dec_xts, req,
219 XTS_TWEAK_CAST(__serpent_encrypt),
220 &ctx->tweak_ctx, &ctx->crypt_ctx, true);
221 }
222
223 static struct skcipher_alg serpent_algs[] = {
224 {
225 .base.cra_name = "__ecb(serpent)",
226 .base.cra_driver_name = "__ecb-serpent-avx",
227 .base.cra_priority = 500,
228 .base.cra_flags = CRYPTO_ALG_INTERNAL,
229 .base.cra_blocksize = SERPENT_BLOCK_SIZE,
230 .base.cra_ctxsize = sizeof(struct serpent_ctx),
231 .base.cra_module = THIS_MODULE,
232 .min_keysize = SERPENT_MIN_KEY_SIZE,
233 .max_keysize = SERPENT_MAX_KEY_SIZE,
234 .setkey = serpent_setkey_skcipher,
235 .encrypt = ecb_encrypt,
236 .decrypt = ecb_decrypt,
237 }, {
238 .base.cra_name = "__cbc(serpent)",
239 .base.cra_driver_name = "__cbc-serpent-avx",
240 .base.cra_priority = 500,
241 .base.cra_flags = CRYPTO_ALG_INTERNAL,
242 .base.cra_blocksize = SERPENT_BLOCK_SIZE,
243 .base.cra_ctxsize = sizeof(struct serpent_ctx),
244 .base.cra_module = THIS_MODULE,
245 .min_keysize = SERPENT_MIN_KEY_SIZE,
246 .max_keysize = SERPENT_MAX_KEY_SIZE,
247 .ivsize = SERPENT_BLOCK_SIZE,
248 .setkey = serpent_setkey_skcipher,
249 .encrypt = cbc_encrypt,
250 .decrypt = cbc_decrypt,
251 }, {
252 .base.cra_name = "__ctr(serpent)",
253 .base.cra_driver_name = "__ctr-serpent-avx",
254 .base.cra_priority = 500,
255 .base.cra_flags = CRYPTO_ALG_INTERNAL,
256 .base.cra_blocksize = 1,
257 .base.cra_ctxsize = sizeof(struct serpent_ctx),
258 .base.cra_module = THIS_MODULE,
259 .min_keysize = SERPENT_MIN_KEY_SIZE,
260 .max_keysize = SERPENT_MAX_KEY_SIZE,
261 .ivsize = SERPENT_BLOCK_SIZE,
262 .chunksize = SERPENT_BLOCK_SIZE,
263 .setkey = serpent_setkey_skcipher,
264 .encrypt = ctr_crypt,
265 .decrypt = ctr_crypt,
266 }, {
267 .base.cra_name = "__xts(serpent)",
268 .base.cra_driver_name = "__xts-serpent-avx",
269 .base.cra_priority = 500,
270 .base.cra_flags = CRYPTO_ALG_INTERNAL,
271 .base.cra_blocksize = SERPENT_BLOCK_SIZE,
272 .base.cra_ctxsize = sizeof(struct serpent_xts_ctx),
273 .base.cra_module = THIS_MODULE,
274 .min_keysize = 2 * SERPENT_MIN_KEY_SIZE,
275 .max_keysize = 2 * SERPENT_MAX_KEY_SIZE,
276 .ivsize = SERPENT_BLOCK_SIZE,
277 .setkey = xts_serpent_setkey,
278 .encrypt = xts_encrypt,
279 .decrypt = xts_decrypt,
280 },
281 };
282
283 static struct simd_skcipher_alg *serpent_simd_algs[ARRAY_SIZE(serpent_algs)];
284
285 static int __init serpent_init(void)
286 {
287 const char *feature_name;
288
289 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
290 &feature_name)) {
291 pr_info("CPU feature '%s' is not supported.\n", feature_name);
292 return -ENODEV;
293 }
294
295 return simd_register_skciphers_compat(serpent_algs,
296 ARRAY_SIZE(serpent_algs),
297 serpent_simd_algs);
298 }
299
300 static void __exit serpent_exit(void)
301 {
302 simd_unregister_skciphers(serpent_algs, ARRAY_SIZE(serpent_algs),
303 serpent_simd_algs);
304 }
305
306 module_init(serpent_init);
307 module_exit(serpent_exit);
308
309 MODULE_DESCRIPTION("Serpent Cipher Algorithm, AVX optimized");
310 MODULE_LICENSE("GPL");
311 MODULE_ALIAS_CRYPTO("serpent");