This source file includes following definitions.
- spe_begin
- spe_end
- ppc_aes_setkey
- ppc_xts_setkey
- ppc_aes_encrypt
- ppc_aes_decrypt
- ppc_ecb_encrypt
- ppc_ecb_decrypt
- ppc_cbc_encrypt
- ppc_cbc_decrypt
- ppc_ctr_crypt
- ppc_xts_encrypt
- ppc_xts_decrypt
- ppc_aes_mod_init
- ppc_aes_mod_fini
1
2
3
4
5
6
7
8
9
10
11 #include <crypto/aes.h>
12 #include <linux/module.h>
13 #include <linux/init.h>
14 #include <linux/types.h>
15 #include <linux/errno.h>
16 #include <linux/crypto.h>
17 #include <asm/byteorder.h>
18 #include <asm/switch_to.h>
19 #include <crypto/algapi.h>
20 #include <crypto/xts.h>
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36 #define MAX_BYTES 768
37
38 struct ppc_aes_ctx {
39 u32 key_enc[AES_MAX_KEYLENGTH_U32];
40 u32 key_dec[AES_MAX_KEYLENGTH_U32];
41 u32 rounds;
42 };
43
44 struct ppc_xts_ctx {
45 u32 key_enc[AES_MAX_KEYLENGTH_U32];
46 u32 key_dec[AES_MAX_KEYLENGTH_U32];
47 u32 key_twk[AES_MAX_KEYLENGTH_U32];
48 u32 rounds;
49 };
50
51 extern void ppc_encrypt_aes(u8 *out, const u8 *in, u32 *key_enc, u32 rounds);
52 extern void ppc_decrypt_aes(u8 *out, const u8 *in, u32 *key_dec, u32 rounds);
53 extern void ppc_encrypt_ecb(u8 *out, const u8 *in, u32 *key_enc, u32 rounds,
54 u32 bytes);
55 extern void ppc_decrypt_ecb(u8 *out, const u8 *in, u32 *key_dec, u32 rounds,
56 u32 bytes);
57 extern void ppc_encrypt_cbc(u8 *out, const u8 *in, u32 *key_enc, u32 rounds,
58 u32 bytes, u8 *iv);
59 extern void ppc_decrypt_cbc(u8 *out, const u8 *in, u32 *key_dec, u32 rounds,
60 u32 bytes, u8 *iv);
61 extern void ppc_crypt_ctr (u8 *out, const u8 *in, u32 *key_enc, u32 rounds,
62 u32 bytes, u8 *iv);
63 extern void ppc_encrypt_xts(u8 *out, const u8 *in, u32 *key_enc, u32 rounds,
64 u32 bytes, u8 *iv, u32 *key_twk);
65 extern void ppc_decrypt_xts(u8 *out, const u8 *in, u32 *key_dec, u32 rounds,
66 u32 bytes, u8 *iv, u32 *key_twk);
67
68 extern void ppc_expand_key_128(u32 *key_enc, const u8 *key);
69 extern void ppc_expand_key_192(u32 *key_enc, const u8 *key);
70 extern void ppc_expand_key_256(u32 *key_enc, const u8 *key);
71
72 extern void ppc_generate_decrypt_key(u32 *key_dec,u32 *key_enc,
73 unsigned int key_len);
74
75 static void spe_begin(void)
76 {
77
78 preempt_disable();
79 enable_kernel_spe();
80 }
81
82 static void spe_end(void)
83 {
84 disable_kernel_spe();
85
86 preempt_enable();
87 }
88
89 static int ppc_aes_setkey(struct crypto_tfm *tfm, const u8 *in_key,
90 unsigned int key_len)
91 {
92 struct ppc_aes_ctx *ctx = crypto_tfm_ctx(tfm);
93
94 if (key_len != AES_KEYSIZE_128 &&
95 key_len != AES_KEYSIZE_192 &&
96 key_len != AES_KEYSIZE_256) {
97 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
98 return -EINVAL;
99 }
100
101 switch (key_len) {
102 case AES_KEYSIZE_128:
103 ctx->rounds = 4;
104 ppc_expand_key_128(ctx->key_enc, in_key);
105 break;
106 case AES_KEYSIZE_192:
107 ctx->rounds = 5;
108 ppc_expand_key_192(ctx->key_enc, in_key);
109 break;
110 case AES_KEYSIZE_256:
111 ctx->rounds = 6;
112 ppc_expand_key_256(ctx->key_enc, in_key);
113 break;
114 }
115
116 ppc_generate_decrypt_key(ctx->key_dec, ctx->key_enc, key_len);
117
118 return 0;
119 }
120
121 static int ppc_xts_setkey(struct crypto_tfm *tfm, const u8 *in_key,
122 unsigned int key_len)
123 {
124 struct ppc_xts_ctx *ctx = crypto_tfm_ctx(tfm);
125 int err;
126
127 err = xts_check_key(tfm, in_key, key_len);
128 if (err)
129 return err;
130
131 key_len >>= 1;
132
133 if (key_len != AES_KEYSIZE_128 &&
134 key_len != AES_KEYSIZE_192 &&
135 key_len != AES_KEYSIZE_256) {
136 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
137 return -EINVAL;
138 }
139
140 switch (key_len) {
141 case AES_KEYSIZE_128:
142 ctx->rounds = 4;
143 ppc_expand_key_128(ctx->key_enc, in_key);
144 ppc_expand_key_128(ctx->key_twk, in_key + AES_KEYSIZE_128);
145 break;
146 case AES_KEYSIZE_192:
147 ctx->rounds = 5;
148 ppc_expand_key_192(ctx->key_enc, in_key);
149 ppc_expand_key_192(ctx->key_twk, in_key + AES_KEYSIZE_192);
150 break;
151 case AES_KEYSIZE_256:
152 ctx->rounds = 6;
153 ppc_expand_key_256(ctx->key_enc, in_key);
154 ppc_expand_key_256(ctx->key_twk, in_key + AES_KEYSIZE_256);
155 break;
156 }
157
158 ppc_generate_decrypt_key(ctx->key_dec, ctx->key_enc, key_len);
159
160 return 0;
161 }
162
163 static void ppc_aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
164 {
165 struct ppc_aes_ctx *ctx = crypto_tfm_ctx(tfm);
166
167 spe_begin();
168 ppc_encrypt_aes(out, in, ctx->key_enc, ctx->rounds);
169 spe_end();
170 }
171
172 static void ppc_aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
173 {
174 struct ppc_aes_ctx *ctx = crypto_tfm_ctx(tfm);
175
176 spe_begin();
177 ppc_decrypt_aes(out, in, ctx->key_dec, ctx->rounds);
178 spe_end();
179 }
180
181 static int ppc_ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
182 struct scatterlist *src, unsigned int nbytes)
183 {
184 struct ppc_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
185 struct blkcipher_walk walk;
186 unsigned int ubytes;
187 int err;
188
189 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
190 blkcipher_walk_init(&walk, dst, src, nbytes);
191 err = blkcipher_walk_virt(desc, &walk);
192
193 while ((nbytes = walk.nbytes)) {
194 ubytes = nbytes > MAX_BYTES ?
195 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1);
196 nbytes -= ubytes;
197
198 spe_begin();
199 ppc_encrypt_ecb(walk.dst.virt.addr, walk.src.virt.addr,
200 ctx->key_enc, ctx->rounds, nbytes);
201 spe_end();
202
203 err = blkcipher_walk_done(desc, &walk, ubytes);
204 }
205
206 return err;
207 }
208
209 static int ppc_ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
210 struct scatterlist *src, unsigned int nbytes)
211 {
212 struct ppc_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
213 struct blkcipher_walk walk;
214 unsigned int ubytes;
215 int err;
216
217 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
218 blkcipher_walk_init(&walk, dst, src, nbytes);
219 err = blkcipher_walk_virt(desc, &walk);
220
221 while ((nbytes = walk.nbytes)) {
222 ubytes = nbytes > MAX_BYTES ?
223 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1);
224 nbytes -= ubytes;
225
226 spe_begin();
227 ppc_decrypt_ecb(walk.dst.virt.addr, walk.src.virt.addr,
228 ctx->key_dec, ctx->rounds, nbytes);
229 spe_end();
230
231 err = blkcipher_walk_done(desc, &walk, ubytes);
232 }
233
234 return err;
235 }
236
237 static int ppc_cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
238 struct scatterlist *src, unsigned int nbytes)
239 {
240 struct ppc_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
241 struct blkcipher_walk walk;
242 unsigned int ubytes;
243 int err;
244
245 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
246 blkcipher_walk_init(&walk, dst, src, nbytes);
247 err = blkcipher_walk_virt(desc, &walk);
248
249 while ((nbytes = walk.nbytes)) {
250 ubytes = nbytes > MAX_BYTES ?
251 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1);
252 nbytes -= ubytes;
253
254 spe_begin();
255 ppc_encrypt_cbc(walk.dst.virt.addr, walk.src.virt.addr,
256 ctx->key_enc, ctx->rounds, nbytes, walk.iv);
257 spe_end();
258
259 err = blkcipher_walk_done(desc, &walk, ubytes);
260 }
261
262 return err;
263 }
264
265 static int ppc_cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
266 struct scatterlist *src, unsigned int nbytes)
267 {
268 struct ppc_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
269 struct blkcipher_walk walk;
270 unsigned int ubytes;
271 int err;
272
273 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
274 blkcipher_walk_init(&walk, dst, src, nbytes);
275 err = blkcipher_walk_virt(desc, &walk);
276
277 while ((nbytes = walk.nbytes)) {
278 ubytes = nbytes > MAX_BYTES ?
279 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1);
280 nbytes -= ubytes;
281
282 spe_begin();
283 ppc_decrypt_cbc(walk.dst.virt.addr, walk.src.virt.addr,
284 ctx->key_dec, ctx->rounds, nbytes, walk.iv);
285 spe_end();
286
287 err = blkcipher_walk_done(desc, &walk, ubytes);
288 }
289
290 return err;
291 }
292
293 static int ppc_ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
294 struct scatterlist *src, unsigned int nbytes)
295 {
296 struct ppc_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
297 struct blkcipher_walk walk;
298 unsigned int pbytes, ubytes;
299 int err;
300
301 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
302 blkcipher_walk_init(&walk, dst, src, nbytes);
303 err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
304
305 while ((pbytes = walk.nbytes)) {
306 pbytes = pbytes > MAX_BYTES ? MAX_BYTES : pbytes;
307 pbytes = pbytes == nbytes ?
308 nbytes : pbytes & ~(AES_BLOCK_SIZE - 1);
309 ubytes = walk.nbytes - pbytes;
310
311 spe_begin();
312 ppc_crypt_ctr(walk.dst.virt.addr, walk.src.virt.addr,
313 ctx->key_enc, ctx->rounds, pbytes , walk.iv);
314 spe_end();
315
316 nbytes -= pbytes;
317 err = blkcipher_walk_done(desc, &walk, ubytes);
318 }
319
320 return err;
321 }
322
323 static int ppc_xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
324 struct scatterlist *src, unsigned int nbytes)
325 {
326 struct ppc_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
327 struct blkcipher_walk walk;
328 unsigned int ubytes;
329 int err;
330 u32 *twk;
331
332 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
333 blkcipher_walk_init(&walk, dst, src, nbytes);
334 err = blkcipher_walk_virt(desc, &walk);
335 twk = ctx->key_twk;
336
337 while ((nbytes = walk.nbytes)) {
338 ubytes = nbytes > MAX_BYTES ?
339 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1);
340 nbytes -= ubytes;
341
342 spe_begin();
343 ppc_encrypt_xts(walk.dst.virt.addr, walk.src.virt.addr,
344 ctx->key_enc, ctx->rounds, nbytes, walk.iv, twk);
345 spe_end();
346
347 twk = NULL;
348 err = blkcipher_walk_done(desc, &walk, ubytes);
349 }
350
351 return err;
352 }
353
354 static int ppc_xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
355 struct scatterlist *src, unsigned int nbytes)
356 {
357 struct ppc_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
358 struct blkcipher_walk walk;
359 unsigned int ubytes;
360 int err;
361 u32 *twk;
362
363 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
364 blkcipher_walk_init(&walk, dst, src, nbytes);
365 err = blkcipher_walk_virt(desc, &walk);
366 twk = ctx->key_twk;
367
368 while ((nbytes = walk.nbytes)) {
369 ubytes = nbytes > MAX_BYTES ?
370 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1);
371 nbytes -= ubytes;
372
373 spe_begin();
374 ppc_decrypt_xts(walk.dst.virt.addr, walk.src.virt.addr,
375 ctx->key_dec, ctx->rounds, nbytes, walk.iv, twk);
376 spe_end();
377
378 twk = NULL;
379 err = blkcipher_walk_done(desc, &walk, ubytes);
380 }
381
382 return err;
383 }
384
385
386
387
388
389
390
391
392
393 static struct crypto_alg aes_algs[] = { {
394 .cra_name = "aes",
395 .cra_driver_name = "aes-ppc-spe",
396 .cra_priority = 300,
397 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
398 .cra_blocksize = AES_BLOCK_SIZE,
399 .cra_ctxsize = sizeof(struct ppc_aes_ctx),
400 .cra_alignmask = 0,
401 .cra_module = THIS_MODULE,
402 .cra_u = {
403 .cipher = {
404 .cia_min_keysize = AES_MIN_KEY_SIZE,
405 .cia_max_keysize = AES_MAX_KEY_SIZE,
406 .cia_setkey = ppc_aes_setkey,
407 .cia_encrypt = ppc_aes_encrypt,
408 .cia_decrypt = ppc_aes_decrypt
409 }
410 }
411 }, {
412 .cra_name = "ecb(aes)",
413 .cra_driver_name = "ecb-ppc-spe",
414 .cra_priority = 300,
415 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
416 .cra_blocksize = AES_BLOCK_SIZE,
417 .cra_ctxsize = sizeof(struct ppc_aes_ctx),
418 .cra_alignmask = 0,
419 .cra_type = &crypto_blkcipher_type,
420 .cra_module = THIS_MODULE,
421 .cra_u = {
422 .blkcipher = {
423 .min_keysize = AES_MIN_KEY_SIZE,
424 .max_keysize = AES_MAX_KEY_SIZE,
425 .ivsize = AES_BLOCK_SIZE,
426 .setkey = ppc_aes_setkey,
427 .encrypt = ppc_ecb_encrypt,
428 .decrypt = ppc_ecb_decrypt,
429 }
430 }
431 }, {
432 .cra_name = "cbc(aes)",
433 .cra_driver_name = "cbc-ppc-spe",
434 .cra_priority = 300,
435 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
436 .cra_blocksize = AES_BLOCK_SIZE,
437 .cra_ctxsize = sizeof(struct ppc_aes_ctx),
438 .cra_alignmask = 0,
439 .cra_type = &crypto_blkcipher_type,
440 .cra_module = THIS_MODULE,
441 .cra_u = {
442 .blkcipher = {
443 .min_keysize = AES_MIN_KEY_SIZE,
444 .max_keysize = AES_MAX_KEY_SIZE,
445 .ivsize = AES_BLOCK_SIZE,
446 .setkey = ppc_aes_setkey,
447 .encrypt = ppc_cbc_encrypt,
448 .decrypt = ppc_cbc_decrypt,
449 }
450 }
451 }, {
452 .cra_name = "ctr(aes)",
453 .cra_driver_name = "ctr-ppc-spe",
454 .cra_priority = 300,
455 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
456 .cra_blocksize = 1,
457 .cra_ctxsize = sizeof(struct ppc_aes_ctx),
458 .cra_alignmask = 0,
459 .cra_type = &crypto_blkcipher_type,
460 .cra_module = THIS_MODULE,
461 .cra_u = {
462 .blkcipher = {
463 .min_keysize = AES_MIN_KEY_SIZE,
464 .max_keysize = AES_MAX_KEY_SIZE,
465 .ivsize = AES_BLOCK_SIZE,
466 .setkey = ppc_aes_setkey,
467 .encrypt = ppc_ctr_crypt,
468 .decrypt = ppc_ctr_crypt,
469 }
470 }
471 }, {
472 .cra_name = "xts(aes)",
473 .cra_driver_name = "xts-ppc-spe",
474 .cra_priority = 300,
475 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
476 .cra_blocksize = AES_BLOCK_SIZE,
477 .cra_ctxsize = sizeof(struct ppc_xts_ctx),
478 .cra_alignmask = 0,
479 .cra_type = &crypto_blkcipher_type,
480 .cra_module = THIS_MODULE,
481 .cra_u = {
482 .blkcipher = {
483 .min_keysize = AES_MIN_KEY_SIZE * 2,
484 .max_keysize = AES_MAX_KEY_SIZE * 2,
485 .ivsize = AES_BLOCK_SIZE,
486 .setkey = ppc_xts_setkey,
487 .encrypt = ppc_xts_encrypt,
488 .decrypt = ppc_xts_decrypt,
489 }
490 }
491 } };
492
493 static int __init ppc_aes_mod_init(void)
494 {
495 return crypto_register_algs(aes_algs, ARRAY_SIZE(aes_algs));
496 }
497
498 static void __exit ppc_aes_mod_fini(void)
499 {
500 crypto_unregister_algs(aes_algs, ARRAY_SIZE(aes_algs));
501 }
502
503 module_init(ppc_aes_mod_init);
504 module_exit(ppc_aes_mod_fini);
505
506 MODULE_LICENSE("GPL");
507 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS, SPE optimized");
508
509 MODULE_ALIAS_CRYPTO("aes");
510 MODULE_ALIAS_CRYPTO("ecb(aes)");
511 MODULE_ALIAS_CRYPTO("cbc(aes)");
512 MODULE_ALIAS_CRYPTO("ctr(aes)");
513 MODULE_ALIAS_CRYPTO("xts(aes)");
514 MODULE_ALIAS_CRYPTO("aes-ppc-spe");