1 /*
2 * Algorithm testing framework and tests.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * Updated RFC4106 AES-GCM testing.
10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
15 *
16 * This program is free software; you can redistribute it and/or modify it
17 * under the terms of the GNU General Public License as published by the Free
18 * Software Foundation; either version 2 of the License, or (at your option)
19 * any later version.
20 *
21 */
22
23 #include <crypto/aead.h>
24 #include <crypto/hash.h>
25 #include <crypto/skcipher.h>
26 #include <linux/err.h>
27 #include <linux/fips.h>
28 #include <linux/module.h>
29 #include <linux/scatterlist.h>
30 #include <linux/slab.h>
31 #include <linux/string.h>
32 #include <crypto/rng.h>
33 #include <crypto/drbg.h>
34 #include <crypto/akcipher.h>
35
36 #include "internal.h"
37
38 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
39
40 /* a perfect nop */
alg_test(const char * driver,const char * alg,u32 type,u32 mask)41 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
42 {
43 return 0;
44 }
45
46 #else
47
48 #include "testmgr.h"
49
50 /*
51 * Need slab memory for testing (size in number of pages).
52 */
53 #define XBUFSIZE 8
54
55 /*
56 * Indexes into the xbuf to simulate cross-page access.
57 */
58 #define IDX1 32
59 #define IDX2 32400
60 #define IDX3 1
61 #define IDX4 8193
62 #define IDX5 22222
63 #define IDX6 17101
64 #define IDX7 27333
65 #define IDX8 3000
66
67 /*
68 * Used by test_cipher()
69 */
70 #define ENCRYPT 1
71 #define DECRYPT 0
72
73 struct tcrypt_result {
74 struct completion completion;
75 int err;
76 };
77
78 struct aead_test_suite {
79 struct {
80 struct aead_testvec *vecs;
81 unsigned int count;
82 } enc, dec;
83 };
84
85 struct cipher_test_suite {
86 struct {
87 struct cipher_testvec *vecs;
88 unsigned int count;
89 } enc, dec;
90 };
91
92 struct comp_test_suite {
93 struct {
94 struct comp_testvec *vecs;
95 unsigned int count;
96 } comp, decomp;
97 };
98
99 struct pcomp_test_suite {
100 struct {
101 struct pcomp_testvec *vecs;
102 unsigned int count;
103 } comp, decomp;
104 };
105
106 struct hash_test_suite {
107 struct hash_testvec *vecs;
108 unsigned int count;
109 };
110
111 struct cprng_test_suite {
112 struct cprng_testvec *vecs;
113 unsigned int count;
114 };
115
116 struct drbg_test_suite {
117 struct drbg_testvec *vecs;
118 unsigned int count;
119 };
120
121 struct akcipher_test_suite {
122 struct akcipher_testvec *vecs;
123 unsigned int count;
124 };
125
126 struct alg_test_desc {
127 const char *alg;
128 int (*test)(const struct alg_test_desc *desc, const char *driver,
129 u32 type, u32 mask);
130 int fips_allowed; /* set if alg is allowed in fips mode */
131
132 union {
133 struct aead_test_suite aead;
134 struct cipher_test_suite cipher;
135 struct comp_test_suite comp;
136 struct pcomp_test_suite pcomp;
137 struct hash_test_suite hash;
138 struct cprng_test_suite cprng;
139 struct drbg_test_suite drbg;
140 struct akcipher_test_suite akcipher;
141 } suite;
142 };
143
144 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
145
hexdump(unsigned char * buf,unsigned int len)146 static void hexdump(unsigned char *buf, unsigned int len)
147 {
148 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
149 16, 1,
150 buf, len, false);
151 }
152
tcrypt_complete(struct crypto_async_request * req,int err)153 static void tcrypt_complete(struct crypto_async_request *req, int err)
154 {
155 struct tcrypt_result *res = req->data;
156
157 if (err == -EINPROGRESS)
158 return;
159
160 res->err = err;
161 complete(&res->completion);
162 }
163
testmgr_alloc_buf(char * buf[XBUFSIZE])164 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
165 {
166 int i;
167
168 for (i = 0; i < XBUFSIZE; i++) {
169 buf[i] = (void *)__get_free_page(GFP_KERNEL);
170 if (!buf[i])
171 goto err_free_buf;
172 }
173
174 return 0;
175
176 err_free_buf:
177 while (i-- > 0)
178 free_page((unsigned long)buf[i]);
179
180 return -ENOMEM;
181 }
182
testmgr_free_buf(char * buf[XBUFSIZE])183 static void testmgr_free_buf(char *buf[XBUFSIZE])
184 {
185 int i;
186
187 for (i = 0; i < XBUFSIZE; i++)
188 free_page((unsigned long)buf[i]);
189 }
190
wait_async_op(struct tcrypt_result * tr,int ret)191 static int wait_async_op(struct tcrypt_result *tr, int ret)
192 {
193 if (ret == -EINPROGRESS || ret == -EBUSY) {
194 wait_for_completion(&tr->completion);
195 reinit_completion(&tr->completion);
196 ret = tr->err;
197 }
198 return ret;
199 }
200
__test_hash(struct crypto_ahash * tfm,struct hash_testvec * template,unsigned int tcount,bool use_digest,const int align_offset)201 static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
202 unsigned int tcount, bool use_digest,
203 const int align_offset)
204 {
205 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
206 unsigned int i, j, k, temp;
207 struct scatterlist sg[8];
208 char *result;
209 char *key;
210 struct ahash_request *req;
211 struct tcrypt_result tresult;
212 void *hash_buff;
213 char *xbuf[XBUFSIZE];
214 int ret = -ENOMEM;
215
216 result = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
217 if (!result)
218 return ret;
219 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
220 if (!key)
221 goto out_nobuf;
222 if (testmgr_alloc_buf(xbuf))
223 goto out_nobuf;
224
225 init_completion(&tresult.completion);
226
227 req = ahash_request_alloc(tfm, GFP_KERNEL);
228 if (!req) {
229 printk(KERN_ERR "alg: hash: Failed to allocate request for "
230 "%s\n", algo);
231 goto out_noreq;
232 }
233 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
234 tcrypt_complete, &tresult);
235
236 j = 0;
237 for (i = 0; i < tcount; i++) {
238 if (template[i].np)
239 continue;
240
241 ret = -EINVAL;
242 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
243 goto out;
244
245 j++;
246 memset(result, 0, MAX_DIGEST_SIZE);
247
248 hash_buff = xbuf[0];
249 hash_buff += align_offset;
250
251 memcpy(hash_buff, template[i].plaintext, template[i].psize);
252 sg_init_one(&sg[0], hash_buff, template[i].psize);
253
254 if (template[i].ksize) {
255 crypto_ahash_clear_flags(tfm, ~0);
256 if (template[i].ksize > MAX_KEYLEN) {
257 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
258 j, algo, template[i].ksize, MAX_KEYLEN);
259 ret = -EINVAL;
260 goto out;
261 }
262 memcpy(key, template[i].key, template[i].ksize);
263 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
264 if (ret) {
265 printk(KERN_ERR "alg: hash: setkey failed on "
266 "test %d for %s: ret=%d\n", j, algo,
267 -ret);
268 goto out;
269 }
270 }
271
272 ahash_request_set_crypt(req, sg, result, template[i].psize);
273 if (use_digest) {
274 ret = wait_async_op(&tresult, crypto_ahash_digest(req));
275 if (ret) {
276 pr_err("alg: hash: digest failed on test %d "
277 "for %s: ret=%d\n", j, algo, -ret);
278 goto out;
279 }
280 } else {
281 ret = wait_async_op(&tresult, crypto_ahash_init(req));
282 if (ret) {
283 pr_err("alt: hash: init failed on test %d "
284 "for %s: ret=%d\n", j, algo, -ret);
285 goto out;
286 }
287 ret = wait_async_op(&tresult, crypto_ahash_update(req));
288 if (ret) {
289 pr_err("alt: hash: update failed on test %d "
290 "for %s: ret=%d\n", j, algo, -ret);
291 goto out;
292 }
293 ret = wait_async_op(&tresult, crypto_ahash_final(req));
294 if (ret) {
295 pr_err("alt: hash: final failed on test %d "
296 "for %s: ret=%d\n", j, algo, -ret);
297 goto out;
298 }
299 }
300
301 if (memcmp(result, template[i].digest,
302 crypto_ahash_digestsize(tfm))) {
303 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
304 j, algo);
305 hexdump(result, crypto_ahash_digestsize(tfm));
306 ret = -EINVAL;
307 goto out;
308 }
309 }
310
311 j = 0;
312 for (i = 0; i < tcount; i++) {
313 /* alignment tests are only done with continuous buffers */
314 if (align_offset != 0)
315 break;
316
317 if (!template[i].np)
318 continue;
319
320 j++;
321 memset(result, 0, MAX_DIGEST_SIZE);
322
323 temp = 0;
324 sg_init_table(sg, template[i].np);
325 ret = -EINVAL;
326 for (k = 0; k < template[i].np; k++) {
327 if (WARN_ON(offset_in_page(IDX[k]) +
328 template[i].tap[k] > PAGE_SIZE))
329 goto out;
330 sg_set_buf(&sg[k],
331 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
332 offset_in_page(IDX[k]),
333 template[i].plaintext + temp,
334 template[i].tap[k]),
335 template[i].tap[k]);
336 temp += template[i].tap[k];
337 }
338
339 if (template[i].ksize) {
340 if (template[i].ksize > MAX_KEYLEN) {
341 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
342 j, algo, template[i].ksize, MAX_KEYLEN);
343 ret = -EINVAL;
344 goto out;
345 }
346 crypto_ahash_clear_flags(tfm, ~0);
347 memcpy(key, template[i].key, template[i].ksize);
348 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
349
350 if (ret) {
351 printk(KERN_ERR "alg: hash: setkey "
352 "failed on chunking test %d "
353 "for %s: ret=%d\n", j, algo, -ret);
354 goto out;
355 }
356 }
357
358 ahash_request_set_crypt(req, sg, result, template[i].psize);
359 ret = crypto_ahash_digest(req);
360 switch (ret) {
361 case 0:
362 break;
363 case -EINPROGRESS:
364 case -EBUSY:
365 wait_for_completion(&tresult.completion);
366 reinit_completion(&tresult.completion);
367 ret = tresult.err;
368 if (!ret)
369 break;
370 /* fall through */
371 default:
372 printk(KERN_ERR "alg: hash: digest failed "
373 "on chunking test %d for %s: "
374 "ret=%d\n", j, algo, -ret);
375 goto out;
376 }
377
378 if (memcmp(result, template[i].digest,
379 crypto_ahash_digestsize(tfm))) {
380 printk(KERN_ERR "alg: hash: Chunking test %d "
381 "failed for %s\n", j, algo);
382 hexdump(result, crypto_ahash_digestsize(tfm));
383 ret = -EINVAL;
384 goto out;
385 }
386 }
387
388 ret = 0;
389
390 out:
391 ahash_request_free(req);
392 out_noreq:
393 testmgr_free_buf(xbuf);
394 out_nobuf:
395 kfree(key);
396 kfree(result);
397 return ret;
398 }
399
test_hash(struct crypto_ahash * tfm,struct hash_testvec * template,unsigned int tcount,bool use_digest)400 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
401 unsigned int tcount, bool use_digest)
402 {
403 unsigned int alignmask;
404 int ret;
405
406 ret = __test_hash(tfm, template, tcount, use_digest, 0);
407 if (ret)
408 return ret;
409
410 /* test unaligned buffers, check with one byte offset */
411 ret = __test_hash(tfm, template, tcount, use_digest, 1);
412 if (ret)
413 return ret;
414
415 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
416 if (alignmask) {
417 /* Check if alignment mask for tfm is correctly set. */
418 ret = __test_hash(tfm, template, tcount, use_digest,
419 alignmask + 1);
420 if (ret)
421 return ret;
422 }
423
424 return 0;
425 }
426
__test_aead(struct crypto_aead * tfm,int enc,struct aead_testvec * template,unsigned int tcount,const bool diff_dst,const int align_offset)427 static int __test_aead(struct crypto_aead *tfm, int enc,
428 struct aead_testvec *template, unsigned int tcount,
429 const bool diff_dst, const int align_offset)
430 {
431 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
432 unsigned int i, j, k, n, temp;
433 int ret = -ENOMEM;
434 char *q;
435 char *key;
436 struct aead_request *req;
437 struct scatterlist *sg;
438 struct scatterlist *sgout;
439 const char *e, *d;
440 struct tcrypt_result result;
441 unsigned int authsize, iv_len;
442 void *input;
443 void *output;
444 void *assoc;
445 char *iv;
446 char *xbuf[XBUFSIZE];
447 char *xoutbuf[XBUFSIZE];
448 char *axbuf[XBUFSIZE];
449
450 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
451 if (!iv)
452 return ret;
453 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
454 if (!key)
455 goto out_noxbuf;
456 if (testmgr_alloc_buf(xbuf))
457 goto out_noxbuf;
458 if (testmgr_alloc_buf(axbuf))
459 goto out_noaxbuf;
460 if (diff_dst && testmgr_alloc_buf(xoutbuf))
461 goto out_nooutbuf;
462
463 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
464 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 4 : 2), GFP_KERNEL);
465 if (!sg)
466 goto out_nosg;
467 sgout = &sg[16];
468
469 if (diff_dst)
470 d = "-ddst";
471 else
472 d = "";
473
474 if (enc == ENCRYPT)
475 e = "encryption";
476 else
477 e = "decryption";
478
479 init_completion(&result.completion);
480
481 req = aead_request_alloc(tfm, GFP_KERNEL);
482 if (!req) {
483 pr_err("alg: aead%s: Failed to allocate request for %s\n",
484 d, algo);
485 goto out;
486 }
487
488 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
489 tcrypt_complete, &result);
490
491 for (i = 0, j = 0; i < tcount; i++) {
492 if (template[i].np)
493 continue;
494
495 j++;
496
497 /* some templates have no input data but they will
498 * touch input
499 */
500 input = xbuf[0];
501 input += align_offset;
502 assoc = axbuf[0];
503
504 ret = -EINVAL;
505 if (WARN_ON(align_offset + template[i].ilen >
506 PAGE_SIZE || template[i].alen > PAGE_SIZE))
507 goto out;
508
509 memcpy(input, template[i].input, template[i].ilen);
510 memcpy(assoc, template[i].assoc, template[i].alen);
511 iv_len = crypto_aead_ivsize(tfm);
512 if (template[i].iv)
513 memcpy(iv, template[i].iv, iv_len);
514 else
515 memset(iv, 0, iv_len);
516
517 crypto_aead_clear_flags(tfm, ~0);
518 if (template[i].wk)
519 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
520
521 if (template[i].klen > MAX_KEYLEN) {
522 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
523 d, j, algo, template[i].klen,
524 MAX_KEYLEN);
525 ret = -EINVAL;
526 goto out;
527 }
528 memcpy(key, template[i].key, template[i].klen);
529
530 ret = crypto_aead_setkey(tfm, key, template[i].klen);
531 if (!ret == template[i].fail) {
532 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
533 d, j, algo, crypto_aead_get_flags(tfm));
534 goto out;
535 } else if (ret)
536 continue;
537
538 authsize = abs(template[i].rlen - template[i].ilen);
539 ret = crypto_aead_setauthsize(tfm, authsize);
540 if (ret) {
541 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
542 d, authsize, j, algo);
543 goto out;
544 }
545
546 k = !!template[i].alen;
547 sg_init_table(sg, k + 1);
548 sg_set_buf(&sg[0], assoc, template[i].alen);
549 sg_set_buf(&sg[k], input,
550 template[i].ilen + (enc ? authsize : 0));
551 output = input;
552
553 if (diff_dst) {
554 sg_init_table(sgout, k + 1);
555 sg_set_buf(&sgout[0], assoc, template[i].alen);
556
557 output = xoutbuf[0];
558 output += align_offset;
559 sg_set_buf(&sgout[k], output,
560 template[i].rlen + (enc ? 0 : authsize));
561 }
562
563 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
564 template[i].ilen, iv);
565
566 aead_request_set_ad(req, template[i].alen);
567
568 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
569
570 switch (ret) {
571 case 0:
572 if (template[i].novrfy) {
573 /* verification was supposed to fail */
574 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
575 d, e, j, algo);
576 /* so really, we got a bad message */
577 ret = -EBADMSG;
578 goto out;
579 }
580 break;
581 case -EINPROGRESS:
582 case -EBUSY:
583 wait_for_completion(&result.completion);
584 reinit_completion(&result.completion);
585 ret = result.err;
586 if (!ret)
587 break;
588 case -EBADMSG:
589 if (template[i].novrfy)
590 /* verification failure was expected */
591 continue;
592 /* fall through */
593 default:
594 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
595 d, e, j, algo, -ret);
596 goto out;
597 }
598
599 q = output;
600 if (memcmp(q, template[i].result, template[i].rlen)) {
601 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
602 d, j, e, algo);
603 hexdump(q, template[i].rlen);
604 ret = -EINVAL;
605 goto out;
606 }
607 }
608
609 for (i = 0, j = 0; i < tcount; i++) {
610 /* alignment tests are only done with continuous buffers */
611 if (align_offset != 0)
612 break;
613
614 if (!template[i].np)
615 continue;
616
617 j++;
618
619 if (template[i].iv)
620 memcpy(iv, template[i].iv, MAX_IVLEN);
621 else
622 memset(iv, 0, MAX_IVLEN);
623
624 crypto_aead_clear_flags(tfm, ~0);
625 if (template[i].wk)
626 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
627 if (template[i].klen > MAX_KEYLEN) {
628 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
629 d, j, algo, template[i].klen, MAX_KEYLEN);
630 ret = -EINVAL;
631 goto out;
632 }
633 memcpy(key, template[i].key, template[i].klen);
634
635 ret = crypto_aead_setkey(tfm, key, template[i].klen);
636 if (!ret == template[i].fail) {
637 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
638 d, j, algo, crypto_aead_get_flags(tfm));
639 goto out;
640 } else if (ret)
641 continue;
642
643 authsize = abs(template[i].rlen - template[i].ilen);
644
645 ret = -EINVAL;
646 sg_init_table(sg, template[i].anp + template[i].np);
647 if (diff_dst)
648 sg_init_table(sgout, template[i].anp + template[i].np);
649
650 ret = -EINVAL;
651 for (k = 0, temp = 0; k < template[i].anp; k++) {
652 if (WARN_ON(offset_in_page(IDX[k]) +
653 template[i].atap[k] > PAGE_SIZE))
654 goto out;
655 sg_set_buf(&sg[k],
656 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
657 offset_in_page(IDX[k]),
658 template[i].assoc + temp,
659 template[i].atap[k]),
660 template[i].atap[k]);
661 if (diff_dst)
662 sg_set_buf(&sgout[k],
663 axbuf[IDX[k] >> PAGE_SHIFT] +
664 offset_in_page(IDX[k]),
665 template[i].atap[k]);
666 temp += template[i].atap[k];
667 }
668
669 for (k = 0, temp = 0; k < template[i].np; k++) {
670 if (WARN_ON(offset_in_page(IDX[k]) +
671 template[i].tap[k] > PAGE_SIZE))
672 goto out;
673
674 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
675 memcpy(q, template[i].input + temp, template[i].tap[k]);
676 sg_set_buf(&sg[template[i].anp + k],
677 q, template[i].tap[k]);
678
679 if (diff_dst) {
680 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
681 offset_in_page(IDX[k]);
682
683 memset(q, 0, template[i].tap[k]);
684
685 sg_set_buf(&sgout[template[i].anp + k],
686 q, template[i].tap[k]);
687 }
688
689 n = template[i].tap[k];
690 if (k == template[i].np - 1 && enc)
691 n += authsize;
692 if (offset_in_page(q) + n < PAGE_SIZE)
693 q[n] = 0;
694
695 temp += template[i].tap[k];
696 }
697
698 ret = crypto_aead_setauthsize(tfm, authsize);
699 if (ret) {
700 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
701 d, authsize, j, algo);
702 goto out;
703 }
704
705 if (enc) {
706 if (WARN_ON(sg[template[i].anp + k - 1].offset +
707 sg[template[i].anp + k - 1].length +
708 authsize > PAGE_SIZE)) {
709 ret = -EINVAL;
710 goto out;
711 }
712
713 if (diff_dst)
714 sgout[template[i].anp + k - 1].length +=
715 authsize;
716 sg[template[i].anp + k - 1].length += authsize;
717 }
718
719 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
720 template[i].ilen,
721 iv);
722
723 aead_request_set_ad(req, template[i].alen);
724
725 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
726
727 switch (ret) {
728 case 0:
729 if (template[i].novrfy) {
730 /* verification was supposed to fail */
731 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
732 d, e, j, algo);
733 /* so really, we got a bad message */
734 ret = -EBADMSG;
735 goto out;
736 }
737 break;
738 case -EINPROGRESS:
739 case -EBUSY:
740 wait_for_completion(&result.completion);
741 reinit_completion(&result.completion);
742 ret = result.err;
743 if (!ret)
744 break;
745 case -EBADMSG:
746 if (template[i].novrfy)
747 /* verification failure was expected */
748 continue;
749 /* fall through */
750 default:
751 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
752 d, e, j, algo, -ret);
753 goto out;
754 }
755
756 ret = -EINVAL;
757 for (k = 0, temp = 0; k < template[i].np; k++) {
758 if (diff_dst)
759 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
760 offset_in_page(IDX[k]);
761 else
762 q = xbuf[IDX[k] >> PAGE_SHIFT] +
763 offset_in_page(IDX[k]);
764
765 n = template[i].tap[k];
766 if (k == template[i].np - 1)
767 n += enc ? authsize : -authsize;
768
769 if (memcmp(q, template[i].result + temp, n)) {
770 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
771 d, j, e, k, algo);
772 hexdump(q, n);
773 goto out;
774 }
775
776 q += n;
777 if (k == template[i].np - 1 && !enc) {
778 if (!diff_dst &&
779 memcmp(q, template[i].input +
780 temp + n, authsize))
781 n = authsize;
782 else
783 n = 0;
784 } else {
785 for (n = 0; offset_in_page(q + n) && q[n]; n++)
786 ;
787 }
788 if (n) {
789 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
790 d, j, e, k, algo, n);
791 hexdump(q, n);
792 goto out;
793 }
794
795 temp += template[i].tap[k];
796 }
797 }
798
799 ret = 0;
800
801 out:
802 aead_request_free(req);
803 kfree(sg);
804 out_nosg:
805 if (diff_dst)
806 testmgr_free_buf(xoutbuf);
807 out_nooutbuf:
808 testmgr_free_buf(axbuf);
809 out_noaxbuf:
810 testmgr_free_buf(xbuf);
811 out_noxbuf:
812 kfree(key);
813 kfree(iv);
814 return ret;
815 }
816
test_aead(struct crypto_aead * tfm,int enc,struct aead_testvec * template,unsigned int tcount)817 static int test_aead(struct crypto_aead *tfm, int enc,
818 struct aead_testvec *template, unsigned int tcount)
819 {
820 unsigned int alignmask;
821 int ret;
822
823 /* test 'dst == src' case */
824 ret = __test_aead(tfm, enc, template, tcount, false, 0);
825 if (ret)
826 return ret;
827
828 /* test 'dst != src' case */
829 ret = __test_aead(tfm, enc, template, tcount, true, 0);
830 if (ret)
831 return ret;
832
833 /* test unaligned buffers, check with one byte offset */
834 ret = __test_aead(tfm, enc, template, tcount, true, 1);
835 if (ret)
836 return ret;
837
838 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
839 if (alignmask) {
840 /* Check if alignment mask for tfm is correctly set. */
841 ret = __test_aead(tfm, enc, template, tcount, true,
842 alignmask + 1);
843 if (ret)
844 return ret;
845 }
846
847 return 0;
848 }
849
test_cipher(struct crypto_cipher * tfm,int enc,struct cipher_testvec * template,unsigned int tcount)850 static int test_cipher(struct crypto_cipher *tfm, int enc,
851 struct cipher_testvec *template, unsigned int tcount)
852 {
853 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
854 unsigned int i, j, k;
855 char *q;
856 const char *e;
857 void *data;
858 char *xbuf[XBUFSIZE];
859 int ret = -ENOMEM;
860
861 if (testmgr_alloc_buf(xbuf))
862 goto out_nobuf;
863
864 if (enc == ENCRYPT)
865 e = "encryption";
866 else
867 e = "decryption";
868
869 j = 0;
870 for (i = 0; i < tcount; i++) {
871 if (template[i].np)
872 continue;
873
874 j++;
875
876 ret = -EINVAL;
877 if (WARN_ON(template[i].ilen > PAGE_SIZE))
878 goto out;
879
880 data = xbuf[0];
881 memcpy(data, template[i].input, template[i].ilen);
882
883 crypto_cipher_clear_flags(tfm, ~0);
884 if (template[i].wk)
885 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
886
887 ret = crypto_cipher_setkey(tfm, template[i].key,
888 template[i].klen);
889 if (!ret == template[i].fail) {
890 printk(KERN_ERR "alg: cipher: setkey failed "
891 "on test %d for %s: flags=%x\n", j,
892 algo, crypto_cipher_get_flags(tfm));
893 goto out;
894 } else if (ret)
895 continue;
896
897 for (k = 0; k < template[i].ilen;
898 k += crypto_cipher_blocksize(tfm)) {
899 if (enc)
900 crypto_cipher_encrypt_one(tfm, data + k,
901 data + k);
902 else
903 crypto_cipher_decrypt_one(tfm, data + k,
904 data + k);
905 }
906
907 q = data;
908 if (memcmp(q, template[i].result, template[i].rlen)) {
909 printk(KERN_ERR "alg: cipher: Test %d failed "
910 "on %s for %s\n", j, e, algo);
911 hexdump(q, template[i].rlen);
912 ret = -EINVAL;
913 goto out;
914 }
915 }
916
917 ret = 0;
918
919 out:
920 testmgr_free_buf(xbuf);
921 out_nobuf:
922 return ret;
923 }
924
__test_skcipher(struct crypto_skcipher * tfm,int enc,struct cipher_testvec * template,unsigned int tcount,const bool diff_dst,const int align_offset)925 static int __test_skcipher(struct crypto_skcipher *tfm, int enc,
926 struct cipher_testvec *template, unsigned int tcount,
927 const bool diff_dst, const int align_offset)
928 {
929 const char *algo =
930 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
931 unsigned int i, j, k, n, temp;
932 char *q;
933 struct skcipher_request *req;
934 struct scatterlist sg[8];
935 struct scatterlist sgout[8];
936 const char *e, *d;
937 struct tcrypt_result result;
938 void *data;
939 char iv[MAX_IVLEN];
940 char *xbuf[XBUFSIZE];
941 char *xoutbuf[XBUFSIZE];
942 int ret = -ENOMEM;
943 unsigned int ivsize = crypto_skcipher_ivsize(tfm);
944
945 if (testmgr_alloc_buf(xbuf))
946 goto out_nobuf;
947
948 if (diff_dst && testmgr_alloc_buf(xoutbuf))
949 goto out_nooutbuf;
950
951 if (diff_dst)
952 d = "-ddst";
953 else
954 d = "";
955
956 if (enc == ENCRYPT)
957 e = "encryption";
958 else
959 e = "decryption";
960
961 init_completion(&result.completion);
962
963 req = skcipher_request_alloc(tfm, GFP_KERNEL);
964 if (!req) {
965 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
966 d, algo);
967 goto out;
968 }
969
970 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
971 tcrypt_complete, &result);
972
973 j = 0;
974 for (i = 0; i < tcount; i++) {
975 if (template[i].np && !template[i].also_non_np)
976 continue;
977
978 if (template[i].iv)
979 memcpy(iv, template[i].iv, ivsize);
980 else
981 memset(iv, 0, MAX_IVLEN);
982
983 j++;
984 ret = -EINVAL;
985 if (WARN_ON(align_offset + template[i].ilen > PAGE_SIZE))
986 goto out;
987
988 data = xbuf[0];
989 data += align_offset;
990 memcpy(data, template[i].input, template[i].ilen);
991
992 crypto_skcipher_clear_flags(tfm, ~0);
993 if (template[i].wk)
994 crypto_skcipher_set_flags(tfm,
995 CRYPTO_TFM_REQ_WEAK_KEY);
996
997 ret = crypto_skcipher_setkey(tfm, template[i].key,
998 template[i].klen);
999 if (!ret == template[i].fail) {
1000 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
1001 d, j, algo, crypto_skcipher_get_flags(tfm));
1002 goto out;
1003 } else if (ret)
1004 continue;
1005
1006 sg_init_one(&sg[0], data, template[i].ilen);
1007 if (diff_dst) {
1008 data = xoutbuf[0];
1009 data += align_offset;
1010 sg_init_one(&sgout[0], data, template[i].ilen);
1011 }
1012
1013 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1014 template[i].ilen, iv);
1015 ret = enc ? crypto_skcipher_encrypt(req) :
1016 crypto_skcipher_decrypt(req);
1017
1018 switch (ret) {
1019 case 0:
1020 break;
1021 case -EINPROGRESS:
1022 case -EBUSY:
1023 wait_for_completion(&result.completion);
1024 reinit_completion(&result.completion);
1025 ret = result.err;
1026 if (!ret)
1027 break;
1028 /* fall through */
1029 default:
1030 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1031 d, e, j, algo, -ret);
1032 goto out;
1033 }
1034
1035 q = data;
1036 if (memcmp(q, template[i].result, template[i].rlen)) {
1037 pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n",
1038 d, j, e, algo);
1039 hexdump(q, template[i].rlen);
1040 ret = -EINVAL;
1041 goto out;
1042 }
1043
1044 if (template[i].iv_out &&
1045 memcmp(iv, template[i].iv_out,
1046 crypto_skcipher_ivsize(tfm))) {
1047 pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n",
1048 d, j, e, algo);
1049 hexdump(iv, crypto_skcipher_ivsize(tfm));
1050 ret = -EINVAL;
1051 goto out;
1052 }
1053 }
1054
1055 j = 0;
1056 for (i = 0; i < tcount; i++) {
1057 /* alignment tests are only done with continuous buffers */
1058 if (align_offset != 0)
1059 break;
1060
1061 if (!template[i].np)
1062 continue;
1063
1064 if (template[i].iv)
1065 memcpy(iv, template[i].iv, ivsize);
1066 else
1067 memset(iv, 0, MAX_IVLEN);
1068
1069 j++;
1070 crypto_skcipher_clear_flags(tfm, ~0);
1071 if (template[i].wk)
1072 crypto_skcipher_set_flags(tfm,
1073 CRYPTO_TFM_REQ_WEAK_KEY);
1074
1075 ret = crypto_skcipher_setkey(tfm, template[i].key,
1076 template[i].klen);
1077 if (!ret == template[i].fail) {
1078 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1079 d, j, algo, crypto_skcipher_get_flags(tfm));
1080 goto out;
1081 } else if (ret)
1082 continue;
1083
1084 temp = 0;
1085 ret = -EINVAL;
1086 sg_init_table(sg, template[i].np);
1087 if (diff_dst)
1088 sg_init_table(sgout, template[i].np);
1089 for (k = 0; k < template[i].np; k++) {
1090 if (WARN_ON(offset_in_page(IDX[k]) +
1091 template[i].tap[k] > PAGE_SIZE))
1092 goto out;
1093
1094 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
1095
1096 memcpy(q, template[i].input + temp, template[i].tap[k]);
1097
1098 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE)
1099 q[template[i].tap[k]] = 0;
1100
1101 sg_set_buf(&sg[k], q, template[i].tap[k]);
1102 if (diff_dst) {
1103 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1104 offset_in_page(IDX[k]);
1105
1106 sg_set_buf(&sgout[k], q, template[i].tap[k]);
1107
1108 memset(q, 0, template[i].tap[k]);
1109 if (offset_in_page(q) +
1110 template[i].tap[k] < PAGE_SIZE)
1111 q[template[i].tap[k]] = 0;
1112 }
1113
1114 temp += template[i].tap[k];
1115 }
1116
1117 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1118 template[i].ilen, iv);
1119
1120 ret = enc ? crypto_skcipher_encrypt(req) :
1121 crypto_skcipher_decrypt(req);
1122
1123 switch (ret) {
1124 case 0:
1125 break;
1126 case -EINPROGRESS:
1127 case -EBUSY:
1128 wait_for_completion(&result.completion);
1129 reinit_completion(&result.completion);
1130 ret = result.err;
1131 if (!ret)
1132 break;
1133 /* fall through */
1134 default:
1135 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1136 d, e, j, algo, -ret);
1137 goto out;
1138 }
1139
1140 temp = 0;
1141 ret = -EINVAL;
1142 for (k = 0; k < template[i].np; k++) {
1143 if (diff_dst)
1144 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1145 offset_in_page(IDX[k]);
1146 else
1147 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1148 offset_in_page(IDX[k]);
1149
1150 if (memcmp(q, template[i].result + temp,
1151 template[i].tap[k])) {
1152 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1153 d, j, e, k, algo);
1154 hexdump(q, template[i].tap[k]);
1155 goto out;
1156 }
1157
1158 q += template[i].tap[k];
1159 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1160 ;
1161 if (n) {
1162 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1163 d, j, e, k, algo, n);
1164 hexdump(q, n);
1165 goto out;
1166 }
1167 temp += template[i].tap[k];
1168 }
1169 }
1170
1171 ret = 0;
1172
1173 out:
1174 skcipher_request_free(req);
1175 if (diff_dst)
1176 testmgr_free_buf(xoutbuf);
1177 out_nooutbuf:
1178 testmgr_free_buf(xbuf);
1179 out_nobuf:
1180 return ret;
1181 }
1182
test_skcipher(struct crypto_skcipher * tfm,int enc,struct cipher_testvec * template,unsigned int tcount)1183 static int test_skcipher(struct crypto_skcipher *tfm, int enc,
1184 struct cipher_testvec *template, unsigned int tcount)
1185 {
1186 unsigned int alignmask;
1187 int ret;
1188
1189 /* test 'dst == src' case */
1190 ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1191 if (ret)
1192 return ret;
1193
1194 /* test 'dst != src' case */
1195 ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1196 if (ret)
1197 return ret;
1198
1199 /* test unaligned buffers, check with one byte offset */
1200 ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1201 if (ret)
1202 return ret;
1203
1204 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1205 if (alignmask) {
1206 /* Check if alignment mask for tfm is correctly set. */
1207 ret = __test_skcipher(tfm, enc, template, tcount, true,
1208 alignmask + 1);
1209 if (ret)
1210 return ret;
1211 }
1212
1213 return 0;
1214 }
1215
test_comp(struct crypto_comp * tfm,struct comp_testvec * ctemplate,struct comp_testvec * dtemplate,int ctcount,int dtcount)1216 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1217 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1218 {
1219 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1220 unsigned int i;
1221 char result[COMP_BUF_SIZE];
1222 int ret;
1223
1224 for (i = 0; i < ctcount; i++) {
1225 int ilen;
1226 unsigned int dlen = COMP_BUF_SIZE;
1227
1228 memset(result, 0, sizeof (result));
1229
1230 ilen = ctemplate[i].inlen;
1231 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1232 ilen, result, &dlen);
1233 if (ret) {
1234 printk(KERN_ERR "alg: comp: compression failed "
1235 "on test %d for %s: ret=%d\n", i + 1, algo,
1236 -ret);
1237 goto out;
1238 }
1239
1240 if (dlen != ctemplate[i].outlen) {
1241 printk(KERN_ERR "alg: comp: Compression test %d "
1242 "failed for %s: output len = %d\n", i + 1, algo,
1243 dlen);
1244 ret = -EINVAL;
1245 goto out;
1246 }
1247
1248 if (memcmp(result, ctemplate[i].output, dlen)) {
1249 printk(KERN_ERR "alg: comp: Compression test %d "
1250 "failed for %s\n", i + 1, algo);
1251 hexdump(result, dlen);
1252 ret = -EINVAL;
1253 goto out;
1254 }
1255 }
1256
1257 for (i = 0; i < dtcount; i++) {
1258 int ilen;
1259 unsigned int dlen = COMP_BUF_SIZE;
1260
1261 memset(result, 0, sizeof (result));
1262
1263 ilen = dtemplate[i].inlen;
1264 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1265 ilen, result, &dlen);
1266 if (ret) {
1267 printk(KERN_ERR "alg: comp: decompression failed "
1268 "on test %d for %s: ret=%d\n", i + 1, algo,
1269 -ret);
1270 goto out;
1271 }
1272
1273 if (dlen != dtemplate[i].outlen) {
1274 printk(KERN_ERR "alg: comp: Decompression test %d "
1275 "failed for %s: output len = %d\n", i + 1, algo,
1276 dlen);
1277 ret = -EINVAL;
1278 goto out;
1279 }
1280
1281 if (memcmp(result, dtemplate[i].output, dlen)) {
1282 printk(KERN_ERR "alg: comp: Decompression test %d "
1283 "failed for %s\n", i + 1, algo);
1284 hexdump(result, dlen);
1285 ret = -EINVAL;
1286 goto out;
1287 }
1288 }
1289
1290 ret = 0;
1291
1292 out:
1293 return ret;
1294 }
1295
test_pcomp(struct crypto_pcomp * tfm,struct pcomp_testvec * ctemplate,struct pcomp_testvec * dtemplate,int ctcount,int dtcount)1296 static int test_pcomp(struct crypto_pcomp *tfm,
1297 struct pcomp_testvec *ctemplate,
1298 struct pcomp_testvec *dtemplate, int ctcount,
1299 int dtcount)
1300 {
1301 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm));
1302 unsigned int i;
1303 char result[COMP_BUF_SIZE];
1304 int res;
1305
1306 for (i = 0; i < ctcount; i++) {
1307 struct comp_request req;
1308 unsigned int produced = 0;
1309
1310 res = crypto_compress_setup(tfm, ctemplate[i].params,
1311 ctemplate[i].paramsize);
1312 if (res) {
1313 pr_err("alg: pcomp: compression setup failed on test "
1314 "%d for %s: error=%d\n", i + 1, algo, res);
1315 return res;
1316 }
1317
1318 res = crypto_compress_init(tfm);
1319 if (res) {
1320 pr_err("alg: pcomp: compression init failed on test "
1321 "%d for %s: error=%d\n", i + 1, algo, res);
1322 return res;
1323 }
1324
1325 memset(result, 0, sizeof(result));
1326
1327 req.next_in = ctemplate[i].input;
1328 req.avail_in = ctemplate[i].inlen / 2;
1329 req.next_out = result;
1330 req.avail_out = ctemplate[i].outlen / 2;
1331
1332 res = crypto_compress_update(tfm, &req);
1333 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1334 pr_err("alg: pcomp: compression update failed on test "
1335 "%d for %s: error=%d\n", i + 1, algo, res);
1336 return res;
1337 }
1338 if (res > 0)
1339 produced += res;
1340
1341 /* Add remaining input data */
1342 req.avail_in += (ctemplate[i].inlen + 1) / 2;
1343
1344 res = crypto_compress_update(tfm, &req);
1345 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1346 pr_err("alg: pcomp: compression update failed on test "
1347 "%d for %s: error=%d\n", i + 1, algo, res);
1348 return res;
1349 }
1350 if (res > 0)
1351 produced += res;
1352
1353 /* Provide remaining output space */
1354 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2;
1355
1356 res = crypto_compress_final(tfm, &req);
1357 if (res < 0) {
1358 pr_err("alg: pcomp: compression final failed on test "
1359 "%d for %s: error=%d\n", i + 1, algo, res);
1360 return res;
1361 }
1362 produced += res;
1363
1364 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) {
1365 pr_err("alg: comp: Compression test %d failed for %s: "
1366 "output len = %d (expected %d)\n", i + 1, algo,
1367 COMP_BUF_SIZE - req.avail_out,
1368 ctemplate[i].outlen);
1369 return -EINVAL;
1370 }
1371
1372 if (produced != ctemplate[i].outlen) {
1373 pr_err("alg: comp: Compression test %d failed for %s: "
1374 "returned len = %u (expected %d)\n", i + 1,
1375 algo, produced, ctemplate[i].outlen);
1376 return -EINVAL;
1377 }
1378
1379 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) {
1380 pr_err("alg: pcomp: Compression test %d failed for "
1381 "%s\n", i + 1, algo);
1382 hexdump(result, ctemplate[i].outlen);
1383 return -EINVAL;
1384 }
1385 }
1386
1387 for (i = 0; i < dtcount; i++) {
1388 struct comp_request req;
1389 unsigned int produced = 0;
1390
1391 res = crypto_decompress_setup(tfm, dtemplate[i].params,
1392 dtemplate[i].paramsize);
1393 if (res) {
1394 pr_err("alg: pcomp: decompression setup failed on "
1395 "test %d for %s: error=%d\n", i + 1, algo, res);
1396 return res;
1397 }
1398
1399 res = crypto_decompress_init(tfm);
1400 if (res) {
1401 pr_err("alg: pcomp: decompression init failed on test "
1402 "%d for %s: error=%d\n", i + 1, algo, res);
1403 return res;
1404 }
1405
1406 memset(result, 0, sizeof(result));
1407
1408 req.next_in = dtemplate[i].input;
1409 req.avail_in = dtemplate[i].inlen / 2;
1410 req.next_out = result;
1411 req.avail_out = dtemplate[i].outlen / 2;
1412
1413 res = crypto_decompress_update(tfm, &req);
1414 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1415 pr_err("alg: pcomp: decompression update failed on "
1416 "test %d for %s: error=%d\n", i + 1, algo, res);
1417 return res;
1418 }
1419 if (res > 0)
1420 produced += res;
1421
1422 /* Add remaining input data */
1423 req.avail_in += (dtemplate[i].inlen + 1) / 2;
1424
1425 res = crypto_decompress_update(tfm, &req);
1426 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1427 pr_err("alg: pcomp: decompression update failed on "
1428 "test %d for %s: error=%d\n", i + 1, algo, res);
1429 return res;
1430 }
1431 if (res > 0)
1432 produced += res;
1433
1434 /* Provide remaining output space */
1435 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2;
1436
1437 res = crypto_decompress_final(tfm, &req);
1438 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1439 pr_err("alg: pcomp: decompression final failed on "
1440 "test %d for %s: error=%d\n", i + 1, algo, res);
1441 return res;
1442 }
1443 if (res > 0)
1444 produced += res;
1445
1446 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) {
1447 pr_err("alg: comp: Decompression test %d failed for "
1448 "%s: output len = %d (expected %d)\n", i + 1,
1449 algo, COMP_BUF_SIZE - req.avail_out,
1450 dtemplate[i].outlen);
1451 return -EINVAL;
1452 }
1453
1454 if (produced != dtemplate[i].outlen) {
1455 pr_err("alg: comp: Decompression test %d failed for "
1456 "%s: returned len = %u (expected %d)\n", i + 1,
1457 algo, produced, dtemplate[i].outlen);
1458 return -EINVAL;
1459 }
1460
1461 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) {
1462 pr_err("alg: pcomp: Decompression test %d failed for "
1463 "%s\n", i + 1, algo);
1464 hexdump(result, dtemplate[i].outlen);
1465 return -EINVAL;
1466 }
1467 }
1468
1469 return 0;
1470 }
1471
1472
test_cprng(struct crypto_rng * tfm,struct cprng_testvec * template,unsigned int tcount)1473 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1474 unsigned int tcount)
1475 {
1476 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1477 int err = 0, i, j, seedsize;
1478 u8 *seed;
1479 char result[32];
1480
1481 seedsize = crypto_rng_seedsize(tfm);
1482
1483 seed = kmalloc(seedsize, GFP_KERNEL);
1484 if (!seed) {
1485 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1486 "for %s\n", algo);
1487 return -ENOMEM;
1488 }
1489
1490 for (i = 0; i < tcount; i++) {
1491 memset(result, 0, 32);
1492
1493 memcpy(seed, template[i].v, template[i].vlen);
1494 memcpy(seed + template[i].vlen, template[i].key,
1495 template[i].klen);
1496 memcpy(seed + template[i].vlen + template[i].klen,
1497 template[i].dt, template[i].dtlen);
1498
1499 err = crypto_rng_reset(tfm, seed, seedsize);
1500 if (err) {
1501 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1502 "for %s\n", algo);
1503 goto out;
1504 }
1505
1506 for (j = 0; j < template[i].loops; j++) {
1507 err = crypto_rng_get_bytes(tfm, result,
1508 template[i].rlen);
1509 if (err < 0) {
1510 printk(KERN_ERR "alg: cprng: Failed to obtain "
1511 "the correct amount of random data for "
1512 "%s (requested %d)\n", algo,
1513 template[i].rlen);
1514 goto out;
1515 }
1516 }
1517
1518 err = memcmp(result, template[i].result,
1519 template[i].rlen);
1520 if (err) {
1521 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1522 i, algo);
1523 hexdump(result, template[i].rlen);
1524 err = -EINVAL;
1525 goto out;
1526 }
1527 }
1528
1529 out:
1530 kfree(seed);
1531 return err;
1532 }
1533
alg_test_aead(const struct alg_test_desc * desc,const char * driver,u32 type,u32 mask)1534 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1535 u32 type, u32 mask)
1536 {
1537 struct crypto_aead *tfm;
1538 int err = 0;
1539
1540 tfm = crypto_alloc_aead(driver, type | CRYPTO_ALG_INTERNAL, mask);
1541 if (IS_ERR(tfm)) {
1542 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1543 "%ld\n", driver, PTR_ERR(tfm));
1544 return PTR_ERR(tfm);
1545 }
1546
1547 if (desc->suite.aead.enc.vecs) {
1548 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1549 desc->suite.aead.enc.count);
1550 if (err)
1551 goto out;
1552 }
1553
1554 if (!err && desc->suite.aead.dec.vecs)
1555 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1556 desc->suite.aead.dec.count);
1557
1558 out:
1559 crypto_free_aead(tfm);
1560 return err;
1561 }
1562
alg_test_cipher(const struct alg_test_desc * desc,const char * driver,u32 type,u32 mask)1563 static int alg_test_cipher(const struct alg_test_desc *desc,
1564 const char *driver, u32 type, u32 mask)
1565 {
1566 struct crypto_cipher *tfm;
1567 int err = 0;
1568
1569 tfm = crypto_alloc_cipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1570 if (IS_ERR(tfm)) {
1571 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1572 "%s: %ld\n", driver, PTR_ERR(tfm));
1573 return PTR_ERR(tfm);
1574 }
1575
1576 if (desc->suite.cipher.enc.vecs) {
1577 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1578 desc->suite.cipher.enc.count);
1579 if (err)
1580 goto out;
1581 }
1582
1583 if (desc->suite.cipher.dec.vecs)
1584 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1585 desc->suite.cipher.dec.count);
1586
1587 out:
1588 crypto_free_cipher(tfm);
1589 return err;
1590 }
1591
alg_test_skcipher(const struct alg_test_desc * desc,const char * driver,u32 type,u32 mask)1592 static int alg_test_skcipher(const struct alg_test_desc *desc,
1593 const char *driver, u32 type, u32 mask)
1594 {
1595 struct crypto_skcipher *tfm;
1596 int err = 0;
1597
1598 tfm = crypto_alloc_skcipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1599 if (IS_ERR(tfm)) {
1600 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1601 "%s: %ld\n", driver, PTR_ERR(tfm));
1602 return PTR_ERR(tfm);
1603 }
1604
1605 if (desc->suite.cipher.enc.vecs) {
1606 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1607 desc->suite.cipher.enc.count);
1608 if (err)
1609 goto out;
1610 }
1611
1612 if (desc->suite.cipher.dec.vecs)
1613 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1614 desc->suite.cipher.dec.count);
1615
1616 out:
1617 crypto_free_skcipher(tfm);
1618 return err;
1619 }
1620
alg_test_comp(const struct alg_test_desc * desc,const char * driver,u32 type,u32 mask)1621 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1622 u32 type, u32 mask)
1623 {
1624 struct crypto_comp *tfm;
1625 int err;
1626
1627 tfm = crypto_alloc_comp(driver, type, mask);
1628 if (IS_ERR(tfm)) {
1629 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1630 "%ld\n", driver, PTR_ERR(tfm));
1631 return PTR_ERR(tfm);
1632 }
1633
1634 err = test_comp(tfm, desc->suite.comp.comp.vecs,
1635 desc->suite.comp.decomp.vecs,
1636 desc->suite.comp.comp.count,
1637 desc->suite.comp.decomp.count);
1638
1639 crypto_free_comp(tfm);
1640 return err;
1641 }
1642
alg_test_pcomp(const struct alg_test_desc * desc,const char * driver,u32 type,u32 mask)1643 static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver,
1644 u32 type, u32 mask)
1645 {
1646 struct crypto_pcomp *tfm;
1647 int err;
1648
1649 tfm = crypto_alloc_pcomp(driver, type, mask);
1650 if (IS_ERR(tfm)) {
1651 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
1652 driver, PTR_ERR(tfm));
1653 return PTR_ERR(tfm);
1654 }
1655
1656 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs,
1657 desc->suite.pcomp.decomp.vecs,
1658 desc->suite.pcomp.comp.count,
1659 desc->suite.pcomp.decomp.count);
1660
1661 crypto_free_pcomp(tfm);
1662 return err;
1663 }
1664
alg_test_hash(const struct alg_test_desc * desc,const char * driver,u32 type,u32 mask)1665 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1666 u32 type, u32 mask)
1667 {
1668 struct crypto_ahash *tfm;
1669 int err;
1670
1671 tfm = crypto_alloc_ahash(driver, type | CRYPTO_ALG_INTERNAL, mask);
1672 if (IS_ERR(tfm)) {
1673 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1674 "%ld\n", driver, PTR_ERR(tfm));
1675 return PTR_ERR(tfm);
1676 }
1677
1678 err = test_hash(tfm, desc->suite.hash.vecs,
1679 desc->suite.hash.count, true);
1680 if (!err)
1681 err = test_hash(tfm, desc->suite.hash.vecs,
1682 desc->suite.hash.count, false);
1683
1684 crypto_free_ahash(tfm);
1685 return err;
1686 }
1687
alg_test_crc32c(const struct alg_test_desc * desc,const char * driver,u32 type,u32 mask)1688 static int alg_test_crc32c(const struct alg_test_desc *desc,
1689 const char *driver, u32 type, u32 mask)
1690 {
1691 struct crypto_shash *tfm;
1692 u32 val;
1693 int err;
1694
1695 err = alg_test_hash(desc, driver, type, mask);
1696 if (err)
1697 goto out;
1698
1699 tfm = crypto_alloc_shash(driver, type | CRYPTO_ALG_INTERNAL, mask);
1700 if (IS_ERR(tfm)) {
1701 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1702 "%ld\n", driver, PTR_ERR(tfm));
1703 err = PTR_ERR(tfm);
1704 goto out;
1705 }
1706
1707 do {
1708 SHASH_DESC_ON_STACK(shash, tfm);
1709 u32 *ctx = (u32 *)shash_desc_ctx(shash);
1710
1711 shash->tfm = tfm;
1712 shash->flags = 0;
1713
1714 *ctx = le32_to_cpu(420553207);
1715 err = crypto_shash_final(shash, (u8 *)&val);
1716 if (err) {
1717 printk(KERN_ERR "alg: crc32c: Operation failed for "
1718 "%s: %d\n", driver, err);
1719 break;
1720 }
1721
1722 if (val != ~420553207) {
1723 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1724 "%d\n", driver, val);
1725 err = -EINVAL;
1726 }
1727 } while (0);
1728
1729 crypto_free_shash(tfm);
1730
1731 out:
1732 return err;
1733 }
1734
alg_test_cprng(const struct alg_test_desc * desc,const char * driver,u32 type,u32 mask)1735 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1736 u32 type, u32 mask)
1737 {
1738 struct crypto_rng *rng;
1739 int err;
1740
1741 rng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask);
1742 if (IS_ERR(rng)) {
1743 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1744 "%ld\n", driver, PTR_ERR(rng));
1745 return PTR_ERR(rng);
1746 }
1747
1748 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1749
1750 crypto_free_rng(rng);
1751
1752 return err;
1753 }
1754
1755
drbg_cavs_test(struct drbg_testvec * test,int pr,const char * driver,u32 type,u32 mask)1756 static int drbg_cavs_test(struct drbg_testvec *test, int pr,
1757 const char *driver, u32 type, u32 mask)
1758 {
1759 int ret = -EAGAIN;
1760 struct crypto_rng *drng;
1761 struct drbg_test_data test_data;
1762 struct drbg_string addtl, pers, testentropy;
1763 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
1764
1765 if (!buf)
1766 return -ENOMEM;
1767
1768 drng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask);
1769 if (IS_ERR(drng)) {
1770 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
1771 "%s\n", driver);
1772 kzfree(buf);
1773 return -ENOMEM;
1774 }
1775
1776 test_data.testentropy = &testentropy;
1777 drbg_string_fill(&testentropy, test->entropy, test->entropylen);
1778 drbg_string_fill(&pers, test->pers, test->perslen);
1779 ret = crypto_drbg_reset_test(drng, &pers, &test_data);
1780 if (ret) {
1781 printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
1782 goto outbuf;
1783 }
1784
1785 drbg_string_fill(&addtl, test->addtla, test->addtllen);
1786 if (pr) {
1787 drbg_string_fill(&testentropy, test->entpra, test->entprlen);
1788 ret = crypto_drbg_get_bytes_addtl_test(drng,
1789 buf, test->expectedlen, &addtl, &test_data);
1790 } else {
1791 ret = crypto_drbg_get_bytes_addtl(drng,
1792 buf, test->expectedlen, &addtl);
1793 }
1794 if (ret < 0) {
1795 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1796 "driver %s\n", driver);
1797 goto outbuf;
1798 }
1799
1800 drbg_string_fill(&addtl, test->addtlb, test->addtllen);
1801 if (pr) {
1802 drbg_string_fill(&testentropy, test->entprb, test->entprlen);
1803 ret = crypto_drbg_get_bytes_addtl_test(drng,
1804 buf, test->expectedlen, &addtl, &test_data);
1805 } else {
1806 ret = crypto_drbg_get_bytes_addtl(drng,
1807 buf, test->expectedlen, &addtl);
1808 }
1809 if (ret < 0) {
1810 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1811 "driver %s\n", driver);
1812 goto outbuf;
1813 }
1814
1815 ret = memcmp(test->expected, buf, test->expectedlen);
1816
1817 outbuf:
1818 crypto_free_rng(drng);
1819 kzfree(buf);
1820 return ret;
1821 }
1822
1823
alg_test_drbg(const struct alg_test_desc * desc,const char * driver,u32 type,u32 mask)1824 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
1825 u32 type, u32 mask)
1826 {
1827 int err = 0;
1828 int pr = 0;
1829 int i = 0;
1830 struct drbg_testvec *template = desc->suite.drbg.vecs;
1831 unsigned int tcount = desc->suite.drbg.count;
1832
1833 if (0 == memcmp(driver, "drbg_pr_", 8))
1834 pr = 1;
1835
1836 for (i = 0; i < tcount; i++) {
1837 err = drbg_cavs_test(&template[i], pr, driver, type, mask);
1838 if (err) {
1839 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
1840 i, driver);
1841 err = -EINVAL;
1842 break;
1843 }
1844 }
1845 return err;
1846
1847 }
1848
do_test_rsa(struct crypto_akcipher * tfm,struct akcipher_testvec * vecs)1849 static int do_test_rsa(struct crypto_akcipher *tfm,
1850 struct akcipher_testvec *vecs)
1851 {
1852 char *xbuf[XBUFSIZE];
1853 struct akcipher_request *req;
1854 void *outbuf_enc = NULL;
1855 void *outbuf_dec = NULL;
1856 struct tcrypt_result result;
1857 unsigned int out_len_max, out_len = 0;
1858 int err = -ENOMEM;
1859 struct scatterlist src, dst, src_tab[2];
1860
1861 if (testmgr_alloc_buf(xbuf))
1862 return err;
1863
1864 req = akcipher_request_alloc(tfm, GFP_KERNEL);
1865 if (!req)
1866 goto free_xbuf;
1867
1868 init_completion(&result.completion);
1869
1870 if (vecs->public_key_vec)
1871 err = crypto_akcipher_set_pub_key(tfm, vecs->key,
1872 vecs->key_len);
1873 else
1874 err = crypto_akcipher_set_priv_key(tfm, vecs->key,
1875 vecs->key_len);
1876 if (err)
1877 goto free_req;
1878
1879 out_len_max = crypto_akcipher_maxsize(tfm);
1880 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
1881 if (!outbuf_enc)
1882 goto free_req;
1883
1884 if (WARN_ON(vecs->m_size > PAGE_SIZE))
1885 goto free_all;
1886
1887 memcpy(xbuf[0], vecs->m, vecs->m_size);
1888
1889 sg_init_table(src_tab, 2);
1890 sg_set_buf(&src_tab[0], xbuf[0], 8);
1891 sg_set_buf(&src_tab[1], xbuf[0] + 8, vecs->m_size - 8);
1892 sg_init_one(&dst, outbuf_enc, out_len_max);
1893 akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size,
1894 out_len_max);
1895 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1896 tcrypt_complete, &result);
1897
1898 /* Run RSA encrypt - c = m^e mod n;*/
1899 err = wait_async_op(&result, crypto_akcipher_encrypt(req));
1900 if (err) {
1901 pr_err("alg: rsa: encrypt test failed. err %d\n", err);
1902 goto free_all;
1903 }
1904 if (req->dst_len != vecs->c_size) {
1905 pr_err("alg: rsa: encrypt test failed. Invalid output len\n");
1906 err = -EINVAL;
1907 goto free_all;
1908 }
1909 /* verify that encrypted message is equal to expected */
1910 if (memcmp(vecs->c, outbuf_enc, vecs->c_size)) {
1911 pr_err("alg: rsa: encrypt test failed. Invalid output\n");
1912 err = -EINVAL;
1913 goto free_all;
1914 }
1915 /* Don't invoke decrypt for vectors with public key */
1916 if (vecs->public_key_vec) {
1917 err = 0;
1918 goto free_all;
1919 }
1920 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
1921 if (!outbuf_dec) {
1922 err = -ENOMEM;
1923 goto free_all;
1924 }
1925
1926 if (WARN_ON(vecs->c_size > PAGE_SIZE))
1927 goto free_all;
1928
1929 memcpy(xbuf[0], vecs->c, vecs->c_size);
1930
1931 sg_init_one(&src, xbuf[0], vecs->c_size);
1932 sg_init_one(&dst, outbuf_dec, out_len_max);
1933 init_completion(&result.completion);
1934 akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max);
1935
1936 /* Run RSA decrypt - m = c^d mod n;*/
1937 err = wait_async_op(&result, crypto_akcipher_decrypt(req));
1938 if (err) {
1939 pr_err("alg: rsa: decrypt test failed. err %d\n", err);
1940 goto free_all;
1941 }
1942 out_len = req->dst_len;
1943 if (out_len != vecs->m_size) {
1944 pr_err("alg: rsa: decrypt test failed. Invalid output len\n");
1945 err = -EINVAL;
1946 goto free_all;
1947 }
1948 /* verify that decrypted message is equal to the original msg */
1949 if (memcmp(vecs->m, outbuf_dec, vecs->m_size)) {
1950 pr_err("alg: rsa: decrypt test failed. Invalid output\n");
1951 err = -EINVAL;
1952 }
1953 free_all:
1954 kfree(outbuf_dec);
1955 kfree(outbuf_enc);
1956 free_req:
1957 akcipher_request_free(req);
1958 free_xbuf:
1959 testmgr_free_buf(xbuf);
1960 return err;
1961 }
1962
test_rsa(struct crypto_akcipher * tfm,struct akcipher_testvec * vecs,unsigned int tcount)1963 static int test_rsa(struct crypto_akcipher *tfm, struct akcipher_testvec *vecs,
1964 unsigned int tcount)
1965 {
1966 int ret, i;
1967
1968 for (i = 0; i < tcount; i++) {
1969 ret = do_test_rsa(tfm, vecs++);
1970 if (ret) {
1971 pr_err("alg: rsa: test failed on vector %d, err=%d\n",
1972 i + 1, ret);
1973 return ret;
1974 }
1975 }
1976 return 0;
1977 }
1978
test_akcipher(struct crypto_akcipher * tfm,const char * alg,struct akcipher_testvec * vecs,unsigned int tcount)1979 static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
1980 struct akcipher_testvec *vecs, unsigned int tcount)
1981 {
1982 if (strncmp(alg, "rsa", 3) == 0)
1983 return test_rsa(tfm, vecs, tcount);
1984
1985 return 0;
1986 }
1987
alg_test_akcipher(const struct alg_test_desc * desc,const char * driver,u32 type,u32 mask)1988 static int alg_test_akcipher(const struct alg_test_desc *desc,
1989 const char *driver, u32 type, u32 mask)
1990 {
1991 struct crypto_akcipher *tfm;
1992 int err = 0;
1993
1994 tfm = crypto_alloc_akcipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1995 if (IS_ERR(tfm)) {
1996 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
1997 driver, PTR_ERR(tfm));
1998 return PTR_ERR(tfm);
1999 }
2000 if (desc->suite.akcipher.vecs)
2001 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
2002 desc->suite.akcipher.count);
2003
2004 crypto_free_akcipher(tfm);
2005 return err;
2006 }
2007
alg_test_null(const struct alg_test_desc * desc,const char * driver,u32 type,u32 mask)2008 static int alg_test_null(const struct alg_test_desc *desc,
2009 const char *driver, u32 type, u32 mask)
2010 {
2011 return 0;
2012 }
2013
2014 /* Please keep this list sorted by algorithm name. */
2015 static const struct alg_test_desc alg_test_descs[] = {
2016 {
2017 .alg = "__cbc-cast5-avx",
2018 .test = alg_test_null,
2019 }, {
2020 .alg = "__cbc-cast6-avx",
2021 .test = alg_test_null,
2022 }, {
2023 .alg = "__cbc-serpent-avx",
2024 .test = alg_test_null,
2025 }, {
2026 .alg = "__cbc-serpent-avx2",
2027 .test = alg_test_null,
2028 }, {
2029 .alg = "__cbc-serpent-sse2",
2030 .test = alg_test_null,
2031 }, {
2032 .alg = "__cbc-twofish-avx",
2033 .test = alg_test_null,
2034 }, {
2035 .alg = "__driver-cbc-aes-aesni",
2036 .test = alg_test_null,
2037 .fips_allowed = 1,
2038 }, {
2039 .alg = "__driver-cbc-camellia-aesni",
2040 .test = alg_test_null,
2041 }, {
2042 .alg = "__driver-cbc-camellia-aesni-avx2",
2043 .test = alg_test_null,
2044 }, {
2045 .alg = "__driver-cbc-cast5-avx",
2046 .test = alg_test_null,
2047 }, {
2048 .alg = "__driver-cbc-cast6-avx",
2049 .test = alg_test_null,
2050 }, {
2051 .alg = "__driver-cbc-serpent-avx",
2052 .test = alg_test_null,
2053 }, {
2054 .alg = "__driver-cbc-serpent-avx2",
2055 .test = alg_test_null,
2056 }, {
2057 .alg = "__driver-cbc-serpent-sse2",
2058 .test = alg_test_null,
2059 }, {
2060 .alg = "__driver-cbc-twofish-avx",
2061 .test = alg_test_null,
2062 }, {
2063 .alg = "__driver-ecb-aes-aesni",
2064 .test = alg_test_null,
2065 .fips_allowed = 1,
2066 }, {
2067 .alg = "__driver-ecb-camellia-aesni",
2068 .test = alg_test_null,
2069 }, {
2070 .alg = "__driver-ecb-camellia-aesni-avx2",
2071 .test = alg_test_null,
2072 }, {
2073 .alg = "__driver-ecb-cast5-avx",
2074 .test = alg_test_null,
2075 }, {
2076 .alg = "__driver-ecb-cast6-avx",
2077 .test = alg_test_null,
2078 }, {
2079 .alg = "__driver-ecb-serpent-avx",
2080 .test = alg_test_null,
2081 }, {
2082 .alg = "__driver-ecb-serpent-avx2",
2083 .test = alg_test_null,
2084 }, {
2085 .alg = "__driver-ecb-serpent-sse2",
2086 .test = alg_test_null,
2087 }, {
2088 .alg = "__driver-ecb-twofish-avx",
2089 .test = alg_test_null,
2090 }, {
2091 .alg = "__driver-gcm-aes-aesni",
2092 .test = alg_test_null,
2093 .fips_allowed = 1,
2094 }, {
2095 .alg = "__ghash-pclmulqdqni",
2096 .test = alg_test_null,
2097 .fips_allowed = 1,
2098 }, {
2099 .alg = "ansi_cprng",
2100 .test = alg_test_cprng,
2101 .fips_allowed = 1,
2102 .suite = {
2103 .cprng = {
2104 .vecs = ansi_cprng_aes_tv_template,
2105 .count = ANSI_CPRNG_AES_TEST_VECTORS
2106 }
2107 }
2108 }, {
2109 .alg = "authenc(hmac(md5),ecb(cipher_null))",
2110 .test = alg_test_aead,
2111 .suite = {
2112 .aead = {
2113 .enc = {
2114 .vecs = hmac_md5_ecb_cipher_null_enc_tv_template,
2115 .count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS
2116 },
2117 .dec = {
2118 .vecs = hmac_md5_ecb_cipher_null_dec_tv_template,
2119 .count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS
2120 }
2121 }
2122 }
2123 }, {
2124 .alg = "authenc(hmac(sha1),cbc(aes))",
2125 .test = alg_test_aead,
2126 .suite = {
2127 .aead = {
2128 .enc = {
2129 .vecs =
2130 hmac_sha1_aes_cbc_enc_tv_temp,
2131 .count =
2132 HMAC_SHA1_AES_CBC_ENC_TEST_VEC
2133 }
2134 }
2135 }
2136 }, {
2137 .alg = "authenc(hmac(sha1),cbc(des))",
2138 .test = alg_test_aead,
2139 .suite = {
2140 .aead = {
2141 .enc = {
2142 .vecs =
2143 hmac_sha1_des_cbc_enc_tv_temp,
2144 .count =
2145 HMAC_SHA1_DES_CBC_ENC_TEST_VEC
2146 }
2147 }
2148 }
2149 }, {
2150 .alg = "authenc(hmac(sha1),cbc(des3_ede))",
2151 .test = alg_test_aead,
2152 .suite = {
2153 .aead = {
2154 .enc = {
2155 .vecs =
2156 hmac_sha1_des3_ede_cbc_enc_tv_temp,
2157 .count =
2158 HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC
2159 }
2160 }
2161 }
2162 }, {
2163 .alg = "authenc(hmac(sha1),ecb(cipher_null))",
2164 .test = alg_test_aead,
2165 .suite = {
2166 .aead = {
2167 .enc = {
2168 .vecs =
2169 hmac_sha1_ecb_cipher_null_enc_tv_temp,
2170 .count =
2171 HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC
2172 },
2173 .dec = {
2174 .vecs =
2175 hmac_sha1_ecb_cipher_null_dec_tv_temp,
2176 .count =
2177 HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC
2178 }
2179 }
2180 }
2181 }, {
2182 .alg = "authenc(hmac(sha224),cbc(des))",
2183 .test = alg_test_aead,
2184 .suite = {
2185 .aead = {
2186 .enc = {
2187 .vecs =
2188 hmac_sha224_des_cbc_enc_tv_temp,
2189 .count =
2190 HMAC_SHA224_DES_CBC_ENC_TEST_VEC
2191 }
2192 }
2193 }
2194 }, {
2195 .alg = "authenc(hmac(sha224),cbc(des3_ede))",
2196 .test = alg_test_aead,
2197 .suite = {
2198 .aead = {
2199 .enc = {
2200 .vecs =
2201 hmac_sha224_des3_ede_cbc_enc_tv_temp,
2202 .count =
2203 HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC
2204 }
2205 }
2206 }
2207 }, {
2208 .alg = "authenc(hmac(sha256),cbc(aes))",
2209 .test = alg_test_aead,
2210 .suite = {
2211 .aead = {
2212 .enc = {
2213 .vecs =
2214 hmac_sha256_aes_cbc_enc_tv_temp,
2215 .count =
2216 HMAC_SHA256_AES_CBC_ENC_TEST_VEC
2217 }
2218 }
2219 }
2220 }, {
2221 .alg = "authenc(hmac(sha256),cbc(des))",
2222 .test = alg_test_aead,
2223 .suite = {
2224 .aead = {
2225 .enc = {
2226 .vecs =
2227 hmac_sha256_des_cbc_enc_tv_temp,
2228 .count =
2229 HMAC_SHA256_DES_CBC_ENC_TEST_VEC
2230 }
2231 }
2232 }
2233 }, {
2234 .alg = "authenc(hmac(sha256),cbc(des3_ede))",
2235 .test = alg_test_aead,
2236 .suite = {
2237 .aead = {
2238 .enc = {
2239 .vecs =
2240 hmac_sha256_des3_ede_cbc_enc_tv_temp,
2241 .count =
2242 HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC
2243 }
2244 }
2245 }
2246 }, {
2247 .alg = "authenc(hmac(sha384),cbc(des))",
2248 .test = alg_test_aead,
2249 .suite = {
2250 .aead = {
2251 .enc = {
2252 .vecs =
2253 hmac_sha384_des_cbc_enc_tv_temp,
2254 .count =
2255 HMAC_SHA384_DES_CBC_ENC_TEST_VEC
2256 }
2257 }
2258 }
2259 }, {
2260 .alg = "authenc(hmac(sha384),cbc(des3_ede))",
2261 .test = alg_test_aead,
2262 .suite = {
2263 .aead = {
2264 .enc = {
2265 .vecs =
2266 hmac_sha384_des3_ede_cbc_enc_tv_temp,
2267 .count =
2268 HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC
2269 }
2270 }
2271 }
2272 }, {
2273 .alg = "authenc(hmac(sha512),cbc(aes))",
2274 .test = alg_test_aead,
2275 .suite = {
2276 .aead = {
2277 .enc = {
2278 .vecs =
2279 hmac_sha512_aes_cbc_enc_tv_temp,
2280 .count =
2281 HMAC_SHA512_AES_CBC_ENC_TEST_VEC
2282 }
2283 }
2284 }
2285 }, {
2286 .alg = "authenc(hmac(sha512),cbc(des))",
2287 .test = alg_test_aead,
2288 .suite = {
2289 .aead = {
2290 .enc = {
2291 .vecs =
2292 hmac_sha512_des_cbc_enc_tv_temp,
2293 .count =
2294 HMAC_SHA512_DES_CBC_ENC_TEST_VEC
2295 }
2296 }
2297 }
2298 }, {
2299 .alg = "authenc(hmac(sha512),cbc(des3_ede))",
2300 .test = alg_test_aead,
2301 .suite = {
2302 .aead = {
2303 .enc = {
2304 .vecs =
2305 hmac_sha512_des3_ede_cbc_enc_tv_temp,
2306 .count =
2307 HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC
2308 }
2309 }
2310 }
2311 }, {
2312 .alg = "cbc(aes)",
2313 .test = alg_test_skcipher,
2314 .fips_allowed = 1,
2315 .suite = {
2316 .cipher = {
2317 .enc = {
2318 .vecs = aes_cbc_enc_tv_template,
2319 .count = AES_CBC_ENC_TEST_VECTORS
2320 },
2321 .dec = {
2322 .vecs = aes_cbc_dec_tv_template,
2323 .count = AES_CBC_DEC_TEST_VECTORS
2324 }
2325 }
2326 }
2327 }, {
2328 .alg = "cbc(anubis)",
2329 .test = alg_test_skcipher,
2330 .suite = {
2331 .cipher = {
2332 .enc = {
2333 .vecs = anubis_cbc_enc_tv_template,
2334 .count = ANUBIS_CBC_ENC_TEST_VECTORS
2335 },
2336 .dec = {
2337 .vecs = anubis_cbc_dec_tv_template,
2338 .count = ANUBIS_CBC_DEC_TEST_VECTORS
2339 }
2340 }
2341 }
2342 }, {
2343 .alg = "cbc(blowfish)",
2344 .test = alg_test_skcipher,
2345 .suite = {
2346 .cipher = {
2347 .enc = {
2348 .vecs = bf_cbc_enc_tv_template,
2349 .count = BF_CBC_ENC_TEST_VECTORS
2350 },
2351 .dec = {
2352 .vecs = bf_cbc_dec_tv_template,
2353 .count = BF_CBC_DEC_TEST_VECTORS
2354 }
2355 }
2356 }
2357 }, {
2358 .alg = "cbc(camellia)",
2359 .test = alg_test_skcipher,
2360 .suite = {
2361 .cipher = {
2362 .enc = {
2363 .vecs = camellia_cbc_enc_tv_template,
2364 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
2365 },
2366 .dec = {
2367 .vecs = camellia_cbc_dec_tv_template,
2368 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
2369 }
2370 }
2371 }
2372 }, {
2373 .alg = "cbc(cast5)",
2374 .test = alg_test_skcipher,
2375 .suite = {
2376 .cipher = {
2377 .enc = {
2378 .vecs = cast5_cbc_enc_tv_template,
2379 .count = CAST5_CBC_ENC_TEST_VECTORS
2380 },
2381 .dec = {
2382 .vecs = cast5_cbc_dec_tv_template,
2383 .count = CAST5_CBC_DEC_TEST_VECTORS
2384 }
2385 }
2386 }
2387 }, {
2388 .alg = "cbc(cast6)",
2389 .test = alg_test_skcipher,
2390 .suite = {
2391 .cipher = {
2392 .enc = {
2393 .vecs = cast6_cbc_enc_tv_template,
2394 .count = CAST6_CBC_ENC_TEST_VECTORS
2395 },
2396 .dec = {
2397 .vecs = cast6_cbc_dec_tv_template,
2398 .count = CAST6_CBC_DEC_TEST_VECTORS
2399 }
2400 }
2401 }
2402 }, {
2403 .alg = "cbc(des)",
2404 .test = alg_test_skcipher,
2405 .suite = {
2406 .cipher = {
2407 .enc = {
2408 .vecs = des_cbc_enc_tv_template,
2409 .count = DES_CBC_ENC_TEST_VECTORS
2410 },
2411 .dec = {
2412 .vecs = des_cbc_dec_tv_template,
2413 .count = DES_CBC_DEC_TEST_VECTORS
2414 }
2415 }
2416 }
2417 }, {
2418 .alg = "cbc(des3_ede)",
2419 .test = alg_test_skcipher,
2420 .fips_allowed = 1,
2421 .suite = {
2422 .cipher = {
2423 .enc = {
2424 .vecs = des3_ede_cbc_enc_tv_template,
2425 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
2426 },
2427 .dec = {
2428 .vecs = des3_ede_cbc_dec_tv_template,
2429 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
2430 }
2431 }
2432 }
2433 }, {
2434 .alg = "cbc(serpent)",
2435 .test = alg_test_skcipher,
2436 .suite = {
2437 .cipher = {
2438 .enc = {
2439 .vecs = serpent_cbc_enc_tv_template,
2440 .count = SERPENT_CBC_ENC_TEST_VECTORS
2441 },
2442 .dec = {
2443 .vecs = serpent_cbc_dec_tv_template,
2444 .count = SERPENT_CBC_DEC_TEST_VECTORS
2445 }
2446 }
2447 }
2448 }, {
2449 .alg = "cbc(twofish)",
2450 .test = alg_test_skcipher,
2451 .suite = {
2452 .cipher = {
2453 .enc = {
2454 .vecs = tf_cbc_enc_tv_template,
2455 .count = TF_CBC_ENC_TEST_VECTORS
2456 },
2457 .dec = {
2458 .vecs = tf_cbc_dec_tv_template,
2459 .count = TF_CBC_DEC_TEST_VECTORS
2460 }
2461 }
2462 }
2463 }, {
2464 .alg = "ccm(aes)",
2465 .test = alg_test_aead,
2466 .fips_allowed = 1,
2467 .suite = {
2468 .aead = {
2469 .enc = {
2470 .vecs = aes_ccm_enc_tv_template,
2471 .count = AES_CCM_ENC_TEST_VECTORS
2472 },
2473 .dec = {
2474 .vecs = aes_ccm_dec_tv_template,
2475 .count = AES_CCM_DEC_TEST_VECTORS
2476 }
2477 }
2478 }
2479 }, {
2480 .alg = "chacha20",
2481 .test = alg_test_skcipher,
2482 .suite = {
2483 .cipher = {
2484 .enc = {
2485 .vecs = chacha20_enc_tv_template,
2486 .count = CHACHA20_ENC_TEST_VECTORS
2487 },
2488 .dec = {
2489 .vecs = chacha20_enc_tv_template,
2490 .count = CHACHA20_ENC_TEST_VECTORS
2491 },
2492 }
2493 }
2494 }, {
2495 .alg = "cmac(aes)",
2496 .fips_allowed = 1,
2497 .test = alg_test_hash,
2498 .suite = {
2499 .hash = {
2500 .vecs = aes_cmac128_tv_template,
2501 .count = CMAC_AES_TEST_VECTORS
2502 }
2503 }
2504 }, {
2505 .alg = "cmac(des3_ede)",
2506 .fips_allowed = 1,
2507 .test = alg_test_hash,
2508 .suite = {
2509 .hash = {
2510 .vecs = des3_ede_cmac64_tv_template,
2511 .count = CMAC_DES3_EDE_TEST_VECTORS
2512 }
2513 }
2514 }, {
2515 .alg = "compress_null",
2516 .test = alg_test_null,
2517 }, {
2518 .alg = "crc32",
2519 .test = alg_test_hash,
2520 .suite = {
2521 .hash = {
2522 .vecs = crc32_tv_template,
2523 .count = CRC32_TEST_VECTORS
2524 }
2525 }
2526 }, {
2527 .alg = "crc32c",
2528 .test = alg_test_crc32c,
2529 .fips_allowed = 1,
2530 .suite = {
2531 .hash = {
2532 .vecs = crc32c_tv_template,
2533 .count = CRC32C_TEST_VECTORS
2534 }
2535 }
2536 }, {
2537 .alg = "crct10dif",
2538 .test = alg_test_hash,
2539 .fips_allowed = 1,
2540 .suite = {
2541 .hash = {
2542 .vecs = crct10dif_tv_template,
2543 .count = CRCT10DIF_TEST_VECTORS
2544 }
2545 }
2546 }, {
2547 .alg = "cryptd(__driver-cbc-aes-aesni)",
2548 .test = alg_test_null,
2549 .fips_allowed = 1,
2550 }, {
2551 .alg = "cryptd(__driver-cbc-camellia-aesni)",
2552 .test = alg_test_null,
2553 }, {
2554 .alg = "cryptd(__driver-cbc-camellia-aesni-avx2)",
2555 .test = alg_test_null,
2556 }, {
2557 .alg = "cryptd(__driver-cbc-serpent-avx2)",
2558 .test = alg_test_null,
2559 }, {
2560 .alg = "cryptd(__driver-ecb-aes-aesni)",
2561 .test = alg_test_null,
2562 .fips_allowed = 1,
2563 }, {
2564 .alg = "cryptd(__driver-ecb-camellia-aesni)",
2565 .test = alg_test_null,
2566 }, {
2567 .alg = "cryptd(__driver-ecb-camellia-aesni-avx2)",
2568 .test = alg_test_null,
2569 }, {
2570 .alg = "cryptd(__driver-ecb-cast5-avx)",
2571 .test = alg_test_null,
2572 }, {
2573 .alg = "cryptd(__driver-ecb-cast6-avx)",
2574 .test = alg_test_null,
2575 }, {
2576 .alg = "cryptd(__driver-ecb-serpent-avx)",
2577 .test = alg_test_null,
2578 }, {
2579 .alg = "cryptd(__driver-ecb-serpent-avx2)",
2580 .test = alg_test_null,
2581 }, {
2582 .alg = "cryptd(__driver-ecb-serpent-sse2)",
2583 .test = alg_test_null,
2584 }, {
2585 .alg = "cryptd(__driver-ecb-twofish-avx)",
2586 .test = alg_test_null,
2587 }, {
2588 .alg = "cryptd(__driver-gcm-aes-aesni)",
2589 .test = alg_test_null,
2590 .fips_allowed = 1,
2591 }, {
2592 .alg = "cryptd(__ghash-pclmulqdqni)",
2593 .test = alg_test_null,
2594 .fips_allowed = 1,
2595 }, {
2596 .alg = "ctr(aes)",
2597 .test = alg_test_skcipher,
2598 .fips_allowed = 1,
2599 .suite = {
2600 .cipher = {
2601 .enc = {
2602 .vecs = aes_ctr_enc_tv_template,
2603 .count = AES_CTR_ENC_TEST_VECTORS
2604 },
2605 .dec = {
2606 .vecs = aes_ctr_dec_tv_template,
2607 .count = AES_CTR_DEC_TEST_VECTORS
2608 }
2609 }
2610 }
2611 }, {
2612 .alg = "ctr(blowfish)",
2613 .test = alg_test_skcipher,
2614 .suite = {
2615 .cipher = {
2616 .enc = {
2617 .vecs = bf_ctr_enc_tv_template,
2618 .count = BF_CTR_ENC_TEST_VECTORS
2619 },
2620 .dec = {
2621 .vecs = bf_ctr_dec_tv_template,
2622 .count = BF_CTR_DEC_TEST_VECTORS
2623 }
2624 }
2625 }
2626 }, {
2627 .alg = "ctr(camellia)",
2628 .test = alg_test_skcipher,
2629 .suite = {
2630 .cipher = {
2631 .enc = {
2632 .vecs = camellia_ctr_enc_tv_template,
2633 .count = CAMELLIA_CTR_ENC_TEST_VECTORS
2634 },
2635 .dec = {
2636 .vecs = camellia_ctr_dec_tv_template,
2637 .count = CAMELLIA_CTR_DEC_TEST_VECTORS
2638 }
2639 }
2640 }
2641 }, {
2642 .alg = "ctr(cast5)",
2643 .test = alg_test_skcipher,
2644 .suite = {
2645 .cipher = {
2646 .enc = {
2647 .vecs = cast5_ctr_enc_tv_template,
2648 .count = CAST5_CTR_ENC_TEST_VECTORS
2649 },
2650 .dec = {
2651 .vecs = cast5_ctr_dec_tv_template,
2652 .count = CAST5_CTR_DEC_TEST_VECTORS
2653 }
2654 }
2655 }
2656 }, {
2657 .alg = "ctr(cast6)",
2658 .test = alg_test_skcipher,
2659 .suite = {
2660 .cipher = {
2661 .enc = {
2662 .vecs = cast6_ctr_enc_tv_template,
2663 .count = CAST6_CTR_ENC_TEST_VECTORS
2664 },
2665 .dec = {
2666 .vecs = cast6_ctr_dec_tv_template,
2667 .count = CAST6_CTR_DEC_TEST_VECTORS
2668 }
2669 }
2670 }
2671 }, {
2672 .alg = "ctr(des)",
2673 .test = alg_test_skcipher,
2674 .suite = {
2675 .cipher = {
2676 .enc = {
2677 .vecs = des_ctr_enc_tv_template,
2678 .count = DES_CTR_ENC_TEST_VECTORS
2679 },
2680 .dec = {
2681 .vecs = des_ctr_dec_tv_template,
2682 .count = DES_CTR_DEC_TEST_VECTORS
2683 }
2684 }
2685 }
2686 }, {
2687 .alg = "ctr(des3_ede)",
2688 .test = alg_test_skcipher,
2689 .suite = {
2690 .cipher = {
2691 .enc = {
2692 .vecs = des3_ede_ctr_enc_tv_template,
2693 .count = DES3_EDE_CTR_ENC_TEST_VECTORS
2694 },
2695 .dec = {
2696 .vecs = des3_ede_ctr_dec_tv_template,
2697 .count = DES3_EDE_CTR_DEC_TEST_VECTORS
2698 }
2699 }
2700 }
2701 }, {
2702 .alg = "ctr(serpent)",
2703 .test = alg_test_skcipher,
2704 .suite = {
2705 .cipher = {
2706 .enc = {
2707 .vecs = serpent_ctr_enc_tv_template,
2708 .count = SERPENT_CTR_ENC_TEST_VECTORS
2709 },
2710 .dec = {
2711 .vecs = serpent_ctr_dec_tv_template,
2712 .count = SERPENT_CTR_DEC_TEST_VECTORS
2713 }
2714 }
2715 }
2716 }, {
2717 .alg = "ctr(twofish)",
2718 .test = alg_test_skcipher,
2719 .suite = {
2720 .cipher = {
2721 .enc = {
2722 .vecs = tf_ctr_enc_tv_template,
2723 .count = TF_CTR_ENC_TEST_VECTORS
2724 },
2725 .dec = {
2726 .vecs = tf_ctr_dec_tv_template,
2727 .count = TF_CTR_DEC_TEST_VECTORS
2728 }
2729 }
2730 }
2731 }, {
2732 .alg = "cts(cbc(aes))",
2733 .test = alg_test_skcipher,
2734 .suite = {
2735 .cipher = {
2736 .enc = {
2737 .vecs = cts_mode_enc_tv_template,
2738 .count = CTS_MODE_ENC_TEST_VECTORS
2739 },
2740 .dec = {
2741 .vecs = cts_mode_dec_tv_template,
2742 .count = CTS_MODE_DEC_TEST_VECTORS
2743 }
2744 }
2745 }
2746 }, {
2747 .alg = "deflate",
2748 .test = alg_test_comp,
2749 .fips_allowed = 1,
2750 .suite = {
2751 .comp = {
2752 .comp = {
2753 .vecs = deflate_comp_tv_template,
2754 .count = DEFLATE_COMP_TEST_VECTORS
2755 },
2756 .decomp = {
2757 .vecs = deflate_decomp_tv_template,
2758 .count = DEFLATE_DECOMP_TEST_VECTORS
2759 }
2760 }
2761 }
2762 }, {
2763 .alg = "digest_null",
2764 .test = alg_test_null,
2765 }, {
2766 .alg = "drbg_nopr_ctr_aes128",
2767 .test = alg_test_drbg,
2768 .fips_allowed = 1,
2769 .suite = {
2770 .drbg = {
2771 .vecs = drbg_nopr_ctr_aes128_tv_template,
2772 .count = ARRAY_SIZE(drbg_nopr_ctr_aes128_tv_template)
2773 }
2774 }
2775 }, {
2776 .alg = "drbg_nopr_ctr_aes192",
2777 .test = alg_test_drbg,
2778 .fips_allowed = 1,
2779 .suite = {
2780 .drbg = {
2781 .vecs = drbg_nopr_ctr_aes192_tv_template,
2782 .count = ARRAY_SIZE(drbg_nopr_ctr_aes192_tv_template)
2783 }
2784 }
2785 }, {
2786 .alg = "drbg_nopr_ctr_aes256",
2787 .test = alg_test_drbg,
2788 .fips_allowed = 1,
2789 .suite = {
2790 .drbg = {
2791 .vecs = drbg_nopr_ctr_aes256_tv_template,
2792 .count = ARRAY_SIZE(drbg_nopr_ctr_aes256_tv_template)
2793 }
2794 }
2795 }, {
2796 /*
2797 * There is no need to specifically test the DRBG with every
2798 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
2799 */
2800 .alg = "drbg_nopr_hmac_sha1",
2801 .fips_allowed = 1,
2802 .test = alg_test_null,
2803 }, {
2804 .alg = "drbg_nopr_hmac_sha256",
2805 .test = alg_test_drbg,
2806 .fips_allowed = 1,
2807 .suite = {
2808 .drbg = {
2809 .vecs = drbg_nopr_hmac_sha256_tv_template,
2810 .count =
2811 ARRAY_SIZE(drbg_nopr_hmac_sha256_tv_template)
2812 }
2813 }
2814 }, {
2815 /* covered by drbg_nopr_hmac_sha256 test */
2816 .alg = "drbg_nopr_hmac_sha384",
2817 .fips_allowed = 1,
2818 .test = alg_test_null,
2819 }, {
2820 .alg = "drbg_nopr_hmac_sha512",
2821 .test = alg_test_null,
2822 .fips_allowed = 1,
2823 }, {
2824 .alg = "drbg_nopr_sha1",
2825 .fips_allowed = 1,
2826 .test = alg_test_null,
2827 }, {
2828 .alg = "drbg_nopr_sha256",
2829 .test = alg_test_drbg,
2830 .fips_allowed = 1,
2831 .suite = {
2832 .drbg = {
2833 .vecs = drbg_nopr_sha256_tv_template,
2834 .count = ARRAY_SIZE(drbg_nopr_sha256_tv_template)
2835 }
2836 }
2837 }, {
2838 /* covered by drbg_nopr_sha256 test */
2839 .alg = "drbg_nopr_sha384",
2840 .fips_allowed = 1,
2841 .test = alg_test_null,
2842 }, {
2843 .alg = "drbg_nopr_sha512",
2844 .fips_allowed = 1,
2845 .test = alg_test_null,
2846 }, {
2847 .alg = "drbg_pr_ctr_aes128",
2848 .test = alg_test_drbg,
2849 .fips_allowed = 1,
2850 .suite = {
2851 .drbg = {
2852 .vecs = drbg_pr_ctr_aes128_tv_template,
2853 .count = ARRAY_SIZE(drbg_pr_ctr_aes128_tv_template)
2854 }
2855 }
2856 }, {
2857 /* covered by drbg_pr_ctr_aes128 test */
2858 .alg = "drbg_pr_ctr_aes192",
2859 .fips_allowed = 1,
2860 .test = alg_test_null,
2861 }, {
2862 .alg = "drbg_pr_ctr_aes256",
2863 .fips_allowed = 1,
2864 .test = alg_test_null,
2865 }, {
2866 .alg = "drbg_pr_hmac_sha1",
2867 .fips_allowed = 1,
2868 .test = alg_test_null,
2869 }, {
2870 .alg = "drbg_pr_hmac_sha256",
2871 .test = alg_test_drbg,
2872 .fips_allowed = 1,
2873 .suite = {
2874 .drbg = {
2875 .vecs = drbg_pr_hmac_sha256_tv_template,
2876 .count = ARRAY_SIZE(drbg_pr_hmac_sha256_tv_template)
2877 }
2878 }
2879 }, {
2880 /* covered by drbg_pr_hmac_sha256 test */
2881 .alg = "drbg_pr_hmac_sha384",
2882 .fips_allowed = 1,
2883 .test = alg_test_null,
2884 }, {
2885 .alg = "drbg_pr_hmac_sha512",
2886 .test = alg_test_null,
2887 .fips_allowed = 1,
2888 }, {
2889 .alg = "drbg_pr_sha1",
2890 .fips_allowed = 1,
2891 .test = alg_test_null,
2892 }, {
2893 .alg = "drbg_pr_sha256",
2894 .test = alg_test_drbg,
2895 .fips_allowed = 1,
2896 .suite = {
2897 .drbg = {
2898 .vecs = drbg_pr_sha256_tv_template,
2899 .count = ARRAY_SIZE(drbg_pr_sha256_tv_template)
2900 }
2901 }
2902 }, {
2903 /* covered by drbg_pr_sha256 test */
2904 .alg = "drbg_pr_sha384",
2905 .fips_allowed = 1,
2906 .test = alg_test_null,
2907 }, {
2908 .alg = "drbg_pr_sha512",
2909 .fips_allowed = 1,
2910 .test = alg_test_null,
2911 }, {
2912 .alg = "ecb(__aes-aesni)",
2913 .test = alg_test_null,
2914 .fips_allowed = 1,
2915 }, {
2916 .alg = "ecb(aes)",
2917 .test = alg_test_skcipher,
2918 .fips_allowed = 1,
2919 .suite = {
2920 .cipher = {
2921 .enc = {
2922 .vecs = aes_enc_tv_template,
2923 .count = AES_ENC_TEST_VECTORS
2924 },
2925 .dec = {
2926 .vecs = aes_dec_tv_template,
2927 .count = AES_DEC_TEST_VECTORS
2928 }
2929 }
2930 }
2931 }, {
2932 .alg = "ecb(anubis)",
2933 .test = alg_test_skcipher,
2934 .suite = {
2935 .cipher = {
2936 .enc = {
2937 .vecs = anubis_enc_tv_template,
2938 .count = ANUBIS_ENC_TEST_VECTORS
2939 },
2940 .dec = {
2941 .vecs = anubis_dec_tv_template,
2942 .count = ANUBIS_DEC_TEST_VECTORS
2943 }
2944 }
2945 }
2946 }, {
2947 .alg = "ecb(arc4)",
2948 .test = alg_test_skcipher,
2949 .suite = {
2950 .cipher = {
2951 .enc = {
2952 .vecs = arc4_enc_tv_template,
2953 .count = ARC4_ENC_TEST_VECTORS
2954 },
2955 .dec = {
2956 .vecs = arc4_dec_tv_template,
2957 .count = ARC4_DEC_TEST_VECTORS
2958 }
2959 }
2960 }
2961 }, {
2962 .alg = "ecb(blowfish)",
2963 .test = alg_test_skcipher,
2964 .suite = {
2965 .cipher = {
2966 .enc = {
2967 .vecs = bf_enc_tv_template,
2968 .count = BF_ENC_TEST_VECTORS
2969 },
2970 .dec = {
2971 .vecs = bf_dec_tv_template,
2972 .count = BF_DEC_TEST_VECTORS
2973 }
2974 }
2975 }
2976 }, {
2977 .alg = "ecb(camellia)",
2978 .test = alg_test_skcipher,
2979 .suite = {
2980 .cipher = {
2981 .enc = {
2982 .vecs = camellia_enc_tv_template,
2983 .count = CAMELLIA_ENC_TEST_VECTORS
2984 },
2985 .dec = {
2986 .vecs = camellia_dec_tv_template,
2987 .count = CAMELLIA_DEC_TEST_VECTORS
2988 }
2989 }
2990 }
2991 }, {
2992 .alg = "ecb(cast5)",
2993 .test = alg_test_skcipher,
2994 .suite = {
2995 .cipher = {
2996 .enc = {
2997 .vecs = cast5_enc_tv_template,
2998 .count = CAST5_ENC_TEST_VECTORS
2999 },
3000 .dec = {
3001 .vecs = cast5_dec_tv_template,
3002 .count = CAST5_DEC_TEST_VECTORS
3003 }
3004 }
3005 }
3006 }, {
3007 .alg = "ecb(cast6)",
3008 .test = alg_test_skcipher,
3009 .suite = {
3010 .cipher = {
3011 .enc = {
3012 .vecs = cast6_enc_tv_template,
3013 .count = CAST6_ENC_TEST_VECTORS
3014 },
3015 .dec = {
3016 .vecs = cast6_dec_tv_template,
3017 .count = CAST6_DEC_TEST_VECTORS
3018 }
3019 }
3020 }
3021 }, {
3022 .alg = "ecb(cipher_null)",
3023 .test = alg_test_null,
3024 }, {
3025 .alg = "ecb(des)",
3026 .test = alg_test_skcipher,
3027 .suite = {
3028 .cipher = {
3029 .enc = {
3030 .vecs = des_enc_tv_template,
3031 .count = DES_ENC_TEST_VECTORS
3032 },
3033 .dec = {
3034 .vecs = des_dec_tv_template,
3035 .count = DES_DEC_TEST_VECTORS
3036 }
3037 }
3038 }
3039 }, {
3040 .alg = "ecb(des3_ede)",
3041 .test = alg_test_skcipher,
3042 .fips_allowed = 1,
3043 .suite = {
3044 .cipher = {
3045 .enc = {
3046 .vecs = des3_ede_enc_tv_template,
3047 .count = DES3_EDE_ENC_TEST_VECTORS
3048 },
3049 .dec = {
3050 .vecs = des3_ede_dec_tv_template,
3051 .count = DES3_EDE_DEC_TEST_VECTORS
3052 }
3053 }
3054 }
3055 }, {
3056 .alg = "ecb(fcrypt)",
3057 .test = alg_test_skcipher,
3058 .suite = {
3059 .cipher = {
3060 .enc = {
3061 .vecs = fcrypt_pcbc_enc_tv_template,
3062 .count = 1
3063 },
3064 .dec = {
3065 .vecs = fcrypt_pcbc_dec_tv_template,
3066 .count = 1
3067 }
3068 }
3069 }
3070 }, {
3071 .alg = "ecb(khazad)",
3072 .test = alg_test_skcipher,
3073 .suite = {
3074 .cipher = {
3075 .enc = {
3076 .vecs = khazad_enc_tv_template,
3077 .count = KHAZAD_ENC_TEST_VECTORS
3078 },
3079 .dec = {
3080 .vecs = khazad_dec_tv_template,
3081 .count = KHAZAD_DEC_TEST_VECTORS
3082 }
3083 }
3084 }
3085 }, {
3086 .alg = "ecb(seed)",
3087 .test = alg_test_skcipher,
3088 .suite = {
3089 .cipher = {
3090 .enc = {
3091 .vecs = seed_enc_tv_template,
3092 .count = SEED_ENC_TEST_VECTORS
3093 },
3094 .dec = {
3095 .vecs = seed_dec_tv_template,
3096 .count = SEED_DEC_TEST_VECTORS
3097 }
3098 }
3099 }
3100 }, {
3101 .alg = "ecb(serpent)",
3102 .test = alg_test_skcipher,
3103 .suite = {
3104 .cipher = {
3105 .enc = {
3106 .vecs = serpent_enc_tv_template,
3107 .count = SERPENT_ENC_TEST_VECTORS
3108 },
3109 .dec = {
3110 .vecs = serpent_dec_tv_template,
3111 .count = SERPENT_DEC_TEST_VECTORS
3112 }
3113 }
3114 }
3115 }, {
3116 .alg = "ecb(tea)",
3117 .test = alg_test_skcipher,
3118 .suite = {
3119 .cipher = {
3120 .enc = {
3121 .vecs = tea_enc_tv_template,
3122 .count = TEA_ENC_TEST_VECTORS
3123 },
3124 .dec = {
3125 .vecs = tea_dec_tv_template,
3126 .count = TEA_DEC_TEST_VECTORS
3127 }
3128 }
3129 }
3130 }, {
3131 .alg = "ecb(tnepres)",
3132 .test = alg_test_skcipher,
3133 .suite = {
3134 .cipher = {
3135 .enc = {
3136 .vecs = tnepres_enc_tv_template,
3137 .count = TNEPRES_ENC_TEST_VECTORS
3138 },
3139 .dec = {
3140 .vecs = tnepres_dec_tv_template,
3141 .count = TNEPRES_DEC_TEST_VECTORS
3142 }
3143 }
3144 }
3145 }, {
3146 .alg = "ecb(twofish)",
3147 .test = alg_test_skcipher,
3148 .suite = {
3149 .cipher = {
3150 .enc = {
3151 .vecs = tf_enc_tv_template,
3152 .count = TF_ENC_TEST_VECTORS
3153 },
3154 .dec = {
3155 .vecs = tf_dec_tv_template,
3156 .count = TF_DEC_TEST_VECTORS
3157 }
3158 }
3159 }
3160 }, {
3161 .alg = "ecb(xeta)",
3162 .test = alg_test_skcipher,
3163 .suite = {
3164 .cipher = {
3165 .enc = {
3166 .vecs = xeta_enc_tv_template,
3167 .count = XETA_ENC_TEST_VECTORS
3168 },
3169 .dec = {
3170 .vecs = xeta_dec_tv_template,
3171 .count = XETA_DEC_TEST_VECTORS
3172 }
3173 }
3174 }
3175 }, {
3176 .alg = "ecb(xtea)",
3177 .test = alg_test_skcipher,
3178 .suite = {
3179 .cipher = {
3180 .enc = {
3181 .vecs = xtea_enc_tv_template,
3182 .count = XTEA_ENC_TEST_VECTORS
3183 },
3184 .dec = {
3185 .vecs = xtea_dec_tv_template,
3186 .count = XTEA_DEC_TEST_VECTORS
3187 }
3188 }
3189 }
3190 }, {
3191 .alg = "gcm(aes)",
3192 .test = alg_test_aead,
3193 .fips_allowed = 1,
3194 .suite = {
3195 .aead = {
3196 .enc = {
3197 .vecs = aes_gcm_enc_tv_template,
3198 .count = AES_GCM_ENC_TEST_VECTORS
3199 },
3200 .dec = {
3201 .vecs = aes_gcm_dec_tv_template,
3202 .count = AES_GCM_DEC_TEST_VECTORS
3203 }
3204 }
3205 }
3206 }, {
3207 .alg = "ghash",
3208 .test = alg_test_hash,
3209 .fips_allowed = 1,
3210 .suite = {
3211 .hash = {
3212 .vecs = ghash_tv_template,
3213 .count = GHASH_TEST_VECTORS
3214 }
3215 }
3216 }, {
3217 .alg = "hmac(crc32)",
3218 .test = alg_test_hash,
3219 .suite = {
3220 .hash = {
3221 .vecs = bfin_crc_tv_template,
3222 .count = BFIN_CRC_TEST_VECTORS
3223 }
3224 }
3225 }, {
3226 .alg = "hmac(md5)",
3227 .test = alg_test_hash,
3228 .suite = {
3229 .hash = {
3230 .vecs = hmac_md5_tv_template,
3231 .count = HMAC_MD5_TEST_VECTORS
3232 }
3233 }
3234 }, {
3235 .alg = "hmac(rmd128)",
3236 .test = alg_test_hash,
3237 .suite = {
3238 .hash = {
3239 .vecs = hmac_rmd128_tv_template,
3240 .count = HMAC_RMD128_TEST_VECTORS
3241 }
3242 }
3243 }, {
3244 .alg = "hmac(rmd160)",
3245 .test = alg_test_hash,
3246 .suite = {
3247 .hash = {
3248 .vecs = hmac_rmd160_tv_template,
3249 .count = HMAC_RMD160_TEST_VECTORS
3250 }
3251 }
3252 }, {
3253 .alg = "hmac(sha1)",
3254 .test = alg_test_hash,
3255 .fips_allowed = 1,
3256 .suite = {
3257 .hash = {
3258 .vecs = hmac_sha1_tv_template,
3259 .count = HMAC_SHA1_TEST_VECTORS
3260 }
3261 }
3262 }, {
3263 .alg = "hmac(sha224)",
3264 .test = alg_test_hash,
3265 .fips_allowed = 1,
3266 .suite = {
3267 .hash = {
3268 .vecs = hmac_sha224_tv_template,
3269 .count = HMAC_SHA224_TEST_VECTORS
3270 }
3271 }
3272 }, {
3273 .alg = "hmac(sha256)",
3274 .test = alg_test_hash,
3275 .fips_allowed = 1,
3276 .suite = {
3277 .hash = {
3278 .vecs = hmac_sha256_tv_template,
3279 .count = HMAC_SHA256_TEST_VECTORS
3280 }
3281 }
3282 }, {
3283 .alg = "hmac(sha384)",
3284 .test = alg_test_hash,
3285 .fips_allowed = 1,
3286 .suite = {
3287 .hash = {
3288 .vecs = hmac_sha384_tv_template,
3289 .count = HMAC_SHA384_TEST_VECTORS
3290 }
3291 }
3292 }, {
3293 .alg = "hmac(sha512)",
3294 .test = alg_test_hash,
3295 .fips_allowed = 1,
3296 .suite = {
3297 .hash = {
3298 .vecs = hmac_sha512_tv_template,
3299 .count = HMAC_SHA512_TEST_VECTORS
3300 }
3301 }
3302 }, {
3303 .alg = "jitterentropy_rng",
3304 .fips_allowed = 1,
3305 .test = alg_test_null,
3306 }, {
3307 .alg = "kw(aes)",
3308 .test = alg_test_skcipher,
3309 .fips_allowed = 1,
3310 .suite = {
3311 .cipher = {
3312 .enc = {
3313 .vecs = aes_kw_enc_tv_template,
3314 .count = ARRAY_SIZE(aes_kw_enc_tv_template)
3315 },
3316 .dec = {
3317 .vecs = aes_kw_dec_tv_template,
3318 .count = ARRAY_SIZE(aes_kw_dec_tv_template)
3319 }
3320 }
3321 }
3322 }, {
3323 .alg = "lrw(aes)",
3324 .test = alg_test_skcipher,
3325 .suite = {
3326 .cipher = {
3327 .enc = {
3328 .vecs = aes_lrw_enc_tv_template,
3329 .count = AES_LRW_ENC_TEST_VECTORS
3330 },
3331 .dec = {
3332 .vecs = aes_lrw_dec_tv_template,
3333 .count = AES_LRW_DEC_TEST_VECTORS
3334 }
3335 }
3336 }
3337 }, {
3338 .alg = "lrw(camellia)",
3339 .test = alg_test_skcipher,
3340 .suite = {
3341 .cipher = {
3342 .enc = {
3343 .vecs = camellia_lrw_enc_tv_template,
3344 .count = CAMELLIA_LRW_ENC_TEST_VECTORS
3345 },
3346 .dec = {
3347 .vecs = camellia_lrw_dec_tv_template,
3348 .count = CAMELLIA_LRW_DEC_TEST_VECTORS
3349 }
3350 }
3351 }
3352 }, {
3353 .alg = "lrw(cast6)",
3354 .test = alg_test_skcipher,
3355 .suite = {
3356 .cipher = {
3357 .enc = {
3358 .vecs = cast6_lrw_enc_tv_template,
3359 .count = CAST6_LRW_ENC_TEST_VECTORS
3360 },
3361 .dec = {
3362 .vecs = cast6_lrw_dec_tv_template,
3363 .count = CAST6_LRW_DEC_TEST_VECTORS
3364 }
3365 }
3366 }
3367 }, {
3368 .alg = "lrw(serpent)",
3369 .test = alg_test_skcipher,
3370 .suite = {
3371 .cipher = {
3372 .enc = {
3373 .vecs = serpent_lrw_enc_tv_template,
3374 .count = SERPENT_LRW_ENC_TEST_VECTORS
3375 },
3376 .dec = {
3377 .vecs = serpent_lrw_dec_tv_template,
3378 .count = SERPENT_LRW_DEC_TEST_VECTORS
3379 }
3380 }
3381 }
3382 }, {
3383 .alg = "lrw(twofish)",
3384 .test = alg_test_skcipher,
3385 .suite = {
3386 .cipher = {
3387 .enc = {
3388 .vecs = tf_lrw_enc_tv_template,
3389 .count = TF_LRW_ENC_TEST_VECTORS
3390 },
3391 .dec = {
3392 .vecs = tf_lrw_dec_tv_template,
3393 .count = TF_LRW_DEC_TEST_VECTORS
3394 }
3395 }
3396 }
3397 }, {
3398 .alg = "lz4",
3399 .test = alg_test_comp,
3400 .fips_allowed = 1,
3401 .suite = {
3402 .comp = {
3403 .comp = {
3404 .vecs = lz4_comp_tv_template,
3405 .count = LZ4_COMP_TEST_VECTORS
3406 },
3407 .decomp = {
3408 .vecs = lz4_decomp_tv_template,
3409 .count = LZ4_DECOMP_TEST_VECTORS
3410 }
3411 }
3412 }
3413 }, {
3414 .alg = "lz4hc",
3415 .test = alg_test_comp,
3416 .fips_allowed = 1,
3417 .suite = {
3418 .comp = {
3419 .comp = {
3420 .vecs = lz4hc_comp_tv_template,
3421 .count = LZ4HC_COMP_TEST_VECTORS
3422 },
3423 .decomp = {
3424 .vecs = lz4hc_decomp_tv_template,
3425 .count = LZ4HC_DECOMP_TEST_VECTORS
3426 }
3427 }
3428 }
3429 }, {
3430 .alg = "lzo",
3431 .test = alg_test_comp,
3432 .fips_allowed = 1,
3433 .suite = {
3434 .comp = {
3435 .comp = {
3436 .vecs = lzo_comp_tv_template,
3437 .count = LZO_COMP_TEST_VECTORS
3438 },
3439 .decomp = {
3440 .vecs = lzo_decomp_tv_template,
3441 .count = LZO_DECOMP_TEST_VECTORS
3442 }
3443 }
3444 }
3445 }, {
3446 .alg = "md4",
3447 .test = alg_test_hash,
3448 .suite = {
3449 .hash = {
3450 .vecs = md4_tv_template,
3451 .count = MD4_TEST_VECTORS
3452 }
3453 }
3454 }, {
3455 .alg = "md5",
3456 .test = alg_test_hash,
3457 .suite = {
3458 .hash = {
3459 .vecs = md5_tv_template,
3460 .count = MD5_TEST_VECTORS
3461 }
3462 }
3463 }, {
3464 .alg = "michael_mic",
3465 .test = alg_test_hash,
3466 .suite = {
3467 .hash = {
3468 .vecs = michael_mic_tv_template,
3469 .count = MICHAEL_MIC_TEST_VECTORS
3470 }
3471 }
3472 }, {
3473 .alg = "ofb(aes)",
3474 .test = alg_test_skcipher,
3475 .fips_allowed = 1,
3476 .suite = {
3477 .cipher = {
3478 .enc = {
3479 .vecs = aes_ofb_enc_tv_template,
3480 .count = AES_OFB_ENC_TEST_VECTORS
3481 },
3482 .dec = {
3483 .vecs = aes_ofb_dec_tv_template,
3484 .count = AES_OFB_DEC_TEST_VECTORS
3485 }
3486 }
3487 }
3488 }, {
3489 .alg = "pcbc(fcrypt)",
3490 .test = alg_test_skcipher,
3491 .suite = {
3492 .cipher = {
3493 .enc = {
3494 .vecs = fcrypt_pcbc_enc_tv_template,
3495 .count = FCRYPT_ENC_TEST_VECTORS
3496 },
3497 .dec = {
3498 .vecs = fcrypt_pcbc_dec_tv_template,
3499 .count = FCRYPT_DEC_TEST_VECTORS
3500 }
3501 }
3502 }
3503 }, {
3504 .alg = "poly1305",
3505 .test = alg_test_hash,
3506 .suite = {
3507 .hash = {
3508 .vecs = poly1305_tv_template,
3509 .count = POLY1305_TEST_VECTORS
3510 }
3511 }
3512 }, {
3513 .alg = "rfc3686(ctr(aes))",
3514 .test = alg_test_skcipher,
3515 .fips_allowed = 1,
3516 .suite = {
3517 .cipher = {
3518 .enc = {
3519 .vecs = aes_ctr_rfc3686_enc_tv_template,
3520 .count = AES_CTR_3686_ENC_TEST_VECTORS
3521 },
3522 .dec = {
3523 .vecs = aes_ctr_rfc3686_dec_tv_template,
3524 .count = AES_CTR_3686_DEC_TEST_VECTORS
3525 }
3526 }
3527 }
3528 }, {
3529 .alg = "rfc4106(gcm(aes))",
3530 .test = alg_test_aead,
3531 .fips_allowed = 1,
3532 .suite = {
3533 .aead = {
3534 .enc = {
3535 .vecs = aes_gcm_rfc4106_enc_tv_template,
3536 .count = AES_GCM_4106_ENC_TEST_VECTORS
3537 },
3538 .dec = {
3539 .vecs = aes_gcm_rfc4106_dec_tv_template,
3540 .count = AES_GCM_4106_DEC_TEST_VECTORS
3541 }
3542 }
3543 }
3544 }, {
3545 .alg = "rfc4309(ccm(aes))",
3546 .test = alg_test_aead,
3547 .fips_allowed = 1,
3548 .suite = {
3549 .aead = {
3550 .enc = {
3551 .vecs = aes_ccm_rfc4309_enc_tv_template,
3552 .count = AES_CCM_4309_ENC_TEST_VECTORS
3553 },
3554 .dec = {
3555 .vecs = aes_ccm_rfc4309_dec_tv_template,
3556 .count = AES_CCM_4309_DEC_TEST_VECTORS
3557 }
3558 }
3559 }
3560 }, {
3561 .alg = "rfc4543(gcm(aes))",
3562 .test = alg_test_aead,
3563 .suite = {
3564 .aead = {
3565 .enc = {
3566 .vecs = aes_gcm_rfc4543_enc_tv_template,
3567 .count = AES_GCM_4543_ENC_TEST_VECTORS
3568 },
3569 .dec = {
3570 .vecs = aes_gcm_rfc4543_dec_tv_template,
3571 .count = AES_GCM_4543_DEC_TEST_VECTORS
3572 },
3573 }
3574 }
3575 }, {
3576 .alg = "rfc7539(chacha20,poly1305)",
3577 .test = alg_test_aead,
3578 .suite = {
3579 .aead = {
3580 .enc = {
3581 .vecs = rfc7539_enc_tv_template,
3582 .count = RFC7539_ENC_TEST_VECTORS
3583 },
3584 .dec = {
3585 .vecs = rfc7539_dec_tv_template,
3586 .count = RFC7539_DEC_TEST_VECTORS
3587 },
3588 }
3589 }
3590 }, {
3591 .alg = "rfc7539esp(chacha20,poly1305)",
3592 .test = alg_test_aead,
3593 .suite = {
3594 .aead = {
3595 .enc = {
3596 .vecs = rfc7539esp_enc_tv_template,
3597 .count = RFC7539ESP_ENC_TEST_VECTORS
3598 },
3599 .dec = {
3600 .vecs = rfc7539esp_dec_tv_template,
3601 .count = RFC7539ESP_DEC_TEST_VECTORS
3602 },
3603 }
3604 }
3605 }, {
3606 .alg = "rmd128",
3607 .test = alg_test_hash,
3608 .suite = {
3609 .hash = {
3610 .vecs = rmd128_tv_template,
3611 .count = RMD128_TEST_VECTORS
3612 }
3613 }
3614 }, {
3615 .alg = "rmd160",
3616 .test = alg_test_hash,
3617 .suite = {
3618 .hash = {
3619 .vecs = rmd160_tv_template,
3620 .count = RMD160_TEST_VECTORS
3621 }
3622 }
3623 }, {
3624 .alg = "rmd256",
3625 .test = alg_test_hash,
3626 .suite = {
3627 .hash = {
3628 .vecs = rmd256_tv_template,
3629 .count = RMD256_TEST_VECTORS
3630 }
3631 }
3632 }, {
3633 .alg = "rmd320",
3634 .test = alg_test_hash,
3635 .suite = {
3636 .hash = {
3637 .vecs = rmd320_tv_template,
3638 .count = RMD320_TEST_VECTORS
3639 }
3640 }
3641 }, {
3642 .alg = "rsa",
3643 .test = alg_test_akcipher,
3644 .fips_allowed = 1,
3645 .suite = {
3646 .akcipher = {
3647 .vecs = rsa_tv_template,
3648 .count = RSA_TEST_VECTORS
3649 }
3650 }
3651 }, {
3652 .alg = "salsa20",
3653 .test = alg_test_skcipher,
3654 .suite = {
3655 .cipher = {
3656 .enc = {
3657 .vecs = salsa20_stream_enc_tv_template,
3658 .count = SALSA20_STREAM_ENC_TEST_VECTORS
3659 }
3660 }
3661 }
3662 }, {
3663 .alg = "sha1",
3664 .test = alg_test_hash,
3665 .fips_allowed = 1,
3666 .suite = {
3667 .hash = {
3668 .vecs = sha1_tv_template,
3669 .count = SHA1_TEST_VECTORS
3670 }
3671 }
3672 }, {
3673 .alg = "sha224",
3674 .test = alg_test_hash,
3675 .fips_allowed = 1,
3676 .suite = {
3677 .hash = {
3678 .vecs = sha224_tv_template,
3679 .count = SHA224_TEST_VECTORS
3680 }
3681 }
3682 }, {
3683 .alg = "sha256",
3684 .test = alg_test_hash,
3685 .fips_allowed = 1,
3686 .suite = {
3687 .hash = {
3688 .vecs = sha256_tv_template,
3689 .count = SHA256_TEST_VECTORS
3690 }
3691 }
3692 }, {
3693 .alg = "sha384",
3694 .test = alg_test_hash,
3695 .fips_allowed = 1,
3696 .suite = {
3697 .hash = {
3698 .vecs = sha384_tv_template,
3699 .count = SHA384_TEST_VECTORS
3700 }
3701 }
3702 }, {
3703 .alg = "sha512",
3704 .test = alg_test_hash,
3705 .fips_allowed = 1,
3706 .suite = {
3707 .hash = {
3708 .vecs = sha512_tv_template,
3709 .count = SHA512_TEST_VECTORS
3710 }
3711 }
3712 }, {
3713 .alg = "tgr128",
3714 .test = alg_test_hash,
3715 .suite = {
3716 .hash = {
3717 .vecs = tgr128_tv_template,
3718 .count = TGR128_TEST_VECTORS
3719 }
3720 }
3721 }, {
3722 .alg = "tgr160",
3723 .test = alg_test_hash,
3724 .suite = {
3725 .hash = {
3726 .vecs = tgr160_tv_template,
3727 .count = TGR160_TEST_VECTORS
3728 }
3729 }
3730 }, {
3731 .alg = "tgr192",
3732 .test = alg_test_hash,
3733 .suite = {
3734 .hash = {
3735 .vecs = tgr192_tv_template,
3736 .count = TGR192_TEST_VECTORS
3737 }
3738 }
3739 }, {
3740 .alg = "vmac(aes)",
3741 .test = alg_test_hash,
3742 .suite = {
3743 .hash = {
3744 .vecs = aes_vmac128_tv_template,
3745 .count = VMAC_AES_TEST_VECTORS
3746 }
3747 }
3748 }, {
3749 .alg = "wp256",
3750 .test = alg_test_hash,
3751 .suite = {
3752 .hash = {
3753 .vecs = wp256_tv_template,
3754 .count = WP256_TEST_VECTORS
3755 }
3756 }
3757 }, {
3758 .alg = "wp384",
3759 .test = alg_test_hash,
3760 .suite = {
3761 .hash = {
3762 .vecs = wp384_tv_template,
3763 .count = WP384_TEST_VECTORS
3764 }
3765 }
3766 }, {
3767 .alg = "wp512",
3768 .test = alg_test_hash,
3769 .suite = {
3770 .hash = {
3771 .vecs = wp512_tv_template,
3772 .count = WP512_TEST_VECTORS
3773 }
3774 }
3775 }, {
3776 .alg = "xcbc(aes)",
3777 .test = alg_test_hash,
3778 .suite = {
3779 .hash = {
3780 .vecs = aes_xcbc128_tv_template,
3781 .count = XCBC_AES_TEST_VECTORS
3782 }
3783 }
3784 }, {
3785 .alg = "xts(aes)",
3786 .test = alg_test_skcipher,
3787 .fips_allowed = 1,
3788 .suite = {
3789 .cipher = {
3790 .enc = {
3791 .vecs = aes_xts_enc_tv_template,
3792 .count = AES_XTS_ENC_TEST_VECTORS
3793 },
3794 .dec = {
3795 .vecs = aes_xts_dec_tv_template,
3796 .count = AES_XTS_DEC_TEST_VECTORS
3797 }
3798 }
3799 }
3800 }, {
3801 .alg = "xts(camellia)",
3802 .test = alg_test_skcipher,
3803 .suite = {
3804 .cipher = {
3805 .enc = {
3806 .vecs = camellia_xts_enc_tv_template,
3807 .count = CAMELLIA_XTS_ENC_TEST_VECTORS
3808 },
3809 .dec = {
3810 .vecs = camellia_xts_dec_tv_template,
3811 .count = CAMELLIA_XTS_DEC_TEST_VECTORS
3812 }
3813 }
3814 }
3815 }, {
3816 .alg = "xts(cast6)",
3817 .test = alg_test_skcipher,
3818 .suite = {
3819 .cipher = {
3820 .enc = {
3821 .vecs = cast6_xts_enc_tv_template,
3822 .count = CAST6_XTS_ENC_TEST_VECTORS
3823 },
3824 .dec = {
3825 .vecs = cast6_xts_dec_tv_template,
3826 .count = CAST6_XTS_DEC_TEST_VECTORS
3827 }
3828 }
3829 }
3830 }, {
3831 .alg = "xts(serpent)",
3832 .test = alg_test_skcipher,
3833 .suite = {
3834 .cipher = {
3835 .enc = {
3836 .vecs = serpent_xts_enc_tv_template,
3837 .count = SERPENT_XTS_ENC_TEST_VECTORS
3838 },
3839 .dec = {
3840 .vecs = serpent_xts_dec_tv_template,
3841 .count = SERPENT_XTS_DEC_TEST_VECTORS
3842 }
3843 }
3844 }
3845 }, {
3846 .alg = "xts(twofish)",
3847 .test = alg_test_skcipher,
3848 .suite = {
3849 .cipher = {
3850 .enc = {
3851 .vecs = tf_xts_enc_tv_template,
3852 .count = TF_XTS_ENC_TEST_VECTORS
3853 },
3854 .dec = {
3855 .vecs = tf_xts_dec_tv_template,
3856 .count = TF_XTS_DEC_TEST_VECTORS
3857 }
3858 }
3859 }
3860 }, {
3861 .alg = "zlib",
3862 .test = alg_test_pcomp,
3863 .fips_allowed = 1,
3864 .suite = {
3865 .pcomp = {
3866 .comp = {
3867 .vecs = zlib_comp_tv_template,
3868 .count = ZLIB_COMP_TEST_VECTORS
3869 },
3870 .decomp = {
3871 .vecs = zlib_decomp_tv_template,
3872 .count = ZLIB_DECOMP_TEST_VECTORS
3873 }
3874 }
3875 }
3876 }
3877 };
3878
3879 static bool alg_test_descs_checked;
3880
alg_test_descs_check_order(void)3881 static void alg_test_descs_check_order(void)
3882 {
3883 int i;
3884
3885 /* only check once */
3886 if (alg_test_descs_checked)
3887 return;
3888
3889 alg_test_descs_checked = true;
3890
3891 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
3892 int diff = strcmp(alg_test_descs[i - 1].alg,
3893 alg_test_descs[i].alg);
3894
3895 if (WARN_ON(diff > 0)) {
3896 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
3897 alg_test_descs[i - 1].alg,
3898 alg_test_descs[i].alg);
3899 }
3900
3901 if (WARN_ON(diff == 0)) {
3902 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
3903 alg_test_descs[i].alg);
3904 }
3905 }
3906 }
3907
alg_find_test(const char * alg)3908 static int alg_find_test(const char *alg)
3909 {
3910 int start = 0;
3911 int end = ARRAY_SIZE(alg_test_descs);
3912
3913 while (start < end) {
3914 int i = (start + end) / 2;
3915 int diff = strcmp(alg_test_descs[i].alg, alg);
3916
3917 if (diff > 0) {
3918 end = i;
3919 continue;
3920 }
3921
3922 if (diff < 0) {
3923 start = i + 1;
3924 continue;
3925 }
3926
3927 return i;
3928 }
3929
3930 return -1;
3931 }
3932
alg_test(const char * driver,const char * alg,u32 type,u32 mask)3933 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
3934 {
3935 int i;
3936 int j;
3937 int rc;
3938
3939 alg_test_descs_check_order();
3940
3941 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
3942 char nalg[CRYPTO_MAX_ALG_NAME];
3943
3944 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
3945 sizeof(nalg))
3946 return -ENAMETOOLONG;
3947
3948 i = alg_find_test(nalg);
3949 if (i < 0)
3950 goto notest;
3951
3952 if (fips_enabled && !alg_test_descs[i].fips_allowed)
3953 goto non_fips_alg;
3954
3955 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
3956 goto test_done;
3957 }
3958
3959 i = alg_find_test(alg);
3960 j = alg_find_test(driver);
3961 if (i < 0 && j < 0)
3962 goto notest;
3963
3964 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
3965 (j >= 0 && !alg_test_descs[j].fips_allowed)))
3966 goto non_fips_alg;
3967
3968 rc = 0;
3969 if (i >= 0)
3970 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
3971 type, mask);
3972 if (j >= 0 && j != i)
3973 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
3974 type, mask);
3975
3976 test_done:
3977 if (fips_enabled && rc)
3978 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
3979
3980 if (fips_enabled && !rc)
3981 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
3982
3983 return rc;
3984
3985 notest:
3986 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
3987 return 0;
3988 non_fips_alg:
3989 return -EINVAL;
3990 }
3991
3992 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
3993
3994 EXPORT_SYMBOL_GPL(alg_test);
3995