2 * Algorithm testing framework and tests.
6 * Copyright (c) 2007 Nokia Siemens Networks
9 * Updated RFC4106 AES-GCM testing.
14 * Copyright (c) 2010, Intel Corporation.
16 * This program is free software; you can redistribute it and/or modify it
17 * under the terms of the GNU General Public License as published by the Free
18 * Software Foundation; either version 2 of the License, or (at your option)
23 #include <crypto/aead.h>
24 #include <crypto/hash.h>
25 #include <crypto/skcipher.h>
26 #include <linux/err.h>
27 #include <linux/fips.h>
28 #include <linux/module.h>
29 #include <linux/scatterlist.h>
30 #include <linux/slab.h>
31 #include <linux/string.h>
32 #include <crypto/rng.h>
33 #include <crypto/drbg.h>
34 #include <crypto/akcipher.h>
35 #include <crypto/kpp.h>
40 module_param(notests, bool, 0644);
41 MODULE_PARM_DESC(notests, "disable crypto self-tests");
43 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
46 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
56 * Need slab memory for testing (size in number of pages).
61 * Indexes into the xbuf to simulate cross-page access.
73 * Used by test_cipher()
78 struct tcrypt_result {
79 struct completion completion;
83 struct aead_test_suite {
85 struct aead_testvec *vecs;
90 struct cipher_test_suite {
92 struct cipher_testvec *vecs;
97 struct comp_test_suite {
99 struct comp_testvec *vecs;
104 struct hash_test_suite {
105 struct hash_testvec *vecs;
109 struct cprng_test_suite {
110 struct cprng_testvec *vecs;
114 struct drbg_test_suite {
115 struct drbg_testvec *vecs;
119 struct akcipher_test_suite {
120 struct akcipher_testvec *vecs;
124 struct kpp_test_suite {
125 struct kpp_testvec *vecs;
129 struct alg_test_desc {
131 int (*test)(const struct alg_test_desc *desc, const char *driver,
133 int fips_allowed; /* set if alg is allowed in fips mode */
136 struct aead_test_suite aead;
137 struct cipher_test_suite cipher;
138 struct comp_test_suite comp;
139 struct hash_test_suite hash;
140 struct cprng_test_suite cprng;
141 struct drbg_test_suite drbg;
142 struct akcipher_test_suite akcipher;
143 struct kpp_test_suite kpp;
147 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
149 static void hexdump(unsigned char *buf, unsigned int len)
151 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
156 static void tcrypt_complete(struct crypto_async_request *req, int err)
158 struct tcrypt_result *res = req->data;
160 if (err == -EINPROGRESS)
164 complete(&res->completion);
167 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
171 for (i = 0; i < XBUFSIZE; i++) {
172 buf[i] = (void *)__get_free_page(GFP_KERNEL);
181 free_page((unsigned long)buf[i]);
186 static void testmgr_free_buf(char *buf[XBUFSIZE])
190 for (i = 0; i < XBUFSIZE; i++)
191 free_page((unsigned long)buf[i]);
194 static int wait_async_op(struct tcrypt_result *tr, int ret)
196 if (ret == -EINPROGRESS || ret == -EBUSY) {
197 wait_for_completion(&tr->completion);
198 reinit_completion(&tr->completion);
204 static int ahash_partial_update(struct ahash_request **preq,
205 struct crypto_ahash *tfm, struct hash_testvec *template,
206 void *hash_buff, int k, int temp, struct scatterlist *sg,
207 const char *algo, char *result, struct tcrypt_result *tresult)
210 struct ahash_request *req;
211 int statesize, ret = -EINVAL;
214 statesize = crypto_ahash_statesize(
215 crypto_ahash_reqtfm(req));
216 state = kmalloc(statesize, GFP_KERNEL);
218 pr_err("alt: hash: Failed to alloc state for %s\n", algo);
221 ret = crypto_ahash_export(req, state);
223 pr_err("alt: hash: Failed to export() for %s\n", algo);
226 ahash_request_free(req);
227 req = ahash_request_alloc(tfm, GFP_KERNEL);
229 pr_err("alg: hash: Failed to alloc request for %s\n", algo);
232 ahash_request_set_callback(req,
233 CRYPTO_TFM_REQ_MAY_BACKLOG,
234 tcrypt_complete, tresult);
236 memcpy(hash_buff, template->plaintext + temp,
238 sg_init_one(&sg[0], hash_buff, template->tap[k]);
239 ahash_request_set_crypt(req, sg, result, template->tap[k]);
240 ret = crypto_ahash_import(req, state);
242 pr_err("alg: hash: Failed to import() for %s\n", algo);
245 ret = wait_async_op(tresult, crypto_ahash_update(req));
252 ahash_request_free(req);
259 static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
260 unsigned int tcount, bool use_digest,
261 const int align_offset)
263 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
264 unsigned int i, j, k, temp;
265 struct scatterlist sg[8];
268 struct ahash_request *req;
269 struct tcrypt_result tresult;
271 char *xbuf[XBUFSIZE];
274 result = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
277 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
280 if (testmgr_alloc_buf(xbuf))
283 init_completion(&tresult.completion);
285 req = ahash_request_alloc(tfm, GFP_KERNEL);
287 printk(KERN_ERR "alg: hash: Failed to allocate request for "
291 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
292 tcrypt_complete, &tresult);
295 for (i = 0; i < tcount; i++) {
300 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
304 memset(result, 0, MAX_DIGEST_SIZE);
307 hash_buff += align_offset;
309 memcpy(hash_buff, template[i].plaintext, template[i].psize);
310 sg_init_one(&sg[0], hash_buff, template[i].psize);
312 if (template[i].ksize) {
313 crypto_ahash_clear_flags(tfm, ~0);
314 if (template[i].ksize > MAX_KEYLEN) {
315 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
316 j, algo, template[i].ksize, MAX_KEYLEN);
320 memcpy(key, template[i].key, template[i].ksize);
321 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
323 printk(KERN_ERR "alg: hash: setkey failed on "
324 "test %d for %s: ret=%d\n", j, algo,
330 ahash_request_set_crypt(req, sg, result, template[i].psize);
332 ret = wait_async_op(&tresult, crypto_ahash_digest(req));
334 pr_err("alg: hash: digest failed on test %d "
335 "for %s: ret=%d\n", j, algo, -ret);
339 ret = wait_async_op(&tresult, crypto_ahash_init(req));
341 pr_err("alt: hash: init failed on test %d "
342 "for %s: ret=%d\n", j, algo, -ret);
345 ret = wait_async_op(&tresult, crypto_ahash_update(req));
347 pr_err("alt: hash: update failed on test %d "
348 "for %s: ret=%d\n", j, algo, -ret);
351 ret = wait_async_op(&tresult, crypto_ahash_final(req));
353 pr_err("alt: hash: final failed on test %d "
354 "for %s: ret=%d\n", j, algo, -ret);
359 if (memcmp(result, template[i].digest,
360 crypto_ahash_digestsize(tfm))) {
361 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
363 hexdump(result, crypto_ahash_digestsize(tfm));
370 for (i = 0; i < tcount; i++) {
371 /* alignment tests are only done with continuous buffers */
372 if (align_offset != 0)
379 memset(result, 0, MAX_DIGEST_SIZE);
382 sg_init_table(sg, template[i].np);
384 for (k = 0; k < template[i].np; k++) {
385 if (WARN_ON(offset_in_page(IDX[k]) +
386 template[i].tap[k] > PAGE_SIZE))
389 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
390 offset_in_page(IDX[k]),
391 template[i].plaintext + temp,
394 temp += template[i].tap[k];
397 if (template[i].ksize) {
398 if (template[i].ksize > MAX_KEYLEN) {
399 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
400 j, algo, template[i].ksize, MAX_KEYLEN);
404 crypto_ahash_clear_flags(tfm, ~0);
405 memcpy(key, template[i].key, template[i].ksize);
406 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
409 printk(KERN_ERR "alg: hash: setkey "
410 "failed on chunking test %d "
411 "for %s: ret=%d\n", j, algo, -ret);
416 ahash_request_set_crypt(req, sg, result, template[i].psize);
417 ret = crypto_ahash_digest(req);
423 wait_for_completion(&tresult.completion);
424 reinit_completion(&tresult.completion);
430 printk(KERN_ERR "alg: hash: digest failed "
431 "on chunking test %d for %s: "
432 "ret=%d\n", j, algo, -ret);
436 if (memcmp(result, template[i].digest,
437 crypto_ahash_digestsize(tfm))) {
438 printk(KERN_ERR "alg: hash: Chunking test %d "
439 "failed for %s\n", j, algo);
440 hexdump(result, crypto_ahash_digestsize(tfm));
446 /* partial update exercise */
448 for (i = 0; i < tcount; i++) {
449 /* alignment tests are only done with continuous buffers */
450 if (align_offset != 0)
453 if (template[i].np < 2)
457 memset(result, 0, MAX_DIGEST_SIZE);
461 memcpy(hash_buff, template[i].plaintext,
463 sg_init_one(&sg[0], hash_buff, template[i].tap[0]);
465 if (template[i].ksize) {
466 crypto_ahash_clear_flags(tfm, ~0);
467 if (template[i].ksize > MAX_KEYLEN) {
468 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
469 j, algo, template[i].ksize, MAX_KEYLEN);
473 memcpy(key, template[i].key, template[i].ksize);
474 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
476 pr_err("alg: hash: setkey failed on test %d for %s: ret=%d\n",
482 ahash_request_set_crypt(req, sg, result, template[i].tap[0]);
483 ret = wait_async_op(&tresult, crypto_ahash_init(req));
485 pr_err("alt: hash: init failed on test %d for %s: ret=%d\n",
489 ret = wait_async_op(&tresult, crypto_ahash_update(req));
491 pr_err("alt: hash: update failed on test %d for %s: ret=%d\n",
496 temp = template[i].tap[0];
497 for (k = 1; k < template[i].np; k++) {
498 ret = ahash_partial_update(&req, tfm, &template[i],
499 hash_buff, k, temp, &sg[0], algo, result,
502 pr_err("hash: partial update failed on test %d for %s: ret=%d\n",
506 temp += template[i].tap[k];
508 ret = wait_async_op(&tresult, crypto_ahash_final(req));
510 pr_err("alt: hash: final failed on test %d for %s: ret=%d\n",
514 if (memcmp(result, template[i].digest,
515 crypto_ahash_digestsize(tfm))) {
516 pr_err("alg: hash: Partial Test %d failed for %s\n",
518 hexdump(result, crypto_ahash_digestsize(tfm));
527 ahash_request_free(req);
529 testmgr_free_buf(xbuf);
536 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
537 unsigned int tcount, bool use_digest)
539 unsigned int alignmask;
542 ret = __test_hash(tfm, template, tcount, use_digest, 0);
546 /* test unaligned buffers, check with one byte offset */
547 ret = __test_hash(tfm, template, tcount, use_digest, 1);
551 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
553 /* Check if alignment mask for tfm is correctly set. */
554 ret = __test_hash(tfm, template, tcount, use_digest,
563 static int __test_aead(struct crypto_aead *tfm, int enc,
564 struct aead_testvec *template, unsigned int tcount,
565 const bool diff_dst, const int align_offset)
567 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
568 unsigned int i, j, k, n, temp;
572 struct aead_request *req;
573 struct scatterlist *sg;
574 struct scatterlist *sgout;
576 struct tcrypt_result result;
577 unsigned int authsize, iv_len;
582 char *xbuf[XBUFSIZE];
583 char *xoutbuf[XBUFSIZE];
584 char *axbuf[XBUFSIZE];
586 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
589 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
592 if (testmgr_alloc_buf(xbuf))
594 if (testmgr_alloc_buf(axbuf))
596 if (diff_dst && testmgr_alloc_buf(xoutbuf))
599 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
600 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 4 : 2), GFP_KERNEL);
615 init_completion(&result.completion);
617 req = aead_request_alloc(tfm, GFP_KERNEL);
619 pr_err("alg: aead%s: Failed to allocate request for %s\n",
624 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
625 tcrypt_complete, &result);
627 iv_len = crypto_aead_ivsize(tfm);
629 for (i = 0, j = 0; i < tcount; i++) {
635 /* some templates have no input data but they will
639 input += align_offset;
643 if (WARN_ON(align_offset + template[i].ilen >
644 PAGE_SIZE || template[i].alen > PAGE_SIZE))
647 memcpy(input, template[i].input, template[i].ilen);
648 memcpy(assoc, template[i].assoc, template[i].alen);
650 memcpy(iv, template[i].iv, iv_len);
652 memset(iv, 0, iv_len);
654 crypto_aead_clear_flags(tfm, ~0);
656 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
658 if (template[i].klen > MAX_KEYLEN) {
659 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
660 d, j, algo, template[i].klen,
665 memcpy(key, template[i].key, template[i].klen);
667 ret = crypto_aead_setkey(tfm, key, template[i].klen);
668 if (!ret == template[i].fail) {
669 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
670 d, j, algo, crypto_aead_get_flags(tfm));
675 authsize = abs(template[i].rlen - template[i].ilen);
676 ret = crypto_aead_setauthsize(tfm, authsize);
678 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
679 d, authsize, j, algo);
683 k = !!template[i].alen;
684 sg_init_table(sg, k + 1);
685 sg_set_buf(&sg[0], assoc, template[i].alen);
686 sg_set_buf(&sg[k], input,
687 template[i].ilen + (enc ? authsize : 0));
691 sg_init_table(sgout, k + 1);
692 sg_set_buf(&sgout[0], assoc, template[i].alen);
695 output += align_offset;
696 sg_set_buf(&sgout[k], output,
697 template[i].rlen + (enc ? 0 : authsize));
700 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
701 template[i].ilen, iv);
703 aead_request_set_ad(req, template[i].alen);
705 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
709 if (template[i].novrfy) {
710 /* verification was supposed to fail */
711 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
713 /* so really, we got a bad message */
720 wait_for_completion(&result.completion);
721 reinit_completion(&result.completion);
726 if (template[i].novrfy)
727 /* verification failure was expected */
731 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
732 d, e, j, algo, -ret);
737 if (memcmp(q, template[i].result, template[i].rlen)) {
738 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
740 hexdump(q, template[i].rlen);
746 for (i = 0, j = 0; i < tcount; i++) {
747 /* alignment tests are only done with continuous buffers */
748 if (align_offset != 0)
757 memcpy(iv, template[i].iv, iv_len);
759 memset(iv, 0, MAX_IVLEN);
761 crypto_aead_clear_flags(tfm, ~0);
763 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
764 if (template[i].klen > MAX_KEYLEN) {
765 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
766 d, j, algo, template[i].klen, MAX_KEYLEN);
770 memcpy(key, template[i].key, template[i].klen);
772 ret = crypto_aead_setkey(tfm, key, template[i].klen);
773 if (!ret == template[i].fail) {
774 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
775 d, j, algo, crypto_aead_get_flags(tfm));
780 authsize = abs(template[i].rlen - template[i].ilen);
783 sg_init_table(sg, template[i].anp + template[i].np);
785 sg_init_table(sgout, template[i].anp + template[i].np);
788 for (k = 0, temp = 0; k < template[i].anp; k++) {
789 if (WARN_ON(offset_in_page(IDX[k]) +
790 template[i].atap[k] > PAGE_SIZE))
793 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
794 offset_in_page(IDX[k]),
795 template[i].assoc + temp,
796 template[i].atap[k]),
797 template[i].atap[k]);
799 sg_set_buf(&sgout[k],
800 axbuf[IDX[k] >> PAGE_SHIFT] +
801 offset_in_page(IDX[k]),
802 template[i].atap[k]);
803 temp += template[i].atap[k];
806 for (k = 0, temp = 0; k < template[i].np; k++) {
807 if (WARN_ON(offset_in_page(IDX[k]) +
808 template[i].tap[k] > PAGE_SIZE))
811 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
812 memcpy(q, template[i].input + temp, template[i].tap[k]);
813 sg_set_buf(&sg[template[i].anp + k],
814 q, template[i].tap[k]);
817 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
818 offset_in_page(IDX[k]);
820 memset(q, 0, template[i].tap[k]);
822 sg_set_buf(&sgout[template[i].anp + k],
823 q, template[i].tap[k]);
826 n = template[i].tap[k];
827 if (k == template[i].np - 1 && enc)
829 if (offset_in_page(q) + n < PAGE_SIZE)
832 temp += template[i].tap[k];
835 ret = crypto_aead_setauthsize(tfm, authsize);
837 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
838 d, authsize, j, algo);
843 if (WARN_ON(sg[template[i].anp + k - 1].offset +
844 sg[template[i].anp + k - 1].length +
845 authsize > PAGE_SIZE)) {
851 sgout[template[i].anp + k - 1].length +=
853 sg[template[i].anp + k - 1].length += authsize;
856 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
860 aead_request_set_ad(req, template[i].alen);
862 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
866 if (template[i].novrfy) {
867 /* verification was supposed to fail */
868 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
870 /* so really, we got a bad message */
877 wait_for_completion(&result.completion);
878 reinit_completion(&result.completion);
883 if (template[i].novrfy)
884 /* verification failure was expected */
888 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
889 d, e, j, algo, -ret);
894 for (k = 0, temp = 0; k < template[i].np; k++) {
896 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
897 offset_in_page(IDX[k]);
899 q = xbuf[IDX[k] >> PAGE_SHIFT] +
900 offset_in_page(IDX[k]);
902 n = template[i].tap[k];
903 if (k == template[i].np - 1)
904 n += enc ? authsize : -authsize;
906 if (memcmp(q, template[i].result + temp, n)) {
907 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
914 if (k == template[i].np - 1 && !enc) {
916 memcmp(q, template[i].input +
922 for (n = 0; offset_in_page(q + n) && q[n]; n++)
926 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
927 d, j, e, k, algo, n);
932 temp += template[i].tap[k];
939 aead_request_free(req);
943 testmgr_free_buf(xoutbuf);
945 testmgr_free_buf(axbuf);
947 testmgr_free_buf(xbuf);
954 static int test_aead(struct crypto_aead *tfm, int enc,
955 struct aead_testvec *template, unsigned int tcount)
957 unsigned int alignmask;
960 /* test 'dst == src' case */
961 ret = __test_aead(tfm, enc, template, tcount, false, 0);
965 /* test 'dst != src' case */
966 ret = __test_aead(tfm, enc, template, tcount, true, 0);
970 /* test unaligned buffers, check with one byte offset */
971 ret = __test_aead(tfm, enc, template, tcount, true, 1);
975 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
977 /* Check if alignment mask for tfm is correctly set. */
978 ret = __test_aead(tfm, enc, template, tcount, true,
987 static int test_cipher(struct crypto_cipher *tfm, int enc,
988 struct cipher_testvec *template, unsigned int tcount)
990 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
991 unsigned int i, j, k;
995 char *xbuf[XBUFSIZE];
998 if (testmgr_alloc_buf(xbuf))
1007 for (i = 0; i < tcount; i++) {
1014 if (WARN_ON(template[i].ilen > PAGE_SIZE))
1018 memcpy(data, template[i].input, template[i].ilen);
1020 crypto_cipher_clear_flags(tfm, ~0);
1022 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
1024 ret = crypto_cipher_setkey(tfm, template[i].key,
1026 if (!ret == template[i].fail) {
1027 printk(KERN_ERR "alg: cipher: setkey failed "
1028 "on test %d for %s: flags=%x\n", j,
1029 algo, crypto_cipher_get_flags(tfm));
1034 for (k = 0; k < template[i].ilen;
1035 k += crypto_cipher_blocksize(tfm)) {
1037 crypto_cipher_encrypt_one(tfm, data + k,
1040 crypto_cipher_decrypt_one(tfm, data + k,
1045 if (memcmp(q, template[i].result, template[i].rlen)) {
1046 printk(KERN_ERR "alg: cipher: Test %d failed "
1047 "on %s for %s\n", j, e, algo);
1048 hexdump(q, template[i].rlen);
1057 testmgr_free_buf(xbuf);
1062 static int __test_skcipher(struct crypto_skcipher *tfm, int enc,
1063 struct cipher_testvec *template, unsigned int tcount,
1064 const bool diff_dst, const int align_offset)
1067 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
1068 unsigned int i, j, k, n, temp;
1070 struct skcipher_request *req;
1071 struct scatterlist sg[8];
1072 struct scatterlist sgout[8];
1074 struct tcrypt_result result;
1077 char *xbuf[XBUFSIZE];
1078 char *xoutbuf[XBUFSIZE];
1080 unsigned int ivsize = crypto_skcipher_ivsize(tfm);
1082 if (testmgr_alloc_buf(xbuf))
1085 if (diff_dst && testmgr_alloc_buf(xoutbuf))
1098 init_completion(&result.completion);
1100 req = skcipher_request_alloc(tfm, GFP_KERNEL);
1102 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
1107 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1108 tcrypt_complete, &result);
1111 for (i = 0; i < tcount; i++) {
1112 if (template[i].np && !template[i].also_non_np)
1116 memcpy(iv, template[i].iv, ivsize);
1118 memset(iv, 0, MAX_IVLEN);
1122 if (WARN_ON(align_offset + template[i].ilen > PAGE_SIZE))
1126 data += align_offset;
1127 memcpy(data, template[i].input, template[i].ilen);
1129 crypto_skcipher_clear_flags(tfm, ~0);
1131 crypto_skcipher_set_flags(tfm,
1132 CRYPTO_TFM_REQ_WEAK_KEY);
1134 ret = crypto_skcipher_setkey(tfm, template[i].key,
1136 if (!ret == template[i].fail) {
1137 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
1138 d, j, algo, crypto_skcipher_get_flags(tfm));
1143 sg_init_one(&sg[0], data, template[i].ilen);
1146 data += align_offset;
1147 sg_init_one(&sgout[0], data, template[i].ilen);
1150 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1151 template[i].ilen, iv);
1152 ret = enc ? crypto_skcipher_encrypt(req) :
1153 crypto_skcipher_decrypt(req);
1160 wait_for_completion(&result.completion);
1161 reinit_completion(&result.completion);
1167 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1168 d, e, j, algo, -ret);
1173 if (memcmp(q, template[i].result, template[i].rlen)) {
1174 pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n",
1176 hexdump(q, template[i].rlen);
1181 if (template[i].iv_out &&
1182 memcmp(iv, template[i].iv_out,
1183 crypto_skcipher_ivsize(tfm))) {
1184 pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n",
1186 hexdump(iv, crypto_skcipher_ivsize(tfm));
1193 for (i = 0; i < tcount; i++) {
1194 /* alignment tests are only done with continuous buffers */
1195 if (align_offset != 0)
1198 if (!template[i].np)
1202 memcpy(iv, template[i].iv, ivsize);
1204 memset(iv, 0, MAX_IVLEN);
1207 crypto_skcipher_clear_flags(tfm, ~0);
1209 crypto_skcipher_set_flags(tfm,
1210 CRYPTO_TFM_REQ_WEAK_KEY);
1212 ret = crypto_skcipher_setkey(tfm, template[i].key,
1214 if (!ret == template[i].fail) {
1215 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1216 d, j, algo, crypto_skcipher_get_flags(tfm));
1223 sg_init_table(sg, template[i].np);
1225 sg_init_table(sgout, template[i].np);
1226 for (k = 0; k < template[i].np; k++) {
1227 if (WARN_ON(offset_in_page(IDX[k]) +
1228 template[i].tap[k] > PAGE_SIZE))
1231 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
1233 memcpy(q, template[i].input + temp, template[i].tap[k]);
1235 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE)
1236 q[template[i].tap[k]] = 0;
1238 sg_set_buf(&sg[k], q, template[i].tap[k]);
1240 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1241 offset_in_page(IDX[k]);
1243 sg_set_buf(&sgout[k], q, template[i].tap[k]);
1245 memset(q, 0, template[i].tap[k]);
1246 if (offset_in_page(q) +
1247 template[i].tap[k] < PAGE_SIZE)
1248 q[template[i].tap[k]] = 0;
1251 temp += template[i].tap[k];
1254 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1255 template[i].ilen, iv);
1257 ret = enc ? crypto_skcipher_encrypt(req) :
1258 crypto_skcipher_decrypt(req);
1265 wait_for_completion(&result.completion);
1266 reinit_completion(&result.completion);
1272 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1273 d, e, j, algo, -ret);
1279 for (k = 0; k < template[i].np; k++) {
1281 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1282 offset_in_page(IDX[k]);
1284 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1285 offset_in_page(IDX[k]);
1287 if (memcmp(q, template[i].result + temp,
1288 template[i].tap[k])) {
1289 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1291 hexdump(q, template[i].tap[k]);
1295 q += template[i].tap[k];
1296 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1299 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1300 d, j, e, k, algo, n);
1304 temp += template[i].tap[k];
1311 skcipher_request_free(req);
1313 testmgr_free_buf(xoutbuf);
1315 testmgr_free_buf(xbuf);
1320 static int test_skcipher(struct crypto_skcipher *tfm, int enc,
1321 struct cipher_testvec *template, unsigned int tcount)
1323 unsigned int alignmask;
1326 /* test 'dst == src' case */
1327 ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1331 /* test 'dst != src' case */
1332 ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1336 /* test unaligned buffers, check with one byte offset */
1337 ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1341 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1343 /* Check if alignment mask for tfm is correctly set. */
1344 ret = __test_skcipher(tfm, enc, template, tcount, true,
1353 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1354 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1356 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1358 char result[COMP_BUF_SIZE];
1361 for (i = 0; i < ctcount; i++) {
1363 unsigned int dlen = COMP_BUF_SIZE;
1365 memset(result, 0, sizeof (result));
1367 ilen = ctemplate[i].inlen;
1368 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1369 ilen, result, &dlen);
1371 printk(KERN_ERR "alg: comp: compression failed "
1372 "on test %d for %s: ret=%d\n", i + 1, algo,
1377 if (dlen != ctemplate[i].outlen) {
1378 printk(KERN_ERR "alg: comp: Compression test %d "
1379 "failed for %s: output len = %d\n", i + 1, algo,
1385 if (memcmp(result, ctemplate[i].output, dlen)) {
1386 printk(KERN_ERR "alg: comp: Compression test %d "
1387 "failed for %s\n", i + 1, algo);
1388 hexdump(result, dlen);
1394 for (i = 0; i < dtcount; i++) {
1396 unsigned int dlen = COMP_BUF_SIZE;
1398 memset(result, 0, sizeof (result));
1400 ilen = dtemplate[i].inlen;
1401 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1402 ilen, result, &dlen);
1404 printk(KERN_ERR "alg: comp: decompression failed "
1405 "on test %d for %s: ret=%d\n", i + 1, algo,
1410 if (dlen != dtemplate[i].outlen) {
1411 printk(KERN_ERR "alg: comp: Decompression test %d "
1412 "failed for %s: output len = %d\n", i + 1, algo,
1418 if (memcmp(result, dtemplate[i].output, dlen)) {
1419 printk(KERN_ERR "alg: comp: Decompression test %d "
1420 "failed for %s\n", i + 1, algo);
1421 hexdump(result, dlen);
1433 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1434 unsigned int tcount)
1436 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1437 int err = 0, i, j, seedsize;
1441 seedsize = crypto_rng_seedsize(tfm);
1443 seed = kmalloc(seedsize, GFP_KERNEL);
1445 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1450 for (i = 0; i < tcount; i++) {
1451 memset(result, 0, 32);
1453 memcpy(seed, template[i].v, template[i].vlen);
1454 memcpy(seed + template[i].vlen, template[i].key,
1456 memcpy(seed + template[i].vlen + template[i].klen,
1457 template[i].dt, template[i].dtlen);
1459 err = crypto_rng_reset(tfm, seed, seedsize);
1461 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1466 for (j = 0; j < template[i].loops; j++) {
1467 err = crypto_rng_get_bytes(tfm, result,
1470 printk(KERN_ERR "alg: cprng: Failed to obtain "
1471 "the correct amount of random data for "
1472 "%s (requested %d)\n", algo,
1478 err = memcmp(result, template[i].result,
1481 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1483 hexdump(result, template[i].rlen);
1494 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1497 struct crypto_aead *tfm;
1500 tfm = crypto_alloc_aead(driver, type | CRYPTO_ALG_INTERNAL, mask);
1502 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1503 "%ld\n", driver, PTR_ERR(tfm));
1504 return PTR_ERR(tfm);
1507 if (desc->suite.aead.enc.vecs) {
1508 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1509 desc->suite.aead.enc.count);
1514 if (!err && desc->suite.aead.dec.vecs)
1515 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1516 desc->suite.aead.dec.count);
1519 crypto_free_aead(tfm);
1523 static int alg_test_cipher(const struct alg_test_desc *desc,
1524 const char *driver, u32 type, u32 mask)
1526 struct crypto_cipher *tfm;
1529 tfm = crypto_alloc_cipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1531 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1532 "%s: %ld\n", driver, PTR_ERR(tfm));
1533 return PTR_ERR(tfm);
1536 if (desc->suite.cipher.enc.vecs) {
1537 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1538 desc->suite.cipher.enc.count);
1543 if (desc->suite.cipher.dec.vecs)
1544 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1545 desc->suite.cipher.dec.count);
1548 crypto_free_cipher(tfm);
1552 static int alg_test_skcipher(const struct alg_test_desc *desc,
1553 const char *driver, u32 type, u32 mask)
1555 struct crypto_skcipher *tfm;
1558 tfm = crypto_alloc_skcipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1560 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1561 "%s: %ld\n", driver, PTR_ERR(tfm));
1562 return PTR_ERR(tfm);
1565 if (desc->suite.cipher.enc.vecs) {
1566 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1567 desc->suite.cipher.enc.count);
1572 if (desc->suite.cipher.dec.vecs)
1573 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1574 desc->suite.cipher.dec.count);
1577 crypto_free_skcipher(tfm);
1581 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1584 struct crypto_comp *tfm;
1587 tfm = crypto_alloc_comp(driver, type, mask);
1589 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1590 "%ld\n", driver, PTR_ERR(tfm));
1591 return PTR_ERR(tfm);
1594 err = test_comp(tfm, desc->suite.comp.comp.vecs,
1595 desc->suite.comp.decomp.vecs,
1596 desc->suite.comp.comp.count,
1597 desc->suite.comp.decomp.count);
1599 crypto_free_comp(tfm);
1603 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1606 struct crypto_ahash *tfm;
1609 tfm = crypto_alloc_ahash(driver, type | CRYPTO_ALG_INTERNAL, mask);
1611 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1612 "%ld\n", driver, PTR_ERR(tfm));
1613 return PTR_ERR(tfm);
1616 err = test_hash(tfm, desc->suite.hash.vecs,
1617 desc->suite.hash.count, true);
1619 err = test_hash(tfm, desc->suite.hash.vecs,
1620 desc->suite.hash.count, false);
1622 crypto_free_ahash(tfm);
1626 static int alg_test_crc32c(const struct alg_test_desc *desc,
1627 const char *driver, u32 type, u32 mask)
1629 struct crypto_shash *tfm;
1633 err = alg_test_hash(desc, driver, type, mask);
1637 tfm = crypto_alloc_shash(driver, type | CRYPTO_ALG_INTERNAL, mask);
1639 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1640 "%ld\n", driver, PTR_ERR(tfm));
1646 SHASH_DESC_ON_STACK(shash, tfm);
1647 u32 *ctx = (u32 *)shash_desc_ctx(shash);
1652 *ctx = le32_to_cpu(420553207);
1653 err = crypto_shash_final(shash, (u8 *)&val);
1655 printk(KERN_ERR "alg: crc32c: Operation failed for "
1656 "%s: %d\n", driver, err);
1660 if (val != ~420553207) {
1661 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1662 "%d\n", driver, val);
1667 crypto_free_shash(tfm);
1673 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1676 struct crypto_rng *rng;
1679 rng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask);
1681 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1682 "%ld\n", driver, PTR_ERR(rng));
1683 return PTR_ERR(rng);
1686 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1688 crypto_free_rng(rng);
1694 static int drbg_cavs_test(struct drbg_testvec *test, int pr,
1695 const char *driver, u32 type, u32 mask)
1698 struct crypto_rng *drng;
1699 struct drbg_test_data test_data;
1700 struct drbg_string addtl, pers, testentropy;
1701 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
1706 drng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask);
1708 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
1714 test_data.testentropy = &testentropy;
1715 drbg_string_fill(&testentropy, test->entropy, test->entropylen);
1716 drbg_string_fill(&pers, test->pers, test->perslen);
1717 ret = crypto_drbg_reset_test(drng, &pers, &test_data);
1719 printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
1723 drbg_string_fill(&addtl, test->addtla, test->addtllen);
1725 drbg_string_fill(&testentropy, test->entpra, test->entprlen);
1726 ret = crypto_drbg_get_bytes_addtl_test(drng,
1727 buf, test->expectedlen, &addtl, &test_data);
1729 ret = crypto_drbg_get_bytes_addtl(drng,
1730 buf, test->expectedlen, &addtl);
1733 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1734 "driver %s\n", driver);
1738 drbg_string_fill(&addtl, test->addtlb, test->addtllen);
1740 drbg_string_fill(&testentropy, test->entprb, test->entprlen);
1741 ret = crypto_drbg_get_bytes_addtl_test(drng,
1742 buf, test->expectedlen, &addtl, &test_data);
1744 ret = crypto_drbg_get_bytes_addtl(drng,
1745 buf, test->expectedlen, &addtl);
1748 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1749 "driver %s\n", driver);
1753 ret = memcmp(test->expected, buf, test->expectedlen);
1756 crypto_free_rng(drng);
1762 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
1768 struct drbg_testvec *template = desc->suite.drbg.vecs;
1769 unsigned int tcount = desc->suite.drbg.count;
1771 if (0 == memcmp(driver, "drbg_pr_", 8))
1774 for (i = 0; i < tcount; i++) {
1775 err = drbg_cavs_test(&template[i], pr, driver, type, mask);
1777 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
1787 static int do_test_kpp(struct crypto_kpp *tfm, struct kpp_testvec *vec,
1790 struct kpp_request *req;
1791 void *input_buf = NULL;
1792 void *output_buf = NULL;
1793 struct tcrypt_result result;
1794 unsigned int out_len_max;
1796 struct scatterlist src, dst;
1798 req = kpp_request_alloc(tfm, GFP_KERNEL);
1802 init_completion(&result.completion);
1804 err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size);
1808 out_len_max = crypto_kpp_maxsize(tfm);
1809 output_buf = kzalloc(out_len_max, GFP_KERNEL);
1815 /* Use appropriate parameter as base */
1816 kpp_request_set_input(req, NULL, 0);
1817 sg_init_one(&dst, output_buf, out_len_max);
1818 kpp_request_set_output(req, &dst, out_len_max);
1819 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1820 tcrypt_complete, &result);
1822 /* Compute public key */
1823 err = wait_async_op(&result, crypto_kpp_generate_public_key(req));
1825 pr_err("alg: %s: generate public key test failed. err %d\n",
1829 /* Verify calculated public key */
1830 if (memcmp(vec->expected_a_public, sg_virt(req->dst),
1831 vec->expected_a_public_size)) {
1832 pr_err("alg: %s: generate public key test failed. Invalid output\n",
1838 /* Calculate shared secret key by using counter part (b) public key. */
1839 input_buf = kzalloc(vec->b_public_size, GFP_KERNEL);
1845 memcpy(input_buf, vec->b_public, vec->b_public_size);
1846 sg_init_one(&src, input_buf, vec->b_public_size);
1847 sg_init_one(&dst, output_buf, out_len_max);
1848 kpp_request_set_input(req, &src, vec->b_public_size);
1849 kpp_request_set_output(req, &dst, out_len_max);
1850 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1851 tcrypt_complete, &result);
1852 err = wait_async_op(&result, crypto_kpp_compute_shared_secret(req));
1854 pr_err("alg: %s: compute shard secret test failed. err %d\n",
1859 * verify shared secret from which the user will derive
1860 * secret key by executing whatever hash it has chosen
1862 if (memcmp(vec->expected_ss, sg_virt(req->dst),
1863 vec->expected_ss_size)) {
1864 pr_err("alg: %s: compute shared secret test failed. Invalid output\n",
1874 kpp_request_free(req);
1878 static int test_kpp(struct crypto_kpp *tfm, const char *alg,
1879 struct kpp_testvec *vecs, unsigned int tcount)
1883 for (i = 0; i < tcount; i++) {
1884 ret = do_test_kpp(tfm, vecs++, alg);
1886 pr_err("alg: %s: test failed on vector %d, err=%d\n",
1894 static int alg_test_kpp(const struct alg_test_desc *desc, const char *driver,
1897 struct crypto_kpp *tfm;
1900 tfm = crypto_alloc_kpp(driver, type | CRYPTO_ALG_INTERNAL, mask);
1902 pr_err("alg: kpp: Failed to load tfm for %s: %ld\n",
1903 driver, PTR_ERR(tfm));
1904 return PTR_ERR(tfm);
1906 if (desc->suite.kpp.vecs)
1907 err = test_kpp(tfm, desc->alg, desc->suite.kpp.vecs,
1908 desc->suite.kpp.count);
1910 crypto_free_kpp(tfm);
1914 static int test_akcipher_one(struct crypto_akcipher *tfm,
1915 struct akcipher_testvec *vecs)
1917 char *xbuf[XBUFSIZE];
1918 struct akcipher_request *req;
1919 void *outbuf_enc = NULL;
1920 void *outbuf_dec = NULL;
1921 struct tcrypt_result result;
1922 unsigned int out_len_max, out_len = 0;
1924 struct scatterlist src, dst, src_tab[2];
1926 if (testmgr_alloc_buf(xbuf))
1929 req = akcipher_request_alloc(tfm, GFP_KERNEL);
1933 init_completion(&result.completion);
1935 if (vecs->public_key_vec)
1936 err = crypto_akcipher_set_pub_key(tfm, vecs->key,
1939 err = crypto_akcipher_set_priv_key(tfm, vecs->key,
1944 out_len_max = crypto_akcipher_maxsize(tfm);
1945 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
1949 if (WARN_ON(vecs->m_size > PAGE_SIZE))
1952 memcpy(xbuf[0], vecs->m, vecs->m_size);
1954 sg_init_table(src_tab, 2);
1955 sg_set_buf(&src_tab[0], xbuf[0], 8);
1956 sg_set_buf(&src_tab[1], xbuf[0] + 8, vecs->m_size - 8);
1957 sg_init_one(&dst, outbuf_enc, out_len_max);
1958 akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size,
1960 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1961 tcrypt_complete, &result);
1963 /* Run RSA encrypt - c = m^e mod n;*/
1964 err = wait_async_op(&result, crypto_akcipher_encrypt(req));
1966 pr_err("alg: akcipher: encrypt test failed. err %d\n", err);
1969 if (req->dst_len != vecs->c_size) {
1970 pr_err("alg: akcipher: encrypt test failed. Invalid output len\n");
1974 /* verify that encrypted message is equal to expected */
1975 if (memcmp(vecs->c, outbuf_enc, vecs->c_size)) {
1976 pr_err("alg: akcipher: encrypt test failed. Invalid output\n");
1977 hexdump(outbuf_enc, vecs->c_size);
1981 /* Don't invoke decrypt for vectors with public key */
1982 if (vecs->public_key_vec) {
1986 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
1992 if (WARN_ON(vecs->c_size > PAGE_SIZE))
1995 memcpy(xbuf[0], vecs->c, vecs->c_size);
1997 sg_init_one(&src, xbuf[0], vecs->c_size);
1998 sg_init_one(&dst, outbuf_dec, out_len_max);
1999 init_completion(&result.completion);
2000 akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max);
2002 /* Run RSA decrypt - m = c^d mod n;*/
2003 err = wait_async_op(&result, crypto_akcipher_decrypt(req));
2005 pr_err("alg: akcipher: decrypt test failed. err %d\n", err);
2008 out_len = req->dst_len;
2009 if (out_len < vecs->m_size) {
2010 pr_err("alg: akcipher: decrypt test failed. "
2011 "Invalid output len %u\n", out_len);
2015 /* verify that decrypted message is equal to the original msg */
2016 if (memchr_inv(outbuf_dec, 0, out_len - vecs->m_size) ||
2017 memcmp(vecs->m, outbuf_dec + out_len - vecs->m_size,
2019 pr_err("alg: akcipher: decrypt test failed. Invalid output\n");
2020 hexdump(outbuf_dec, out_len);
2027 akcipher_request_free(req);
2029 testmgr_free_buf(xbuf);
2033 static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
2034 struct akcipher_testvec *vecs, unsigned int tcount)
2038 for (i = 0; i < tcount; i++) {
2039 ret = test_akcipher_one(tfm, vecs++);
2043 pr_err("alg: akcipher: test failed on vector %d, err=%d\n",
2050 static int alg_test_akcipher(const struct alg_test_desc *desc,
2051 const char *driver, u32 type, u32 mask)
2053 struct crypto_akcipher *tfm;
2056 tfm = crypto_alloc_akcipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
2058 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
2059 driver, PTR_ERR(tfm));
2060 return PTR_ERR(tfm);
2062 if (desc->suite.akcipher.vecs)
2063 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
2064 desc->suite.akcipher.count);
2066 crypto_free_akcipher(tfm);
2070 static int alg_test_null(const struct alg_test_desc *desc,
2071 const char *driver, u32 type, u32 mask)
2076 /* Please keep this list sorted by algorithm name. */
2077 static const struct alg_test_desc alg_test_descs[] = {
2079 .alg = "__cbc-cast5-avx",
2080 .test = alg_test_null,
2082 .alg = "__cbc-cast6-avx",
2083 .test = alg_test_null,
2085 .alg = "__cbc-serpent-avx",
2086 .test = alg_test_null,
2088 .alg = "__cbc-serpent-avx2",
2089 .test = alg_test_null,
2091 .alg = "__cbc-serpent-sse2",
2092 .test = alg_test_null,
2094 .alg = "__cbc-twofish-avx",
2095 .test = alg_test_null,
2097 .alg = "__driver-cbc-aes-aesni",
2098 .test = alg_test_null,
2101 .alg = "__driver-cbc-camellia-aesni",
2102 .test = alg_test_null,
2104 .alg = "__driver-cbc-camellia-aesni-avx2",
2105 .test = alg_test_null,
2107 .alg = "__driver-cbc-cast5-avx",
2108 .test = alg_test_null,
2110 .alg = "__driver-cbc-cast6-avx",
2111 .test = alg_test_null,
2113 .alg = "__driver-cbc-serpent-avx",
2114 .test = alg_test_null,
2116 .alg = "__driver-cbc-serpent-avx2",
2117 .test = alg_test_null,
2119 .alg = "__driver-cbc-serpent-sse2",
2120 .test = alg_test_null,
2122 .alg = "__driver-cbc-twofish-avx",
2123 .test = alg_test_null,
2125 .alg = "__driver-ecb-aes-aesni",
2126 .test = alg_test_null,
2129 .alg = "__driver-ecb-camellia-aesni",
2130 .test = alg_test_null,
2132 .alg = "__driver-ecb-camellia-aesni-avx2",
2133 .test = alg_test_null,
2135 .alg = "__driver-ecb-cast5-avx",
2136 .test = alg_test_null,
2138 .alg = "__driver-ecb-cast6-avx",
2139 .test = alg_test_null,
2141 .alg = "__driver-ecb-serpent-avx",
2142 .test = alg_test_null,
2144 .alg = "__driver-ecb-serpent-avx2",
2145 .test = alg_test_null,
2147 .alg = "__driver-ecb-serpent-sse2",
2148 .test = alg_test_null,
2150 .alg = "__driver-ecb-twofish-avx",
2151 .test = alg_test_null,
2153 .alg = "__driver-gcm-aes-aesni",
2154 .test = alg_test_null,
2157 .alg = "__ghash-pclmulqdqni",
2158 .test = alg_test_null,
2161 .alg = "ansi_cprng",
2162 .test = alg_test_cprng,
2165 .vecs = ansi_cprng_aes_tv_template,
2166 .count = ANSI_CPRNG_AES_TEST_VECTORS
2170 .alg = "authenc(hmac(md5),ecb(cipher_null))",
2171 .test = alg_test_aead,
2175 .vecs = hmac_md5_ecb_cipher_null_enc_tv_template,
2176 .count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS
2179 .vecs = hmac_md5_ecb_cipher_null_dec_tv_template,
2180 .count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS
2185 .alg = "authenc(hmac(sha1),cbc(aes))",
2186 .test = alg_test_aead,
2191 hmac_sha1_aes_cbc_enc_tv_temp,
2193 HMAC_SHA1_AES_CBC_ENC_TEST_VEC
2198 .alg = "authenc(hmac(sha1),cbc(des))",
2199 .test = alg_test_aead,
2204 hmac_sha1_des_cbc_enc_tv_temp,
2206 HMAC_SHA1_DES_CBC_ENC_TEST_VEC
2211 .alg = "authenc(hmac(sha1),cbc(des3_ede))",
2212 .test = alg_test_aead,
2218 hmac_sha1_des3_ede_cbc_enc_tv_temp,
2220 HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC
2225 .alg = "authenc(hmac(sha1),ctr(aes))",
2226 .test = alg_test_null,
2229 .alg = "authenc(hmac(sha1),ecb(cipher_null))",
2230 .test = alg_test_aead,
2235 hmac_sha1_ecb_cipher_null_enc_tv_temp,
2237 HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC
2241 hmac_sha1_ecb_cipher_null_dec_tv_temp,
2243 HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC
2248 .alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2249 .test = alg_test_null,
2252 .alg = "authenc(hmac(sha224),cbc(des))",
2253 .test = alg_test_aead,
2258 hmac_sha224_des_cbc_enc_tv_temp,
2260 HMAC_SHA224_DES_CBC_ENC_TEST_VEC
2265 .alg = "authenc(hmac(sha224),cbc(des3_ede))",
2266 .test = alg_test_aead,
2272 hmac_sha224_des3_ede_cbc_enc_tv_temp,
2274 HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC
2279 .alg = "authenc(hmac(sha256),cbc(aes))",
2280 .test = alg_test_aead,
2286 hmac_sha256_aes_cbc_enc_tv_temp,
2288 HMAC_SHA256_AES_CBC_ENC_TEST_VEC
2293 .alg = "authenc(hmac(sha256),cbc(des))",
2294 .test = alg_test_aead,
2299 hmac_sha256_des_cbc_enc_tv_temp,
2301 HMAC_SHA256_DES_CBC_ENC_TEST_VEC
2306 .alg = "authenc(hmac(sha256),cbc(des3_ede))",
2307 .test = alg_test_aead,
2313 hmac_sha256_des3_ede_cbc_enc_tv_temp,
2315 HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC
2320 .alg = "authenc(hmac(sha256),ctr(aes))",
2321 .test = alg_test_null,
2324 .alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2325 .test = alg_test_null,
2328 .alg = "authenc(hmac(sha384),cbc(des))",
2329 .test = alg_test_aead,
2334 hmac_sha384_des_cbc_enc_tv_temp,
2336 HMAC_SHA384_DES_CBC_ENC_TEST_VEC
2341 .alg = "authenc(hmac(sha384),cbc(des3_ede))",
2342 .test = alg_test_aead,
2348 hmac_sha384_des3_ede_cbc_enc_tv_temp,
2350 HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC
2355 .alg = "authenc(hmac(sha384),ctr(aes))",
2356 .test = alg_test_null,
2359 .alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2360 .test = alg_test_null,
2363 .alg = "authenc(hmac(sha512),cbc(aes))",
2365 .test = alg_test_aead,
2370 hmac_sha512_aes_cbc_enc_tv_temp,
2372 HMAC_SHA512_AES_CBC_ENC_TEST_VEC
2377 .alg = "authenc(hmac(sha512),cbc(des))",
2378 .test = alg_test_aead,
2383 hmac_sha512_des_cbc_enc_tv_temp,
2385 HMAC_SHA512_DES_CBC_ENC_TEST_VEC
2390 .alg = "authenc(hmac(sha512),cbc(des3_ede))",
2391 .test = alg_test_aead,
2397 hmac_sha512_des3_ede_cbc_enc_tv_temp,
2399 HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC
2404 .alg = "authenc(hmac(sha512),ctr(aes))",
2405 .test = alg_test_null,
2408 .alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2409 .test = alg_test_null,
2413 .test = alg_test_skcipher,
2418 .vecs = aes_cbc_enc_tv_template,
2419 .count = AES_CBC_ENC_TEST_VECTORS
2422 .vecs = aes_cbc_dec_tv_template,
2423 .count = AES_CBC_DEC_TEST_VECTORS
2428 .alg = "cbc(anubis)",
2429 .test = alg_test_skcipher,
2433 .vecs = anubis_cbc_enc_tv_template,
2434 .count = ANUBIS_CBC_ENC_TEST_VECTORS
2437 .vecs = anubis_cbc_dec_tv_template,
2438 .count = ANUBIS_CBC_DEC_TEST_VECTORS
2443 .alg = "cbc(blowfish)",
2444 .test = alg_test_skcipher,
2448 .vecs = bf_cbc_enc_tv_template,
2449 .count = BF_CBC_ENC_TEST_VECTORS
2452 .vecs = bf_cbc_dec_tv_template,
2453 .count = BF_CBC_DEC_TEST_VECTORS
2458 .alg = "cbc(camellia)",
2459 .test = alg_test_skcipher,
2463 .vecs = camellia_cbc_enc_tv_template,
2464 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
2467 .vecs = camellia_cbc_dec_tv_template,
2468 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
2473 .alg = "cbc(cast5)",
2474 .test = alg_test_skcipher,
2478 .vecs = cast5_cbc_enc_tv_template,
2479 .count = CAST5_CBC_ENC_TEST_VECTORS
2482 .vecs = cast5_cbc_dec_tv_template,
2483 .count = CAST5_CBC_DEC_TEST_VECTORS
2488 .alg = "cbc(cast6)",
2489 .test = alg_test_skcipher,
2493 .vecs = cast6_cbc_enc_tv_template,
2494 .count = CAST6_CBC_ENC_TEST_VECTORS
2497 .vecs = cast6_cbc_dec_tv_template,
2498 .count = CAST6_CBC_DEC_TEST_VECTORS
2504 .test = alg_test_skcipher,
2508 .vecs = des_cbc_enc_tv_template,
2509 .count = DES_CBC_ENC_TEST_VECTORS
2512 .vecs = des_cbc_dec_tv_template,
2513 .count = DES_CBC_DEC_TEST_VECTORS
2518 .alg = "cbc(des3_ede)",
2519 .test = alg_test_skcipher,
2524 .vecs = des3_ede_cbc_enc_tv_template,
2525 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
2528 .vecs = des3_ede_cbc_dec_tv_template,
2529 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
2534 .alg = "cbc(serpent)",
2535 .test = alg_test_skcipher,
2539 .vecs = serpent_cbc_enc_tv_template,
2540 .count = SERPENT_CBC_ENC_TEST_VECTORS
2543 .vecs = serpent_cbc_dec_tv_template,
2544 .count = SERPENT_CBC_DEC_TEST_VECTORS
2549 .alg = "cbc(twofish)",
2550 .test = alg_test_skcipher,
2554 .vecs = tf_cbc_enc_tv_template,
2555 .count = TF_CBC_ENC_TEST_VECTORS
2558 .vecs = tf_cbc_dec_tv_template,
2559 .count = TF_CBC_DEC_TEST_VECTORS
2565 .test = alg_test_aead,
2570 .vecs = aes_ccm_enc_tv_template,
2571 .count = AES_CCM_ENC_TEST_VECTORS
2574 .vecs = aes_ccm_dec_tv_template,
2575 .count = AES_CCM_DEC_TEST_VECTORS
2581 .test = alg_test_skcipher,
2585 .vecs = chacha20_enc_tv_template,
2586 .count = CHACHA20_ENC_TEST_VECTORS
2589 .vecs = chacha20_enc_tv_template,
2590 .count = CHACHA20_ENC_TEST_VECTORS
2597 .test = alg_test_hash,
2600 .vecs = aes_cmac128_tv_template,
2601 .count = CMAC_AES_TEST_VECTORS
2605 .alg = "cmac(des3_ede)",
2607 .test = alg_test_hash,
2610 .vecs = des3_ede_cmac64_tv_template,
2611 .count = CMAC_DES3_EDE_TEST_VECTORS
2615 .alg = "compress_null",
2616 .test = alg_test_null,
2619 .test = alg_test_hash,
2622 .vecs = crc32_tv_template,
2623 .count = CRC32_TEST_VECTORS
2628 .test = alg_test_crc32c,
2632 .vecs = crc32c_tv_template,
2633 .count = CRC32C_TEST_VECTORS
2638 .test = alg_test_hash,
2642 .vecs = crct10dif_tv_template,
2643 .count = CRCT10DIF_TEST_VECTORS
2647 .alg = "cryptd(__driver-cbc-aes-aesni)",
2648 .test = alg_test_null,
2651 .alg = "cryptd(__driver-cbc-camellia-aesni)",
2652 .test = alg_test_null,
2654 .alg = "cryptd(__driver-cbc-camellia-aesni-avx2)",
2655 .test = alg_test_null,
2657 .alg = "cryptd(__driver-cbc-serpent-avx2)",
2658 .test = alg_test_null,
2660 .alg = "cryptd(__driver-ecb-aes-aesni)",
2661 .test = alg_test_null,
2664 .alg = "cryptd(__driver-ecb-camellia-aesni)",
2665 .test = alg_test_null,
2667 .alg = "cryptd(__driver-ecb-camellia-aesni-avx2)",
2668 .test = alg_test_null,
2670 .alg = "cryptd(__driver-ecb-cast5-avx)",
2671 .test = alg_test_null,
2673 .alg = "cryptd(__driver-ecb-cast6-avx)",
2674 .test = alg_test_null,
2676 .alg = "cryptd(__driver-ecb-serpent-avx)",
2677 .test = alg_test_null,
2679 .alg = "cryptd(__driver-ecb-serpent-avx2)",
2680 .test = alg_test_null,
2682 .alg = "cryptd(__driver-ecb-serpent-sse2)",
2683 .test = alg_test_null,
2685 .alg = "cryptd(__driver-ecb-twofish-avx)",
2686 .test = alg_test_null,
2688 .alg = "cryptd(__driver-gcm-aes-aesni)",
2689 .test = alg_test_null,
2692 .alg = "cryptd(__ghash-pclmulqdqni)",
2693 .test = alg_test_null,
2697 .test = alg_test_skcipher,
2702 .vecs = aes_ctr_enc_tv_template,
2703 .count = AES_CTR_ENC_TEST_VECTORS
2706 .vecs = aes_ctr_dec_tv_template,
2707 .count = AES_CTR_DEC_TEST_VECTORS
2712 .alg = "ctr(blowfish)",
2713 .test = alg_test_skcipher,
2717 .vecs = bf_ctr_enc_tv_template,
2718 .count = BF_CTR_ENC_TEST_VECTORS
2721 .vecs = bf_ctr_dec_tv_template,
2722 .count = BF_CTR_DEC_TEST_VECTORS
2727 .alg = "ctr(camellia)",
2728 .test = alg_test_skcipher,
2732 .vecs = camellia_ctr_enc_tv_template,
2733 .count = CAMELLIA_CTR_ENC_TEST_VECTORS
2736 .vecs = camellia_ctr_dec_tv_template,
2737 .count = CAMELLIA_CTR_DEC_TEST_VECTORS
2742 .alg = "ctr(cast5)",
2743 .test = alg_test_skcipher,
2747 .vecs = cast5_ctr_enc_tv_template,
2748 .count = CAST5_CTR_ENC_TEST_VECTORS
2751 .vecs = cast5_ctr_dec_tv_template,
2752 .count = CAST5_CTR_DEC_TEST_VECTORS
2757 .alg = "ctr(cast6)",
2758 .test = alg_test_skcipher,
2762 .vecs = cast6_ctr_enc_tv_template,
2763 .count = CAST6_CTR_ENC_TEST_VECTORS
2766 .vecs = cast6_ctr_dec_tv_template,
2767 .count = CAST6_CTR_DEC_TEST_VECTORS
2773 .test = alg_test_skcipher,
2777 .vecs = des_ctr_enc_tv_template,
2778 .count = DES_CTR_ENC_TEST_VECTORS
2781 .vecs = des_ctr_dec_tv_template,
2782 .count = DES_CTR_DEC_TEST_VECTORS
2787 .alg = "ctr(des3_ede)",
2788 .test = alg_test_skcipher,
2792 .vecs = des3_ede_ctr_enc_tv_template,
2793 .count = DES3_EDE_CTR_ENC_TEST_VECTORS
2796 .vecs = des3_ede_ctr_dec_tv_template,
2797 .count = DES3_EDE_CTR_DEC_TEST_VECTORS
2802 .alg = "ctr(serpent)",
2803 .test = alg_test_skcipher,
2807 .vecs = serpent_ctr_enc_tv_template,
2808 .count = SERPENT_CTR_ENC_TEST_VECTORS
2811 .vecs = serpent_ctr_dec_tv_template,
2812 .count = SERPENT_CTR_DEC_TEST_VECTORS
2817 .alg = "ctr(twofish)",
2818 .test = alg_test_skcipher,
2822 .vecs = tf_ctr_enc_tv_template,
2823 .count = TF_CTR_ENC_TEST_VECTORS
2826 .vecs = tf_ctr_dec_tv_template,
2827 .count = TF_CTR_DEC_TEST_VECTORS
2832 .alg = "cts(cbc(aes))",
2833 .test = alg_test_skcipher,
2837 .vecs = cts_mode_enc_tv_template,
2838 .count = CTS_MODE_ENC_TEST_VECTORS
2841 .vecs = cts_mode_dec_tv_template,
2842 .count = CTS_MODE_DEC_TEST_VECTORS
2848 .test = alg_test_comp,
2853 .vecs = deflate_comp_tv_template,
2854 .count = DEFLATE_COMP_TEST_VECTORS
2857 .vecs = deflate_decomp_tv_template,
2858 .count = DEFLATE_DECOMP_TEST_VECTORS
2864 .test = alg_test_kpp,
2868 .vecs = dh_tv_template,
2869 .count = DH_TEST_VECTORS
2873 .alg = "digest_null",
2874 .test = alg_test_null,
2876 .alg = "drbg_nopr_ctr_aes128",
2877 .test = alg_test_drbg,
2881 .vecs = drbg_nopr_ctr_aes128_tv_template,
2882 .count = ARRAY_SIZE(drbg_nopr_ctr_aes128_tv_template)
2886 .alg = "drbg_nopr_ctr_aes192",
2887 .test = alg_test_drbg,
2891 .vecs = drbg_nopr_ctr_aes192_tv_template,
2892 .count = ARRAY_SIZE(drbg_nopr_ctr_aes192_tv_template)
2896 .alg = "drbg_nopr_ctr_aes256",
2897 .test = alg_test_drbg,
2901 .vecs = drbg_nopr_ctr_aes256_tv_template,
2902 .count = ARRAY_SIZE(drbg_nopr_ctr_aes256_tv_template)
2907 * There is no need to specifically test the DRBG with every
2908 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
2910 .alg = "drbg_nopr_hmac_sha1",
2912 .test = alg_test_null,
2914 .alg = "drbg_nopr_hmac_sha256",
2915 .test = alg_test_drbg,
2919 .vecs = drbg_nopr_hmac_sha256_tv_template,
2921 ARRAY_SIZE(drbg_nopr_hmac_sha256_tv_template)
2925 /* covered by drbg_nopr_hmac_sha256 test */
2926 .alg = "drbg_nopr_hmac_sha384",
2928 .test = alg_test_null,
2930 .alg = "drbg_nopr_hmac_sha512",
2931 .test = alg_test_null,
2934 .alg = "drbg_nopr_sha1",
2936 .test = alg_test_null,
2938 .alg = "drbg_nopr_sha256",
2939 .test = alg_test_drbg,
2943 .vecs = drbg_nopr_sha256_tv_template,
2944 .count = ARRAY_SIZE(drbg_nopr_sha256_tv_template)
2948 /* covered by drbg_nopr_sha256 test */
2949 .alg = "drbg_nopr_sha384",
2951 .test = alg_test_null,
2953 .alg = "drbg_nopr_sha512",
2955 .test = alg_test_null,
2957 .alg = "drbg_pr_ctr_aes128",
2958 .test = alg_test_drbg,
2962 .vecs = drbg_pr_ctr_aes128_tv_template,
2963 .count = ARRAY_SIZE(drbg_pr_ctr_aes128_tv_template)
2967 /* covered by drbg_pr_ctr_aes128 test */
2968 .alg = "drbg_pr_ctr_aes192",
2970 .test = alg_test_null,
2972 .alg = "drbg_pr_ctr_aes256",
2974 .test = alg_test_null,
2976 .alg = "drbg_pr_hmac_sha1",
2978 .test = alg_test_null,
2980 .alg = "drbg_pr_hmac_sha256",
2981 .test = alg_test_drbg,
2985 .vecs = drbg_pr_hmac_sha256_tv_template,
2986 .count = ARRAY_SIZE(drbg_pr_hmac_sha256_tv_template)
2990 /* covered by drbg_pr_hmac_sha256 test */
2991 .alg = "drbg_pr_hmac_sha384",
2993 .test = alg_test_null,
2995 .alg = "drbg_pr_hmac_sha512",
2996 .test = alg_test_null,
2999 .alg = "drbg_pr_sha1",
3001 .test = alg_test_null,
3003 .alg = "drbg_pr_sha256",
3004 .test = alg_test_drbg,
3008 .vecs = drbg_pr_sha256_tv_template,
3009 .count = ARRAY_SIZE(drbg_pr_sha256_tv_template)
3013 /* covered by drbg_pr_sha256 test */
3014 .alg = "drbg_pr_sha384",
3016 .test = alg_test_null,
3018 .alg = "drbg_pr_sha512",
3020 .test = alg_test_null,
3022 .alg = "ecb(__aes-aesni)",
3023 .test = alg_test_null,
3027 .test = alg_test_skcipher,
3032 .vecs = aes_enc_tv_template,
3033 .count = AES_ENC_TEST_VECTORS
3036 .vecs = aes_dec_tv_template,
3037 .count = AES_DEC_TEST_VECTORS
3042 .alg = "ecb(anubis)",
3043 .test = alg_test_skcipher,
3047 .vecs = anubis_enc_tv_template,
3048 .count = ANUBIS_ENC_TEST_VECTORS
3051 .vecs = anubis_dec_tv_template,
3052 .count = ANUBIS_DEC_TEST_VECTORS
3058 .test = alg_test_skcipher,
3062 .vecs = arc4_enc_tv_template,
3063 .count = ARC4_ENC_TEST_VECTORS
3066 .vecs = arc4_dec_tv_template,
3067 .count = ARC4_DEC_TEST_VECTORS
3072 .alg = "ecb(blowfish)",
3073 .test = alg_test_skcipher,
3077 .vecs = bf_enc_tv_template,
3078 .count = BF_ENC_TEST_VECTORS
3081 .vecs = bf_dec_tv_template,
3082 .count = BF_DEC_TEST_VECTORS
3087 .alg = "ecb(camellia)",
3088 .test = alg_test_skcipher,
3092 .vecs = camellia_enc_tv_template,
3093 .count = CAMELLIA_ENC_TEST_VECTORS
3096 .vecs = camellia_dec_tv_template,
3097 .count = CAMELLIA_DEC_TEST_VECTORS
3102 .alg = "ecb(cast5)",
3103 .test = alg_test_skcipher,
3107 .vecs = cast5_enc_tv_template,
3108 .count = CAST5_ENC_TEST_VECTORS
3111 .vecs = cast5_dec_tv_template,
3112 .count = CAST5_DEC_TEST_VECTORS
3117 .alg = "ecb(cast6)",
3118 .test = alg_test_skcipher,
3122 .vecs = cast6_enc_tv_template,
3123 .count = CAST6_ENC_TEST_VECTORS
3126 .vecs = cast6_dec_tv_template,
3127 .count = CAST6_DEC_TEST_VECTORS
3132 .alg = "ecb(cipher_null)",
3133 .test = alg_test_null,
3136 .test = alg_test_skcipher,
3140 .vecs = des_enc_tv_template,
3141 .count = DES_ENC_TEST_VECTORS
3144 .vecs = des_dec_tv_template,
3145 .count = DES_DEC_TEST_VECTORS
3150 .alg = "ecb(des3_ede)",
3151 .test = alg_test_skcipher,
3156 .vecs = des3_ede_enc_tv_template,
3157 .count = DES3_EDE_ENC_TEST_VECTORS
3160 .vecs = des3_ede_dec_tv_template,
3161 .count = DES3_EDE_DEC_TEST_VECTORS
3166 .alg = "ecb(fcrypt)",
3167 .test = alg_test_skcipher,
3171 .vecs = fcrypt_pcbc_enc_tv_template,
3175 .vecs = fcrypt_pcbc_dec_tv_template,
3181 .alg = "ecb(khazad)",
3182 .test = alg_test_skcipher,
3186 .vecs = khazad_enc_tv_template,
3187 .count = KHAZAD_ENC_TEST_VECTORS
3190 .vecs = khazad_dec_tv_template,
3191 .count = KHAZAD_DEC_TEST_VECTORS
3197 .test = alg_test_skcipher,
3201 .vecs = seed_enc_tv_template,
3202 .count = SEED_ENC_TEST_VECTORS
3205 .vecs = seed_dec_tv_template,
3206 .count = SEED_DEC_TEST_VECTORS
3211 .alg = "ecb(serpent)",
3212 .test = alg_test_skcipher,
3216 .vecs = serpent_enc_tv_template,
3217 .count = SERPENT_ENC_TEST_VECTORS
3220 .vecs = serpent_dec_tv_template,
3221 .count = SERPENT_DEC_TEST_VECTORS
3227 .test = alg_test_skcipher,
3231 .vecs = tea_enc_tv_template,
3232 .count = TEA_ENC_TEST_VECTORS
3235 .vecs = tea_dec_tv_template,
3236 .count = TEA_DEC_TEST_VECTORS
3241 .alg = "ecb(tnepres)",
3242 .test = alg_test_skcipher,
3246 .vecs = tnepres_enc_tv_template,
3247 .count = TNEPRES_ENC_TEST_VECTORS
3250 .vecs = tnepres_dec_tv_template,
3251 .count = TNEPRES_DEC_TEST_VECTORS
3256 .alg = "ecb(twofish)",
3257 .test = alg_test_skcipher,
3261 .vecs = tf_enc_tv_template,
3262 .count = TF_ENC_TEST_VECTORS
3265 .vecs = tf_dec_tv_template,
3266 .count = TF_DEC_TEST_VECTORS
3272 .test = alg_test_skcipher,
3276 .vecs = xeta_enc_tv_template,
3277 .count = XETA_ENC_TEST_VECTORS
3280 .vecs = xeta_dec_tv_template,
3281 .count = XETA_DEC_TEST_VECTORS
3287 .test = alg_test_skcipher,
3291 .vecs = xtea_enc_tv_template,
3292 .count = XTEA_ENC_TEST_VECTORS
3295 .vecs = xtea_dec_tv_template,
3296 .count = XTEA_DEC_TEST_VECTORS
3302 .test = alg_test_kpp,
3306 .vecs = ecdh_tv_template,
3307 .count = ECDH_TEST_VECTORS
3312 .test = alg_test_aead,
3317 .vecs = aes_gcm_enc_tv_template,
3318 .count = AES_GCM_ENC_TEST_VECTORS
3321 .vecs = aes_gcm_dec_tv_template,
3322 .count = AES_GCM_DEC_TEST_VECTORS
3328 .test = alg_test_hash,
3332 .vecs = ghash_tv_template,
3333 .count = GHASH_TEST_VECTORS
3337 .alg = "hmac(crc32)",
3338 .test = alg_test_hash,
3341 .vecs = bfin_crc_tv_template,
3342 .count = BFIN_CRC_TEST_VECTORS
3347 .test = alg_test_hash,
3350 .vecs = hmac_md5_tv_template,
3351 .count = HMAC_MD5_TEST_VECTORS
3355 .alg = "hmac(rmd128)",
3356 .test = alg_test_hash,
3359 .vecs = hmac_rmd128_tv_template,
3360 .count = HMAC_RMD128_TEST_VECTORS
3364 .alg = "hmac(rmd160)",
3365 .test = alg_test_hash,
3368 .vecs = hmac_rmd160_tv_template,
3369 .count = HMAC_RMD160_TEST_VECTORS
3373 .alg = "hmac(sha1)",
3374 .test = alg_test_hash,
3378 .vecs = hmac_sha1_tv_template,
3379 .count = HMAC_SHA1_TEST_VECTORS
3383 .alg = "hmac(sha224)",
3384 .test = alg_test_hash,
3388 .vecs = hmac_sha224_tv_template,
3389 .count = HMAC_SHA224_TEST_VECTORS
3393 .alg = "hmac(sha256)",
3394 .test = alg_test_hash,
3398 .vecs = hmac_sha256_tv_template,
3399 .count = HMAC_SHA256_TEST_VECTORS
3403 .alg = "hmac(sha384)",
3404 .test = alg_test_hash,
3408 .vecs = hmac_sha384_tv_template,
3409 .count = HMAC_SHA384_TEST_VECTORS
3413 .alg = "hmac(sha512)",
3414 .test = alg_test_hash,
3418 .vecs = hmac_sha512_tv_template,
3419 .count = HMAC_SHA512_TEST_VECTORS
3423 .alg = "jitterentropy_rng",
3425 .test = alg_test_null,
3428 .test = alg_test_skcipher,
3433 .vecs = aes_kw_enc_tv_template,
3434 .count = ARRAY_SIZE(aes_kw_enc_tv_template)
3437 .vecs = aes_kw_dec_tv_template,
3438 .count = ARRAY_SIZE(aes_kw_dec_tv_template)
3444 .test = alg_test_skcipher,
3448 .vecs = aes_lrw_enc_tv_template,
3449 .count = AES_LRW_ENC_TEST_VECTORS
3452 .vecs = aes_lrw_dec_tv_template,
3453 .count = AES_LRW_DEC_TEST_VECTORS
3458 .alg = "lrw(camellia)",
3459 .test = alg_test_skcipher,
3463 .vecs = camellia_lrw_enc_tv_template,
3464 .count = CAMELLIA_LRW_ENC_TEST_VECTORS
3467 .vecs = camellia_lrw_dec_tv_template,
3468 .count = CAMELLIA_LRW_DEC_TEST_VECTORS
3473 .alg = "lrw(cast6)",
3474 .test = alg_test_skcipher,
3478 .vecs = cast6_lrw_enc_tv_template,
3479 .count = CAST6_LRW_ENC_TEST_VECTORS
3482 .vecs = cast6_lrw_dec_tv_template,
3483 .count = CAST6_LRW_DEC_TEST_VECTORS
3488 .alg = "lrw(serpent)",
3489 .test = alg_test_skcipher,
3493 .vecs = serpent_lrw_enc_tv_template,
3494 .count = SERPENT_LRW_ENC_TEST_VECTORS
3497 .vecs = serpent_lrw_dec_tv_template,
3498 .count = SERPENT_LRW_DEC_TEST_VECTORS
3503 .alg = "lrw(twofish)",
3504 .test = alg_test_skcipher,
3508 .vecs = tf_lrw_enc_tv_template,
3509 .count = TF_LRW_ENC_TEST_VECTORS
3512 .vecs = tf_lrw_dec_tv_template,
3513 .count = TF_LRW_DEC_TEST_VECTORS
3519 .test = alg_test_comp,
3524 .vecs = lz4_comp_tv_template,
3525 .count = LZ4_COMP_TEST_VECTORS
3528 .vecs = lz4_decomp_tv_template,
3529 .count = LZ4_DECOMP_TEST_VECTORS
3535 .test = alg_test_comp,
3540 .vecs = lz4hc_comp_tv_template,
3541 .count = LZ4HC_COMP_TEST_VECTORS
3544 .vecs = lz4hc_decomp_tv_template,
3545 .count = LZ4HC_DECOMP_TEST_VECTORS
3551 .test = alg_test_comp,
3556 .vecs = lzo_comp_tv_template,
3557 .count = LZO_COMP_TEST_VECTORS
3560 .vecs = lzo_decomp_tv_template,
3561 .count = LZO_DECOMP_TEST_VECTORS
3567 .test = alg_test_hash,
3570 .vecs = md4_tv_template,
3571 .count = MD4_TEST_VECTORS
3576 .test = alg_test_hash,
3579 .vecs = md5_tv_template,
3580 .count = MD5_TEST_VECTORS
3584 .alg = "michael_mic",
3585 .test = alg_test_hash,
3588 .vecs = michael_mic_tv_template,
3589 .count = MICHAEL_MIC_TEST_VECTORS
3594 .test = alg_test_skcipher,
3599 .vecs = aes_ofb_enc_tv_template,
3600 .count = AES_OFB_ENC_TEST_VECTORS
3603 .vecs = aes_ofb_dec_tv_template,
3604 .count = AES_OFB_DEC_TEST_VECTORS
3609 .alg = "pcbc(fcrypt)",
3610 .test = alg_test_skcipher,
3614 .vecs = fcrypt_pcbc_enc_tv_template,
3615 .count = FCRYPT_ENC_TEST_VECTORS
3618 .vecs = fcrypt_pcbc_dec_tv_template,
3619 .count = FCRYPT_DEC_TEST_VECTORS
3625 .test = alg_test_hash,
3628 .vecs = poly1305_tv_template,
3629 .count = POLY1305_TEST_VECTORS
3633 .alg = "rfc3686(ctr(aes))",
3634 .test = alg_test_skcipher,
3639 .vecs = aes_ctr_rfc3686_enc_tv_template,
3640 .count = AES_CTR_3686_ENC_TEST_VECTORS
3643 .vecs = aes_ctr_rfc3686_dec_tv_template,
3644 .count = AES_CTR_3686_DEC_TEST_VECTORS
3649 .alg = "rfc4106(gcm(aes))",
3650 .test = alg_test_aead,
3655 .vecs = aes_gcm_rfc4106_enc_tv_template,
3656 .count = AES_GCM_4106_ENC_TEST_VECTORS
3659 .vecs = aes_gcm_rfc4106_dec_tv_template,
3660 .count = AES_GCM_4106_DEC_TEST_VECTORS
3665 .alg = "rfc4309(ccm(aes))",
3666 .test = alg_test_aead,
3671 .vecs = aes_ccm_rfc4309_enc_tv_template,
3672 .count = AES_CCM_4309_ENC_TEST_VECTORS
3675 .vecs = aes_ccm_rfc4309_dec_tv_template,
3676 .count = AES_CCM_4309_DEC_TEST_VECTORS
3681 .alg = "rfc4543(gcm(aes))",
3682 .test = alg_test_aead,
3686 .vecs = aes_gcm_rfc4543_enc_tv_template,
3687 .count = AES_GCM_4543_ENC_TEST_VECTORS
3690 .vecs = aes_gcm_rfc4543_dec_tv_template,
3691 .count = AES_GCM_4543_DEC_TEST_VECTORS
3696 .alg = "rfc7539(chacha20,poly1305)",
3697 .test = alg_test_aead,
3701 .vecs = rfc7539_enc_tv_template,
3702 .count = RFC7539_ENC_TEST_VECTORS
3705 .vecs = rfc7539_dec_tv_template,
3706 .count = RFC7539_DEC_TEST_VECTORS
3711 .alg = "rfc7539esp(chacha20,poly1305)",
3712 .test = alg_test_aead,
3716 .vecs = rfc7539esp_enc_tv_template,
3717 .count = RFC7539ESP_ENC_TEST_VECTORS
3720 .vecs = rfc7539esp_dec_tv_template,
3721 .count = RFC7539ESP_DEC_TEST_VECTORS
3727 .test = alg_test_hash,
3730 .vecs = rmd128_tv_template,
3731 .count = RMD128_TEST_VECTORS
3736 .test = alg_test_hash,
3739 .vecs = rmd160_tv_template,
3740 .count = RMD160_TEST_VECTORS
3745 .test = alg_test_hash,
3748 .vecs = rmd256_tv_template,
3749 .count = RMD256_TEST_VECTORS
3754 .test = alg_test_hash,
3757 .vecs = rmd320_tv_template,
3758 .count = RMD320_TEST_VECTORS
3763 .test = alg_test_akcipher,
3767 .vecs = rsa_tv_template,
3768 .count = RSA_TEST_VECTORS
3773 .test = alg_test_skcipher,
3777 .vecs = salsa20_stream_enc_tv_template,
3778 .count = SALSA20_STREAM_ENC_TEST_VECTORS
3784 .test = alg_test_hash,
3788 .vecs = sha1_tv_template,
3789 .count = SHA1_TEST_VECTORS
3794 .test = alg_test_hash,
3798 .vecs = sha224_tv_template,
3799 .count = SHA224_TEST_VECTORS
3804 .test = alg_test_hash,
3808 .vecs = sha256_tv_template,
3809 .count = SHA256_TEST_VECTORS
3814 .test = alg_test_hash,
3818 .vecs = sha3_224_tv_template,
3819 .count = SHA3_224_TEST_VECTORS
3824 .test = alg_test_hash,
3828 .vecs = sha3_256_tv_template,
3829 .count = SHA3_256_TEST_VECTORS
3834 .test = alg_test_hash,
3838 .vecs = sha3_384_tv_template,
3839 .count = SHA3_384_TEST_VECTORS
3844 .test = alg_test_hash,
3848 .vecs = sha3_512_tv_template,
3849 .count = SHA3_512_TEST_VECTORS
3854 .test = alg_test_hash,
3858 .vecs = sha384_tv_template,
3859 .count = SHA384_TEST_VECTORS
3864 .test = alg_test_hash,
3868 .vecs = sha512_tv_template,
3869 .count = SHA512_TEST_VECTORS
3874 .test = alg_test_hash,
3877 .vecs = tgr128_tv_template,
3878 .count = TGR128_TEST_VECTORS
3883 .test = alg_test_hash,
3886 .vecs = tgr160_tv_template,
3887 .count = TGR160_TEST_VECTORS
3892 .test = alg_test_hash,
3895 .vecs = tgr192_tv_template,
3896 .count = TGR192_TEST_VECTORS
3901 .test = alg_test_hash,
3904 .vecs = aes_vmac128_tv_template,
3905 .count = VMAC_AES_TEST_VECTORS
3910 .test = alg_test_hash,
3913 .vecs = wp256_tv_template,
3914 .count = WP256_TEST_VECTORS
3919 .test = alg_test_hash,
3922 .vecs = wp384_tv_template,
3923 .count = WP384_TEST_VECTORS
3928 .test = alg_test_hash,
3931 .vecs = wp512_tv_template,
3932 .count = WP512_TEST_VECTORS
3937 .test = alg_test_hash,
3940 .vecs = aes_xcbc128_tv_template,
3941 .count = XCBC_AES_TEST_VECTORS
3946 .test = alg_test_skcipher,
3951 .vecs = aes_xts_enc_tv_template,
3952 .count = AES_XTS_ENC_TEST_VECTORS
3955 .vecs = aes_xts_dec_tv_template,
3956 .count = AES_XTS_DEC_TEST_VECTORS
3961 .alg = "xts(camellia)",
3962 .test = alg_test_skcipher,
3966 .vecs = camellia_xts_enc_tv_template,
3967 .count = CAMELLIA_XTS_ENC_TEST_VECTORS
3970 .vecs = camellia_xts_dec_tv_template,
3971 .count = CAMELLIA_XTS_DEC_TEST_VECTORS
3976 .alg = "xts(cast6)",
3977 .test = alg_test_skcipher,
3981 .vecs = cast6_xts_enc_tv_template,
3982 .count = CAST6_XTS_ENC_TEST_VECTORS
3985 .vecs = cast6_xts_dec_tv_template,
3986 .count = CAST6_XTS_DEC_TEST_VECTORS
3991 .alg = "xts(serpent)",
3992 .test = alg_test_skcipher,
3996 .vecs = serpent_xts_enc_tv_template,
3997 .count = SERPENT_XTS_ENC_TEST_VECTORS
4000 .vecs = serpent_xts_dec_tv_template,
4001 .count = SERPENT_XTS_DEC_TEST_VECTORS
4006 .alg = "xts(twofish)",
4007 .test = alg_test_skcipher,
4011 .vecs = tf_xts_enc_tv_template,
4012 .count = TF_XTS_ENC_TEST_VECTORS
4015 .vecs = tf_xts_dec_tv_template,
4016 .count = TF_XTS_DEC_TEST_VECTORS
4023 static bool alg_test_descs_checked;
4025 static void alg_test_descs_check_order(void)
4029 /* only check once */
4030 if (alg_test_descs_checked)
4033 alg_test_descs_checked = true;
4035 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
4036 int diff = strcmp(alg_test_descs[i - 1].alg,
4037 alg_test_descs[i].alg);
4039 if (WARN_ON(diff > 0)) {
4040 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
4041 alg_test_descs[i - 1].alg,
4042 alg_test_descs[i].alg);
4045 if (WARN_ON(diff == 0)) {
4046 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
4047 alg_test_descs[i].alg);
4052 static int alg_find_test(const char *alg)
4055 int end = ARRAY_SIZE(alg_test_descs);
4057 while (start < end) {
4058 int i = (start + end) / 2;
4059 int diff = strcmp(alg_test_descs[i].alg, alg);
4077 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
4083 if (!fips_enabled && notests) {
4084 printk_once(KERN_INFO "alg: self-tests disabled\n");
4088 alg_test_descs_check_order();
4090 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
4091 char nalg[CRYPTO_MAX_ALG_NAME];
4093 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
4095 return -ENAMETOOLONG;
4097 i = alg_find_test(nalg);
4101 if (fips_enabled && !alg_test_descs[i].fips_allowed)
4104 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
4108 i = alg_find_test(alg);
4109 j = alg_find_test(driver);
4113 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
4114 (j >= 0 && !alg_test_descs[j].fips_allowed)))
4119 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
4121 if (j >= 0 && j != i)
4122 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
4126 if (fips_enabled && rc)
4127 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
4129 if (fips_enabled && !rc)
4130 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
4135 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
4141 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
4143 EXPORT_SYMBOL_GPL(alg_test);