2 * Algorithm testing framework and tests.
6 * Copyright (c) 2007 Nokia Siemens Networks
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
16 #include <crypto/hash.h>
17 #include <linux/err.h>
18 #include <linux/module.h>
19 #include <linux/scatterlist.h>
20 #include <linux/slab.h>
21 #include <linux/string.h>
22 #include <crypto/rng.h>
28 * Need slab memory for testing (size in number of pages).
33 * Indexes into the xbuf to simulate cross-page access.
45 * Used by test_cipher()
50 struct tcrypt_result {
51 struct completion completion;
55 struct aead_test_suite {
57 struct aead_testvec *vecs;
62 struct cipher_test_suite {
64 struct cipher_testvec *vecs;
69 struct comp_test_suite {
71 struct comp_testvec *vecs;
76 struct pcomp_test_suite {
78 struct pcomp_testvec *vecs;
83 struct hash_test_suite {
84 struct hash_testvec *vecs;
88 struct cprng_test_suite {
89 struct cprng_testvec *vecs;
93 struct alg_test_desc {
95 int (*test)(const struct alg_test_desc *desc, const char *driver,
99 struct aead_test_suite aead;
100 struct cipher_test_suite cipher;
101 struct comp_test_suite comp;
102 struct pcomp_test_suite pcomp;
103 struct hash_test_suite hash;
104 struct cprng_test_suite cprng;
108 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
110 static char *xbuf[XBUFSIZE];
111 static char *axbuf[XBUFSIZE];
113 static void hexdump(unsigned char *buf, unsigned int len)
115 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
120 static void tcrypt_complete(struct crypto_async_request *req, int err)
122 struct tcrypt_result *res = req->data;
124 if (err == -EINPROGRESS)
128 complete(&res->completion);
131 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
134 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
135 unsigned int i, j, k, temp;
136 struct scatterlist sg[8];
138 struct ahash_request *req;
139 struct tcrypt_result tresult;
143 init_completion(&tresult.completion);
145 req = ahash_request_alloc(tfm, GFP_KERNEL);
147 printk(KERN_ERR "alg: hash: Failed to allocate request for "
152 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
153 tcrypt_complete, &tresult);
155 for (i = 0; i < tcount; i++) {
156 memset(result, 0, 64);
160 memcpy(hash_buff, template[i].plaintext, template[i].psize);
161 sg_init_one(&sg[0], hash_buff, template[i].psize);
163 if (template[i].ksize) {
164 crypto_ahash_clear_flags(tfm, ~0);
165 ret = crypto_ahash_setkey(tfm, template[i].key,
168 printk(KERN_ERR "alg: hash: setkey failed on "
169 "test %d for %s: ret=%d\n", i + 1, algo,
175 ahash_request_set_crypt(req, sg, result, template[i].psize);
176 ret = crypto_ahash_digest(req);
182 ret = wait_for_completion_interruptible(
183 &tresult.completion);
184 if (!ret && !(ret = tresult.err)) {
185 INIT_COMPLETION(tresult.completion);
190 printk(KERN_ERR "alg: hash: digest failed on test %d "
191 "for %s: ret=%d\n", i + 1, algo, -ret);
195 if (memcmp(result, template[i].digest,
196 crypto_ahash_digestsize(tfm))) {
197 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
199 hexdump(result, crypto_ahash_digestsize(tfm));
206 for (i = 0; i < tcount; i++) {
207 if (template[i].np) {
209 memset(result, 0, 64);
212 sg_init_table(sg, template[i].np);
213 for (k = 0; k < template[i].np; k++) {
215 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
216 offset_in_page(IDX[k]),
217 template[i].plaintext + temp,
220 temp += template[i].tap[k];
223 if (template[i].ksize) {
224 crypto_ahash_clear_flags(tfm, ~0);
225 ret = crypto_ahash_setkey(tfm, template[i].key,
229 printk(KERN_ERR "alg: hash: setkey "
230 "failed on chunking test %d "
231 "for %s: ret=%d\n", j, algo,
237 ahash_request_set_crypt(req, sg, result,
239 ret = crypto_ahash_digest(req);
245 ret = wait_for_completion_interruptible(
246 &tresult.completion);
247 if (!ret && !(ret = tresult.err)) {
248 INIT_COMPLETION(tresult.completion);
253 printk(KERN_ERR "alg: hash: digest failed "
254 "on chunking test %d for %s: "
255 "ret=%d\n", j, algo, -ret);
259 if (memcmp(result, template[i].digest,
260 crypto_ahash_digestsize(tfm))) {
261 printk(KERN_ERR "alg: hash: Chunking test %d "
262 "failed for %s\n", j, algo);
263 hexdump(result, crypto_ahash_digestsize(tfm));
273 ahash_request_free(req);
278 static int test_aead(struct crypto_aead *tfm, int enc,
279 struct aead_testvec *template, unsigned int tcount)
281 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
282 unsigned int i, j, k, n, temp;
286 struct aead_request *req;
287 struct scatterlist sg[8];
288 struct scatterlist asg[8];
290 struct tcrypt_result result;
291 unsigned int authsize;
301 init_completion(&result.completion);
303 req = aead_request_alloc(tfm, GFP_KERNEL);
305 printk(KERN_ERR "alg: aead: Failed to allocate request for "
311 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
312 tcrypt_complete, &result);
314 for (i = 0, j = 0; i < tcount; i++) {
315 if (!template[i].np) {
318 /* some tepmplates have no input data but they will
324 memcpy(input, template[i].input, template[i].ilen);
325 memcpy(assoc, template[i].assoc, template[i].alen);
327 memcpy(iv, template[i].iv, MAX_IVLEN);
329 memset(iv, 0, MAX_IVLEN);
331 crypto_aead_clear_flags(tfm, ~0);
333 crypto_aead_set_flags(
334 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
336 key = template[i].key;
338 ret = crypto_aead_setkey(tfm, key,
340 if (!ret == template[i].fail) {
341 printk(KERN_ERR "alg: aead: setkey failed on "
342 "test %d for %s: flags=%x\n", j, algo,
343 crypto_aead_get_flags(tfm));
348 authsize = abs(template[i].rlen - template[i].ilen);
349 ret = crypto_aead_setauthsize(tfm, authsize);
351 printk(KERN_ERR "alg: aead: Failed to set "
352 "authsize to %u on test %d for %s\n",
357 sg_init_one(&sg[0], input,
358 template[i].ilen + (enc ? authsize : 0));
360 sg_init_one(&asg[0], assoc, template[i].alen);
362 aead_request_set_crypt(req, sg, sg,
363 template[i].ilen, iv);
365 aead_request_set_assoc(req, asg, template[i].alen);
368 crypto_aead_encrypt(req) :
369 crypto_aead_decrypt(req);
373 if (template[i].novrfy) {
374 /* verification was supposed to fail */
375 printk(KERN_ERR "alg: aead: %s failed "
376 "on test %d for %s: ret was 0, "
377 "expected -EBADMSG\n",
379 /* so really, we got a bad message */
386 ret = wait_for_completion_interruptible(
388 if (!ret && !(ret = result.err)) {
389 INIT_COMPLETION(result.completion);
393 if (template[i].novrfy)
394 /* verification failure was expected */
398 printk(KERN_ERR "alg: aead: %s failed on test "
399 "%d for %s: ret=%d\n", e, j, algo, -ret);
404 if (memcmp(q, template[i].result, template[i].rlen)) {
405 printk(KERN_ERR "alg: aead: Test %d failed on "
406 "%s for %s\n", j, e, algo);
407 hexdump(q, template[i].rlen);
414 for (i = 0, j = 0; i < tcount; i++) {
415 if (template[i].np) {
419 memcpy(iv, template[i].iv, MAX_IVLEN);
421 memset(iv, 0, MAX_IVLEN);
423 crypto_aead_clear_flags(tfm, ~0);
425 crypto_aead_set_flags(
426 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
427 key = template[i].key;
429 ret = crypto_aead_setkey(tfm, key, template[i].klen);
430 if (!ret == template[i].fail) {
431 printk(KERN_ERR "alg: aead: setkey failed on "
432 "chunk test %d for %s: flags=%x\n", j,
433 algo, crypto_aead_get_flags(tfm));
438 authsize = abs(template[i].rlen - template[i].ilen);
441 sg_init_table(sg, template[i].np);
442 for (k = 0, temp = 0; k < template[i].np; k++) {
443 if (WARN_ON(offset_in_page(IDX[k]) +
444 template[i].tap[k] > PAGE_SIZE))
447 q = xbuf[IDX[k] >> PAGE_SHIFT] +
448 offset_in_page(IDX[k]);
450 memcpy(q, template[i].input + temp,
453 n = template[i].tap[k];
454 if (k == template[i].np - 1 && enc)
456 if (offset_in_page(q) + n < PAGE_SIZE)
459 sg_set_buf(&sg[k], q, template[i].tap[k]);
460 temp += template[i].tap[k];
463 ret = crypto_aead_setauthsize(tfm, authsize);
465 printk(KERN_ERR "alg: aead: Failed to set "
466 "authsize to %u on chunk test %d for "
467 "%s\n", authsize, j, algo);
472 if (WARN_ON(sg[k - 1].offset +
473 sg[k - 1].length + authsize >
479 sg[k - 1].length += authsize;
482 sg_init_table(asg, template[i].anp);
483 for (k = 0, temp = 0; k < template[i].anp; k++) {
485 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
486 offset_in_page(IDX[k]),
487 template[i].assoc + temp,
488 template[i].atap[k]),
489 template[i].atap[k]);
490 temp += template[i].atap[k];
493 aead_request_set_crypt(req, sg, sg,
497 aead_request_set_assoc(req, asg, template[i].alen);
500 crypto_aead_encrypt(req) :
501 crypto_aead_decrypt(req);
505 if (template[i].novrfy) {
506 /* verification was supposed to fail */
507 printk(KERN_ERR "alg: aead: %s failed "
508 "on chunk test %d for %s: ret "
509 "was 0, expected -EBADMSG\n",
511 /* so really, we got a bad message */
518 ret = wait_for_completion_interruptible(
520 if (!ret && !(ret = result.err)) {
521 INIT_COMPLETION(result.completion);
525 if (template[i].novrfy)
526 /* verification failure was expected */
530 printk(KERN_ERR "alg: aead: %s failed on "
531 "chunk test %d for %s: ret=%d\n", e, j,
537 for (k = 0, temp = 0; k < template[i].np; k++) {
538 q = xbuf[IDX[k] >> PAGE_SHIFT] +
539 offset_in_page(IDX[k]);
541 n = template[i].tap[k];
542 if (k == template[i].np - 1)
543 n += enc ? authsize : -authsize;
545 if (memcmp(q, template[i].result + temp, n)) {
546 printk(KERN_ERR "alg: aead: Chunk "
547 "test %d failed on %s at page "
548 "%u for %s\n", j, e, k, algo);
554 if (k == template[i].np - 1 && !enc) {
555 if (memcmp(q, template[i].input +
561 for (n = 0; offset_in_page(q + n) &&
566 printk(KERN_ERR "alg: aead: Result "
567 "buffer corruption in chunk "
568 "test %d on %s at page %u for "
569 "%s: %u bytes:\n", j, e, k,
575 temp += template[i].tap[k];
583 aead_request_free(req);
587 static int test_cipher(struct crypto_cipher *tfm, int enc,
588 struct cipher_testvec *template, unsigned int tcount)
590 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
591 unsigned int i, j, k;
603 for (i = 0; i < tcount; i++) {
610 memcpy(data, template[i].input, template[i].ilen);
612 crypto_cipher_clear_flags(tfm, ~0);
614 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
616 ret = crypto_cipher_setkey(tfm, template[i].key,
618 if (!ret == template[i].fail) {
619 printk(KERN_ERR "alg: cipher: setkey failed "
620 "on test %d for %s: flags=%x\n", j,
621 algo, crypto_cipher_get_flags(tfm));
626 for (k = 0; k < template[i].ilen;
627 k += crypto_cipher_blocksize(tfm)) {
629 crypto_cipher_encrypt_one(tfm, data + k,
632 crypto_cipher_decrypt_one(tfm, data + k,
637 if (memcmp(q, template[i].result, template[i].rlen)) {
638 printk(KERN_ERR "alg: cipher: Test %d failed "
639 "on %s for %s\n", j, e, algo);
640 hexdump(q, template[i].rlen);
652 static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
653 struct cipher_testvec *template, unsigned int tcount)
656 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
657 unsigned int i, j, k, n, temp;
660 struct ablkcipher_request *req;
661 struct scatterlist sg[8];
663 struct tcrypt_result result;
672 init_completion(&result.completion);
674 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
676 printk(KERN_ERR "alg: skcipher: Failed to allocate request "
682 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
683 tcrypt_complete, &result);
686 for (i = 0; i < tcount; i++) {
688 memcpy(iv, template[i].iv, MAX_IVLEN);
690 memset(iv, 0, MAX_IVLEN);
692 if (!(template[i].np)) {
696 memcpy(data, template[i].input, template[i].ilen);
698 crypto_ablkcipher_clear_flags(tfm, ~0);
700 crypto_ablkcipher_set_flags(
701 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
703 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
705 if (!ret == template[i].fail) {
706 printk(KERN_ERR "alg: skcipher: setkey failed "
707 "on test %d for %s: flags=%x\n", j,
708 algo, crypto_ablkcipher_get_flags(tfm));
713 sg_init_one(&sg[0], data, template[i].ilen);
715 ablkcipher_request_set_crypt(req, sg, sg,
716 template[i].ilen, iv);
718 crypto_ablkcipher_encrypt(req) :
719 crypto_ablkcipher_decrypt(req);
726 ret = wait_for_completion_interruptible(
728 if (!ret && !((ret = result.err))) {
729 INIT_COMPLETION(result.completion);
734 printk(KERN_ERR "alg: skcipher: %s failed on "
735 "test %d for %s: ret=%d\n", e, j, algo,
741 if (memcmp(q, template[i].result, template[i].rlen)) {
742 printk(KERN_ERR "alg: skcipher: Test %d "
743 "failed on %s for %s\n", j, e, algo);
744 hexdump(q, template[i].rlen);
752 for (i = 0; i < tcount; i++) {
755 memcpy(iv, template[i].iv, MAX_IVLEN);
757 memset(iv, 0, MAX_IVLEN);
759 if (template[i].np) {
762 crypto_ablkcipher_clear_flags(tfm, ~0);
764 crypto_ablkcipher_set_flags(
765 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
767 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
769 if (!ret == template[i].fail) {
770 printk(KERN_ERR "alg: skcipher: setkey failed "
771 "on chunk test %d for %s: flags=%x\n",
773 crypto_ablkcipher_get_flags(tfm));
780 sg_init_table(sg, template[i].np);
781 for (k = 0; k < template[i].np; k++) {
782 if (WARN_ON(offset_in_page(IDX[k]) +
783 template[i].tap[k] > PAGE_SIZE))
786 q = xbuf[IDX[k] >> PAGE_SHIFT] +
787 offset_in_page(IDX[k]);
789 memcpy(q, template[i].input + temp,
792 if (offset_in_page(q) + template[i].tap[k] <
794 q[template[i].tap[k]] = 0;
796 sg_set_buf(&sg[k], q, template[i].tap[k]);
798 temp += template[i].tap[k];
801 ablkcipher_request_set_crypt(req, sg, sg,
802 template[i].ilen, iv);
805 crypto_ablkcipher_encrypt(req) :
806 crypto_ablkcipher_decrypt(req);
813 ret = wait_for_completion_interruptible(
815 if (!ret && !((ret = result.err))) {
816 INIT_COMPLETION(result.completion);
821 printk(KERN_ERR "alg: skcipher: %s failed on "
822 "chunk test %d for %s: ret=%d\n", e, j,
829 for (k = 0; k < template[i].np; k++) {
830 q = xbuf[IDX[k] >> PAGE_SHIFT] +
831 offset_in_page(IDX[k]);
833 if (memcmp(q, template[i].result + temp,
834 template[i].tap[k])) {
835 printk(KERN_ERR "alg: skcipher: Chunk "
836 "test %d failed on %s at page "
837 "%u for %s\n", j, e, k, algo);
838 hexdump(q, template[i].tap[k]);
842 q += template[i].tap[k];
843 for (n = 0; offset_in_page(q + n) && q[n]; n++)
846 printk(KERN_ERR "alg: skcipher: "
847 "Result buffer corruption in "
848 "chunk test %d on %s at page "
849 "%u for %s: %u bytes:\n", j, e,
854 temp += template[i].tap[k];
862 ablkcipher_request_free(req);
866 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
867 struct comp_testvec *dtemplate, int ctcount, int dtcount)
869 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
871 char result[COMP_BUF_SIZE];
874 for (i = 0; i < ctcount; i++) {
876 unsigned int dlen = COMP_BUF_SIZE;
878 memset(result, 0, sizeof (result));
880 ilen = ctemplate[i].inlen;
881 ret = crypto_comp_compress(tfm, ctemplate[i].input,
882 ilen, result, &dlen);
884 printk(KERN_ERR "alg: comp: compression failed "
885 "on test %d for %s: ret=%d\n", i + 1, algo,
890 if (dlen != ctemplate[i].outlen) {
891 printk(KERN_ERR "alg: comp: Compression test %d "
892 "failed for %s: output len = %d\n", i + 1, algo,
898 if (memcmp(result, ctemplate[i].output, dlen)) {
899 printk(KERN_ERR "alg: comp: Compression test %d "
900 "failed for %s\n", i + 1, algo);
901 hexdump(result, dlen);
907 for (i = 0; i < dtcount; i++) {
909 unsigned int dlen = COMP_BUF_SIZE;
911 memset(result, 0, sizeof (result));
913 ilen = dtemplate[i].inlen;
914 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
915 ilen, result, &dlen);
917 printk(KERN_ERR "alg: comp: decompression failed "
918 "on test %d for %s: ret=%d\n", i + 1, algo,
923 if (dlen != dtemplate[i].outlen) {
924 printk(KERN_ERR "alg: comp: Decompression test %d "
925 "failed for %s: output len = %d\n", i + 1, algo,
931 if (memcmp(result, dtemplate[i].output, dlen)) {
932 printk(KERN_ERR "alg: comp: Decompression test %d "
933 "failed for %s\n", i + 1, algo);
934 hexdump(result, dlen);
946 static int test_pcomp(struct crypto_pcomp *tfm,
947 struct pcomp_testvec *ctemplate,
948 struct pcomp_testvec *dtemplate, int ctcount,
951 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm));
953 char result[COMP_BUF_SIZE];
956 for (i = 0; i < ctcount; i++) {
957 struct comp_request req;
959 error = crypto_compress_setup(tfm, ctemplate[i].params,
960 ctemplate[i].paramsize);
962 pr_err("alg: pcomp: compression setup failed on test "
963 "%d for %s: error=%d\n", i + 1, algo, error);
967 error = crypto_compress_init(tfm);
969 pr_err("alg: pcomp: compression init failed on test "
970 "%d for %s: error=%d\n", i + 1, algo, error);
974 memset(result, 0, sizeof(result));
976 req.next_in = ctemplate[i].input;
977 req.avail_in = ctemplate[i].inlen / 2;
978 req.next_out = result;
979 req.avail_out = ctemplate[i].outlen / 2;
981 error = crypto_compress_update(tfm, &req);
982 if (error && (error != -EAGAIN || req.avail_in)) {
983 pr_err("alg: pcomp: compression update failed on test "
984 "%d for %s: error=%d\n", i + 1, algo, error);
988 /* Add remaining input data */
989 req.avail_in += (ctemplate[i].inlen + 1) / 2;
991 error = crypto_compress_update(tfm, &req);
992 if (error && (error != -EAGAIN || req.avail_in)) {
993 pr_err("alg: pcomp: compression update failed on test "
994 "%d for %s: error=%d\n", i + 1, algo, error);
998 /* Provide remaining output space */
999 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2;
1001 error = crypto_compress_final(tfm, &req);
1003 pr_err("alg: pcomp: compression final failed on test "
1004 "%d for %s: error=%d\n", i + 1, algo, error);
1008 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) {
1009 pr_err("alg: comp: Compression test %d failed for %s: "
1010 "output len = %d (expected %d)\n", i + 1, algo,
1011 COMP_BUF_SIZE - req.avail_out,
1012 ctemplate[i].outlen);
1016 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) {
1017 pr_err("alg: pcomp: Compression test %d failed for "
1018 "%s\n", i + 1, algo);
1019 hexdump(result, ctemplate[i].outlen);
1024 for (i = 0; i < dtcount; i++) {
1025 struct comp_request req;
1027 error = crypto_decompress_setup(tfm, dtemplate[i].params,
1028 dtemplate[i].paramsize);
1030 pr_err("alg: pcomp: decompression setup failed on "
1031 "test %d for %s: error=%d\n", i + 1, algo,
1036 error = crypto_decompress_init(tfm);
1038 pr_err("alg: pcomp: decompression init failed on test "
1039 "%d for %s: error=%d\n", i + 1, algo, error);
1043 memset(result, 0, sizeof(result));
1045 req.next_in = dtemplate[i].input;
1046 req.avail_in = dtemplate[i].inlen / 2;
1047 req.next_out = result;
1048 req.avail_out = dtemplate[i].outlen / 2;
1050 error = crypto_decompress_update(tfm, &req);
1051 if (error && (error != -EAGAIN || req.avail_in)) {
1052 pr_err("alg: pcomp: decompression update failed on "
1053 "test %d for %s: error=%d\n", i + 1, algo,
1058 /* Add remaining input data */
1059 req.avail_in += (dtemplate[i].inlen + 1) / 2;
1061 error = crypto_decompress_update(tfm, &req);
1062 if (error && (error != -EAGAIN || req.avail_in)) {
1063 pr_err("alg: pcomp: decompression update failed on "
1064 "test %d for %s: error=%d\n", i + 1, algo,
1069 /* Provide remaining output space */
1070 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2;
1072 error = crypto_decompress_final(tfm, &req);
1073 if (error && (error != -EAGAIN || req.avail_in)) {
1074 pr_err("alg: pcomp: decompression final failed on "
1075 "test %d for %s: error=%d\n", i + 1, algo,
1080 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) {
1081 pr_err("alg: comp: Decompression test %d failed for "
1082 "%s: output len = %d (expected %d)\n", i + 1,
1083 algo, COMP_BUF_SIZE - req.avail_out,
1084 dtemplate[i].outlen);
1088 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) {
1089 pr_err("alg: pcomp: Decompression test %d failed for "
1090 "%s\n", i + 1, algo);
1091 hexdump(result, dtemplate[i].outlen);
1100 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1101 unsigned int tcount)
1103 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1104 int err, i, j, seedsize;
1108 seedsize = crypto_rng_seedsize(tfm);
1110 seed = kmalloc(seedsize, GFP_KERNEL);
1112 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1117 for (i = 0; i < tcount; i++) {
1118 memset(result, 0, 32);
1120 memcpy(seed, template[i].v, template[i].vlen);
1121 memcpy(seed + template[i].vlen, template[i].key,
1123 memcpy(seed + template[i].vlen + template[i].klen,
1124 template[i].dt, template[i].dtlen);
1126 err = crypto_rng_reset(tfm, seed, seedsize);
1128 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1133 for (j = 0; j < template[i].loops; j++) {
1134 err = crypto_rng_get_bytes(tfm, result,
1136 if (err != template[i].rlen) {
1137 printk(KERN_ERR "alg: cprng: Failed to obtain "
1138 "the correct amount of random data for "
1139 "%s (requested %d, got %d)\n", algo,
1140 template[i].rlen, err);
1145 err = memcmp(result, template[i].result,
1148 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1150 hexdump(result, template[i].rlen);
1161 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1164 struct crypto_aead *tfm;
1167 tfm = crypto_alloc_aead(driver, type, mask);
1169 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1170 "%ld\n", driver, PTR_ERR(tfm));
1171 return PTR_ERR(tfm);
1174 if (desc->suite.aead.enc.vecs) {
1175 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1176 desc->suite.aead.enc.count);
1181 if (!err && desc->suite.aead.dec.vecs)
1182 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1183 desc->suite.aead.dec.count);
1186 crypto_free_aead(tfm);
1190 static int alg_test_cipher(const struct alg_test_desc *desc,
1191 const char *driver, u32 type, u32 mask)
1193 struct crypto_cipher *tfm;
1196 tfm = crypto_alloc_cipher(driver, type, mask);
1198 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1199 "%s: %ld\n", driver, PTR_ERR(tfm));
1200 return PTR_ERR(tfm);
1203 if (desc->suite.cipher.enc.vecs) {
1204 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1205 desc->suite.cipher.enc.count);
1210 if (desc->suite.cipher.dec.vecs)
1211 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1212 desc->suite.cipher.dec.count);
1215 crypto_free_cipher(tfm);
1219 static int alg_test_skcipher(const struct alg_test_desc *desc,
1220 const char *driver, u32 type, u32 mask)
1222 struct crypto_ablkcipher *tfm;
1225 tfm = crypto_alloc_ablkcipher(driver, type, mask);
1227 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1228 "%s: %ld\n", driver, PTR_ERR(tfm));
1229 return PTR_ERR(tfm);
1232 if (desc->suite.cipher.enc.vecs) {
1233 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1234 desc->suite.cipher.enc.count);
1239 if (desc->suite.cipher.dec.vecs)
1240 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1241 desc->suite.cipher.dec.count);
1244 crypto_free_ablkcipher(tfm);
1248 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1251 struct crypto_comp *tfm;
1254 tfm = crypto_alloc_comp(driver, type, mask);
1256 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1257 "%ld\n", driver, PTR_ERR(tfm));
1258 return PTR_ERR(tfm);
1261 err = test_comp(tfm, desc->suite.comp.comp.vecs,
1262 desc->suite.comp.decomp.vecs,
1263 desc->suite.comp.comp.count,
1264 desc->suite.comp.decomp.count);
1266 crypto_free_comp(tfm);
1270 static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver,
1273 struct crypto_pcomp *tfm;
1276 tfm = crypto_alloc_pcomp(driver, type, mask);
1278 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
1279 driver, PTR_ERR(tfm));
1280 return PTR_ERR(tfm);
1283 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs,
1284 desc->suite.pcomp.decomp.vecs,
1285 desc->suite.pcomp.comp.count,
1286 desc->suite.pcomp.decomp.count);
1288 crypto_free_pcomp(tfm);
1292 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1295 struct crypto_ahash *tfm;
1298 tfm = crypto_alloc_ahash(driver, type, mask);
1300 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1301 "%ld\n", driver, PTR_ERR(tfm));
1302 return PTR_ERR(tfm);
1305 err = test_hash(tfm, desc->suite.hash.vecs, desc->suite.hash.count);
1307 crypto_free_ahash(tfm);
1311 static int alg_test_crc32c(const struct alg_test_desc *desc,
1312 const char *driver, u32 type, u32 mask)
1314 struct crypto_shash *tfm;
1318 err = alg_test_hash(desc, driver, type, mask);
1322 tfm = crypto_alloc_shash(driver, type, mask);
1324 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1325 "%ld\n", driver, PTR_ERR(tfm));
1332 struct shash_desc shash;
1333 char ctx[crypto_shash_descsize(tfm)];
1336 sdesc.shash.tfm = tfm;
1337 sdesc.shash.flags = 0;
1339 *(u32 *)sdesc.ctx = le32_to_cpu(420553207);
1340 err = crypto_shash_final(&sdesc.shash, (u8 *)&val);
1342 printk(KERN_ERR "alg: crc32c: Operation failed for "
1343 "%s: %d\n", driver, err);
1347 if (val != ~420553207) {
1348 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1349 "%d\n", driver, val);
1354 crypto_free_shash(tfm);
1360 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1363 struct crypto_rng *rng;
1366 rng = crypto_alloc_rng(driver, type, mask);
1368 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1369 "%ld\n", driver, PTR_ERR(rng));
1370 return PTR_ERR(rng);
1373 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1375 crypto_free_rng(rng);
1380 /* Please keep this list sorted by algorithm name. */
1381 static const struct alg_test_desc alg_test_descs[] = {
1384 .test = alg_test_skcipher,
1388 .vecs = aes_cbc_enc_tv_template,
1389 .count = AES_CBC_ENC_TEST_VECTORS
1392 .vecs = aes_cbc_dec_tv_template,
1393 .count = AES_CBC_DEC_TEST_VECTORS
1398 .alg = "cbc(anubis)",
1399 .test = alg_test_skcipher,
1403 .vecs = anubis_cbc_enc_tv_template,
1404 .count = ANUBIS_CBC_ENC_TEST_VECTORS
1407 .vecs = anubis_cbc_dec_tv_template,
1408 .count = ANUBIS_CBC_DEC_TEST_VECTORS
1413 .alg = "cbc(blowfish)",
1414 .test = alg_test_skcipher,
1418 .vecs = bf_cbc_enc_tv_template,
1419 .count = BF_CBC_ENC_TEST_VECTORS
1422 .vecs = bf_cbc_dec_tv_template,
1423 .count = BF_CBC_DEC_TEST_VECTORS
1428 .alg = "cbc(camellia)",
1429 .test = alg_test_skcipher,
1433 .vecs = camellia_cbc_enc_tv_template,
1434 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
1437 .vecs = camellia_cbc_dec_tv_template,
1438 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
1444 .test = alg_test_skcipher,
1448 .vecs = des_cbc_enc_tv_template,
1449 .count = DES_CBC_ENC_TEST_VECTORS
1452 .vecs = des_cbc_dec_tv_template,
1453 .count = DES_CBC_DEC_TEST_VECTORS
1458 .alg = "cbc(des3_ede)",
1459 .test = alg_test_skcipher,
1463 .vecs = des3_ede_cbc_enc_tv_template,
1464 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
1467 .vecs = des3_ede_cbc_dec_tv_template,
1468 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
1473 .alg = "cbc(twofish)",
1474 .test = alg_test_skcipher,
1478 .vecs = tf_cbc_enc_tv_template,
1479 .count = TF_CBC_ENC_TEST_VECTORS
1482 .vecs = tf_cbc_dec_tv_template,
1483 .count = TF_CBC_DEC_TEST_VECTORS
1489 .test = alg_test_aead,
1493 .vecs = aes_ccm_enc_tv_template,
1494 .count = AES_CCM_ENC_TEST_VECTORS
1497 .vecs = aes_ccm_dec_tv_template,
1498 .count = AES_CCM_DEC_TEST_VECTORS
1504 .test = alg_test_crc32c,
1507 .vecs = crc32c_tv_template,
1508 .count = CRC32C_TEST_VECTORS
1512 .alg = "cts(cbc(aes))",
1513 .test = alg_test_skcipher,
1517 .vecs = cts_mode_enc_tv_template,
1518 .count = CTS_MODE_ENC_TEST_VECTORS
1521 .vecs = cts_mode_dec_tv_template,
1522 .count = CTS_MODE_DEC_TEST_VECTORS
1528 .test = alg_test_comp,
1532 .vecs = deflate_comp_tv_template,
1533 .count = DEFLATE_COMP_TEST_VECTORS
1536 .vecs = deflate_decomp_tv_template,
1537 .count = DEFLATE_DECOMP_TEST_VECTORS
1543 .test = alg_test_skcipher,
1547 .vecs = aes_enc_tv_template,
1548 .count = AES_ENC_TEST_VECTORS
1551 .vecs = aes_dec_tv_template,
1552 .count = AES_DEC_TEST_VECTORS
1557 .alg = "ecb(anubis)",
1558 .test = alg_test_skcipher,
1562 .vecs = anubis_enc_tv_template,
1563 .count = ANUBIS_ENC_TEST_VECTORS
1566 .vecs = anubis_dec_tv_template,
1567 .count = ANUBIS_DEC_TEST_VECTORS
1573 .test = alg_test_skcipher,
1577 .vecs = arc4_enc_tv_template,
1578 .count = ARC4_ENC_TEST_VECTORS
1581 .vecs = arc4_dec_tv_template,
1582 .count = ARC4_DEC_TEST_VECTORS
1587 .alg = "ecb(blowfish)",
1588 .test = alg_test_skcipher,
1592 .vecs = bf_enc_tv_template,
1593 .count = BF_ENC_TEST_VECTORS
1596 .vecs = bf_dec_tv_template,
1597 .count = BF_DEC_TEST_VECTORS
1602 .alg = "ecb(camellia)",
1603 .test = alg_test_skcipher,
1607 .vecs = camellia_enc_tv_template,
1608 .count = CAMELLIA_ENC_TEST_VECTORS
1611 .vecs = camellia_dec_tv_template,
1612 .count = CAMELLIA_DEC_TEST_VECTORS
1617 .alg = "ecb(cast5)",
1618 .test = alg_test_skcipher,
1622 .vecs = cast5_enc_tv_template,
1623 .count = CAST5_ENC_TEST_VECTORS
1626 .vecs = cast5_dec_tv_template,
1627 .count = CAST5_DEC_TEST_VECTORS
1632 .alg = "ecb(cast6)",
1633 .test = alg_test_skcipher,
1637 .vecs = cast6_enc_tv_template,
1638 .count = CAST6_ENC_TEST_VECTORS
1641 .vecs = cast6_dec_tv_template,
1642 .count = CAST6_DEC_TEST_VECTORS
1648 .test = alg_test_skcipher,
1652 .vecs = des_enc_tv_template,
1653 .count = DES_ENC_TEST_VECTORS
1656 .vecs = des_dec_tv_template,
1657 .count = DES_DEC_TEST_VECTORS
1662 .alg = "ecb(des3_ede)",
1663 .test = alg_test_skcipher,
1667 .vecs = des3_ede_enc_tv_template,
1668 .count = DES3_EDE_ENC_TEST_VECTORS
1671 .vecs = des3_ede_dec_tv_template,
1672 .count = DES3_EDE_DEC_TEST_VECTORS
1677 .alg = "ecb(khazad)",
1678 .test = alg_test_skcipher,
1682 .vecs = khazad_enc_tv_template,
1683 .count = KHAZAD_ENC_TEST_VECTORS
1686 .vecs = khazad_dec_tv_template,
1687 .count = KHAZAD_DEC_TEST_VECTORS
1693 .test = alg_test_skcipher,
1697 .vecs = seed_enc_tv_template,
1698 .count = SEED_ENC_TEST_VECTORS
1701 .vecs = seed_dec_tv_template,
1702 .count = SEED_DEC_TEST_VECTORS
1707 .alg = "ecb(serpent)",
1708 .test = alg_test_skcipher,
1712 .vecs = serpent_enc_tv_template,
1713 .count = SERPENT_ENC_TEST_VECTORS
1716 .vecs = serpent_dec_tv_template,
1717 .count = SERPENT_DEC_TEST_VECTORS
1723 .test = alg_test_skcipher,
1727 .vecs = tea_enc_tv_template,
1728 .count = TEA_ENC_TEST_VECTORS
1731 .vecs = tea_dec_tv_template,
1732 .count = TEA_DEC_TEST_VECTORS
1737 .alg = "ecb(tnepres)",
1738 .test = alg_test_skcipher,
1742 .vecs = tnepres_enc_tv_template,
1743 .count = TNEPRES_ENC_TEST_VECTORS
1746 .vecs = tnepres_dec_tv_template,
1747 .count = TNEPRES_DEC_TEST_VECTORS
1752 .alg = "ecb(twofish)",
1753 .test = alg_test_skcipher,
1757 .vecs = tf_enc_tv_template,
1758 .count = TF_ENC_TEST_VECTORS
1761 .vecs = tf_dec_tv_template,
1762 .count = TF_DEC_TEST_VECTORS
1768 .test = alg_test_skcipher,
1772 .vecs = xeta_enc_tv_template,
1773 .count = XETA_ENC_TEST_VECTORS
1776 .vecs = xeta_dec_tv_template,
1777 .count = XETA_DEC_TEST_VECTORS
1783 .test = alg_test_skcipher,
1787 .vecs = xtea_enc_tv_template,
1788 .count = XTEA_ENC_TEST_VECTORS
1791 .vecs = xtea_dec_tv_template,
1792 .count = XTEA_DEC_TEST_VECTORS
1798 .test = alg_test_aead,
1802 .vecs = aes_gcm_enc_tv_template,
1803 .count = AES_GCM_ENC_TEST_VECTORS
1806 .vecs = aes_gcm_dec_tv_template,
1807 .count = AES_GCM_DEC_TEST_VECTORS
1813 .test = alg_test_hash,
1816 .vecs = hmac_md5_tv_template,
1817 .count = HMAC_MD5_TEST_VECTORS
1821 .alg = "hmac(rmd128)",
1822 .test = alg_test_hash,
1825 .vecs = hmac_rmd128_tv_template,
1826 .count = HMAC_RMD128_TEST_VECTORS
1830 .alg = "hmac(rmd160)",
1831 .test = alg_test_hash,
1834 .vecs = hmac_rmd160_tv_template,
1835 .count = HMAC_RMD160_TEST_VECTORS
1839 .alg = "hmac(sha1)",
1840 .test = alg_test_hash,
1843 .vecs = hmac_sha1_tv_template,
1844 .count = HMAC_SHA1_TEST_VECTORS
1848 .alg = "hmac(sha224)",
1849 .test = alg_test_hash,
1852 .vecs = hmac_sha224_tv_template,
1853 .count = HMAC_SHA224_TEST_VECTORS
1857 .alg = "hmac(sha256)",
1858 .test = alg_test_hash,
1861 .vecs = hmac_sha256_tv_template,
1862 .count = HMAC_SHA256_TEST_VECTORS
1866 .alg = "hmac(sha384)",
1867 .test = alg_test_hash,
1870 .vecs = hmac_sha384_tv_template,
1871 .count = HMAC_SHA384_TEST_VECTORS
1875 .alg = "hmac(sha512)",
1876 .test = alg_test_hash,
1879 .vecs = hmac_sha512_tv_template,
1880 .count = HMAC_SHA512_TEST_VECTORS
1885 .test = alg_test_skcipher,
1889 .vecs = aes_lrw_enc_tv_template,
1890 .count = AES_LRW_ENC_TEST_VECTORS
1893 .vecs = aes_lrw_dec_tv_template,
1894 .count = AES_LRW_DEC_TEST_VECTORS
1900 .test = alg_test_comp,
1904 .vecs = lzo_comp_tv_template,
1905 .count = LZO_COMP_TEST_VECTORS
1908 .vecs = lzo_decomp_tv_template,
1909 .count = LZO_DECOMP_TEST_VECTORS
1915 .test = alg_test_hash,
1918 .vecs = md4_tv_template,
1919 .count = MD4_TEST_VECTORS
1924 .test = alg_test_hash,
1927 .vecs = md5_tv_template,
1928 .count = MD5_TEST_VECTORS
1932 .alg = "michael_mic",
1933 .test = alg_test_hash,
1936 .vecs = michael_mic_tv_template,
1937 .count = MICHAEL_MIC_TEST_VECTORS
1941 .alg = "pcbc(fcrypt)",
1942 .test = alg_test_skcipher,
1946 .vecs = fcrypt_pcbc_enc_tv_template,
1947 .count = FCRYPT_ENC_TEST_VECTORS
1950 .vecs = fcrypt_pcbc_dec_tv_template,
1951 .count = FCRYPT_DEC_TEST_VECTORS
1956 .alg = "rfc3686(ctr(aes))",
1957 .test = alg_test_skcipher,
1961 .vecs = aes_ctr_enc_tv_template,
1962 .count = AES_CTR_ENC_TEST_VECTORS
1965 .vecs = aes_ctr_dec_tv_template,
1966 .count = AES_CTR_DEC_TEST_VECTORS
1971 .alg = "rfc4309(ccm(aes))",
1972 .test = alg_test_aead,
1976 .vecs = aes_ccm_rfc4309_enc_tv_template,
1977 .count = AES_CCM_4309_ENC_TEST_VECTORS
1980 .vecs = aes_ccm_rfc4309_dec_tv_template,
1981 .count = AES_CCM_4309_DEC_TEST_VECTORS
1987 .test = alg_test_hash,
1990 .vecs = rmd128_tv_template,
1991 .count = RMD128_TEST_VECTORS
1996 .test = alg_test_hash,
1999 .vecs = rmd160_tv_template,
2000 .count = RMD160_TEST_VECTORS
2005 .test = alg_test_hash,
2008 .vecs = rmd256_tv_template,
2009 .count = RMD256_TEST_VECTORS
2014 .test = alg_test_hash,
2017 .vecs = rmd320_tv_template,
2018 .count = RMD320_TEST_VECTORS
2023 .test = alg_test_skcipher,
2027 .vecs = salsa20_stream_enc_tv_template,
2028 .count = SALSA20_STREAM_ENC_TEST_VECTORS
2034 .test = alg_test_hash,
2037 .vecs = sha1_tv_template,
2038 .count = SHA1_TEST_VECTORS
2043 .test = alg_test_hash,
2046 .vecs = sha224_tv_template,
2047 .count = SHA224_TEST_VECTORS
2052 .test = alg_test_hash,
2055 .vecs = sha256_tv_template,
2056 .count = SHA256_TEST_VECTORS
2061 .test = alg_test_hash,
2064 .vecs = sha384_tv_template,
2065 .count = SHA384_TEST_VECTORS
2070 .test = alg_test_hash,
2073 .vecs = sha512_tv_template,
2074 .count = SHA512_TEST_VECTORS
2079 .test = alg_test_hash,
2082 .vecs = tgr128_tv_template,
2083 .count = TGR128_TEST_VECTORS
2088 .test = alg_test_hash,
2091 .vecs = tgr160_tv_template,
2092 .count = TGR160_TEST_VECTORS
2097 .test = alg_test_hash,
2100 .vecs = tgr192_tv_template,
2101 .count = TGR192_TEST_VECTORS
2106 .test = alg_test_hash,
2109 .vecs = wp256_tv_template,
2110 .count = WP256_TEST_VECTORS
2115 .test = alg_test_hash,
2118 .vecs = wp384_tv_template,
2119 .count = WP384_TEST_VECTORS
2124 .test = alg_test_hash,
2127 .vecs = wp512_tv_template,
2128 .count = WP512_TEST_VECTORS
2133 .test = alg_test_hash,
2136 .vecs = aes_xcbc128_tv_template,
2137 .count = XCBC_AES_TEST_VECTORS
2142 .test = alg_test_skcipher,
2146 .vecs = aes_xts_enc_tv_template,
2147 .count = AES_XTS_ENC_TEST_VECTORS
2150 .vecs = aes_xts_dec_tv_template,
2151 .count = AES_XTS_DEC_TEST_VECTORS
2157 .test = alg_test_pcomp,
2161 .vecs = zlib_comp_tv_template,
2162 .count = ZLIB_COMP_TEST_VECTORS
2165 .vecs = zlib_decomp_tv_template,
2166 .count = ZLIB_DECOMP_TEST_VECTORS
2173 static int alg_find_test(const char *alg)
2176 int end = ARRAY_SIZE(alg_test_descs);
2178 while (start < end) {
2179 int i = (start + end) / 2;
2180 int diff = strcmp(alg_test_descs[i].alg, alg);
2198 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
2203 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
2204 char nalg[CRYPTO_MAX_ALG_NAME];
2206 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
2208 return -ENAMETOOLONG;
2210 i = alg_find_test(nalg);
2214 return alg_test_cipher(alg_test_descs + i, driver, type, mask);
2217 i = alg_find_test(alg);
2221 rc = alg_test_descs[i].test(alg_test_descs + i, driver,
2223 if (fips_enabled && rc)
2224 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
2229 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
2232 EXPORT_SYMBOL_GPL(alg_test);
2234 int __init testmgr_init(void)
2238 for (i = 0; i < XBUFSIZE; i++) {
2239 xbuf[i] = (void *)__get_free_page(GFP_KERNEL);
2244 for (i = 0; i < XBUFSIZE; i++) {
2245 axbuf[i] = (void *)__get_free_page(GFP_KERNEL);
2247 goto err_free_axbuf;
2253 for (i = 0; i < XBUFSIZE && axbuf[i]; i++)
2254 free_page((unsigned long)axbuf[i]);
2256 for (i = 0; i < XBUFSIZE && xbuf[i]; i++)
2257 free_page((unsigned long)xbuf[i]);
2262 void testmgr_exit(void)
2266 for (i = 0; i < XBUFSIZE; i++)
2267 free_page((unsigned long)axbuf[i]);
2268 for (i = 0; i < XBUFSIZE; i++)
2269 free_page((unsigned long)xbuf[i]);