2 * Algorithm testing framework and tests.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
8 * Copyright (c) 2019 Google LLC
10 * Updated RFC4106 AES-GCM testing.
11 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
12 * Adrian Hoban <adrian.hoban@intel.com>
13 * Gabriele Paoloni <gabriele.paoloni@intel.com>
14 * Tadeusz Struk (tadeusz.struk@intel.com)
15 * Copyright (c) 2010, Intel Corporation.
17 * This program is free software; you can redistribute it and/or modify it
18 * under the terms of the GNU General Public License as published by the Free
19 * Software Foundation; either version 2 of the License, or (at your option)
24 #include <crypto/aead.h>
25 #include <crypto/hash.h>
26 #include <crypto/skcipher.h>
27 #include <linux/err.h>
28 #include <linux/fips.h>
29 #include <linux/module.h>
30 #include <linux/once.h>
31 #include <linux/scatterlist.h>
32 #include <linux/slab.h>
33 #include <linux/string.h>
34 #include <crypto/rng.h>
35 #include <crypto/drbg.h>
36 #include <crypto/akcipher.h>
37 #include <crypto/kpp.h>
38 #include <crypto/acompress.h>
43 module_param(notests, bool, 0644);
44 MODULE_PARM_DESC(notests, "disable crypto self-tests");
46 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
47 static bool noextratests;
48 module_param(noextratests, bool, 0644);
49 MODULE_PARM_DESC(noextratests, "disable expensive crypto self-tests");
51 static unsigned int fuzz_iterations = 100;
52 module_param(fuzz_iterations, uint, 0644);
53 MODULE_PARM_DESC(fuzz_iterations, "number of fuzz test iterations");
56 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
59 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
69 * Need slab memory for testing (size in number of pages).
74 * Indexes into the xbuf to simulate cross-page access.
86 * Used by test_cipher()
91 struct aead_test_suite {
92 const struct aead_testvec *vecs;
96 struct cipher_test_suite {
97 const struct cipher_testvec *vecs;
101 struct comp_test_suite {
103 const struct comp_testvec *vecs;
108 struct hash_test_suite {
109 const struct hash_testvec *vecs;
113 struct cprng_test_suite {
114 const struct cprng_testvec *vecs;
118 struct drbg_test_suite {
119 const struct drbg_testvec *vecs;
123 struct akcipher_test_suite {
124 const struct akcipher_testvec *vecs;
128 struct kpp_test_suite {
129 const struct kpp_testvec *vecs;
133 struct alg_test_desc {
135 int (*test)(const struct alg_test_desc *desc, const char *driver,
137 int fips_allowed; /* set if alg is allowed in fips mode */
140 struct aead_test_suite aead;
141 struct cipher_test_suite cipher;
142 struct comp_test_suite comp;
143 struct hash_test_suite hash;
144 struct cprng_test_suite cprng;
145 struct drbg_test_suite drbg;
146 struct akcipher_test_suite akcipher;
147 struct kpp_test_suite kpp;
151 static const unsigned int IDX[8] = {
152 IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
154 static void hexdump(unsigned char *buf, unsigned int len)
156 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
161 static int __testmgr_alloc_buf(char *buf[XBUFSIZE], int order)
165 for (i = 0; i < XBUFSIZE; i++) {
166 buf[i] = (char *)__get_free_pages(GFP_KERNEL, order);
175 free_pages((unsigned long)buf[i], order);
180 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
182 return __testmgr_alloc_buf(buf, 0);
185 static void __testmgr_free_buf(char *buf[XBUFSIZE], int order)
189 for (i = 0; i < XBUFSIZE; i++)
190 free_pages((unsigned long)buf[i], order);
193 static void testmgr_free_buf(char *buf[XBUFSIZE])
195 __testmgr_free_buf(buf, 0);
198 #define TESTMGR_POISON_BYTE 0xfe
199 #define TESTMGR_POISON_LEN 16
201 static inline void testmgr_poison(void *addr, size_t len)
203 memset(addr, TESTMGR_POISON_BYTE, len);
206 /* Is the memory region still fully poisoned? */
207 static inline bool testmgr_is_poison(const void *addr, size_t len)
209 return memchr_inv(addr, TESTMGR_POISON_BYTE, len) == NULL;
212 /* flush type for hash algorithms */
214 /* merge with update of previous buffer(s) */
217 /* update with previous buffer(s) before doing this one */
220 /* likewise, but also export and re-import the intermediate state */
224 /* finalization function for hash algorithms */
225 enum finalization_type {
226 FINALIZATION_TYPE_FINAL, /* use final() */
227 FINALIZATION_TYPE_FINUP, /* use finup() */
228 FINALIZATION_TYPE_DIGEST, /* use digest() */
231 #define TEST_SG_TOTAL 10000
234 * struct test_sg_division - description of a scatterlist entry
236 * This struct describes one entry of a scatterlist being constructed to check a
237 * crypto test vector.
239 * @proportion_of_total: length of this chunk relative to the total length,
240 * given as a proportion out of TEST_SG_TOTAL so that it
241 * scales to fit any test vector
242 * @offset: byte offset into a 2-page buffer at which this chunk will start
243 * @offset_relative_to_alignmask: if true, add the algorithm's alignmask to the
245 * @flush_type: for hashes, whether an update() should be done now vs.
246 * continuing to accumulate data
248 struct test_sg_division {
249 unsigned int proportion_of_total;
251 bool offset_relative_to_alignmask;
252 enum flush_type flush_type;
256 * struct testvec_config - configuration for testing a crypto test vector
258 * This struct describes the data layout and other parameters with which each
259 * crypto test vector can be tested.
261 * @name: name of this config, logged for debugging purposes if a test fails
262 * @inplace: operate on the data in-place, if applicable for the algorithm type?
263 * @req_flags: extra request_flags, e.g. CRYPTO_TFM_REQ_MAY_SLEEP
264 * @src_divs: description of how to arrange the source scatterlist
265 * @dst_divs: description of how to arrange the dst scatterlist, if applicable
266 * for the algorithm type. Defaults to @src_divs if unset.
267 * @iv_offset: misalignment of the IV in the range [0..MAX_ALGAPI_ALIGNMASK+1],
268 * where 0 is aligned to a 2*(MAX_ALGAPI_ALIGNMASK+1) byte boundary
269 * @iv_offset_relative_to_alignmask: if true, add the algorithm's alignmask to
271 * @finalization_type: what finalization function to use for hashes
273 struct testvec_config {
277 struct test_sg_division src_divs[XBUFSIZE];
278 struct test_sg_division dst_divs[XBUFSIZE];
279 unsigned int iv_offset;
280 bool iv_offset_relative_to_alignmask;
281 enum finalization_type finalization_type;
284 #define TESTVEC_CONFIG_NAMELEN 192
286 static unsigned int count_test_sg_divisions(const struct test_sg_division *divs)
288 unsigned int remaining = TEST_SG_TOTAL;
289 unsigned int ndivs = 0;
292 remaining -= divs[ndivs++].proportion_of_total;
298 static bool valid_sg_divisions(const struct test_sg_division *divs,
299 unsigned int count, bool *any_flushes_ret)
301 unsigned int total = 0;
304 for (i = 0; i < count && total != TEST_SG_TOTAL; i++) {
305 if (divs[i].proportion_of_total <= 0 ||
306 divs[i].proportion_of_total > TEST_SG_TOTAL - total)
308 total += divs[i].proportion_of_total;
309 if (divs[i].flush_type != FLUSH_TYPE_NONE)
310 *any_flushes_ret = true;
312 return total == TEST_SG_TOTAL &&
313 memchr_inv(&divs[i], 0, (count - i) * sizeof(divs[0])) == NULL;
317 * Check whether the given testvec_config is valid. This isn't strictly needed
318 * since every testvec_config should be valid, but check anyway so that people
319 * don't unknowingly add broken configs that don't do what they wanted.
321 static bool valid_testvec_config(const struct testvec_config *cfg)
323 bool any_flushes = false;
325 if (cfg->name == NULL)
328 if (!valid_sg_divisions(cfg->src_divs, ARRAY_SIZE(cfg->src_divs),
332 if (cfg->dst_divs[0].proportion_of_total) {
333 if (!valid_sg_divisions(cfg->dst_divs,
334 ARRAY_SIZE(cfg->dst_divs),
338 if (memchr_inv(cfg->dst_divs, 0, sizeof(cfg->dst_divs)))
340 /* defaults to dst_divs=src_divs */
344 (cfg->iv_offset_relative_to_alignmask ? MAX_ALGAPI_ALIGNMASK : 0) >
345 MAX_ALGAPI_ALIGNMASK + 1)
348 if (any_flushes && cfg->finalization_type == FINALIZATION_TYPE_DIGEST)
355 char *bufs[XBUFSIZE];
356 struct scatterlist sgl[XBUFSIZE];
357 struct scatterlist sgl_saved[XBUFSIZE];
358 struct scatterlist *sgl_ptr;
362 static int init_test_sglist(struct test_sglist *tsgl)
364 return __testmgr_alloc_buf(tsgl->bufs, 1 /* two pages per buffer */);
367 static void destroy_test_sglist(struct test_sglist *tsgl)
369 return __testmgr_free_buf(tsgl->bufs, 1 /* two pages per buffer */);
373 * build_test_sglist() - build a scatterlist for a crypto test
375 * @tsgl: the scatterlist to build. @tsgl->bufs[] contains an array of 2-page
376 * buffers which the scatterlist @tsgl->sgl[] will be made to point into.
377 * @divs: the layout specification on which the scatterlist will be based
378 * @alignmask: the algorithm's alignmask
379 * @total_len: the total length of the scatterlist to build in bytes
380 * @data: if non-NULL, the buffers will be filled with this data until it ends.
381 * Otherwise the buffers will be poisoned. In both cases, some bytes
382 * past the end of each buffer will be poisoned to help detect overruns.
383 * @out_divs: if non-NULL, the test_sg_division to which each scatterlist entry
384 * corresponds will be returned here. This will match @divs except
385 * that divisions resolving to a length of 0 are omitted as they are
386 * not included in the scatterlist.
388 * Return: 0 or a -errno value
390 static int build_test_sglist(struct test_sglist *tsgl,
391 const struct test_sg_division *divs,
392 const unsigned int alignmask,
393 const unsigned int total_len,
394 struct iov_iter *data,
395 const struct test_sg_division *out_divs[XBUFSIZE])
398 const struct test_sg_division *div;
400 } partitions[XBUFSIZE];
401 const unsigned int ndivs = count_test_sg_divisions(divs);
402 unsigned int len_remaining = total_len;
405 BUILD_BUG_ON(ARRAY_SIZE(partitions) != ARRAY_SIZE(tsgl->sgl));
406 if (WARN_ON(ndivs > ARRAY_SIZE(partitions)))
409 /* Calculate the (div, length) pairs */
411 for (i = 0; i < ndivs; i++) {
412 unsigned int len_this_sg =
414 (total_len * divs[i].proportion_of_total +
415 TEST_SG_TOTAL / 2) / TEST_SG_TOTAL);
417 if (len_this_sg != 0) {
418 partitions[tsgl->nents].div = &divs[i];
419 partitions[tsgl->nents].length = len_this_sg;
421 len_remaining -= len_this_sg;
424 if (tsgl->nents == 0) {
425 partitions[tsgl->nents].div = &divs[0];
426 partitions[tsgl->nents].length = 0;
429 partitions[tsgl->nents - 1].length += len_remaining;
431 /* Set up the sgl entries and fill the data or poison */
432 sg_init_table(tsgl->sgl, tsgl->nents);
433 for (i = 0; i < tsgl->nents; i++) {
434 unsigned int offset = partitions[i].div->offset;
437 if (partitions[i].div->offset_relative_to_alignmask)
440 while (offset + partitions[i].length + TESTMGR_POISON_LEN >
442 if (WARN_ON(offset <= 0))
447 addr = &tsgl->bufs[i][offset];
448 sg_set_buf(&tsgl->sgl[i], addr, partitions[i].length);
451 out_divs[i] = partitions[i].div;
454 size_t copy_len, copied;
456 copy_len = min(partitions[i].length, data->count);
457 copied = copy_from_iter(addr, copy_len, data);
458 if (WARN_ON(copied != copy_len))
460 testmgr_poison(addr + copy_len, partitions[i].length +
461 TESTMGR_POISON_LEN - copy_len);
463 testmgr_poison(addr, partitions[i].length +
468 sg_mark_end(&tsgl->sgl[tsgl->nents - 1]);
469 tsgl->sgl_ptr = tsgl->sgl;
470 memcpy(tsgl->sgl_saved, tsgl->sgl, tsgl->nents * sizeof(tsgl->sgl[0]));
475 * Verify that a scatterlist crypto operation produced the correct output.
477 * @tsgl: scatterlist containing the actual output
478 * @expected_output: buffer containing the expected output
479 * @len_to_check: length of @expected_output in bytes
480 * @unchecked_prefix_len: number of ignored bytes in @tsgl prior to real result
481 * @check_poison: verify that the poison bytes after each chunk are intact?
483 * Return: 0 if correct, -EINVAL if incorrect, -EOVERFLOW if buffer overrun.
485 static int verify_correct_output(const struct test_sglist *tsgl,
486 const char *expected_output,
487 unsigned int len_to_check,
488 unsigned int unchecked_prefix_len,
493 for (i = 0; i < tsgl->nents; i++) {
494 struct scatterlist *sg = &tsgl->sgl_ptr[i];
495 unsigned int len = sg->length;
496 unsigned int offset = sg->offset;
497 const char *actual_output;
499 if (unchecked_prefix_len) {
500 if (unchecked_prefix_len >= len) {
501 unchecked_prefix_len -= len;
504 offset += unchecked_prefix_len;
505 len -= unchecked_prefix_len;
506 unchecked_prefix_len = 0;
508 len = min(len, len_to_check);
509 actual_output = page_address(sg_page(sg)) + offset;
510 if (memcmp(expected_output, actual_output, len) != 0)
513 !testmgr_is_poison(actual_output + len, TESTMGR_POISON_LEN))
516 expected_output += len;
518 if (WARN_ON(len_to_check != 0))
523 static bool is_test_sglist_corrupted(const struct test_sglist *tsgl)
527 for (i = 0; i < tsgl->nents; i++) {
528 if (tsgl->sgl[i].page_link != tsgl->sgl_saved[i].page_link)
530 if (tsgl->sgl[i].offset != tsgl->sgl_saved[i].offset)
532 if (tsgl->sgl[i].length != tsgl->sgl_saved[i].length)
538 struct cipher_test_sglists {
539 struct test_sglist src;
540 struct test_sglist dst;
543 static struct cipher_test_sglists *alloc_cipher_test_sglists(void)
545 struct cipher_test_sglists *tsgls;
547 tsgls = kmalloc(sizeof(*tsgls), GFP_KERNEL);
551 if (init_test_sglist(&tsgls->src) != 0)
553 if (init_test_sglist(&tsgls->dst) != 0)
554 goto fail_destroy_src;
559 destroy_test_sglist(&tsgls->src);
565 static void free_cipher_test_sglists(struct cipher_test_sglists *tsgls)
568 destroy_test_sglist(&tsgls->src);
569 destroy_test_sglist(&tsgls->dst);
574 /* Build the src and dst scatterlists for an skcipher or AEAD test */
575 static int build_cipher_test_sglists(struct cipher_test_sglists *tsgls,
576 const struct testvec_config *cfg,
577 unsigned int alignmask,
578 unsigned int src_total_len,
579 unsigned int dst_total_len,
580 const struct kvec *inputs,
581 unsigned int nr_inputs)
583 struct iov_iter input;
586 iov_iter_kvec(&input, WRITE, inputs, nr_inputs, src_total_len);
587 err = build_test_sglist(&tsgls->src, cfg->src_divs, alignmask,
589 max(dst_total_len, src_total_len) :
596 tsgls->dst.sgl_ptr = tsgls->src.sgl;
597 tsgls->dst.nents = tsgls->src.nents;
600 return build_test_sglist(&tsgls->dst,
601 cfg->dst_divs[0].proportion_of_total ?
602 cfg->dst_divs : cfg->src_divs,
603 alignmask, dst_total_len, NULL, NULL);
606 static int ahash_guard_result(char *result, char c, int size)
610 for (i = 0; i < size; i++) {
618 static int ahash_partial_update(struct ahash_request **preq,
619 struct crypto_ahash *tfm, const struct hash_testvec *template,
620 void *hash_buff, int k, int temp, struct scatterlist *sg,
621 const char *algo, char *result, struct crypto_wait *wait)
624 struct ahash_request *req;
625 int statesize, ret = -EINVAL;
626 static const unsigned char guard[] = { 0x00, 0xba, 0xad, 0x00 };
627 int digestsize = crypto_ahash_digestsize(tfm);
630 statesize = crypto_ahash_statesize(
631 crypto_ahash_reqtfm(req));
632 state = kmalloc(statesize + sizeof(guard), GFP_KERNEL);
634 pr_err("alg: hash: Failed to alloc state for %s\n", algo);
637 memcpy(state + statesize, guard, sizeof(guard));
638 memset(result, 1, digestsize);
639 ret = crypto_ahash_export(req, state);
640 WARN_ON(memcmp(state + statesize, guard, sizeof(guard)));
642 pr_err("alg: hash: Failed to export() for %s\n", algo);
645 ret = ahash_guard_result(result, 1, digestsize);
647 pr_err("alg: hash: Failed, export used req->result for %s\n",
651 ahash_request_free(req);
652 req = ahash_request_alloc(tfm, GFP_KERNEL);
654 pr_err("alg: hash: Failed to alloc request for %s\n", algo);
657 ahash_request_set_callback(req,
658 CRYPTO_TFM_REQ_MAY_BACKLOG,
659 crypto_req_done, wait);
661 memcpy(hash_buff, template->plaintext + temp,
663 sg_init_one(&sg[0], hash_buff, template->tap[k]);
664 ahash_request_set_crypt(req, sg, result, template->tap[k]);
665 ret = crypto_ahash_import(req, state);
667 pr_err("alg: hash: Failed to import() for %s\n", algo);
670 ret = ahash_guard_result(result, 1, digestsize);
672 pr_err("alg: hash: Failed, import used req->result for %s\n",
676 ret = crypto_wait_req(crypto_ahash_update(req), wait);
683 ahash_request_free(req);
696 static int __test_hash(struct crypto_ahash *tfm,
697 const struct hash_testvec *template, unsigned int tcount,
698 enum hash_test test_type, const int align_offset)
700 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
701 size_t digest_size = crypto_ahash_digestsize(tfm);
702 unsigned int i, j, k, temp;
703 struct scatterlist sg[8];
706 struct ahash_request *req;
707 struct crypto_wait wait;
709 char *xbuf[XBUFSIZE];
712 result = kmalloc(digest_size, GFP_KERNEL);
715 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
718 if (testmgr_alloc_buf(xbuf))
721 crypto_init_wait(&wait);
723 req = ahash_request_alloc(tfm, GFP_KERNEL);
725 printk(KERN_ERR "alg: hash: Failed to allocate request for "
729 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
730 crypto_req_done, &wait);
733 for (i = 0; i < tcount; i++) {
738 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
742 memset(result, 0, digest_size);
745 hash_buff += align_offset;
747 memcpy(hash_buff, template[i].plaintext, template[i].psize);
748 sg_init_one(&sg[0], hash_buff, template[i].psize);
750 if (template[i].ksize) {
751 crypto_ahash_clear_flags(tfm, ~0);
752 if (template[i].ksize > MAX_KEYLEN) {
753 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
754 j, algo, template[i].ksize, MAX_KEYLEN);
758 memcpy(key, template[i].key, template[i].ksize);
759 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
761 printk(KERN_ERR "alg: hash: setkey failed on "
762 "test %d for %s: ret=%d\n", j, algo,
768 ahash_request_set_crypt(req, sg, result, template[i].psize);
770 case HASH_TEST_DIGEST:
771 ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
773 pr_err("alg: hash: digest failed on test %d "
774 "for %s: ret=%d\n", j, algo, -ret);
779 case HASH_TEST_FINAL:
780 memset(result, 1, digest_size);
781 ret = crypto_wait_req(crypto_ahash_init(req), &wait);
783 pr_err("alg: hash: init failed on test %d "
784 "for %s: ret=%d\n", j, algo, -ret);
787 ret = ahash_guard_result(result, 1, digest_size);
789 pr_err("alg: hash: init failed on test %d "
790 "for %s: used req->result\n", j, algo);
793 ret = crypto_wait_req(crypto_ahash_update(req), &wait);
795 pr_err("alg: hash: update failed on test %d "
796 "for %s: ret=%d\n", j, algo, -ret);
799 ret = ahash_guard_result(result, 1, digest_size);
801 pr_err("alg: hash: update failed on test %d "
802 "for %s: used req->result\n", j, algo);
805 ret = crypto_wait_req(crypto_ahash_final(req), &wait);
807 pr_err("alg: hash: final failed on test %d "
808 "for %s: ret=%d\n", j, algo, -ret);
813 case HASH_TEST_FINUP:
814 memset(result, 1, digest_size);
815 ret = crypto_wait_req(crypto_ahash_init(req), &wait);
817 pr_err("alg: hash: init failed on test %d "
818 "for %s: ret=%d\n", j, algo, -ret);
821 ret = ahash_guard_result(result, 1, digest_size);
823 pr_err("alg: hash: init failed on test %d "
824 "for %s: used req->result\n", j, algo);
827 ret = crypto_wait_req(crypto_ahash_finup(req), &wait);
829 pr_err("alg: hash: final failed on test %d "
830 "for %s: ret=%d\n", j, algo, -ret);
836 if (memcmp(result, template[i].digest,
837 crypto_ahash_digestsize(tfm))) {
838 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
840 hexdump(result, crypto_ahash_digestsize(tfm));
850 for (i = 0; i < tcount; i++) {
851 /* alignment tests are only done with continuous buffers */
852 if (align_offset != 0)
859 memset(result, 0, digest_size);
862 sg_init_table(sg, template[i].np);
864 for (k = 0; k < template[i].np; k++) {
865 if (WARN_ON(offset_in_page(IDX[k]) +
866 template[i].tap[k] > PAGE_SIZE))
869 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
870 offset_in_page(IDX[k]),
871 template[i].plaintext + temp,
874 temp += template[i].tap[k];
877 if (template[i].ksize) {
878 if (template[i].ksize > MAX_KEYLEN) {
879 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
880 j, algo, template[i].ksize, MAX_KEYLEN);
884 crypto_ahash_clear_flags(tfm, ~0);
885 memcpy(key, template[i].key, template[i].ksize);
886 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
889 printk(KERN_ERR "alg: hash: setkey "
890 "failed on chunking test %d "
891 "for %s: ret=%d\n", j, algo, -ret);
896 ahash_request_set_crypt(req, sg, result, template[i].psize);
897 ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
899 pr_err("alg: hash: digest failed on chunking test %d for %s: ret=%d\n",
904 if (memcmp(result, template[i].digest,
905 crypto_ahash_digestsize(tfm))) {
906 printk(KERN_ERR "alg: hash: Chunking test %d "
907 "failed for %s\n", j, algo);
908 hexdump(result, crypto_ahash_digestsize(tfm));
914 /* partial update exercise */
916 for (i = 0; i < tcount; i++) {
917 /* alignment tests are only done with continuous buffers */
918 if (align_offset != 0)
921 if (template[i].np < 2)
925 memset(result, 0, digest_size);
929 memcpy(hash_buff, template[i].plaintext,
931 sg_init_one(&sg[0], hash_buff, template[i].tap[0]);
933 if (template[i].ksize) {
934 crypto_ahash_clear_flags(tfm, ~0);
935 if (template[i].ksize > MAX_KEYLEN) {
936 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
937 j, algo, template[i].ksize, MAX_KEYLEN);
941 memcpy(key, template[i].key, template[i].ksize);
942 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
944 pr_err("alg: hash: setkey failed on test %d for %s: ret=%d\n",
950 ahash_request_set_crypt(req, sg, result, template[i].tap[0]);
951 ret = crypto_wait_req(crypto_ahash_init(req), &wait);
953 pr_err("alg: hash: init failed on test %d for %s: ret=%d\n",
957 ret = crypto_wait_req(crypto_ahash_update(req), &wait);
959 pr_err("alg: hash: update failed on test %d for %s: ret=%d\n",
964 temp = template[i].tap[0];
965 for (k = 1; k < template[i].np; k++) {
966 ret = ahash_partial_update(&req, tfm, &template[i],
967 hash_buff, k, temp, &sg[0], algo, result,
970 pr_err("alg: hash: partial update failed on test %d for %s: ret=%d\n",
974 temp += template[i].tap[k];
976 ret = crypto_wait_req(crypto_ahash_final(req), &wait);
978 pr_err("alg: hash: final failed on test %d for %s: ret=%d\n",
982 if (memcmp(result, template[i].digest,
983 crypto_ahash_digestsize(tfm))) {
984 pr_err("alg: hash: Partial Test %d failed for %s\n",
986 hexdump(result, crypto_ahash_digestsize(tfm));
995 ahash_request_free(req);
997 testmgr_free_buf(xbuf);
1004 static int test_hash(struct crypto_ahash *tfm,
1005 const struct hash_testvec *template,
1006 unsigned int tcount, enum hash_test test_type)
1008 unsigned int alignmask;
1011 ret = __test_hash(tfm, template, tcount, test_type, 0);
1015 /* test unaligned buffers, check with one byte offset */
1016 ret = __test_hash(tfm, template, tcount, test_type, 1);
1020 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1022 /* Check if alignment mask for tfm is correctly set. */
1023 ret = __test_hash(tfm, template, tcount, test_type,
1032 static int __test_aead(struct crypto_aead *tfm, int enc,
1033 const struct aead_testvec *template, unsigned int tcount,
1034 const bool diff_dst, const int align_offset)
1036 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
1037 unsigned int i, j, k, n, temp;
1041 struct aead_request *req;
1042 struct scatterlist *sg;
1043 struct scatterlist *sgout;
1045 struct crypto_wait wait;
1046 unsigned int authsize, iv_len;
1048 char *xbuf[XBUFSIZE];
1049 char *xoutbuf[XBUFSIZE];
1050 char *axbuf[XBUFSIZE];
1052 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
1055 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
1058 if (testmgr_alloc_buf(xbuf))
1060 if (testmgr_alloc_buf(axbuf))
1062 if (diff_dst && testmgr_alloc_buf(xoutbuf))
1065 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
1066 sg = kmalloc(array3_size(sizeof(*sg), 8, (diff_dst ? 4 : 2)),
1082 crypto_init_wait(&wait);
1084 req = aead_request_alloc(tfm, GFP_KERNEL);
1086 pr_err("alg: aead%s: Failed to allocate request for %s\n",
1091 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1092 crypto_req_done, &wait);
1094 iv_len = crypto_aead_ivsize(tfm);
1096 for (i = 0, j = 0; i < tcount; i++) {
1097 const char *input, *expected_output;
1098 unsigned int inlen, outlen;
1099 char *inbuf, *outbuf, *assocbuf;
1104 if (template[i].novrfy)
1106 input = template[i].ptext;
1107 inlen = template[i].plen;
1108 expected_output = template[i].ctext;
1109 outlen = template[i].clen;
1111 input = template[i].ctext;
1112 inlen = template[i].clen;
1113 expected_output = template[i].ptext;
1114 outlen = template[i].plen;
1119 /* some templates have no input data but they will
1122 inbuf = xbuf[0] + align_offset;
1123 assocbuf = axbuf[0];
1126 if (WARN_ON(align_offset + template[i].clen > PAGE_SIZE ||
1127 template[i].alen > PAGE_SIZE))
1130 memcpy(inbuf, input, inlen);
1131 memcpy(assocbuf, template[i].assoc, template[i].alen);
1133 memcpy(iv, template[i].iv, iv_len);
1135 memset(iv, 0, iv_len);
1137 crypto_aead_clear_flags(tfm, ~0);
1139 crypto_aead_set_flags(tfm,
1140 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
1142 if (template[i].klen > MAX_KEYLEN) {
1143 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
1144 d, j, algo, template[i].klen,
1149 memcpy(key, template[i].key, template[i].klen);
1151 ret = crypto_aead_setkey(tfm, key, template[i].klen);
1152 if (template[i].fail == !ret) {
1153 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
1154 d, j, algo, crypto_aead_get_flags(tfm));
1159 authsize = template[i].clen - template[i].plen;
1160 ret = crypto_aead_setauthsize(tfm, authsize);
1162 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
1163 d, authsize, j, algo);
1167 k = !!template[i].alen;
1168 sg_init_table(sg, k + 1);
1169 sg_set_buf(&sg[0], assocbuf, template[i].alen);
1170 sg_set_buf(&sg[k], inbuf, template[i].clen);
1174 sg_init_table(sgout, k + 1);
1175 sg_set_buf(&sgout[0], assocbuf, template[i].alen);
1177 outbuf = xoutbuf[0] + align_offset;
1178 sg_set_buf(&sgout[k], outbuf, template[i].clen);
1181 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, inlen,
1184 aead_request_set_ad(req, template[i].alen);
1186 ret = crypto_wait_req(enc ? crypto_aead_encrypt(req)
1187 : crypto_aead_decrypt(req), &wait);
1191 if (template[i].novrfy) {
1192 /* verification was supposed to fail */
1193 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
1195 /* so really, we got a bad message */
1201 if (template[i].novrfy)
1202 /* verification failure was expected */
1206 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
1207 d, e, j, algo, -ret);
1211 if (memcmp(outbuf, expected_output, outlen)) {
1212 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
1214 hexdump(outbuf, outlen);
1220 for (i = 0, j = 0; i < tcount; i++) {
1221 const char *input, *expected_output;
1222 unsigned int inlen, outlen;
1224 /* alignment tests are only done with continuous buffers */
1225 if (align_offset != 0)
1228 if (!template[i].np)
1232 if (template[i].novrfy)
1234 input = template[i].ptext;
1235 inlen = template[i].plen;
1236 expected_output = template[i].ctext;
1237 outlen = template[i].clen;
1239 input = template[i].ctext;
1240 inlen = template[i].clen;
1241 expected_output = template[i].ptext;
1242 outlen = template[i].plen;
1248 memcpy(iv, template[i].iv, iv_len);
1250 memset(iv, 0, MAX_IVLEN);
1252 crypto_aead_clear_flags(tfm, ~0);
1254 crypto_aead_set_flags(tfm,
1255 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
1256 if (template[i].klen > MAX_KEYLEN) {
1257 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
1258 d, j, algo, template[i].klen, MAX_KEYLEN);
1262 memcpy(key, template[i].key, template[i].klen);
1264 ret = crypto_aead_setkey(tfm, key, template[i].klen);
1265 if (template[i].fail == !ret) {
1266 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
1267 d, j, algo, crypto_aead_get_flags(tfm));
1272 authsize = template[i].clen - template[i].plen;
1275 sg_init_table(sg, template[i].anp + template[i].np);
1277 sg_init_table(sgout, template[i].anp + template[i].np);
1280 for (k = 0, temp = 0; k < template[i].anp; k++) {
1281 if (WARN_ON(offset_in_page(IDX[k]) +
1282 template[i].atap[k] > PAGE_SIZE))
1285 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
1286 offset_in_page(IDX[k]),
1287 template[i].assoc + temp,
1288 template[i].atap[k]),
1289 template[i].atap[k]);
1291 sg_set_buf(&sgout[k],
1292 axbuf[IDX[k] >> PAGE_SHIFT] +
1293 offset_in_page(IDX[k]),
1294 template[i].atap[k]);
1295 temp += template[i].atap[k];
1298 for (k = 0, temp = 0; k < template[i].np; k++) {
1299 n = template[i].tap[k];
1300 if (k == template[i].np - 1 && !enc)
1303 if (WARN_ON(offset_in_page(IDX[k]) + n > PAGE_SIZE))
1306 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
1307 memcpy(q, input + temp, n);
1308 sg_set_buf(&sg[template[i].anp + k], q, n);
1311 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1312 offset_in_page(IDX[k]);
1316 sg_set_buf(&sgout[template[i].anp + k], q, n);
1319 if (k == template[i].np - 1 && enc)
1321 if (offset_in_page(q) + n < PAGE_SIZE)
1327 ret = crypto_aead_setauthsize(tfm, authsize);
1329 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
1330 d, authsize, j, algo);
1335 if (WARN_ON(sg[template[i].anp + k - 1].offset +
1336 sg[template[i].anp + k - 1].length +
1337 authsize > PAGE_SIZE)) {
1343 sgout[template[i].anp + k - 1].length +=
1345 sg[template[i].anp + k - 1].length += authsize;
1348 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1351 aead_request_set_ad(req, template[i].alen);
1353 ret = crypto_wait_req(enc ? crypto_aead_encrypt(req)
1354 : crypto_aead_decrypt(req), &wait);
1358 if (template[i].novrfy) {
1359 /* verification was supposed to fail */
1360 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
1362 /* so really, we got a bad message */
1368 if (template[i].novrfy)
1369 /* verification failure was expected */
1373 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
1374 d, e, j, algo, -ret);
1379 for (k = 0, temp = 0; k < template[i].np; k++) {
1381 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1382 offset_in_page(IDX[k]);
1384 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1385 offset_in_page(IDX[k]);
1387 n = template[i].tap[k];
1388 if (k == template[i].np - 1 && enc)
1391 if (memcmp(q, expected_output + temp, n)) {
1392 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
1399 if (k == template[i].np - 1 && !enc) {
1400 if (!diff_dst && memcmp(q, input + temp + n,
1406 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1410 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1411 d, j, e, k, algo, n);
1416 temp += template[i].tap[k];
1423 aead_request_free(req);
1427 testmgr_free_buf(xoutbuf);
1429 testmgr_free_buf(axbuf);
1431 testmgr_free_buf(xbuf);
1438 static int test_aead(struct crypto_aead *tfm, int enc,
1439 const struct aead_testvec *template, unsigned int tcount)
1441 unsigned int alignmask;
1444 /* test 'dst == src' case */
1445 ret = __test_aead(tfm, enc, template, tcount, false, 0);
1449 /* test 'dst != src' case */
1450 ret = __test_aead(tfm, enc, template, tcount, true, 0);
1454 /* test unaligned buffers, check with one byte offset */
1455 ret = __test_aead(tfm, enc, template, tcount, true, 1);
1459 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1461 /* Check if alignment mask for tfm is correctly set. */
1462 ret = __test_aead(tfm, enc, template, tcount, true,
1471 static int test_cipher(struct crypto_cipher *tfm, int enc,
1472 const struct cipher_testvec *template,
1473 unsigned int tcount)
1475 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
1476 unsigned int i, j, k;
1479 const char *input, *result;
1481 char *xbuf[XBUFSIZE];
1484 if (testmgr_alloc_buf(xbuf))
1493 for (i = 0; i < tcount; i++) {
1497 if (fips_enabled && template[i].fips_skip)
1500 input = enc ? template[i].ptext : template[i].ctext;
1501 result = enc ? template[i].ctext : template[i].ptext;
1505 if (WARN_ON(template[i].len > PAGE_SIZE))
1509 memcpy(data, input, template[i].len);
1511 crypto_cipher_clear_flags(tfm, ~0);
1513 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
1515 ret = crypto_cipher_setkey(tfm, template[i].key,
1517 if (template[i].fail == !ret) {
1518 printk(KERN_ERR "alg: cipher: setkey failed "
1519 "on test %d for %s: flags=%x\n", j,
1520 algo, crypto_cipher_get_flags(tfm));
1525 for (k = 0; k < template[i].len;
1526 k += crypto_cipher_blocksize(tfm)) {
1528 crypto_cipher_encrypt_one(tfm, data + k,
1531 crypto_cipher_decrypt_one(tfm, data + k,
1536 if (memcmp(q, result, template[i].len)) {
1537 printk(KERN_ERR "alg: cipher: Test %d failed "
1538 "on %s for %s\n", j, e, algo);
1539 hexdump(q, template[i].len);
1548 testmgr_free_buf(xbuf);
1553 static int __test_skcipher(struct crypto_skcipher *tfm, int enc,
1554 const struct cipher_testvec *template,
1555 unsigned int tcount,
1556 const bool diff_dst, const int align_offset)
1559 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
1560 unsigned int i, j, k, n, temp;
1562 struct skcipher_request *req;
1563 struct scatterlist sg[8];
1564 struct scatterlist sgout[8];
1566 struct crypto_wait wait;
1567 const char *input, *result;
1570 char *xbuf[XBUFSIZE];
1571 char *xoutbuf[XBUFSIZE];
1573 unsigned int ivsize = crypto_skcipher_ivsize(tfm);
1575 if (testmgr_alloc_buf(xbuf))
1578 if (diff_dst && testmgr_alloc_buf(xoutbuf))
1591 crypto_init_wait(&wait);
1593 req = skcipher_request_alloc(tfm, GFP_KERNEL);
1595 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
1600 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1601 crypto_req_done, &wait);
1604 for (i = 0; i < tcount; i++) {
1605 if (template[i].np && !template[i].also_non_np)
1608 if (fips_enabled && template[i].fips_skip)
1611 if (template[i].iv && !(template[i].generates_iv && enc))
1612 memcpy(iv, template[i].iv, ivsize);
1614 memset(iv, 0, MAX_IVLEN);
1616 input = enc ? template[i].ptext : template[i].ctext;
1617 result = enc ? template[i].ctext : template[i].ptext;
1620 if (WARN_ON(align_offset + template[i].len > PAGE_SIZE))
1624 data += align_offset;
1625 memcpy(data, input, template[i].len);
1627 crypto_skcipher_clear_flags(tfm, ~0);
1629 crypto_skcipher_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
1631 ret = crypto_skcipher_setkey(tfm, template[i].key,
1633 if (template[i].fail == !ret) {
1634 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
1635 d, j, algo, crypto_skcipher_get_flags(tfm));
1640 sg_init_one(&sg[0], data, template[i].len);
1643 data += align_offset;
1644 sg_init_one(&sgout[0], data, template[i].len);
1647 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1648 template[i].len, iv);
1649 ret = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) :
1650 crypto_skcipher_decrypt(req), &wait);
1653 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1654 d, e, j, algo, -ret);
1659 if (memcmp(q, result, template[i].len)) {
1660 pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n",
1662 hexdump(q, template[i].len);
1667 if (template[i].generates_iv && enc &&
1668 memcmp(iv, template[i].iv, crypto_skcipher_ivsize(tfm))) {
1669 pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n",
1671 hexdump(iv, crypto_skcipher_ivsize(tfm));
1678 for (i = 0; i < tcount; i++) {
1679 /* alignment tests are only done with continuous buffers */
1680 if (align_offset != 0)
1683 if (!template[i].np)
1686 if (fips_enabled && template[i].fips_skip)
1689 if (template[i].iv && !(template[i].generates_iv && enc))
1690 memcpy(iv, template[i].iv, ivsize);
1692 memset(iv, 0, MAX_IVLEN);
1694 input = enc ? template[i].ptext : template[i].ctext;
1695 result = enc ? template[i].ctext : template[i].ptext;
1697 crypto_skcipher_clear_flags(tfm, ~0);
1699 crypto_skcipher_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
1701 ret = crypto_skcipher_setkey(tfm, template[i].key,
1703 if (template[i].fail == !ret) {
1704 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1705 d, j, algo, crypto_skcipher_get_flags(tfm));
1712 sg_init_table(sg, template[i].np);
1714 sg_init_table(sgout, template[i].np);
1715 for (k = 0; k < template[i].np; k++) {
1716 if (WARN_ON(offset_in_page(IDX[k]) +
1717 template[i].tap[k] > PAGE_SIZE))
1720 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
1722 memcpy(q, input + temp, template[i].tap[k]);
1724 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE)
1725 q[template[i].tap[k]] = 0;
1727 sg_set_buf(&sg[k], q, template[i].tap[k]);
1729 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1730 offset_in_page(IDX[k]);
1732 sg_set_buf(&sgout[k], q, template[i].tap[k]);
1734 memset(q, 0, template[i].tap[k]);
1735 if (offset_in_page(q) +
1736 template[i].tap[k] < PAGE_SIZE)
1737 q[template[i].tap[k]] = 0;
1740 temp += template[i].tap[k];
1743 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1744 template[i].len, iv);
1746 ret = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) :
1747 crypto_skcipher_decrypt(req), &wait);
1750 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1751 d, e, j, algo, -ret);
1757 for (k = 0; k < template[i].np; k++) {
1759 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1760 offset_in_page(IDX[k]);
1762 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1763 offset_in_page(IDX[k]);
1765 if (memcmp(q, result + temp, template[i].tap[k])) {
1766 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1768 hexdump(q, template[i].tap[k]);
1772 q += template[i].tap[k];
1773 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1776 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1777 d, j, e, k, algo, n);
1781 temp += template[i].tap[k];
1788 skcipher_request_free(req);
1790 testmgr_free_buf(xoutbuf);
1792 testmgr_free_buf(xbuf);
1797 static int test_skcipher(struct crypto_skcipher *tfm, int enc,
1798 const struct cipher_testvec *template,
1799 unsigned int tcount)
1801 unsigned int alignmask;
1804 /* test 'dst == src' case */
1805 ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1809 /* test 'dst != src' case */
1810 ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1814 /* test unaligned buffers, check with one byte offset */
1815 ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1819 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1821 /* Check if alignment mask for tfm is correctly set. */
1822 ret = __test_skcipher(tfm, enc, template, tcount, true,
1831 static int test_comp(struct crypto_comp *tfm,
1832 const struct comp_testvec *ctemplate,
1833 const struct comp_testvec *dtemplate,
1834 int ctcount, int dtcount)
1836 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1837 char *output, *decomp_output;
1841 output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1845 decomp_output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1846 if (!decomp_output) {
1851 for (i = 0; i < ctcount; i++) {
1853 unsigned int dlen = COMP_BUF_SIZE;
1855 memset(output, 0, COMP_BUF_SIZE);
1856 memset(decomp_output, 0, COMP_BUF_SIZE);
1858 ilen = ctemplate[i].inlen;
1859 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1860 ilen, output, &dlen);
1862 printk(KERN_ERR "alg: comp: compression failed "
1863 "on test %d for %s: ret=%d\n", i + 1, algo,
1869 dlen = COMP_BUF_SIZE;
1870 ret = crypto_comp_decompress(tfm, output,
1871 ilen, decomp_output, &dlen);
1873 pr_err("alg: comp: compression failed: decompress: on test %d for %s failed: ret=%d\n",
1878 if (dlen != ctemplate[i].inlen) {
1879 printk(KERN_ERR "alg: comp: Compression test %d "
1880 "failed for %s: output len = %d\n", i + 1, algo,
1886 if (memcmp(decomp_output, ctemplate[i].input,
1887 ctemplate[i].inlen)) {
1888 pr_err("alg: comp: compression failed: output differs: on test %d for %s\n",
1890 hexdump(decomp_output, dlen);
1896 for (i = 0; i < dtcount; i++) {
1898 unsigned int dlen = COMP_BUF_SIZE;
1900 memset(decomp_output, 0, COMP_BUF_SIZE);
1902 ilen = dtemplate[i].inlen;
1903 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1904 ilen, decomp_output, &dlen);
1906 printk(KERN_ERR "alg: comp: decompression failed "
1907 "on test %d for %s: ret=%d\n", i + 1, algo,
1912 if (dlen != dtemplate[i].outlen) {
1913 printk(KERN_ERR "alg: comp: Decompression test %d "
1914 "failed for %s: output len = %d\n", i + 1, algo,
1920 if (memcmp(decomp_output, dtemplate[i].output, dlen)) {
1921 printk(KERN_ERR "alg: comp: Decompression test %d "
1922 "failed for %s\n", i + 1, algo);
1923 hexdump(decomp_output, dlen);
1932 kfree(decomp_output);
1937 static int test_acomp(struct crypto_acomp *tfm,
1938 const struct comp_testvec *ctemplate,
1939 const struct comp_testvec *dtemplate,
1940 int ctcount, int dtcount)
1942 const char *algo = crypto_tfm_alg_driver_name(crypto_acomp_tfm(tfm));
1944 char *output, *decomp_out;
1946 struct scatterlist src, dst;
1947 struct acomp_req *req;
1948 struct crypto_wait wait;
1950 output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1954 decomp_out = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1960 for (i = 0; i < ctcount; i++) {
1961 unsigned int dlen = COMP_BUF_SIZE;
1962 int ilen = ctemplate[i].inlen;
1965 input_vec = kmemdup(ctemplate[i].input, ilen, GFP_KERNEL);
1971 memset(output, 0, dlen);
1972 crypto_init_wait(&wait);
1973 sg_init_one(&src, input_vec, ilen);
1974 sg_init_one(&dst, output, dlen);
1976 req = acomp_request_alloc(tfm);
1978 pr_err("alg: acomp: request alloc failed for %s\n",
1985 acomp_request_set_params(req, &src, &dst, ilen, dlen);
1986 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1987 crypto_req_done, &wait);
1989 ret = crypto_wait_req(crypto_acomp_compress(req), &wait);
1991 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
1994 acomp_request_free(req);
1999 dlen = COMP_BUF_SIZE;
2000 sg_init_one(&src, output, ilen);
2001 sg_init_one(&dst, decomp_out, dlen);
2002 crypto_init_wait(&wait);
2003 acomp_request_set_params(req, &src, &dst, ilen, dlen);
2005 ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
2007 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
2010 acomp_request_free(req);
2014 if (req->dlen != ctemplate[i].inlen) {
2015 pr_err("alg: acomp: Compression test %d failed for %s: output len = %d\n",
2016 i + 1, algo, req->dlen);
2019 acomp_request_free(req);
2023 if (memcmp(input_vec, decomp_out, req->dlen)) {
2024 pr_err("alg: acomp: Compression test %d failed for %s\n",
2026 hexdump(output, req->dlen);
2029 acomp_request_free(req);
2034 acomp_request_free(req);
2037 for (i = 0; i < dtcount; i++) {
2038 unsigned int dlen = COMP_BUF_SIZE;
2039 int ilen = dtemplate[i].inlen;
2042 input_vec = kmemdup(dtemplate[i].input, ilen, GFP_KERNEL);
2048 memset(output, 0, dlen);
2049 crypto_init_wait(&wait);
2050 sg_init_one(&src, input_vec, ilen);
2051 sg_init_one(&dst, output, dlen);
2053 req = acomp_request_alloc(tfm);
2055 pr_err("alg: acomp: request alloc failed for %s\n",
2062 acomp_request_set_params(req, &src, &dst, ilen, dlen);
2063 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2064 crypto_req_done, &wait);
2066 ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
2068 pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n",
2071 acomp_request_free(req);
2075 if (req->dlen != dtemplate[i].outlen) {
2076 pr_err("alg: acomp: Decompression test %d failed for %s: output len = %d\n",
2077 i + 1, algo, req->dlen);
2080 acomp_request_free(req);
2084 if (memcmp(output, dtemplate[i].output, req->dlen)) {
2085 pr_err("alg: acomp: Decompression test %d failed for %s\n",
2087 hexdump(output, req->dlen);
2090 acomp_request_free(req);
2095 acomp_request_free(req);
2106 static int test_cprng(struct crypto_rng *tfm,
2107 const struct cprng_testvec *template,
2108 unsigned int tcount)
2110 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
2111 int err = 0, i, j, seedsize;
2115 seedsize = crypto_rng_seedsize(tfm);
2117 seed = kmalloc(seedsize, GFP_KERNEL);
2119 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
2124 for (i = 0; i < tcount; i++) {
2125 memset(result, 0, 32);
2127 memcpy(seed, template[i].v, template[i].vlen);
2128 memcpy(seed + template[i].vlen, template[i].key,
2130 memcpy(seed + template[i].vlen + template[i].klen,
2131 template[i].dt, template[i].dtlen);
2133 err = crypto_rng_reset(tfm, seed, seedsize);
2135 printk(KERN_ERR "alg: cprng: Failed to reset rng "
2140 for (j = 0; j < template[i].loops; j++) {
2141 err = crypto_rng_get_bytes(tfm, result,
2144 printk(KERN_ERR "alg: cprng: Failed to obtain "
2145 "the correct amount of random data for "
2146 "%s (requested %d)\n", algo,
2152 err = memcmp(result, template[i].result,
2155 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
2157 hexdump(result, template[i].rlen);
2168 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
2171 const struct aead_test_suite *suite = &desc->suite.aead;
2172 struct crypto_aead *tfm;
2175 tfm = crypto_alloc_aead(driver, type, mask);
2177 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
2178 "%ld\n", driver, PTR_ERR(tfm));
2179 return PTR_ERR(tfm);
2182 err = test_aead(tfm, ENCRYPT, suite->vecs, suite->count);
2184 err = test_aead(tfm, DECRYPT, suite->vecs, suite->count);
2186 crypto_free_aead(tfm);
2190 static int alg_test_cipher(const struct alg_test_desc *desc,
2191 const char *driver, u32 type, u32 mask)
2193 const struct cipher_test_suite *suite = &desc->suite.cipher;
2194 struct crypto_cipher *tfm;
2197 tfm = crypto_alloc_cipher(driver, type, mask);
2199 printk(KERN_ERR "alg: cipher: Failed to load transform for "
2200 "%s: %ld\n", driver, PTR_ERR(tfm));
2201 return PTR_ERR(tfm);
2204 err = test_cipher(tfm, ENCRYPT, suite->vecs, suite->count);
2206 err = test_cipher(tfm, DECRYPT, suite->vecs, suite->count);
2208 crypto_free_cipher(tfm);
2212 static int alg_test_skcipher(const struct alg_test_desc *desc,
2213 const char *driver, u32 type, u32 mask)
2215 const struct cipher_test_suite *suite = &desc->suite.cipher;
2216 struct crypto_skcipher *tfm;
2219 tfm = crypto_alloc_skcipher(driver, type, mask);
2221 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
2222 "%s: %ld\n", driver, PTR_ERR(tfm));
2223 return PTR_ERR(tfm);
2226 err = test_skcipher(tfm, ENCRYPT, suite->vecs, suite->count);
2228 err = test_skcipher(tfm, DECRYPT, suite->vecs, suite->count);
2230 crypto_free_skcipher(tfm);
2234 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
2237 struct crypto_comp *comp;
2238 struct crypto_acomp *acomp;
2240 u32 algo_type = type & CRYPTO_ALG_TYPE_ACOMPRESS_MASK;
2242 if (algo_type == CRYPTO_ALG_TYPE_ACOMPRESS) {
2243 acomp = crypto_alloc_acomp(driver, type, mask);
2244 if (IS_ERR(acomp)) {
2245 pr_err("alg: acomp: Failed to load transform for %s: %ld\n",
2246 driver, PTR_ERR(acomp));
2247 return PTR_ERR(acomp);
2249 err = test_acomp(acomp, desc->suite.comp.comp.vecs,
2250 desc->suite.comp.decomp.vecs,
2251 desc->suite.comp.comp.count,
2252 desc->suite.comp.decomp.count);
2253 crypto_free_acomp(acomp);
2255 comp = crypto_alloc_comp(driver, type, mask);
2257 pr_err("alg: comp: Failed to load transform for %s: %ld\n",
2258 driver, PTR_ERR(comp));
2259 return PTR_ERR(comp);
2262 err = test_comp(comp, desc->suite.comp.comp.vecs,
2263 desc->suite.comp.decomp.vecs,
2264 desc->suite.comp.comp.count,
2265 desc->suite.comp.decomp.count);
2267 crypto_free_comp(comp);
2272 static int __alg_test_hash(const struct hash_testvec *template,
2273 unsigned int tcount, const char *driver,
2276 struct crypto_ahash *tfm;
2279 tfm = crypto_alloc_ahash(driver, type, mask);
2281 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
2282 "%ld\n", driver, PTR_ERR(tfm));
2283 return PTR_ERR(tfm);
2286 err = test_hash(tfm, template, tcount, HASH_TEST_DIGEST);
2288 err = test_hash(tfm, template, tcount, HASH_TEST_FINAL);
2290 err = test_hash(tfm, template, tcount, HASH_TEST_FINUP);
2291 crypto_free_ahash(tfm);
2295 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
2298 const struct hash_testvec *template = desc->suite.hash.vecs;
2299 unsigned int tcount = desc->suite.hash.count;
2300 unsigned int nr_unkeyed, nr_keyed;
2304 * For OPTIONAL_KEY algorithms, we have to do all the unkeyed tests
2305 * first, before setting a key on the tfm. To make this easier, we
2306 * require that the unkeyed test vectors (if any) are listed first.
2309 for (nr_unkeyed = 0; nr_unkeyed < tcount; nr_unkeyed++) {
2310 if (template[nr_unkeyed].ksize)
2313 for (nr_keyed = 0; nr_unkeyed + nr_keyed < tcount; nr_keyed++) {
2314 if (!template[nr_unkeyed + nr_keyed].ksize) {
2315 pr_err("alg: hash: test vectors for %s out of order, "
2316 "unkeyed ones must come first\n", desc->alg);
2323 err = __alg_test_hash(template, nr_unkeyed, driver, type, mask);
2324 template += nr_unkeyed;
2327 if (!err && nr_keyed)
2328 err = __alg_test_hash(template, nr_keyed, driver, type, mask);
2333 static int alg_test_crc32c(const struct alg_test_desc *desc,
2334 const char *driver, u32 type, u32 mask)
2336 struct crypto_shash *tfm;
2340 err = alg_test_hash(desc, driver, type, mask);
2344 tfm = crypto_alloc_shash(driver, type, mask);
2346 if (PTR_ERR(tfm) == -ENOENT) {
2348 * This crc32c implementation is only available through
2349 * ahash API, not the shash API, so the remaining part
2350 * of the test is not applicable to it.
2354 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
2355 "%ld\n", driver, PTR_ERR(tfm));
2356 return PTR_ERR(tfm);
2360 SHASH_DESC_ON_STACK(shash, tfm);
2361 u32 *ctx = (u32 *)shash_desc_ctx(shash);
2367 err = crypto_shash_final(shash, (u8 *)&val);
2369 printk(KERN_ERR "alg: crc32c: Operation failed for "
2370 "%s: %d\n", driver, err);
2374 if (val != cpu_to_le32(~420553207)) {
2375 pr_err("alg: crc32c: Test failed for %s: %u\n",
2376 driver, le32_to_cpu(val));
2381 crypto_free_shash(tfm);
2386 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
2389 struct crypto_rng *rng;
2392 rng = crypto_alloc_rng(driver, type, mask);
2394 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
2395 "%ld\n", driver, PTR_ERR(rng));
2396 return PTR_ERR(rng);
2399 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
2401 crypto_free_rng(rng);
2407 static int drbg_cavs_test(const struct drbg_testvec *test, int pr,
2408 const char *driver, u32 type, u32 mask)
2411 struct crypto_rng *drng;
2412 struct drbg_test_data test_data;
2413 struct drbg_string addtl, pers, testentropy;
2414 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
2419 drng = crypto_alloc_rng(driver, type, mask);
2421 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
2427 test_data.testentropy = &testentropy;
2428 drbg_string_fill(&testentropy, test->entropy, test->entropylen);
2429 drbg_string_fill(&pers, test->pers, test->perslen);
2430 ret = crypto_drbg_reset_test(drng, &pers, &test_data);
2432 printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
2436 drbg_string_fill(&addtl, test->addtla, test->addtllen);
2438 drbg_string_fill(&testentropy, test->entpra, test->entprlen);
2439 ret = crypto_drbg_get_bytes_addtl_test(drng,
2440 buf, test->expectedlen, &addtl, &test_data);
2442 ret = crypto_drbg_get_bytes_addtl(drng,
2443 buf, test->expectedlen, &addtl);
2446 printk(KERN_ERR "alg: drbg: could not obtain random data for "
2447 "driver %s\n", driver);
2451 drbg_string_fill(&addtl, test->addtlb, test->addtllen);
2453 drbg_string_fill(&testentropy, test->entprb, test->entprlen);
2454 ret = crypto_drbg_get_bytes_addtl_test(drng,
2455 buf, test->expectedlen, &addtl, &test_data);
2457 ret = crypto_drbg_get_bytes_addtl(drng,
2458 buf, test->expectedlen, &addtl);
2461 printk(KERN_ERR "alg: drbg: could not obtain random data for "
2462 "driver %s\n", driver);
2466 ret = memcmp(test->expected, buf, test->expectedlen);
2469 crypto_free_rng(drng);
2475 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
2481 const struct drbg_testvec *template = desc->suite.drbg.vecs;
2482 unsigned int tcount = desc->suite.drbg.count;
2484 if (0 == memcmp(driver, "drbg_pr_", 8))
2487 for (i = 0; i < tcount; i++) {
2488 err = drbg_cavs_test(&template[i], pr, driver, type, mask);
2490 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
2500 static int do_test_kpp(struct crypto_kpp *tfm, const struct kpp_testvec *vec,
2503 struct kpp_request *req;
2504 void *input_buf = NULL;
2505 void *output_buf = NULL;
2506 void *a_public = NULL;
2508 void *shared_secret = NULL;
2509 struct crypto_wait wait;
2510 unsigned int out_len_max;
2512 struct scatterlist src, dst;
2514 req = kpp_request_alloc(tfm, GFP_KERNEL);
2518 crypto_init_wait(&wait);
2520 err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size);
2524 out_len_max = crypto_kpp_maxsize(tfm);
2525 output_buf = kzalloc(out_len_max, GFP_KERNEL);
2531 /* Use appropriate parameter as base */
2532 kpp_request_set_input(req, NULL, 0);
2533 sg_init_one(&dst, output_buf, out_len_max);
2534 kpp_request_set_output(req, &dst, out_len_max);
2535 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2536 crypto_req_done, &wait);
2538 /* Compute party A's public key */
2539 err = crypto_wait_req(crypto_kpp_generate_public_key(req), &wait);
2541 pr_err("alg: %s: Party A: generate public key test failed. err %d\n",
2547 /* Save party A's public key */
2548 a_public = kmemdup(sg_virt(req->dst), out_len_max, GFP_KERNEL);
2554 /* Verify calculated public key */
2555 if (memcmp(vec->expected_a_public, sg_virt(req->dst),
2556 vec->expected_a_public_size)) {
2557 pr_err("alg: %s: Party A: generate public key test failed. Invalid output\n",
2564 /* Calculate shared secret key by using counter part (b) public key. */
2565 input_buf = kmemdup(vec->b_public, vec->b_public_size, GFP_KERNEL);
2571 sg_init_one(&src, input_buf, vec->b_public_size);
2572 sg_init_one(&dst, output_buf, out_len_max);
2573 kpp_request_set_input(req, &src, vec->b_public_size);
2574 kpp_request_set_output(req, &dst, out_len_max);
2575 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2576 crypto_req_done, &wait);
2577 err = crypto_wait_req(crypto_kpp_compute_shared_secret(req), &wait);
2579 pr_err("alg: %s: Party A: compute shared secret test failed. err %d\n",
2585 /* Save the shared secret obtained by party A */
2586 a_ss = kmemdup(sg_virt(req->dst), vec->expected_ss_size, GFP_KERNEL);
2593 * Calculate party B's shared secret by using party A's
2596 err = crypto_kpp_set_secret(tfm, vec->b_secret,
2597 vec->b_secret_size);
2601 sg_init_one(&src, a_public, vec->expected_a_public_size);
2602 sg_init_one(&dst, output_buf, out_len_max);
2603 kpp_request_set_input(req, &src, vec->expected_a_public_size);
2604 kpp_request_set_output(req, &dst, out_len_max);
2605 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2606 crypto_req_done, &wait);
2607 err = crypto_wait_req(crypto_kpp_compute_shared_secret(req),
2610 pr_err("alg: %s: Party B: compute shared secret failed. err %d\n",
2615 shared_secret = a_ss;
2617 shared_secret = (void *)vec->expected_ss;
2621 * verify shared secret from which the user will derive
2622 * secret key by executing whatever hash it has chosen
2624 if (memcmp(shared_secret, sg_virt(req->dst),
2625 vec->expected_ss_size)) {
2626 pr_err("alg: %s: compute shared secret test failed. Invalid output\n",
2638 kpp_request_free(req);
2642 static int test_kpp(struct crypto_kpp *tfm, const char *alg,
2643 const struct kpp_testvec *vecs, unsigned int tcount)
2647 for (i = 0; i < tcount; i++) {
2648 ret = do_test_kpp(tfm, vecs++, alg);
2650 pr_err("alg: %s: test failed on vector %d, err=%d\n",
2658 static int alg_test_kpp(const struct alg_test_desc *desc, const char *driver,
2661 struct crypto_kpp *tfm;
2664 tfm = crypto_alloc_kpp(driver, type, mask);
2666 pr_err("alg: kpp: Failed to load tfm for %s: %ld\n",
2667 driver, PTR_ERR(tfm));
2668 return PTR_ERR(tfm);
2670 if (desc->suite.kpp.vecs)
2671 err = test_kpp(tfm, desc->alg, desc->suite.kpp.vecs,
2672 desc->suite.kpp.count);
2674 crypto_free_kpp(tfm);
2678 static int test_akcipher_one(struct crypto_akcipher *tfm,
2679 const struct akcipher_testvec *vecs)
2681 char *xbuf[XBUFSIZE];
2682 struct akcipher_request *req;
2683 void *outbuf_enc = NULL;
2684 void *outbuf_dec = NULL;
2685 struct crypto_wait wait;
2686 unsigned int out_len_max, out_len = 0;
2688 struct scatterlist src, dst, src_tab[2];
2690 unsigned int m_size, c_size;
2693 if (testmgr_alloc_buf(xbuf))
2696 req = akcipher_request_alloc(tfm, GFP_KERNEL);
2700 crypto_init_wait(&wait);
2702 if (vecs->public_key_vec)
2703 err = crypto_akcipher_set_pub_key(tfm, vecs->key,
2706 err = crypto_akcipher_set_priv_key(tfm, vecs->key,
2712 out_len_max = crypto_akcipher_maxsize(tfm);
2715 * First run test which do not require a private key, such as
2716 * encrypt or verify.
2718 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
2722 if (!vecs->siggen_sigver_test) {
2724 m_size = vecs->m_size;
2726 c_size = vecs->c_size;
2729 /* Swap args so we could keep plaintext (digest)
2730 * in vecs->m, and cooked signature in vecs->c.
2732 m = vecs->c; /* signature */
2733 m_size = vecs->c_size;
2734 c = vecs->m; /* digest */
2735 c_size = vecs->m_size;
2739 if (WARN_ON(m_size > PAGE_SIZE))
2741 memcpy(xbuf[0], m, m_size);
2743 sg_init_table(src_tab, 2);
2744 sg_set_buf(&src_tab[0], xbuf[0], 8);
2745 sg_set_buf(&src_tab[1], xbuf[0] + 8, m_size - 8);
2746 sg_init_one(&dst, outbuf_enc, out_len_max);
2747 akcipher_request_set_crypt(req, src_tab, &dst, m_size,
2749 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2750 crypto_req_done, &wait);
2752 err = crypto_wait_req(vecs->siggen_sigver_test ?
2753 /* Run asymmetric signature verification */
2754 crypto_akcipher_verify(req) :
2755 /* Run asymmetric encrypt */
2756 crypto_akcipher_encrypt(req), &wait);
2758 pr_err("alg: akcipher: %s test failed. err %d\n", op, err);
2761 if (req->dst_len != c_size) {
2762 pr_err("alg: akcipher: %s test failed. Invalid output len\n",
2767 /* verify that encrypted message is equal to expected */
2768 if (memcmp(c, outbuf_enc, c_size)) {
2769 pr_err("alg: akcipher: %s test failed. Invalid output\n", op);
2770 hexdump(outbuf_enc, c_size);
2776 * Don't invoke (decrypt or sign) test which require a private key
2777 * for vectors with only a public key.
2779 if (vecs->public_key_vec) {
2783 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
2789 op = vecs->siggen_sigver_test ? "sign" : "decrypt";
2790 if (WARN_ON(c_size > PAGE_SIZE))
2792 memcpy(xbuf[0], c, c_size);
2794 sg_init_one(&src, xbuf[0], c_size);
2795 sg_init_one(&dst, outbuf_dec, out_len_max);
2796 crypto_init_wait(&wait);
2797 akcipher_request_set_crypt(req, &src, &dst, c_size, out_len_max);
2799 err = crypto_wait_req(vecs->siggen_sigver_test ?
2800 /* Run asymmetric signature generation */
2801 crypto_akcipher_sign(req) :
2802 /* Run asymmetric decrypt */
2803 crypto_akcipher_decrypt(req), &wait);
2805 pr_err("alg: akcipher: %s test failed. err %d\n", op, err);
2808 out_len = req->dst_len;
2809 if (out_len < m_size) {
2810 pr_err("alg: akcipher: %s test failed. Invalid output len %u\n",
2815 /* verify that decrypted message is equal to the original msg */
2816 if (memchr_inv(outbuf_dec, 0, out_len - m_size) ||
2817 memcmp(m, outbuf_dec + out_len - m_size, m_size)) {
2818 pr_err("alg: akcipher: %s test failed. Invalid output\n", op);
2819 hexdump(outbuf_dec, out_len);
2826 akcipher_request_free(req);
2828 testmgr_free_buf(xbuf);
2832 static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
2833 const struct akcipher_testvec *vecs,
2834 unsigned int tcount)
2837 crypto_tfm_alg_driver_name(crypto_akcipher_tfm(tfm));
2840 for (i = 0; i < tcount; i++) {
2841 ret = test_akcipher_one(tfm, vecs++);
2845 pr_err("alg: akcipher: test %d failed for %s, err=%d\n",
2852 static int alg_test_akcipher(const struct alg_test_desc *desc,
2853 const char *driver, u32 type, u32 mask)
2855 struct crypto_akcipher *tfm;
2858 tfm = crypto_alloc_akcipher(driver, type, mask);
2860 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
2861 driver, PTR_ERR(tfm));
2862 return PTR_ERR(tfm);
2864 if (desc->suite.akcipher.vecs)
2865 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
2866 desc->suite.akcipher.count);
2868 crypto_free_akcipher(tfm);
2872 static int alg_test_null(const struct alg_test_desc *desc,
2873 const char *driver, u32 type, u32 mask)
2878 #define __VECS(tv) { .vecs = tv, .count = ARRAY_SIZE(tv) }
2880 /* Please keep this list sorted by algorithm name. */
2881 static const struct alg_test_desc alg_test_descs[] = {
2883 .alg = "adiantum(xchacha12,aes)",
2884 .test = alg_test_skcipher,
2886 .cipher = __VECS(adiantum_xchacha12_aes_tv_template)
2889 .alg = "adiantum(xchacha20,aes)",
2890 .test = alg_test_skcipher,
2892 .cipher = __VECS(adiantum_xchacha20_aes_tv_template)
2896 .test = alg_test_aead,
2898 .aead = __VECS(aegis128_tv_template)
2902 .test = alg_test_aead,
2904 .aead = __VECS(aegis128l_tv_template)
2908 .test = alg_test_aead,
2910 .aead = __VECS(aegis256_tv_template)
2913 .alg = "ansi_cprng",
2914 .test = alg_test_cprng,
2916 .cprng = __VECS(ansi_cprng_aes_tv_template)
2919 .alg = "authenc(hmac(md5),ecb(cipher_null))",
2920 .test = alg_test_aead,
2922 .aead = __VECS(hmac_md5_ecb_cipher_null_tv_template)
2925 .alg = "authenc(hmac(sha1),cbc(aes))",
2926 .test = alg_test_aead,
2929 .aead = __VECS(hmac_sha1_aes_cbc_tv_temp)
2932 .alg = "authenc(hmac(sha1),cbc(des))",
2933 .test = alg_test_aead,
2935 .aead = __VECS(hmac_sha1_des_cbc_tv_temp)
2938 .alg = "authenc(hmac(sha1),cbc(des3_ede))",
2939 .test = alg_test_aead,
2942 .aead = __VECS(hmac_sha1_des3_ede_cbc_tv_temp)
2945 .alg = "authenc(hmac(sha1),ctr(aes))",
2946 .test = alg_test_null,
2949 .alg = "authenc(hmac(sha1),ecb(cipher_null))",
2950 .test = alg_test_aead,
2952 .aead = __VECS(hmac_sha1_ecb_cipher_null_tv_temp)
2955 .alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2956 .test = alg_test_null,
2959 .alg = "authenc(hmac(sha224),cbc(des))",
2960 .test = alg_test_aead,
2962 .aead = __VECS(hmac_sha224_des_cbc_tv_temp)
2965 .alg = "authenc(hmac(sha224),cbc(des3_ede))",
2966 .test = alg_test_aead,
2969 .aead = __VECS(hmac_sha224_des3_ede_cbc_tv_temp)
2972 .alg = "authenc(hmac(sha256),cbc(aes))",
2973 .test = alg_test_aead,
2976 .aead = __VECS(hmac_sha256_aes_cbc_tv_temp)
2979 .alg = "authenc(hmac(sha256),cbc(des))",
2980 .test = alg_test_aead,
2982 .aead = __VECS(hmac_sha256_des_cbc_tv_temp)
2985 .alg = "authenc(hmac(sha256),cbc(des3_ede))",
2986 .test = alg_test_aead,
2989 .aead = __VECS(hmac_sha256_des3_ede_cbc_tv_temp)
2992 .alg = "authenc(hmac(sha256),ctr(aes))",
2993 .test = alg_test_null,
2996 .alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2997 .test = alg_test_null,
3000 .alg = "authenc(hmac(sha384),cbc(des))",
3001 .test = alg_test_aead,
3003 .aead = __VECS(hmac_sha384_des_cbc_tv_temp)
3006 .alg = "authenc(hmac(sha384),cbc(des3_ede))",
3007 .test = alg_test_aead,
3010 .aead = __VECS(hmac_sha384_des3_ede_cbc_tv_temp)
3013 .alg = "authenc(hmac(sha384),ctr(aes))",
3014 .test = alg_test_null,
3017 .alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
3018 .test = alg_test_null,
3021 .alg = "authenc(hmac(sha512),cbc(aes))",
3023 .test = alg_test_aead,
3025 .aead = __VECS(hmac_sha512_aes_cbc_tv_temp)
3028 .alg = "authenc(hmac(sha512),cbc(des))",
3029 .test = alg_test_aead,
3031 .aead = __VECS(hmac_sha512_des_cbc_tv_temp)
3034 .alg = "authenc(hmac(sha512),cbc(des3_ede))",
3035 .test = alg_test_aead,
3038 .aead = __VECS(hmac_sha512_des3_ede_cbc_tv_temp)
3041 .alg = "authenc(hmac(sha512),ctr(aes))",
3042 .test = alg_test_null,
3045 .alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
3046 .test = alg_test_null,
3050 .test = alg_test_skcipher,
3053 .cipher = __VECS(aes_cbc_tv_template)
3056 .alg = "cbc(anubis)",
3057 .test = alg_test_skcipher,
3059 .cipher = __VECS(anubis_cbc_tv_template)
3062 .alg = "cbc(blowfish)",
3063 .test = alg_test_skcipher,
3065 .cipher = __VECS(bf_cbc_tv_template)
3068 .alg = "cbc(camellia)",
3069 .test = alg_test_skcipher,
3071 .cipher = __VECS(camellia_cbc_tv_template)
3074 .alg = "cbc(cast5)",
3075 .test = alg_test_skcipher,
3077 .cipher = __VECS(cast5_cbc_tv_template)
3080 .alg = "cbc(cast6)",
3081 .test = alg_test_skcipher,
3083 .cipher = __VECS(cast6_cbc_tv_template)
3087 .test = alg_test_skcipher,
3089 .cipher = __VECS(des_cbc_tv_template)
3092 .alg = "cbc(des3_ede)",
3093 .test = alg_test_skcipher,
3096 .cipher = __VECS(des3_ede_cbc_tv_template)
3099 /* Same as cbc(aes) except the key is stored in
3100 * hardware secure memory which we reference by index
3103 .test = alg_test_null,
3106 .alg = "cbc(serpent)",
3107 .test = alg_test_skcipher,
3109 .cipher = __VECS(serpent_cbc_tv_template)
3113 .test = alg_test_skcipher,
3115 .cipher = __VECS(sm4_cbc_tv_template)
3118 .alg = "cbc(twofish)",
3119 .test = alg_test_skcipher,
3121 .cipher = __VECS(tf_cbc_tv_template)
3124 .alg = "cbcmac(aes)",
3126 .test = alg_test_hash,
3128 .hash = __VECS(aes_cbcmac_tv_template)
3132 .test = alg_test_aead,
3135 .aead = __VECS(aes_ccm_tv_template)
3139 .test = alg_test_skcipher,
3142 .cipher = __VECS(aes_cfb_tv_template)
3146 .test = alg_test_skcipher,
3148 .cipher = __VECS(chacha20_tv_template)
3153 .test = alg_test_hash,
3155 .hash = __VECS(aes_cmac128_tv_template)
3158 .alg = "cmac(des3_ede)",
3160 .test = alg_test_hash,
3162 .hash = __VECS(des3_ede_cmac64_tv_template)
3165 .alg = "compress_null",
3166 .test = alg_test_null,
3169 .test = alg_test_hash,
3172 .hash = __VECS(crc32_tv_template)
3176 .test = alg_test_crc32c,
3179 .hash = __VECS(crc32c_tv_template)
3183 .test = alg_test_hash,
3186 .hash = __VECS(crct10dif_tv_template)
3190 .test = alg_test_skcipher,
3193 .cipher = __VECS(aes_ctr_tv_template)
3196 .alg = "ctr(blowfish)",
3197 .test = alg_test_skcipher,
3199 .cipher = __VECS(bf_ctr_tv_template)
3202 .alg = "ctr(camellia)",
3203 .test = alg_test_skcipher,
3205 .cipher = __VECS(camellia_ctr_tv_template)
3208 .alg = "ctr(cast5)",
3209 .test = alg_test_skcipher,
3211 .cipher = __VECS(cast5_ctr_tv_template)
3214 .alg = "ctr(cast6)",
3215 .test = alg_test_skcipher,
3217 .cipher = __VECS(cast6_ctr_tv_template)
3221 .test = alg_test_skcipher,
3223 .cipher = __VECS(des_ctr_tv_template)
3226 .alg = "ctr(des3_ede)",
3227 .test = alg_test_skcipher,
3230 .cipher = __VECS(des3_ede_ctr_tv_template)
3233 /* Same as ctr(aes) except the key is stored in
3234 * hardware secure memory which we reference by index
3237 .test = alg_test_null,
3240 .alg = "ctr(serpent)",
3241 .test = alg_test_skcipher,
3243 .cipher = __VECS(serpent_ctr_tv_template)
3247 .test = alg_test_skcipher,
3249 .cipher = __VECS(sm4_ctr_tv_template)
3252 .alg = "ctr(twofish)",
3253 .test = alg_test_skcipher,
3255 .cipher = __VECS(tf_ctr_tv_template)
3258 .alg = "cts(cbc(aes))",
3259 .test = alg_test_skcipher,
3262 .cipher = __VECS(cts_mode_tv_template)
3266 .test = alg_test_comp,
3270 .comp = __VECS(deflate_comp_tv_template),
3271 .decomp = __VECS(deflate_decomp_tv_template)
3276 .test = alg_test_kpp,
3279 .kpp = __VECS(dh_tv_template)
3282 .alg = "digest_null",
3283 .test = alg_test_null,
3285 .alg = "drbg_nopr_ctr_aes128",
3286 .test = alg_test_drbg,
3289 .drbg = __VECS(drbg_nopr_ctr_aes128_tv_template)
3292 .alg = "drbg_nopr_ctr_aes192",
3293 .test = alg_test_drbg,
3296 .drbg = __VECS(drbg_nopr_ctr_aes192_tv_template)
3299 .alg = "drbg_nopr_ctr_aes256",
3300 .test = alg_test_drbg,
3303 .drbg = __VECS(drbg_nopr_ctr_aes256_tv_template)
3307 * There is no need to specifically test the DRBG with every
3308 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
3310 .alg = "drbg_nopr_hmac_sha1",
3312 .test = alg_test_null,
3314 .alg = "drbg_nopr_hmac_sha256",
3315 .test = alg_test_drbg,
3318 .drbg = __VECS(drbg_nopr_hmac_sha256_tv_template)
3321 /* covered by drbg_nopr_hmac_sha256 test */
3322 .alg = "drbg_nopr_hmac_sha384",
3324 .test = alg_test_null,
3326 .alg = "drbg_nopr_hmac_sha512",
3327 .test = alg_test_null,
3330 .alg = "drbg_nopr_sha1",
3332 .test = alg_test_null,
3334 .alg = "drbg_nopr_sha256",
3335 .test = alg_test_drbg,
3338 .drbg = __VECS(drbg_nopr_sha256_tv_template)
3341 /* covered by drbg_nopr_sha256 test */
3342 .alg = "drbg_nopr_sha384",
3344 .test = alg_test_null,
3346 .alg = "drbg_nopr_sha512",
3348 .test = alg_test_null,
3350 .alg = "drbg_pr_ctr_aes128",
3351 .test = alg_test_drbg,
3354 .drbg = __VECS(drbg_pr_ctr_aes128_tv_template)
3357 /* covered by drbg_pr_ctr_aes128 test */
3358 .alg = "drbg_pr_ctr_aes192",
3360 .test = alg_test_null,
3362 .alg = "drbg_pr_ctr_aes256",
3364 .test = alg_test_null,
3366 .alg = "drbg_pr_hmac_sha1",
3368 .test = alg_test_null,
3370 .alg = "drbg_pr_hmac_sha256",
3371 .test = alg_test_drbg,
3374 .drbg = __VECS(drbg_pr_hmac_sha256_tv_template)
3377 /* covered by drbg_pr_hmac_sha256 test */
3378 .alg = "drbg_pr_hmac_sha384",
3380 .test = alg_test_null,
3382 .alg = "drbg_pr_hmac_sha512",
3383 .test = alg_test_null,
3386 .alg = "drbg_pr_sha1",
3388 .test = alg_test_null,
3390 .alg = "drbg_pr_sha256",
3391 .test = alg_test_drbg,
3394 .drbg = __VECS(drbg_pr_sha256_tv_template)
3397 /* covered by drbg_pr_sha256 test */
3398 .alg = "drbg_pr_sha384",
3400 .test = alg_test_null,
3402 .alg = "drbg_pr_sha512",
3404 .test = alg_test_null,
3407 .test = alg_test_skcipher,
3410 .cipher = __VECS(aes_tv_template)
3413 .alg = "ecb(anubis)",
3414 .test = alg_test_skcipher,
3416 .cipher = __VECS(anubis_tv_template)
3420 .test = alg_test_skcipher,
3422 .cipher = __VECS(arc4_tv_template)
3425 .alg = "ecb(blowfish)",
3426 .test = alg_test_skcipher,
3428 .cipher = __VECS(bf_tv_template)
3431 .alg = "ecb(camellia)",
3432 .test = alg_test_skcipher,
3434 .cipher = __VECS(camellia_tv_template)
3437 .alg = "ecb(cast5)",
3438 .test = alg_test_skcipher,
3440 .cipher = __VECS(cast5_tv_template)
3443 .alg = "ecb(cast6)",
3444 .test = alg_test_skcipher,
3446 .cipher = __VECS(cast6_tv_template)
3449 .alg = "ecb(cipher_null)",
3450 .test = alg_test_null,
3454 .test = alg_test_skcipher,
3456 .cipher = __VECS(des_tv_template)
3459 .alg = "ecb(des3_ede)",
3460 .test = alg_test_skcipher,
3463 .cipher = __VECS(des3_ede_tv_template)
3466 .alg = "ecb(fcrypt)",
3467 .test = alg_test_skcipher,
3470 .vecs = fcrypt_pcbc_tv_template,
3475 .alg = "ecb(khazad)",
3476 .test = alg_test_skcipher,
3478 .cipher = __VECS(khazad_tv_template)
3481 /* Same as ecb(aes) except the key is stored in
3482 * hardware secure memory which we reference by index
3485 .test = alg_test_null,
3489 .test = alg_test_skcipher,
3491 .cipher = __VECS(seed_tv_template)
3494 .alg = "ecb(serpent)",
3495 .test = alg_test_skcipher,
3497 .cipher = __VECS(serpent_tv_template)
3501 .test = alg_test_skcipher,
3503 .cipher = __VECS(sm4_tv_template)
3507 .test = alg_test_skcipher,
3509 .cipher = __VECS(tea_tv_template)
3512 .alg = "ecb(tnepres)",
3513 .test = alg_test_skcipher,
3515 .cipher = __VECS(tnepres_tv_template)
3518 .alg = "ecb(twofish)",
3519 .test = alg_test_skcipher,
3521 .cipher = __VECS(tf_tv_template)
3525 .test = alg_test_skcipher,
3527 .cipher = __VECS(xeta_tv_template)
3531 .test = alg_test_skcipher,
3533 .cipher = __VECS(xtea_tv_template)
3537 .test = alg_test_kpp,
3540 .kpp = __VECS(ecdh_tv_template)
3544 .test = alg_test_aead,
3547 .aead = __VECS(aes_gcm_tv_template)
3551 .test = alg_test_hash,
3554 .hash = __VECS(ghash_tv_template)
3558 .test = alg_test_hash,
3560 .hash = __VECS(hmac_md5_tv_template)
3563 .alg = "hmac(rmd128)",
3564 .test = alg_test_hash,
3566 .hash = __VECS(hmac_rmd128_tv_template)
3569 .alg = "hmac(rmd160)",
3570 .test = alg_test_hash,
3572 .hash = __VECS(hmac_rmd160_tv_template)
3575 .alg = "hmac(sha1)",
3576 .test = alg_test_hash,
3579 .hash = __VECS(hmac_sha1_tv_template)
3582 .alg = "hmac(sha224)",
3583 .test = alg_test_hash,
3586 .hash = __VECS(hmac_sha224_tv_template)
3589 .alg = "hmac(sha256)",
3590 .test = alg_test_hash,
3593 .hash = __VECS(hmac_sha256_tv_template)
3596 .alg = "hmac(sha3-224)",
3597 .test = alg_test_hash,
3600 .hash = __VECS(hmac_sha3_224_tv_template)
3603 .alg = "hmac(sha3-256)",
3604 .test = alg_test_hash,
3607 .hash = __VECS(hmac_sha3_256_tv_template)
3610 .alg = "hmac(sha3-384)",
3611 .test = alg_test_hash,
3614 .hash = __VECS(hmac_sha3_384_tv_template)
3617 .alg = "hmac(sha3-512)",
3618 .test = alg_test_hash,
3621 .hash = __VECS(hmac_sha3_512_tv_template)
3624 .alg = "hmac(sha384)",
3625 .test = alg_test_hash,
3628 .hash = __VECS(hmac_sha384_tv_template)
3631 .alg = "hmac(sha512)",
3632 .test = alg_test_hash,
3635 .hash = __VECS(hmac_sha512_tv_template)
3638 .alg = "hmac(streebog256)",
3639 .test = alg_test_hash,
3641 .hash = __VECS(hmac_streebog256_tv_template)
3644 .alg = "hmac(streebog512)",
3645 .test = alg_test_hash,
3647 .hash = __VECS(hmac_streebog512_tv_template)
3650 .alg = "jitterentropy_rng",
3652 .test = alg_test_null,
3655 .test = alg_test_skcipher,
3658 .cipher = __VECS(aes_kw_tv_template)
3662 .test = alg_test_skcipher,
3664 .cipher = __VECS(aes_lrw_tv_template)
3667 .alg = "lrw(camellia)",
3668 .test = alg_test_skcipher,
3670 .cipher = __VECS(camellia_lrw_tv_template)
3673 .alg = "lrw(cast6)",
3674 .test = alg_test_skcipher,
3676 .cipher = __VECS(cast6_lrw_tv_template)
3679 .alg = "lrw(serpent)",
3680 .test = alg_test_skcipher,
3682 .cipher = __VECS(serpent_lrw_tv_template)
3685 .alg = "lrw(twofish)",
3686 .test = alg_test_skcipher,
3688 .cipher = __VECS(tf_lrw_tv_template)
3692 .test = alg_test_comp,
3696 .comp = __VECS(lz4_comp_tv_template),
3697 .decomp = __VECS(lz4_decomp_tv_template)
3702 .test = alg_test_comp,
3706 .comp = __VECS(lz4hc_comp_tv_template),
3707 .decomp = __VECS(lz4hc_decomp_tv_template)
3712 .test = alg_test_comp,
3716 .comp = __VECS(lzo_comp_tv_template),
3717 .decomp = __VECS(lzo_decomp_tv_template)
3722 .test = alg_test_hash,
3724 .hash = __VECS(md4_tv_template)
3728 .test = alg_test_hash,
3730 .hash = __VECS(md5_tv_template)
3733 .alg = "michael_mic",
3734 .test = alg_test_hash,
3736 .hash = __VECS(michael_mic_tv_template)
3740 .test = alg_test_aead,
3742 .aead = __VECS(morus1280_tv_template)
3746 .test = alg_test_aead,
3748 .aead = __VECS(morus640_tv_template)
3751 .alg = "nhpoly1305",
3752 .test = alg_test_hash,
3754 .hash = __VECS(nhpoly1305_tv_template)
3758 .test = alg_test_skcipher,
3761 .cipher = __VECS(aes_ofb_tv_template)
3764 /* Same as ofb(aes) except the key is stored in
3765 * hardware secure memory which we reference by index
3768 .test = alg_test_null,
3771 .alg = "pcbc(fcrypt)",
3772 .test = alg_test_skcipher,
3774 .cipher = __VECS(fcrypt_pcbc_tv_template)
3777 .alg = "pkcs1pad(rsa,sha224)",
3778 .test = alg_test_null,
3781 .alg = "pkcs1pad(rsa,sha256)",
3782 .test = alg_test_akcipher,
3785 .akcipher = __VECS(pkcs1pad_rsa_tv_template)
3788 .alg = "pkcs1pad(rsa,sha384)",
3789 .test = alg_test_null,
3792 .alg = "pkcs1pad(rsa,sha512)",
3793 .test = alg_test_null,
3797 .test = alg_test_hash,
3799 .hash = __VECS(poly1305_tv_template)
3802 .alg = "rfc3686(ctr(aes))",
3803 .test = alg_test_skcipher,
3806 .cipher = __VECS(aes_ctr_rfc3686_tv_template)
3809 .alg = "rfc4106(gcm(aes))",
3810 .test = alg_test_aead,
3813 .aead = __VECS(aes_gcm_rfc4106_tv_template)
3816 .alg = "rfc4309(ccm(aes))",
3817 .test = alg_test_aead,
3820 .aead = __VECS(aes_ccm_rfc4309_tv_template)
3823 .alg = "rfc4543(gcm(aes))",
3824 .test = alg_test_aead,
3826 .aead = __VECS(aes_gcm_rfc4543_tv_template)
3829 .alg = "rfc7539(chacha20,poly1305)",
3830 .test = alg_test_aead,
3832 .aead = __VECS(rfc7539_tv_template)
3835 .alg = "rfc7539esp(chacha20,poly1305)",
3836 .test = alg_test_aead,
3838 .aead = __VECS(rfc7539esp_tv_template)
3842 .test = alg_test_hash,
3844 .hash = __VECS(rmd128_tv_template)
3848 .test = alg_test_hash,
3850 .hash = __VECS(rmd160_tv_template)
3854 .test = alg_test_hash,
3856 .hash = __VECS(rmd256_tv_template)
3860 .test = alg_test_hash,
3862 .hash = __VECS(rmd320_tv_template)
3866 .test = alg_test_akcipher,
3869 .akcipher = __VECS(rsa_tv_template)
3873 .test = alg_test_skcipher,
3875 .cipher = __VECS(salsa20_stream_tv_template)
3879 .test = alg_test_hash,
3882 .hash = __VECS(sha1_tv_template)
3886 .test = alg_test_hash,
3889 .hash = __VECS(sha224_tv_template)
3893 .test = alg_test_hash,
3896 .hash = __VECS(sha256_tv_template)
3900 .test = alg_test_hash,
3903 .hash = __VECS(sha3_224_tv_template)
3907 .test = alg_test_hash,
3910 .hash = __VECS(sha3_256_tv_template)
3914 .test = alg_test_hash,
3917 .hash = __VECS(sha3_384_tv_template)
3921 .test = alg_test_hash,
3924 .hash = __VECS(sha3_512_tv_template)
3928 .test = alg_test_hash,
3931 .hash = __VECS(sha384_tv_template)
3935 .test = alg_test_hash,
3938 .hash = __VECS(sha512_tv_template)
3942 .test = alg_test_hash,
3944 .hash = __VECS(sm3_tv_template)
3947 .alg = "streebog256",
3948 .test = alg_test_hash,
3950 .hash = __VECS(streebog256_tv_template)
3953 .alg = "streebog512",
3954 .test = alg_test_hash,
3956 .hash = __VECS(streebog512_tv_template)
3960 .test = alg_test_hash,
3962 .hash = __VECS(tgr128_tv_template)
3966 .test = alg_test_hash,
3968 .hash = __VECS(tgr160_tv_template)
3972 .test = alg_test_hash,
3974 .hash = __VECS(tgr192_tv_template)
3977 .alg = "vmac64(aes)",
3978 .test = alg_test_hash,
3980 .hash = __VECS(vmac64_aes_tv_template)
3984 .test = alg_test_hash,
3986 .hash = __VECS(wp256_tv_template)
3990 .test = alg_test_hash,
3992 .hash = __VECS(wp384_tv_template)
3996 .test = alg_test_hash,
3998 .hash = __VECS(wp512_tv_template)
4002 .test = alg_test_hash,
4004 .hash = __VECS(aes_xcbc128_tv_template)
4008 .test = alg_test_skcipher,
4010 .cipher = __VECS(xchacha12_tv_template)
4014 .test = alg_test_skcipher,
4016 .cipher = __VECS(xchacha20_tv_template)
4020 .test = alg_test_skcipher,
4023 .cipher = __VECS(aes_xts_tv_template)
4026 .alg = "xts(camellia)",
4027 .test = alg_test_skcipher,
4029 .cipher = __VECS(camellia_xts_tv_template)
4032 .alg = "xts(cast6)",
4033 .test = alg_test_skcipher,
4035 .cipher = __VECS(cast6_xts_tv_template)
4038 /* Same as xts(aes) except the key is stored in
4039 * hardware secure memory which we reference by index
4042 .test = alg_test_null,
4045 .alg = "xts(serpent)",
4046 .test = alg_test_skcipher,
4048 .cipher = __VECS(serpent_xts_tv_template)
4051 .alg = "xts(twofish)",
4052 .test = alg_test_skcipher,
4054 .cipher = __VECS(tf_xts_tv_template)
4057 .alg = "xts4096(paes)",
4058 .test = alg_test_null,
4061 .alg = "xts512(paes)",
4062 .test = alg_test_null,
4065 .alg = "zlib-deflate",
4066 .test = alg_test_comp,
4070 .comp = __VECS(zlib_deflate_comp_tv_template),
4071 .decomp = __VECS(zlib_deflate_decomp_tv_template)
4076 .test = alg_test_comp,
4080 .comp = __VECS(zstd_comp_tv_template),
4081 .decomp = __VECS(zstd_decomp_tv_template)
4087 static void alg_check_test_descs_order(void)
4091 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
4092 int diff = strcmp(alg_test_descs[i - 1].alg,
4093 alg_test_descs[i].alg);
4095 if (WARN_ON(diff > 0)) {
4096 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
4097 alg_test_descs[i - 1].alg,
4098 alg_test_descs[i].alg);
4101 if (WARN_ON(diff == 0)) {
4102 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
4103 alg_test_descs[i].alg);
4108 static void alg_check_testvec_configs(void)
4112 static void testmgr_onetime_init(void)
4114 alg_check_test_descs_order();
4115 alg_check_testvec_configs();
4117 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
4118 pr_warn("alg: extra crypto tests enabled. This is intended for developer use only.\n");
4122 static int alg_find_test(const char *alg)
4125 int end = ARRAY_SIZE(alg_test_descs);
4127 while (start < end) {
4128 int i = (start + end) / 2;
4129 int diff = strcmp(alg_test_descs[i].alg, alg);
4147 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
4153 if (!fips_enabled && notests) {
4154 printk_once(KERN_INFO "alg: self-tests disabled\n");
4158 DO_ONCE(testmgr_onetime_init);
4160 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
4161 char nalg[CRYPTO_MAX_ALG_NAME];
4163 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
4165 return -ENAMETOOLONG;
4167 i = alg_find_test(nalg);
4171 if (fips_enabled && !alg_test_descs[i].fips_allowed)
4174 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
4178 i = alg_find_test(alg);
4179 j = alg_find_test(driver);
4183 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
4184 (j >= 0 && !alg_test_descs[j].fips_allowed)))
4189 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
4191 if (j >= 0 && j != i)
4192 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
4196 if (fips_enabled && rc)
4197 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
4199 if (fips_enabled && !rc)
4200 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
4205 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
4211 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
4213 EXPORT_SYMBOL_GPL(alg_test);