diff options
Diffstat (limited to 'crypto/testmgr.c')
| -rw-r--r-- | crypto/testmgr.c | 1256 |
1 files changed, 1140 insertions, 116 deletions
diff --git a/crypto/testmgr.c b/crypto/testmgr.c index 7620bfce92f..498649ac195 100644 --- a/crypto/testmgr.c +++ b/crypto/testmgr.c @@ -6,6 +6,13 @@ * Copyright (c) 2007 Nokia Siemens Networks * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> * + * Updated RFC4106 AES-GCM testing. + * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com) + * Adrian Hoban <adrian.hoban@intel.com> + * Gabriele Paoloni <gabriele.paoloni@intel.com> + * Tadeusz Struk (tadeusz.struk@intel.com) + * Copyright (c) 2010, Intel Corporation. + * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the Free * Software Foundation; either version 2 of the License, or (at your option) @@ -22,6 +29,17 @@ #include <crypto/rng.h> #include "internal.h" + +#ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS + +/* a perfect nop */ +int alg_test(const char *driver, const char *alg, u32 type, u32 mask) +{ + return 0; +} + +#else + #include "testmgr.h" /* @@ -153,8 +171,22 @@ static void testmgr_free_buf(char *buf[XBUFSIZE]) free_page((unsigned long)buf[i]); } -static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, - unsigned int tcount) +static int do_one_async_hash_op(struct ahash_request *req, + struct tcrypt_result *tr, + int ret) +{ + if (ret == -EINPROGRESS || ret == -EBUSY) { + ret = wait_for_completion_interruptible(&tr->completion); + if (!ret) + ret = tr->err; + reinit_completion(&tr->completion); + } + return ret; +} + +static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, + unsigned int tcount, bool use_digest, + const int align_offset) { const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm)); unsigned int i, j, k, temp; @@ -185,10 +217,15 @@ static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, if (template[i].np) continue; + ret = -EINVAL; + if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE)) + goto out; + j++; memset(result, 0, 64); hash_buff = xbuf[0]; + hash_buff += align_offset; memcpy(hash_buff, template[i].plaintext, template[i].psize); sg_init_one(&sg[0], hash_buff, template[i].psize); @@ -206,23 +243,36 @@ static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, } ahash_request_set_crypt(req, sg, result, template[i].psize); - ret = crypto_ahash_digest(req); - switch (ret) { - case 0: - break; - case -EINPROGRESS: - case -EBUSY: - ret = wait_for_completion_interruptible( - &tresult.completion); - if (!ret && !(ret = tresult.err)) { - INIT_COMPLETION(tresult.completion); - break; + if (use_digest) { + ret = do_one_async_hash_op(req, &tresult, + crypto_ahash_digest(req)); + if (ret) { + pr_err("alg: hash: digest failed on test %d " + "for %s: ret=%d\n", j, algo, -ret); + goto out; + } + } else { + ret = do_one_async_hash_op(req, &tresult, + crypto_ahash_init(req)); + if (ret) { + pr_err("alt: hash: init failed on test %d " + "for %s: ret=%d\n", j, algo, -ret); + goto out; + } + ret = do_one_async_hash_op(req, &tresult, + crypto_ahash_update(req)); + if (ret) { + pr_err("alt: hash: update failed on test %d " + "for %s: ret=%d\n", j, algo, -ret); + goto out; + } + ret = do_one_async_hash_op(req, &tresult, + crypto_ahash_final(req)); + if (ret) { + pr_err("alt: hash: final failed on test %d " + "for %s: ret=%d\n", j, algo, -ret); + goto out; } - /* fall through */ - default: - printk(KERN_ERR "alg: hash: digest failed on test %d " - "for %s: ret=%d\n", j, algo, -ret); - goto out; } if (memcmp(result, template[i].digest, @@ -237,6 +287,10 @@ static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, j = 0; for (i = 0; i < tcount; i++) { + /* alignment tests are only done with continuous buffers */ + if (align_offset != 0) + break; + if (template[i].np) { j++; memset(result, 0, 64); @@ -282,7 +336,7 @@ static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, ret = wait_for_completion_interruptible( &tresult.completion); if (!ret && !(ret = tresult.err)) { - INIT_COMPLETION(tresult.completion); + reinit_completion(&tresult.completion); break; } /* fall through */ @@ -314,8 +368,36 @@ out_nobuf: return ret; } -static int test_aead(struct crypto_aead *tfm, int enc, - struct aead_testvec *template, unsigned int tcount) +static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, + unsigned int tcount, bool use_digest) +{ + unsigned int alignmask; + int ret; + + ret = __test_hash(tfm, template, tcount, use_digest, 0); + if (ret) + return ret; + + /* test unaligned buffers, check with one byte offset */ + ret = __test_hash(tfm, template, tcount, use_digest, 1); + if (ret) + return ret; + + alignmask = crypto_tfm_alg_alignmask(&tfm->base); + if (alignmask) { + /* Check if alignment mask for tfm is correctly set. */ + ret = __test_hash(tfm, template, tcount, use_digest, + alignmask + 1); + if (ret) + return ret; + } + + return 0; +} + +static int __test_aead(struct crypto_aead *tfm, int enc, + struct aead_testvec *template, unsigned int tcount, + const bool diff_dst, const int align_offset) { const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)); unsigned int i, j, k, n, temp; @@ -323,21 +405,41 @@ static int test_aead(struct crypto_aead *tfm, int enc, char *q; char *key; struct aead_request *req; - struct scatterlist sg[8]; - struct scatterlist asg[8]; - const char *e; + struct scatterlist *sg; + struct scatterlist *asg; + struct scatterlist *sgout; + const char *e, *d; struct tcrypt_result result; unsigned int authsize; void *input; + void *output; void *assoc; - char iv[MAX_IVLEN]; + char *iv; char *xbuf[XBUFSIZE]; + char *xoutbuf[XBUFSIZE]; char *axbuf[XBUFSIZE]; + iv = kzalloc(MAX_IVLEN, GFP_KERNEL); + if (!iv) + return ret; if (testmgr_alloc_buf(xbuf)) goto out_noxbuf; if (testmgr_alloc_buf(axbuf)) goto out_noaxbuf; + if (diff_dst && testmgr_alloc_buf(xoutbuf)) + goto out_nooutbuf; + + /* avoid "the frame size is larger than 1024 bytes" compiler warning */ + sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 3 : 2), GFP_KERNEL); + if (!sg) + goto out_nosg; + asg = &sg[8]; + sgout = &asg[8]; + + if (diff_dst) + d = "-ddst"; + else + d = ""; if (enc == ENCRYPT) e = "encryption"; @@ -348,8 +450,8 @@ static int test_aead(struct crypto_aead *tfm, int enc, req = aead_request_alloc(tfm, GFP_KERNEL); if (!req) { - printk(KERN_ERR "alg: aead: Failed to allocate request for " - "%s\n", algo); + pr_err("alg: aead%s: Failed to allocate request for %s\n", + d, algo); goto out; } @@ -360,15 +462,16 @@ static int test_aead(struct crypto_aead *tfm, int enc, if (!template[i].np) { j++; - /* some tepmplates have no input data but they will + /* some templates have no input data but they will * touch input */ input = xbuf[0]; + input += align_offset; assoc = axbuf[0]; ret = -EINVAL; - if (WARN_ON(template[i].ilen > PAGE_SIZE || - template[i].alen > PAGE_SIZE)) + if (WARN_ON(align_offset + template[i].ilen > + PAGE_SIZE || template[i].alen > PAGE_SIZE)) goto out; memcpy(input, template[i].input, template[i].ilen); @@ -388,9 +491,8 @@ static int test_aead(struct crypto_aead *tfm, int enc, ret = crypto_aead_setkey(tfm, key, template[i].klen); if (!ret == template[i].fail) { - printk(KERN_ERR "alg: aead: setkey failed on " - "test %d for %s: flags=%x\n", j, algo, - crypto_aead_get_flags(tfm)); + pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n", + d, j, algo, crypto_aead_get_flags(tfm)); goto out; } else if (ret) continue; @@ -398,18 +500,27 @@ static int test_aead(struct crypto_aead *tfm, int enc, authsize = abs(template[i].rlen - template[i].ilen); ret = crypto_aead_setauthsize(tfm, authsize); if (ret) { - printk(KERN_ERR "alg: aead: Failed to set " - "authsize to %u on test %d for %s\n", - authsize, j, algo); + pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n", + d, authsize, j, algo); goto out; } - sg_init_one(&sg[0], input, - template[i].ilen + (enc ? authsize : 0)); + if (diff_dst) { + output = xoutbuf[0]; + output += align_offset; + sg_init_one(&sg[0], input, template[i].ilen); + sg_init_one(&sgout[0], output, + template[i].rlen); + } else { + sg_init_one(&sg[0], input, + template[i].ilen + + (enc ? authsize : 0)); + output = input; + } sg_init_one(&asg[0], assoc, template[i].alen); - aead_request_set_crypt(req, sg, sg, + aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, template[i].ilen, iv); aead_request_set_assoc(req, asg, template[i].alen); @@ -422,10 +533,8 @@ static int test_aead(struct crypto_aead *tfm, int enc, case 0: if (template[i].novrfy) { /* verification was supposed to fail */ - printk(KERN_ERR "alg: aead: %s failed " - "on test %d for %s: ret was 0, " - "expected -EBADMSG\n", - e, j, algo); + pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n", + d, e, j, algo); /* so really, we got a bad message */ ret = -EBADMSG; goto out; @@ -436,7 +545,7 @@ static int test_aead(struct crypto_aead *tfm, int enc, ret = wait_for_completion_interruptible( &result.completion); if (!ret && !(ret = result.err)) { - INIT_COMPLETION(result.completion); + reinit_completion(&result.completion); break; } case -EBADMSG: @@ -445,15 +554,15 @@ static int test_aead(struct crypto_aead *tfm, int enc, continue; /* fall through */ default: - printk(KERN_ERR "alg: aead: %s failed on test " - "%d for %s: ret=%d\n", e, j, algo, -ret); + pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n", + d, e, j, algo, -ret); goto out; } - q = input; + q = output; if (memcmp(q, template[i].result, template[i].rlen)) { - printk(KERN_ERR "alg: aead: Test %d failed on " - "%s for %s\n", j, e, algo); + pr_err("alg: aead%s: Test %d failed on %s for %s\n", + d, j, e, algo); hexdump(q, template[i].rlen); ret = -EINVAL; goto out; @@ -462,6 +571,10 @@ static int test_aead(struct crypto_aead *tfm, int enc, } for (i = 0, j = 0; i < tcount; i++) { + /* alignment tests are only done with continuous buffers */ + if (align_offset != 0) + break; + if (template[i].np) { j++; @@ -478,9 +591,8 @@ static int test_aead(struct crypto_aead *tfm, int enc, ret = crypto_aead_setkey(tfm, key, template[i].klen); if (!ret == template[i].fail) { - printk(KERN_ERR "alg: aead: setkey failed on " - "chunk test %d for %s: flags=%x\n", j, - algo, crypto_aead_get_flags(tfm)); + pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n", + d, j, algo, crypto_aead_get_flags(tfm)); goto out; } else if (ret) continue; @@ -489,6 +601,8 @@ static int test_aead(struct crypto_aead *tfm, int enc, ret = -EINVAL; sg_init_table(sg, template[i].np); + if (diff_dst) + sg_init_table(sgout, template[i].np); for (k = 0, temp = 0; k < template[i].np; k++) { if (WARN_ON(offset_in_page(IDX[k]) + template[i].tap[k] > PAGE_SIZE)) @@ -500,21 +614,31 @@ static int test_aead(struct crypto_aead *tfm, int enc, memcpy(q, template[i].input + temp, template[i].tap[k]); + sg_set_buf(&sg[k], q, template[i].tap[k]); + + if (diff_dst) { + q = xoutbuf[IDX[k] >> PAGE_SHIFT] + + offset_in_page(IDX[k]); + + memset(q, 0, template[i].tap[k]); + + sg_set_buf(&sgout[k], q, + template[i].tap[k]); + } + n = template[i].tap[k]; if (k == template[i].np - 1 && enc) n += authsize; if (offset_in_page(q) + n < PAGE_SIZE) q[n] = 0; - sg_set_buf(&sg[k], q, template[i].tap[k]); temp += template[i].tap[k]; } ret = crypto_aead_setauthsize(tfm, authsize); if (ret) { - printk(KERN_ERR "alg: aead: Failed to set " - "authsize to %u on chunk test %d for " - "%s\n", authsize, j, algo); + pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n", + d, authsize, j, algo); goto out; } @@ -526,7 +650,10 @@ static int test_aead(struct crypto_aead *tfm, int enc, goto out; } - sg[k - 1].length += authsize; + if (diff_dst) + sgout[k - 1].length += authsize; + else + sg[k - 1].length += authsize; } sg_init_table(asg, template[i].anp); @@ -544,7 +671,7 @@ static int test_aead(struct crypto_aead *tfm, int enc, temp += template[i].atap[k]; } - aead_request_set_crypt(req, sg, sg, + aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, template[i].ilen, iv); @@ -558,10 +685,8 @@ static int test_aead(struct crypto_aead *tfm, int enc, case 0: if (template[i].novrfy) { /* verification was supposed to fail */ - printk(KERN_ERR "alg: aead: %s failed " - "on chunk test %d for %s: ret " - "was 0, expected -EBADMSG\n", - e, j, algo); + pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n", + d, e, j, algo); /* so really, we got a bad message */ ret = -EBADMSG; goto out; @@ -572,7 +697,7 @@ static int test_aead(struct crypto_aead *tfm, int enc, ret = wait_for_completion_interruptible( &result.completion); if (!ret && !(ret = result.err)) { - INIT_COMPLETION(result.completion); + reinit_completion(&result.completion); break; } case -EBADMSG: @@ -581,32 +706,35 @@ static int test_aead(struct crypto_aead *tfm, int enc, continue; /* fall through */ default: - printk(KERN_ERR "alg: aead: %s failed on " - "chunk test %d for %s: ret=%d\n", e, j, - algo, -ret); + pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n", + d, e, j, algo, -ret); goto out; } ret = -EINVAL; for (k = 0, temp = 0; k < template[i].np; k++) { - q = xbuf[IDX[k] >> PAGE_SHIFT] + - offset_in_page(IDX[k]); + if (diff_dst) + q = xoutbuf[IDX[k] >> PAGE_SHIFT] + + offset_in_page(IDX[k]); + else + q = xbuf[IDX[k] >> PAGE_SHIFT] + + offset_in_page(IDX[k]); n = template[i].tap[k]; if (k == template[i].np - 1) n += enc ? authsize : -authsize; if (memcmp(q, template[i].result + temp, n)) { - printk(KERN_ERR "alg: aead: Chunk " - "test %d failed on %s at page " - "%u for %s\n", j, e, k, algo); + pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n", + d, j, e, k, algo); hexdump(q, n); goto out; } q += n; if (k == template[i].np - 1 && !enc) { - if (memcmp(q, template[i].input + + if (!diff_dst && + memcmp(q, template[i].input + temp + n, authsize)) n = authsize; else @@ -617,11 +745,8 @@ static int test_aead(struct crypto_aead *tfm, int enc, ; } if (n) { - printk(KERN_ERR "alg: aead: Result " - "buffer corruption in chunk " - "test %d on %s at page %u for " - "%s: %u bytes:\n", j, e, k, - algo, n); + pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n", + d, j, e, k, algo, n); hexdump(q, n); goto out; } @@ -635,13 +760,52 @@ static int test_aead(struct crypto_aead *tfm, int enc, out: aead_request_free(req); + kfree(sg); +out_nosg: + if (diff_dst) + testmgr_free_buf(xoutbuf); +out_nooutbuf: testmgr_free_buf(axbuf); out_noaxbuf: testmgr_free_buf(xbuf); out_noxbuf: + kfree(iv); return ret; } +static int test_aead(struct crypto_aead *tfm, int enc, + struct aead_testvec *template, unsigned int tcount) +{ + unsigned int alignmask; + int ret; + + /* test 'dst == src' case */ + ret = __test_aead(tfm, enc, template, tcount, false, 0); + if (ret) + return ret; + + /* test 'dst != src' case */ + ret = __test_aead(tfm, enc, template, tcount, true, 0); + if (ret) + return ret; + + /* test unaligned buffers, check with one byte offset */ + ret = __test_aead(tfm, enc, template, tcount, true, 1); + if (ret) + return ret; + + alignmask = crypto_tfm_alg_alignmask(&tfm->base); + if (alignmask) { + /* Check if alignment mask for tfm is correctly set. */ + ret = __test_aead(tfm, enc, template, tcount, true, + alignmask + 1); + if (ret) + return ret; + } + + return 0; +} + static int test_cipher(struct crypto_cipher *tfm, int enc, struct cipher_testvec *template, unsigned int tcount) { @@ -717,8 +881,9 @@ out_nobuf: return ret; } -static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, - struct cipher_testvec *template, unsigned int tcount) +static int __test_skcipher(struct crypto_ablkcipher *tfm, int enc, + struct cipher_testvec *template, unsigned int tcount, + const bool diff_dst, const int align_offset) { const char *algo = crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm)); @@ -726,16 +891,26 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, char *q; struct ablkcipher_request *req; struct scatterlist sg[8]; - const char *e; + struct scatterlist sgout[8]; + const char *e, *d; struct tcrypt_result result; void *data; char iv[MAX_IVLEN]; char *xbuf[XBUFSIZE]; + char *xoutbuf[XBUFSIZE]; int ret = -ENOMEM; if (testmgr_alloc_buf(xbuf)) goto out_nobuf; + if (diff_dst && testmgr_alloc_buf(xoutbuf)) + goto out_nooutbuf; + + if (diff_dst) + d = "-ddst"; + else + d = ""; + if (enc == ENCRYPT) e = "encryption"; else @@ -745,8 +920,8 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, req = ablkcipher_request_alloc(tfm, GFP_KERNEL); if (!req) { - printk(KERN_ERR "alg: skcipher: Failed to allocate request " - "for %s\n", algo); + pr_err("alg: skcipher%s: Failed to allocate request for %s\n", + d, algo); goto out; } @@ -760,14 +935,16 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, else memset(iv, 0, MAX_IVLEN); - if (!(template[i].np)) { + if (!(template[i].np) || (template[i].also_non_np)) { j++; ret = -EINVAL; - if (WARN_ON(template[i].ilen > PAGE_SIZE)) + if (WARN_ON(align_offset + template[i].ilen > + PAGE_SIZE)) goto out; data = xbuf[0]; + data += align_offset; memcpy(data, template[i].input, template[i].ilen); crypto_ablkcipher_clear_flags(tfm, ~0); @@ -778,16 +955,22 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, ret = crypto_ablkcipher_setkey(tfm, template[i].key, template[i].klen); if (!ret == template[i].fail) { - printk(KERN_ERR "alg: skcipher: setkey failed " - "on test %d for %s: flags=%x\n", j, - algo, crypto_ablkcipher_get_flags(tfm)); + pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n", + d, j, algo, + crypto_ablkcipher_get_flags(tfm)); goto out; } else if (ret) continue; sg_init_one(&sg[0], data, template[i].ilen); + if (diff_dst) { + data = xoutbuf[0]; + data += align_offset; + sg_init_one(&sgout[0], data, template[i].ilen); + } - ablkcipher_request_set_crypt(req, sg, sg, + ablkcipher_request_set_crypt(req, sg, + (diff_dst) ? sgout : sg, template[i].ilen, iv); ret = enc ? crypto_ablkcipher_encrypt(req) : @@ -801,21 +984,20 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, ret = wait_for_completion_interruptible( &result.completion); if (!ret && !((ret = result.err))) { - INIT_COMPLETION(result.completion); + reinit_completion(&result.completion); break; } /* fall through */ default: - printk(KERN_ERR "alg: skcipher: %s failed on " - "test %d for %s: ret=%d\n", e, j, algo, - -ret); + pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n", + d, e, j, algo, -ret); goto out; } q = data; if (memcmp(q, template[i].result, template[i].rlen)) { - printk(KERN_ERR "alg: skcipher: Test %d " - "failed on %s for %s\n", j, e, algo); + pr_err("alg: skcipher%s: Test %d failed on %s for %s\n", + d, j, e, algo); hexdump(q, template[i].rlen); ret = -EINVAL; goto out; @@ -825,6 +1007,9 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, j = 0; for (i = 0; i < tcount; i++) { + /* alignment tests are only done with continuous buffers */ + if (align_offset != 0) + break; if (template[i].iv) memcpy(iv, template[i].iv, MAX_IVLEN); @@ -842,9 +1027,8 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, ret = crypto_ablkcipher_setkey(tfm, template[i].key, template[i].klen); if (!ret == template[i].fail) { - printk(KERN_ERR "alg: skcipher: setkey failed " - "on chunk test %d for %s: flags=%x\n", - j, algo, + pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n", + d, j, algo, crypto_ablkcipher_get_flags(tfm)); goto out; } else if (ret) @@ -853,6 +1037,8 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, temp = 0; ret = -EINVAL; sg_init_table(sg, template[i].np); + if (diff_dst) + sg_init_table(sgout, template[i].np); for (k = 0; k < template[i].np; k++) { if (WARN_ON(offset_in_page(IDX[k]) + template[i].tap[k] > PAGE_SIZE)) @@ -869,11 +1055,24 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, q[template[i].tap[k]] = 0; sg_set_buf(&sg[k], q, template[i].tap[k]); + if (diff_dst) { + q = xoutbuf[IDX[k] >> PAGE_SHIFT] + + offset_in_page(IDX[k]); + + sg_set_buf(&sgout[k], q, + template[i].tap[k]); + + memset(q, 0, template[i].tap[k]); + if (offset_in_page(q) + + template[i].tap[k] < PAGE_SIZE) + q[template[i].tap[k]] = 0; + } temp += template[i].tap[k]; } - ablkcipher_request_set_crypt(req, sg, sg, + ablkcipher_request_set_crypt(req, sg, + (diff_dst) ? sgout : sg, template[i].ilen, iv); ret = enc ? @@ -888,28 +1087,30 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, ret = wait_for_completion_interruptible( &result.completion); if (!ret && !((ret = result.err))) { - INIT_COMPLETION(result.completion); + reinit_completion(&result.completion); break; } /* fall through */ default: - printk(KERN_ERR "alg: skcipher: %s failed on " - "chunk test %d for %s: ret=%d\n", e, j, - algo, -ret); + pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n", + d, e, j, algo, -ret); goto out; } temp = 0; ret = -EINVAL; for (k = 0; k < template[i].np; k++) { - q = xbuf[IDX[k] >> PAGE_SHIFT] + - offset_in_page(IDX[k]); + if (diff_dst) + q = xoutbuf[IDX[k] >> PAGE_SHIFT] + + offset_in_page(IDX[k]); + else + q = xbuf[IDX[k] >> PAGE_SHIFT] + + offset_in_page(IDX[k]); if (memcmp(q, template[i].result + temp, template[i].tap[k])) { - printk(KERN_ERR "alg: skcipher: Chunk " - "test %d failed on %s at page " - "%u for %s\n", j, e, k, algo); + pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n", + d, j, e, k, algo); hexdump(q, template[i].tap[k]); goto out; } @@ -918,11 +1119,8 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, for (n = 0; offset_in_page(q + n) && q[n]; n++) ; if (n) { - printk(KERN_ERR "alg: skcipher: " - "Result buffer corruption in " - "chunk test %d on %s at page " - "%u for %s: %u bytes:\n", j, e, - k, algo, n); + pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n", + d, j, e, k, algo, n); hexdump(q, n); goto out; } @@ -935,11 +1133,47 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, out: ablkcipher_request_free(req); + if (diff_dst) + testmgr_free_buf(xoutbuf); +out_nooutbuf: testmgr_free_buf(xbuf); out_nobuf: return ret; } +static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, + struct cipher_testvec *template, unsigned int tcount) +{ + unsigned int alignmask; + int ret; + + /* test 'dst == src' case */ + ret = __test_skcipher(tfm, enc, template, tcount, false, 0); + if (ret) + return ret; + + /* test 'dst != src' case */ + ret = __test_skcipher(tfm, enc, template, tcount, true, 0); + if (ret) + return ret; + + /* test unaligned buffers, check with one byte offset */ + ret = __test_skcipher(tfm, enc, template, tcount, true, 1); + if (ret) + return ret; + + alignmask = crypto_tfm_alg_alignmask(&tfm->base); + if (alignmask) { + /* Check if alignment mask for tfm is correctly set. */ + ret = __test_skcipher(tfm, enc, template, tcount, true, + alignmask + 1); + if (ret) + return ret; + } + + return 0; +} + static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate, struct comp_testvec *dtemplate, int ctcount, int dtcount) { @@ -1402,7 +1636,11 @@ static int alg_test_hash(const struct alg_test_desc *desc, const char *driver, return PTR_ERR(tfm); } - err = test_hash(tfm, desc->suite.hash.vecs, desc->suite.hash.count); + err = test_hash(tfm, desc->suite.hash.vecs, + desc->suite.hash.count, true); + if (!err) + err = test_hash(tfm, desc->suite.hash.vecs, + desc->suite.hash.count, false); crypto_free_ahash(tfm); return err; @@ -1477,9 +1715,93 @@ static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver, return err; } +static int alg_test_null(const struct alg_test_desc *desc, + const char *driver, u32 type, u32 mask) +{ + return 0; +} + /* Please keep this list sorted by algorithm name. */ static const struct alg_test_desc alg_test_descs[] = { { + .alg = "__cbc-cast5-avx", + .test = alg_test_null, + }, { + .alg = "__cbc-cast6-avx", + .test = alg_test_null, + }, { + .alg = "__cbc-serpent-avx", + .test = alg_test_null, + }, { + .alg = "__cbc-serpent-avx2", + .test = alg_test_null, + }, { + .alg = "__cbc-serpent-sse2", + .test = alg_test_null, + }, { + .alg = "__cbc-twofish-avx", + .test = alg_test_null, + }, { + .alg = "__driver-cbc-aes-aesni", + .test = alg_test_null, + .fips_allowed = 1, + }, { + .alg = "__driver-cbc-camellia-aesni", + .test = alg_test_null, + }, { + .alg = "__driver-cbc-camellia-aesni-avx2", + .test = alg_test_null, + }, { + .alg = "__driver-cbc-cast5-avx", + .test = alg_test_null, + }, { + .alg = "__driver-cbc-cast6-avx", + .test = alg_test_null, + }, { + .alg = "__driver-cbc-serpent-avx", + .test = alg_test_null, + }, { + .alg = "__driver-cbc-serpent-avx2", + .test = alg_test_null, + }, { + .alg = "__driver-cbc-serpent-sse2", + .test = alg_test_null, + }, { + .alg = "__driver-cbc-twofish-avx", + .test = alg_test_null, + }, { + .alg = "__driver-ecb-aes-aesni", + .test = alg_test_null, + .fips_allowed = 1, + }, { + .alg = "__driver-ecb-camellia-aesni", + .test = alg_test_null, + }, { + .alg = "__driver-ecb-camellia-aesni-avx2", + .test = alg_test_null, + }, { + .alg = "__driver-ecb-cast5-avx", + .test = alg_test_null, + }, { + .alg = "__driver-ecb-cast6-avx", + .test = alg_test_null, + }, { + .alg = "__driver-ecb-serpent-avx", + .test = alg_test_null, + }, { + .alg = "__driver-ecb-serpent-avx2", + .test = alg_test_null, + }, { + .alg = "__driver-ecb-serpent-sse2", + .test = alg_test_null, + }, { + .alg = "__driver-ecb-twofish-avx", + .test = alg_test_null, + }, { + .alg = "__ghash-pclmulqdqni", + .test = alg_test_null, + .fips_allowed = 1, + }, { .alg = "ansi_cprng", .test = alg_test_cprng, .fips_allowed = 1, @@ -1490,6 +1812,224 @@ static const struct alg_test_desc alg_test_descs[] = { } } }, { + .alg = "authenc(hmac(md5),ecb(cipher_null))", + .test = alg_test_aead, + .fips_allowed = 1, + .suite = { + .aead = { + .enc = { + .vecs = hmac_md5_ecb_cipher_null_enc_tv_template, + .count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS + }, + .dec = { + .vecs = hmac_md5_ecb_cipher_null_dec_tv_template, + .count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "authenc(hmac(sha1),cbc(aes))", + .test = alg_test_aead, + .fips_allowed = 1, + .suite = { + .aead = { + .enc = { + .vecs = + hmac_sha1_aes_cbc_enc_tv_temp, + .count = + HMAC_SHA1_AES_CBC_ENC_TEST_VEC + } + } + } + }, { + .alg = "authenc(hmac(sha1),cbc(des))", + .test = alg_test_aead, + .fips_allowed = 1, + .suite = { + .aead = { + .enc = { + .vecs = + hmac_sha1_des_cbc_enc_tv_temp, + .count = + HMAC_SHA1_DES_CBC_ENC_TEST_VEC + } + } + } + }, { + .alg = "authenc(hmac(sha1),cbc(des3_ede))", + .test = alg_test_aead, + .fips_allowed = 1, + .suite = { + .aead = { + .enc = { + .vecs = + hmac_sha1_des3_ede_cbc_enc_tv_temp, + .count = + HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC + } + } + } + }, { + .alg = "authenc(hmac(sha1),ecb(cipher_null))", + .test = alg_test_aead, + .fips_allowed = 1, + .suite = { + .aead = { + .enc = { + .vecs = + hmac_sha1_ecb_cipher_null_enc_tv_temp, + .count = + HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC + }, + .dec = { + .vecs = + hmac_sha1_ecb_cipher_null_dec_tv_temp, + .count = + HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC + } + } + } + }, { + .alg = "authenc(hmac(sha224),cbc(des))", + .test = alg_test_aead, + .fips_allowed = 1, + .suite = { + .aead = { + .enc = { + .vecs = + hmac_sha224_des_cbc_enc_tv_temp, + .count = + HMAC_SHA224_DES_CBC_ENC_TEST_VEC + } + } + } + }, { + .alg = "authenc(hmac(sha224),cbc(des3_ede))", + .test = alg_test_aead, + .fips_allowed = 1, + .suite = { + .aead = { + .enc = { + .vecs = + hmac_sha224_des3_ede_cbc_enc_tv_temp, + .count = + HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC + } + } + } + }, { + .alg = "authenc(hmac(sha256),cbc(aes))", + .test = alg_test_aead, + .fips_allowed = 1, + .suite = { + .aead = { + .enc = { + .vecs = + hmac_sha256_aes_cbc_enc_tv_temp, + .count = + HMAC_SHA256_AES_CBC_ENC_TEST_VEC + } + } + } + }, { + .alg = "authenc(hmac(sha256),cbc(des))", + .test = alg_test_aead, + .fips_allowed = 1, + .suite = { + .aead = { + .enc = { + .vecs = + hmac_sha256_des_cbc_enc_tv_temp, + .count = + HMAC_SHA256_DES_CBC_ENC_TEST_VEC + } + } + } + }, { + .alg = "authenc(hmac(sha256),cbc(des3_ede))", + .test = alg_test_aead, + .fips_allowed = 1, + .suite = { + .aead = { + .enc = { + .vecs = + hmac_sha256_des3_ede_cbc_enc_tv_temp, + .count = + HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC + } + } + } + }, { + .alg = "authenc(hmac(sha384),cbc(des))", + .test = alg_test_aead, + .fips_allowed = 1, + .suite = { + .aead = { + .enc = { + .vecs = + hmac_sha384_des_cbc_enc_tv_temp, + .count = + HMAC_SHA384_DES_CBC_ENC_TEST_VEC + } + } + } + }, { + .alg = "authenc(hmac(sha384),cbc(des3_ede))", + .test = alg_test_aead, + .fips_allowed = 1, + .suite = { + .aead = { + .enc = { + .vecs = + hmac_sha384_des3_ede_cbc_enc_tv_temp, + .count = + HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC + } + } + } + }, { + .alg = "authenc(hmac(sha512),cbc(aes))", + .test = alg_test_aead, + .fips_allowed = 1, + .suite = { + .aead = { + .enc = { + .vecs = + hmac_sha512_aes_cbc_enc_tv_temp, + .count = + HMAC_SHA512_AES_CBC_ENC_TEST_VEC + } + } + } + }, { + .alg = "authenc(hmac(sha512),cbc(des))", + .test = alg_test_aead, + .fips_allowed = 1, + .suite = { + .aead = { + .enc = { + .vecs = + hmac_sha512_des_cbc_enc_tv_temp, + .count = + HMAC_SHA512_DES_CBC_ENC_TEST_VEC + } + } + } + }, { + .alg = "authenc(hmac(sha512),cbc(des3_ede))", + .test = alg_test_aead, + .fips_allowed = 1, + .suite = { + .aead = { + .enc = { + .vecs = + hmac_sha512_des3_ede_cbc_enc_tv_temp, + .count = + HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC + } + } + } + }, { .alg = "cbc(aes)", .test = alg_test_skcipher, .fips_allowed = 1, @@ -1551,6 +2091,36 @@ static const struct alg_test_desc alg_test_descs[] = { } } }, { + .alg = "cbc(cast5)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = cast5_cbc_enc_tv_template, + .count = CAST5_CBC_ENC_TEST_VECTORS + }, + .dec = { + .vecs = cast5_cbc_dec_tv_template, + .count = CAST5_CBC_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "cbc(cast6)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = cast6_cbc_enc_tv_template, + .count = CAST6_CBC_ENC_TEST_VECTORS + }, + .dec = { + .vecs = cast6_cbc_dec_tv_template, + .count = CAST6_CBC_DEC_TEST_VECTORS + } + } + } + }, { .alg = "cbc(des)", .test = alg_test_skcipher, .suite = { @@ -1582,6 +2152,21 @@ static const struct alg_test_desc alg_test_descs[] = { } } }, { + .alg = "cbc(serpent)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = serpent_cbc_enc_tv_template, + .count = SERPENT_CBC_ENC_TEST_VECTORS + }, + .dec = { + .vecs = serpent_cbc_dec_tv_template, + .count = SERPENT_CBC_DEC_TEST_VECTORS + } + } + } + }, { .alg = "cbc(twofish)", .test = alg_test_skcipher, .suite = { @@ -1613,6 +2198,27 @@ static const struct alg_test_desc alg_test_descs[] = { } } }, { + .alg = "cmac(aes)", + .test = alg_test_hash, + .suite = { + .hash = { + .vecs = aes_cmac128_tv_template, + .count = CMAC_AES_TEST_VECTORS + } + } + }, { + .alg = "cmac(des3_ede)", + .test = alg_test_hash, + .suite = { + .hash = { + .vecs = des3_ede_cmac64_tv_template, + .count = CMAC_DES3_EDE_TEST_VECTORS + } + } + }, { + .alg = "compress_null", + .test = alg_test_null, + }, { .alg = "crc32c", .test = alg_test_crc32c, .fips_allowed = 1, @@ -1623,6 +2229,65 @@ static const struct alg_test_desc alg_test_descs[] = { } } }, { + .alg = "crct10dif", + .test = alg_test_hash, + .fips_allowed = 1, + .suite = { + .hash = { + .vecs = crct10dif_tv_template, + .count = CRCT10DIF_TEST_VECTORS + } + } + }, { + .alg = "cryptd(__driver-cbc-aes-aesni)", + .test = alg_test_null, + .fips_allowed = 1, + }, { + .alg = "cryptd(__driver-cbc-camellia-aesni)", + .test = alg_test_null, + }, { + .alg = "cryptd(__driver-cbc-camellia-aesni-avx2)", + .test = alg_test_null, + }, { + .alg = "cryptd(__driver-cbc-serpent-avx2)", + .test = alg_test_null, + }, { + .alg = "cryptd(__driver-ecb-aes-aesni)", + .test = alg_test_null, + .fips_allowed = 1, + }, { + .alg = "cryptd(__driver-ecb-camellia-aesni)", + .test = alg_test_null, + }, { + .alg = "cryptd(__driver-ecb-camellia-aesni-avx2)", + .test = alg_test_null, + }, { + .alg = "cryptd(__driver-ecb-cast5-avx)", + .test = alg_test_null, + }, { + .alg = "cryptd(__driver-ecb-cast6-avx)", + .test = alg_test_null, + }, { + .alg = "cryptd(__driver-ecb-serpent-avx)", + .test = alg_test_null, + }, { + .alg = "cryptd(__driver-ecb-serpent-avx2)", + .test = alg_test_null, + }, { + .alg = "cryptd(__driver-ecb-serpent-sse2)", + .test = alg_test_null, + }, { + .alg = "cryptd(__driver-ecb-twofish-avx)", + .test = alg_test_null, + }, { + .alg = "cryptd(__driver-gcm-aes-aesni)", + .test = alg_test_null, + .fips_allowed = 1, + }, { + .alg = "cryptd(__ghash-pclmulqdqni)", + .test = alg_test_null, + .fips_allowed = 1, + }, { .alg = "ctr(aes)", .test = alg_test_skcipher, .fips_allowed = 1, @@ -1639,6 +2304,126 @@ static const struct alg_test_desc alg_test_descs[] = { } } }, { + .alg = "ctr(blowfish)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = bf_ctr_enc_tv_template, + .count = BF_CTR_ENC_TEST_VECTORS + }, + .dec = { + .vecs = bf_ctr_dec_tv_template, + .count = BF_CTR_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ctr(camellia)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = camellia_ctr_enc_tv_template, + .count = CAMELLIA_CTR_ENC_TEST_VECTORS + }, + .dec = { + .vecs = camellia_ctr_dec_tv_template, + .count = CAMELLIA_CTR_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ctr(cast5)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = cast5_ctr_enc_tv_template, + .count = CAST5_CTR_ENC_TEST_VECTORS + }, + .dec = { + .vecs = cast5_ctr_dec_tv_template, + .count = CAST5_CTR_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ctr(cast6)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = cast6_ctr_enc_tv_template, + .count = CAST6_CTR_ENC_TEST_VECTORS + }, + .dec = { + .vecs = cast6_ctr_dec_tv_template, + .count = CAST6_CTR_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ctr(des)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = des_ctr_enc_tv_template, + .count = DES_CTR_ENC_TEST_VECTORS + }, + .dec = { + .vecs = des_ctr_dec_tv_template, + .count = DES_CTR_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ctr(des3_ede)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = des3_ede_ctr_enc_tv_template, + .count = DES3_EDE_CTR_ENC_TEST_VECTORS + }, + .dec = { + .vecs = des3_ede_ctr_dec_tv_template, + .count = DES3_EDE_CTR_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ctr(serpent)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = serpent_ctr_enc_tv_template, + .count = SERPENT_CTR_ENC_TEST_VECTORS + }, + .dec = { + .vecs = serpent_ctr_dec_tv_template, + .count = SERPENT_CTR_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "ctr(twofish)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = tf_ctr_enc_tv_template, + .count = TF_CTR_ENC_TEST_VECTORS + }, + .dec = { + .vecs = tf_ctr_dec_tv_template, + .count = TF_CTR_DEC_TEST_VECTORS + } + } + } + }, { .alg = "cts(cbc(aes))", .test = alg_test_skcipher, .suite = { @@ -1656,6 +2441,7 @@ static const struct alg_test_desc alg_test_descs[] = { }, { .alg = "deflate", .test = alg_test_comp, + .fips_allowed = 1, .suite = { .comp = { .comp = { @@ -1669,6 +2455,13 @@ static const struct alg_test_desc alg_test_descs[] = { } } }, { + .alg = "digest_null", + .test = alg_test_null, + }, { + .alg = "ecb(__aes-aesni)", + .test = alg_test_null, + .fips_allowed = 1, + }, { .alg = "ecb(aes)", .test = alg_test_skcipher, .fips_allowed = 1, @@ -1775,6 +2568,9 @@ static const struct alg_test_desc alg_test_descs[] = { } } }, { + .alg = "ecb(cipher_null)", + .test = alg_test_null, + }, { .alg = "ecb(des)", .test = alg_test_skcipher, .fips_allowed = 1, @@ -1807,6 +2603,21 @@ static const struct alg_test_desc alg_test_descs[] = { } } }, { + .alg = "ecb(fcrypt)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = fcrypt_pcbc_enc_tv_template, + .count = 1 + }, + .dec = { + .vecs = fcrypt_pcbc_dec_tv_template, + .count = 1 + } + } + } + }, { .alg = "ecb(khazad)", .test = alg_test_skcipher, .suite = { @@ -1945,6 +2756,7 @@ static const struct alg_test_desc alg_test_descs[] = { }, { .alg = "ghash", .test = alg_test_hash, + .fips_allowed = 1, .suite = { .hash = { .vecs = ghash_tv_template, @@ -1952,6 +2764,15 @@ static const struct alg_test_desc alg_test_descs[] = { } } }, { + .alg = "hmac(crc32)", + .test = alg_test_hash, + .suite = { + .hash = { + .vecs = bfin_crc_tv_template, + .count = BFIN_CRC_TEST_VECTORS + } + } + }, { .alg = "hmac(md5)", .test = alg_test_hash, .suite = { @@ -2044,8 +2865,69 @@ static const struct alg_test_desc alg_test_descs[] = { } } }, { + .alg = "lrw(camellia)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = camellia_lrw_enc_tv_template, + .count = CAMELLIA_LRW_ENC_TEST_VECTORS + }, + .dec = { + .vecs = camellia_lrw_dec_tv_template, + .count = CAMELLIA_LRW_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "lrw(cast6)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = cast6_lrw_enc_tv_template, + .count = CAST6_LRW_ENC_TEST_VECTORS + }, + .dec = { + .vecs = cast6_lrw_dec_tv_template, + .count = CAST6_LRW_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "lrw(serpent)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = serpent_lrw_enc_tv_template, + .count = SERPENT_LRW_ENC_TEST_VECTORS + }, + .dec = { + .vecs = serpent_lrw_dec_tv_template, + .count = SERPENT_LRW_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "lrw(twofish)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = tf_lrw_enc_tv_template, + .count = TF_LRW_ENC_TEST_VECTORS + }, + .dec = { + .vecs = tf_lrw_dec_tv_template, + .count = TF_LRW_DEC_TEST_VECTORS + } + } + } + }, { .alg = "lzo", .test = alg_test_comp, + .fips_allowed = 1, .suite = { .comp = { .comp = { @@ -2086,6 +2968,22 @@ static const struct alg_test_desc alg_test_descs[] = { } } }, { + .alg = "ofb(aes)", + .test = alg_test_skcipher, + .fips_allowed = 1, + .suite = { + .cipher = { + .enc = { + .vecs = aes_ofb_enc_tv_template, + .count = AES_OFB_ENC_TEST_VECTORS + }, + .dec = { + .vecs = aes_ofb_dec_tv_template, + .count = AES_OFB_DEC_TEST_VECTORS + } + } + } + }, { .alg = "pcbc(fcrypt)", .test = alg_test_skcipher, .suite = { @@ -2117,6 +3015,21 @@ static const struct alg_test_desc alg_test_descs[] = { } } }, { + .alg = "rfc4106(gcm(aes))", + .test = alg_test_aead, + .suite = { + .aead = { + .enc = { + .vecs = aes_gcm_rfc4106_enc_tv_template, + .count = AES_GCM_4106_ENC_TEST_VECTORS + }, + .dec = { + .vecs = aes_gcm_rfc4106_dec_tv_template, + .count = AES_GCM_4106_DEC_TEST_VECTORS + } + } + } + }, { .alg = "rfc4309(ccm(aes))", .test = alg_test_aead, .fips_allowed = 1, @@ -2133,6 +3046,21 @@ static const struct alg_test_desc alg_test_descs[] = { } } }, { + .alg = "rfc4543(gcm(aes))", + .test = alg_test_aead, + .suite = { + .aead = { + .enc = { + .vecs = aes_gcm_rfc4543_enc_tv_template, + .count = AES_GCM_4543_ENC_TEST_VECTORS + }, + .dec = { + .vecs = aes_gcm_rfc4543_dec_tv_template, + .count = AES_GCM_4543_DEC_TEST_VECTORS + }, + } + } + }, { .alg = "rmd128", .test = alg_test_hash, .suite = { @@ -2304,6 +3232,7 @@ static const struct alg_test_desc alg_test_descs[] = { }, { .alg = "xts(aes)", .test = alg_test_skcipher, + .fips_allowed = 1, .suite = { .cipher = { .enc = { @@ -2317,8 +3246,69 @@ static const struct alg_test_desc alg_test_descs[] = { } } }, { + .alg = "xts(camellia)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = camellia_xts_enc_tv_template, + .count = CAMELLIA_XTS_ENC_TEST_VECTORS + }, + .dec = { + .vecs = camellia_xts_dec_tv_template, + .count = CAMELLIA_XTS_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "xts(cast6)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = cast6_xts_enc_tv_template, + .count = CAST6_XTS_ENC_TEST_VECTORS + }, + .dec = { + .vecs = cast6_xts_dec_tv_template, + .count = CAST6_XTS_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "xts(serpent)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = serpent_xts_enc_tv_template, + .count = SERPENT_XTS_ENC_TEST_VECTORS + }, + .dec = { + .vecs = serpent_xts_dec_tv_template, + .count = SERPENT_XTS_DEC_TEST_VECTORS + } + } + } + }, { + .alg = "xts(twofish)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = { + .vecs = tf_xts_enc_tv_template, + .count = TF_XTS_ENC_TEST_VECTORS + }, + .dec = { + .vecs = tf_xts_dec_tv_template, + .count = TF_XTS_DEC_TEST_VECTORS + } + } + } + }, { .alg = "zlib", .test = alg_test_pcomp, + .fips_allowed = 1, .suite = { .pcomp = { .comp = { @@ -2334,6 +3324,35 @@ static const struct alg_test_desc alg_test_descs[] = { } }; +static bool alg_test_descs_checked; + +static void alg_test_descs_check_order(void) +{ + int i; + + /* only check once */ + if (alg_test_descs_checked) + return; + + alg_test_descs_checked = true; + + for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) { + int diff = strcmp(alg_test_descs[i - 1].alg, + alg_test_descs[i].alg); + + if (WARN_ON(diff > 0)) { + pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n", + alg_test_descs[i - 1].alg, + alg_test_descs[i].alg); + } + + if (WARN_ON(diff == 0)) { + pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n", + alg_test_descs[i].alg); + } + } +} + static int alg_find_test(const char *alg) { int start = 0; @@ -2365,6 +3384,8 @@ int alg_test(const char *driver, const char *alg, u32 type, u32 mask) int j; int rc; + alg_test_descs_check_order(); + if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) { char nalg[CRYPTO_MAX_ALG_NAME]; @@ -2396,7 +3417,7 @@ int alg_test(const char *driver, const char *alg, u32 type, u32 mask) if (i >= 0) rc |= alg_test_descs[i].test(alg_test_descs + i, driver, type, mask); - if (j >= 0) + if (j >= 0 && j != i) rc |= alg_test_descs[j].test(alg_test_descs + j, driver, type, mask); @@ -2405,8 +3426,8 @@ test_done: panic("%s: %s alg self test failed in fips mode!\n", driver, alg); if (fips_enabled && !rc) - printk(KERN_INFO "alg: self-tests for %s (%s) passed\n", - driver, alg); + pr_info(KERN_INFO "alg: self-tests for %s (%s) passed\n", + driver, alg); return rc; @@ -2416,4 +3437,7 @@ notest: non_fips_alg: return -EINVAL; } + +#endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */ + EXPORT_SYMBOL_GPL(alg_test); |
