diff options
author | Linus Torvalds <torvalds@linux-foundation.org> | 2013-07-05 12:12:33 -0700 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2013-07-05 12:12:33 -0700 |
commit | b2c311075db578f1433d9b303698491bfa21279a (patch) | |
tree | 41d5f1b5ad6f45be7211f524328de81f7e9754be /crypto/testmgr.c | |
parent | 45175476ae2dbebc860d5cf486f2916044343513 (diff) | |
parent | 02c0241b600e4ab8a732c89749e252165145d60c (diff) | |
download | op-kernel-dev-b2c311075db578f1433d9b303698491bfa21279a.zip op-kernel-dev-b2c311075db578f1433d9b303698491bfa21279a.tar.gz |
Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Pull crypto update from Herbert Xu:
- Do not idle omap device between crypto operations in one session.
- Added sha224/sha384 shims for SSSE3.
- More optimisations for camellia-aesni-avx2.
- Removed defunct blowfish/twofish AVX2 implementations.
- Added unaligned buffer self-tests.
- Added PCLMULQDQ optimisation for CRCT10DIF.
- Added support for Freescale's DCP co-processor
- Misc fixes.
* git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (44 commits)
crypto: testmgr - test hash implementations with unaligned buffers
crypto: testmgr - test AEADs with unaligned buffers
crypto: testmgr - test skciphers with unaligned buffers
crypto: testmgr - check that entries in alg_test_descs are in correct order
Revert "crypto: twofish - add AVX2/x86_64 assembler implementation of twofish cipher"
Revert "crypto: blowfish - add AVX2/x86_64 implementation of blowfish cipher"
crypto: camellia-aesni-avx2 - tune assembly code for more performance
hwrng: bcm2835 - fix MODULE_LICENSE tag
hwrng: nomadik - use clk_prepare_enable()
crypto: picoxcell - replace strict_strtoul() with kstrtoul()
crypto: dcp - Staticize local symbols
crypto: dcp - Use NULL instead of 0
crypto: dcp - Use devm_* APIs
crypto: dcp - Remove redundant platform_set_drvdata()
hwrng: use platform_{get,set}_drvdata()
crypto: omap-aes - Don't idle/start AES device between Encrypt operations
crypto: crct10dif - Use PTR_RET
crypto: ux500 - Cocci spatch "resource_size.spatch"
crypto: sha256_ssse3 - add sha224 support
crypto: sha512_ssse3 - add sha384 support
...
Diffstat (limited to 'crypto/testmgr.c')
-rw-r--r-- | crypto/testmgr.c | 176 |
1 files changed, 140 insertions, 36 deletions
diff --git a/crypto/testmgr.c b/crypto/testmgr.c index 5823735..2f00607 100644 --- a/crypto/testmgr.c +++ b/crypto/testmgr.c @@ -184,8 +184,9 @@ static int do_one_async_hash_op(struct ahash_request *req, return ret; } -static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, - unsigned int tcount, bool use_digest) +static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, + unsigned int tcount, bool use_digest, + const int align_offset) { const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm)); unsigned int i, j, k, temp; @@ -216,10 +217,15 @@ static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, if (template[i].np) continue; + ret = -EINVAL; + if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE)) + goto out; + j++; memset(result, 0, 64); hash_buff = xbuf[0]; + hash_buff += align_offset; memcpy(hash_buff, template[i].plaintext, template[i].psize); sg_init_one(&sg[0], hash_buff, template[i].psize); @@ -281,6 +287,10 @@ static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, j = 0; for (i = 0; i < tcount; i++) { + /* alignment tests are only done with continuous buffers */ + if (align_offset != 0) + break; + if (template[i].np) { j++; memset(result, 0, 64); @@ -358,9 +368,36 @@ out_nobuf: return ret; } +static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, + unsigned int tcount, bool use_digest) +{ + unsigned int alignmask; + int ret; + + ret = __test_hash(tfm, template, tcount, use_digest, 0); + if (ret) + return ret; + + /* test unaligned buffers, check with one byte offset */ + ret = __test_hash(tfm, template, tcount, use_digest, 1); + if (ret) + return ret; + + alignmask = crypto_tfm_alg_alignmask(&tfm->base); + if (alignmask) { + /* Check if alignment mask for tfm is correctly set. */ + ret = __test_hash(tfm, template, tcount, use_digest, + alignmask + 1); + if (ret) + return ret; + } + + return 0; +} + static int __test_aead(struct crypto_aead *tfm, int enc, struct aead_testvec *template, unsigned int tcount, - const bool diff_dst) + const bool diff_dst, const int align_offset) { const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)); unsigned int i, j, k, n, temp; @@ -423,15 +460,16 @@ static int __test_aead(struct crypto_aead *tfm, int enc, if (!template[i].np) { j++; - /* some tepmplates have no input data but they will + /* some templates have no input data but they will * touch input */ input = xbuf[0]; + input += align_offset; assoc = axbuf[0]; ret = -EINVAL; - if (WARN_ON(template[i].ilen > PAGE_SIZE || - template[i].alen > PAGE_SIZE)) + if (WARN_ON(align_offset + template[i].ilen > + PAGE_SIZE || template[i].alen > PAGE_SIZE)) goto out; memcpy(input, template[i].input, template[i].ilen); @@ -470,6 +508,7 @@ static int __test_aead(struct crypto_aead *tfm, int enc, if (diff_dst) { output = xoutbuf[0]; + output += align_offset; sg_init_one(&sgout[0], output, template[i].ilen + (enc ? authsize : 0)); @@ -530,6 +569,10 @@ static int __test_aead(struct crypto_aead *tfm, int enc, } for (i = 0, j = 0; i < tcount; i++) { + /* alignment tests are only done with continuous buffers */ + if (align_offset != 0) + break; + if (template[i].np) { j++; @@ -732,15 +775,34 @@ out_noxbuf: static int test_aead(struct crypto_aead *tfm, int enc, struct aead_testvec *template, unsigned int tcount) { + unsigned int alignmask; int ret; /* test 'dst == src' case */ - ret = __test_aead(tfm, enc, template, tcount, false); + ret = __test_aead(tfm, enc, template, tcount, false, 0); if (ret) return ret; /* test 'dst != src' case */ - return __test_aead(tfm, enc, template, tcount, true); + ret = __test_aead(tfm, enc, template, tcount, true, 0); + if (ret) + return ret; + + /* test unaligned buffers, check with one byte offset */ + ret = __test_aead(tfm, enc, template, tcount, true, 1); + if (ret) + return ret; + + alignmask = crypto_tfm_alg_alignmask(&tfm->base); + if (alignmask) { + /* Check if alignment mask for tfm is correctly set. */ + ret = __test_aead(tfm, enc, template, tcount, true, + alignmask + 1); + if (ret) + return ret; + } + + return 0; } static int test_cipher(struct crypto_cipher *tfm, int enc, @@ -820,7 +882,7 @@ out_nobuf: static int __test_skcipher(struct crypto_ablkcipher *tfm, int enc, struct cipher_testvec *template, unsigned int tcount, - const bool diff_dst) + const bool diff_dst, const int align_offset) { const char *algo = crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm)); @@ -876,10 +938,12 @@ static int __test_skcipher(struct crypto_ablkcipher *tfm, int enc, j++; ret = -EINVAL; - if (WARN_ON(template[i].ilen > PAGE_SIZE)) + if (WARN_ON(align_offset + template[i].ilen > + PAGE_SIZE)) goto out; data = xbuf[0]; + data += align_offset; memcpy(data, template[i].input, template[i].ilen); crypto_ablkcipher_clear_flags(tfm, ~0); @@ -900,6 +964,7 @@ static int __test_skcipher(struct crypto_ablkcipher *tfm, int enc, sg_init_one(&sg[0], data, template[i].ilen); if (diff_dst) { data = xoutbuf[0]; + data += align_offset; sg_init_one(&sgout[0], data, template[i].ilen); } @@ -941,6 +1006,9 @@ static int __test_skcipher(struct crypto_ablkcipher *tfm, int enc, j = 0; for (i = 0; i < tcount; i++) { + /* alignment tests are only done with continuous buffers */ + if (align_offset != 0) + break; if (template[i].iv) memcpy(iv, template[i].iv, MAX_IVLEN); @@ -1075,15 +1143,34 @@ out_nobuf: static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, struct cipher_testvec *template, unsigned int tcount) { + unsigned int alignmask; int ret; /* test 'dst == src' case */ - ret = __test_skcipher(tfm, enc, template, tcount, false); + ret = __test_skcipher(tfm, enc, template, tcount, false, 0); if (ret) return ret; /* test 'dst != src' case */ - return __test_skcipher(tfm, enc, template, tcount, true); + ret = __test_skcipher(tfm, enc, template, tcount, true, 0); + if (ret) + return ret; + + /* test unaligned buffers, check with one byte offset */ + ret = __test_skcipher(tfm, enc, template, tcount, true, 1); + if (ret) + return ret; + + alignmask = crypto_tfm_alg_alignmask(&tfm->base); + if (alignmask) { + /* Check if alignment mask for tfm is correctly set. */ + ret = __test_skcipher(tfm, enc, template, tcount, true, + alignmask + 1); + if (ret) + return ret; + } + + return 0; } static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate, @@ -1654,16 +1741,10 @@ static const struct alg_test_desc alg_test_descs[] = { .alg = "__cbc-twofish-avx", .test = alg_test_null, }, { - .alg = "__cbc-twofish-avx2", - .test = alg_test_null, - }, { .alg = "__driver-cbc-aes-aesni", .test = alg_test_null, .fips_allowed = 1, }, { - .alg = "__driver-cbc-blowfish-avx2", - .test = alg_test_null, - }, { .alg = "__driver-cbc-camellia-aesni", .test = alg_test_null, }, { @@ -1688,16 +1769,10 @@ static const struct alg_test_desc alg_test_descs[] = { .alg = "__driver-cbc-twofish-avx", .test = alg_test_null, }, { - .alg = "__driver-cbc-twofish-avx2", - .test = alg_test_null, - }, { .alg = "__driver-ecb-aes-aesni", .test = alg_test_null, .fips_allowed = 1, }, { - .alg = "__driver-ecb-blowfish-avx2", - .test = alg_test_null, - }, { .alg = "__driver-ecb-camellia-aesni", .test = alg_test_null, }, { @@ -1722,9 +1797,6 @@ static const struct alg_test_desc alg_test_descs[] = { .alg = "__driver-ecb-twofish-avx", .test = alg_test_null, }, { - .alg = "__driver-ecb-twofish-avx2", - .test = alg_test_null, - }, { .alg = "__ghash-pclmulqdqni", .test = alg_test_null, .fips_allowed = 1, @@ -1974,12 +2046,19 @@ static const struct alg_test_desc alg_test_descs[] = { } } }, { - .alg = "cryptd(__driver-cbc-aes-aesni)", - .test = alg_test_null, + .alg = "crct10dif", + .test = alg_test_hash, .fips_allowed = 1, + .suite = { + .hash = { + .vecs = crct10dif_tv_template, + .count = CRCT10DIF_TEST_VECTORS + } + } }, { - .alg = "cryptd(__driver-cbc-blowfish-avx2)", + .alg = "cryptd(__driver-cbc-aes-aesni)", .test = alg_test_null, + .fips_allowed = 1, }, { .alg = "cryptd(__driver-cbc-camellia-aesni)", .test = alg_test_null, @@ -1994,9 +2073,6 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_null, .fips_allowed = 1, }, { - .alg = "cryptd(__driver-ecb-blowfish-avx2)", - .test = alg_test_null, - }, { .alg = "cryptd(__driver-ecb-camellia-aesni)", .test = alg_test_null, }, { @@ -2021,9 +2097,6 @@ static const struct alg_test_desc alg_test_descs[] = { .alg = "cryptd(__driver-ecb-twofish-avx)", .test = alg_test_null, }, { - .alg = "cryptd(__driver-ecb-twofish-avx2)", - .test = alg_test_null, - }, { .alg = "cryptd(__driver-gcm-aes-aesni)", .test = alg_test_null, .fips_allowed = 1, @@ -3068,6 +3141,35 @@ static const struct alg_test_desc alg_test_descs[] = { } }; +static bool alg_test_descs_checked; + +static void alg_test_descs_check_order(void) +{ + int i; + + /* only check once */ + if (alg_test_descs_checked) + return; + + alg_test_descs_checked = true; + + for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) { + int diff = strcmp(alg_test_descs[i - 1].alg, + alg_test_descs[i].alg); + + if (WARN_ON(diff > 0)) { + pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n", + alg_test_descs[i - 1].alg, + alg_test_descs[i].alg); + } + + if (WARN_ON(diff == 0)) { + pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n", + alg_test_descs[i].alg); + } + } +} + static int alg_find_test(const char *alg) { int start = 0; @@ -3099,6 +3201,8 @@ int alg_test(const char *driver, const char *alg, u32 type, u32 mask) int j; int rc; + alg_test_descs_check_order(); + if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) { char nalg[CRYPTO_MAX_ALG_NAME]; |